language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/IdentityHashMapBoxingTest.java | {
"start": 2179,
"end": 2973
} | class ____ {
void test() {
Map<String, Integer> map1 = new IdentityHashMap<>();
Map<String, Float> map2 = new IdentityHashMap<>();
Map<String, Double> map3 = new IdentityHashMap<>();
Map<String, Long> map4 = new IdentityHashMap<>();
Map<String, Object> map5 = new IdentityHashMap<>();
Map<Object, String> map6 = new IdentityHashMap<>();
}
}
""")
.doTest();
}
@Test
public void mapsPositiveCases() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.collect.Maps;
import java.util.IdentityHashMap;
import java.util.Map;
| Test |
java | elastic__elasticsearch | modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java | {
"start": 8199,
"end": 12063
} | class ____ implements MetricDetail {
private static ParseField DCG_FIELD = new ParseField("dcg");
private static ParseField IDCG_FIELD = new ParseField("ideal_dcg");
private static ParseField NDCG_FIELD = new ParseField("normalized_dcg");
private static ParseField UNRATED_FIELD = new ParseField("unrated_docs");
private final double dcg;
private final double idcg;
private final int unratedDocs;
Detail(double dcg, double idcg, int unratedDocs) {
this.dcg = dcg;
this.idcg = idcg;
this.unratedDocs = unratedDocs;
}
Detail(StreamInput in) throws IOException {
this.dcg = in.readDouble();
this.idcg = in.readDouble();
this.unratedDocs = in.readVInt();
}
@Override
public String getMetricName() {
return NAME;
}
@Override
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(DCG_FIELD.getPreferredName(), this.dcg);
if (this.idcg != 0) {
builder.field(IDCG_FIELD.getPreferredName(), this.idcg);
builder.field(NDCG_FIELD.getPreferredName(), this.dcg / this.idcg);
}
builder.field(UNRATED_FIELD.getPreferredName(), this.unratedDocs);
return builder;
}
private static final ConstructingObjectParser<Detail, Void> PARSER = new ConstructingObjectParser<>(NAME, true, args -> {
return new Detail((Double) args[0], (Double) args[1] != null ? (Double) args[1] : 0.0d, (Integer) args[2]);
});
static {
PARSER.declareDouble(constructorArg(), DCG_FIELD);
PARSER.declareDouble(optionalConstructorArg(), IDCG_FIELD);
PARSER.declareInt(constructorArg(), UNRATED_FIELD);
}
public static Detail fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(this.dcg);
out.writeDouble(this.idcg);
out.writeVInt(this.unratedDocs);
}
@Override
public String getWriteableName() {
return NAME;
}
/**
* @return the discounted cumulative gain
*/
public double getDCG() {
return this.dcg;
}
/**
* @return the ideal discounted cumulative gain, can be 0 if nothing was computed, e.g. because no normalization was required
*/
public double getIDCG() {
return this.idcg;
}
/**
* @return the normalized discounted cumulative gain, can be 0 if nothing was computed, e.g. because no normalization was required
*/
public double getNDCG() {
return (this.idcg != 0) ? this.dcg / this.idcg : 0;
}
/**
* @return the number of unrated documents in the search results
*/
public Object getUnratedDocs() {
return this.unratedDocs;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
DiscountedCumulativeGain.Detail other = (DiscountedCumulativeGain.Detail) obj;
return Double.compare(this.dcg, other.dcg) == 0
&& Double.compare(this.idcg, other.idcg) == 0
&& this.unratedDocs == other.unratedDocs;
}
@Override
public int hashCode() {
return Objects.hash(this.dcg, this.idcg, this.unratedDocs);
}
}
}
| Detail |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/admin/internals/AllBrokersStrategyTest.java | {
"start": 1393,
"end": 5558
} | class ____ {
private final LogContext logContext = new LogContext();
@Test
public void testBuildRequest() {
AllBrokersStrategy strategy = new AllBrokersStrategy(logContext);
MetadataRequest.Builder builder = strategy.buildRequest(AllBrokersStrategy.LOOKUP_KEYS);
assertEquals(Collections.emptyList(), builder.topics());
}
@Test
public void testBuildRequestWithInvalidLookupKeys() {
AllBrokersStrategy strategy = new AllBrokersStrategy(logContext);
AllBrokersStrategy.BrokerKey key1 = new AllBrokersStrategy.BrokerKey(OptionalInt.empty());
AllBrokersStrategy.BrokerKey key2 = new AllBrokersStrategy.BrokerKey(OptionalInt.of(1));
assertThrows(IllegalArgumentException.class, () -> strategy.buildRequest(Set.of(key1)));
assertThrows(IllegalArgumentException.class, () -> strategy.buildRequest(Set.of(key2)));
assertThrows(IllegalArgumentException.class, () -> strategy.buildRequest(Set.of(key1, key2)));
Set<AllBrokersStrategy.BrokerKey> keys = new HashSet<>(AllBrokersStrategy.LOOKUP_KEYS);
keys.add(key2);
assertThrows(IllegalArgumentException.class, () -> strategy.buildRequest(keys));
}
@Test
public void testHandleResponse() {
AllBrokersStrategy strategy = new AllBrokersStrategy(logContext);
MetadataResponseData response = new MetadataResponseData();
response.brokers().add(new MetadataResponseData.MetadataResponseBroker()
.setNodeId(1)
.setHost("host1")
.setPort(9092)
);
response.brokers().add(new MetadataResponseData.MetadataResponseBroker()
.setNodeId(2)
.setHost("host2")
.setPort(9092)
);
AdminApiLookupStrategy.LookupResult<AllBrokersStrategy.BrokerKey> lookupResult = strategy.handleResponse(
AllBrokersStrategy.LOOKUP_KEYS,
new MetadataResponse(response, ApiKeys.METADATA.latestVersion())
);
assertEquals(Collections.emptyMap(), lookupResult.failedKeys);
Set<AllBrokersStrategy.BrokerKey> expectedMappedKeys = Set.of(
new AllBrokersStrategy.BrokerKey(OptionalInt.of(1)),
new AllBrokersStrategy.BrokerKey(OptionalInt.of(2))
);
assertEquals(expectedMappedKeys, lookupResult.mappedKeys.keySet());
lookupResult.mappedKeys.forEach((brokerKey, brokerId) ->
assertEquals(OptionalInt.of(brokerId), brokerKey.brokerId)
);
}
@Test
public void testHandleResponseWithNoBrokers() {
AllBrokersStrategy strategy = new AllBrokersStrategy(logContext);
MetadataResponseData response = new MetadataResponseData();
AdminApiLookupStrategy.LookupResult<AllBrokersStrategy.BrokerKey> lookupResult = strategy.handleResponse(
AllBrokersStrategy.LOOKUP_KEYS,
new MetadataResponse(response, ApiKeys.METADATA.latestVersion())
);
assertEquals(Collections.emptyMap(), lookupResult.failedKeys);
assertEquals(Collections.emptyMap(), lookupResult.mappedKeys);
}
@Test
public void testHandleResponseWithInvalidLookupKeys() {
AllBrokersStrategy strategy = new AllBrokersStrategy(logContext);
AllBrokersStrategy.BrokerKey key1 = new AllBrokersStrategy.BrokerKey(OptionalInt.empty());
AllBrokersStrategy.BrokerKey key2 = new AllBrokersStrategy.BrokerKey(OptionalInt.of(1));
MetadataResponse response = new MetadataResponse(new MetadataResponseData(), ApiKeys.METADATA.latestVersion());
assertThrows(IllegalArgumentException.class, () -> strategy.handleResponse(Set.of(key1), response));
assertThrows(IllegalArgumentException.class, () -> strategy.handleResponse(Set.of(key2), response));
assertThrows(IllegalArgumentException.class, () -> strategy.handleResponse(Set.of(key1, key2), response));
Set<AllBrokersStrategy.BrokerKey> keys = new HashSet<>(AllBrokersStrategy.LOOKUP_KEYS);
keys.add(key2);
assertThrows(IllegalArgumentException.class, () -> strategy.handleResponse(keys, response));
}
}
| AllBrokersStrategyTest |
java | processing__processing4 | java/src/processing/mode/java/lsp/PdeAdapter.java | {
"start": 13711,
"end": 13844
} | class ____ {
int line;
int col;
Offset(int line, int col) {
this.line = line;
this.col = col;
}
}
}
| Offset |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/parameters/converters/ConverterInfo.java | {
"start": 206,
"end": 745
} | class ____ {
private final Deployment deployment;
private final RuntimeResource resource;
private final int param;
public ConverterInfo(Deployment deployment, RuntimeResource resource, int param) {
this.deployment = deployment;
this.resource = resource;
this.param = param;
}
public Deployment getDeployment() {
return deployment;
}
public RuntimeResource getResource() {
return resource;
}
public int getParam() {
return param;
}
}
| ConverterInfo |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/notify/NotifyCenter.java | {
"start": 12235,
"end": 12652
} | class ____ type of the event type.
* @param queueMaxSize the publisher's queue max size.
*/
public static EventPublisher registerToPublisher(final Class<? extends Event> eventType, final int queueMaxSize) {
return registerToPublisher(eventType, DEFAULT_PUBLISHER_FACTORY, queueMaxSize);
}
/**
* Register publisher with specified factory.
*
* @param eventType | Instances |
java | elastic__elasticsearch | modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java | {
"start": 59196,
"end": 59656
} | class ____ extends Plugin implements SearchPlugin {
@Override
public List<QuerySpec<?>> getQueries() {
return Collections.singletonList(
new QuerySpec<QueryBuilder>(
CustomParserQueryBuilder.NAME,
CustomParserQueryBuilder::new,
CustomParserQueryBuilder::fromXContent
)
);
}
}
public static final | CustomQueriesPlugin |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InDoubleEvaluator.java | {
"start": 7128,
"end": 8126
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory lhs;
private final EvalOperator.ExpressionEvaluator.Factory[] rhs;
Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory[] rhs) {
this.source = source;
this.lhs = lhs;
this.rhs = rhs;
}
@Override
public InDoubleEvaluator get(DriverContext context) {
EvalOperator.ExpressionEvaluator[] rhs = Arrays.stream(this.rhs)
.map(a -> a.get(context))
.toArray(EvalOperator.ExpressionEvaluator[]::new);
return new InDoubleEvaluator(source, lhs.get(context), rhs, context);
}
@Override
public String toString() {
return "InDoubleEvaluator[" + "lhs=" + lhs + ", rhs=" + Arrays.toString(rhs) + "]";
}
}
}
| Factory |
java | junit-team__junit5 | documentation/src/test/java/example/callbacks/DatabaseTestsDemo.java | {
"start": 1100,
"end": 1809
} | class ____ extends AbstractDatabaseTests {
@BeforeAll
static void beforeAll() {
beforeAllMethod(DatabaseTestsDemo.class.getSimpleName() + ".beforeAll()");
}
@BeforeEach
void insertTestDataIntoDatabase() {
beforeEachMethod(getClass().getSimpleName() + ".insertTestDataIntoDatabase()");
}
@Test
void testDatabaseFunctionality() {
testMethod(getClass().getSimpleName() + ".testDatabaseFunctionality()");
}
@AfterEach
void deleteTestDataFromDatabase() {
afterEachMethod(getClass().getSimpleName() + ".deleteTestDataFromDatabase()");
}
@AfterAll
static void afterAll() {
beforeAllMethod(DatabaseTestsDemo.class.getSimpleName() + ".afterAll()");
}
}
// end::user_guide[]
| DatabaseTestsDemo |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/JPACacheDisabledTest.java | {
"start": 468,
"end": 1151
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(Address.class))
.withConfigurationResource("application.properties")
.overrideConfigKey("quarkus.hibernate-orm.second-level-caching-enabled", "false");
@Inject
Session session;
@Test
@Transactional
public void testNTransaction() {
CacheImplementor cache = (CacheImplementor) session.getSessionFactory().getCache();
TimestampsCache timestampsCache = cache.getTimestampsCache();
Assertions.assertNull(timestampsCache);
}
}
| JPACacheDisabledTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/ScalarResultNativeQueryTest.java | {
"start": 938,
"end": 1266
} | class ____ {
@Entity(name="Person")
@Table(name="person")
@NamedNativeQuery(name = "personAge", query = "select p.age from person p", resultSetMapping = "ageStringMapping")
@SqlResultSetMapping(name = "ageStringMapping", columns = { @ColumnResult(name = "age", type = String.class) })
public static | ScalarResultNativeQueryTest |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/classloading/jar/StreamingProgram.java | {
"start": 2947,
"end": 3319
} | class ____ implements FlatMapFunction<String, Word> {
@Override
public void flatMap(String value, Collector<Word> out) throws Exception {
StringTokenizer tokenizer = new StringTokenizer(value);
while (tokenizer.hasMoreTokens()) {
out.collect(new Word(tokenizer.nextToken(), 1));
}
}
}
}
| Tokenizer |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/processor/api/FixedKeyProcessor.java | {
"start": 1273,
"end": 2893
} | interface ____<KIn, VIn, VOut> {
/**
* Initialize this processor with the given context. The framework ensures this is called once per processor when the topology
* that contains it is initialized. When the framework is done with the processor, {@link #close()} will be called on it; the
* framework may later re-use the processor by calling {@code #init()} again.
* <p>
* The provided {@link FixedKeyProcessorContext context} can be used to access topology and record metadata, to
* {@link FixedKeyProcessorContext#schedule(Duration, PunctuationType, Punctuator) schedule} a method to be
* {@link Punctuator#punctuate(long) called periodically} and to access attached {@link StateStore}s.
*
* @param context the context; may not be null
*/
default void init(final FixedKeyProcessorContext<KIn, VOut> context) {}
/**
* Process the record. Note that record metadata is undefined in cases such as a forward call from a punctuator.
*
* @param record the record to process
*/
void process(FixedKeyRecord<KIn, VIn> record);
/**
* Close this processor and clean up any resources. Be aware that {@code #close()} is called after an internal cleanup.
* Thus, it is not possible to write anything to Kafka as underlying clients are already closed. The framework may
* later re-use this processor by calling {@code #init()} on it again.
* <p>
* Note: Do not close any streams managed resources, like {@link StateStore}s here, as they are managed by the library.
*/
default void close() {}
}
| FixedKeyProcessor |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/spi/DirtyCheckEvent.java | {
"start": 142,
"end": 272
} | class ____ {@link org.hibernate.Session#isDirty}.
*
* @author Steve Ebersole
*
* @see org.hibernate.Session#isDirty
*/
public | for |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/ProgrammaticExtensionRegistrationTests.java | {
"start": 19718,
"end": 19904
} | class ____ extends AbstractTestCase {
@RegisterExtension
static Extension field = new ExplosiveExtension(new Exception("boom"));
}
static | ClassLevelExplosiveCheckedExceptionTestCase |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/StreamGroupedReduceAsyncStateOperatorTest.java | {
"start": 5988,
"end": 6271
} | class ____ implements ReduceFunction<Integer> {
private static final long serialVersionUID = 1L;
@Override
public Integer reduce(Integer value1, Integer value2) throws Exception {
return value1 + value2;
}
}
private static | MyReducer |
java | spring-projects__spring-boot | build-plugin/spring-boot-maven-plugin/src/test/java/org/springframework/boot/maven/IncludeFilterTests.java | {
"start": 1269,
"end": 4757
} | class ____ {
@Test
void includeSimple() throws ArtifactFilterException {
IncludeFilter filter = new IncludeFilter(Arrays.asList(createInclude("com.foo", "bar")));
Artifact artifact = createArtifact("com.foo", "bar");
Set result = filter.filter(Collections.singleton(artifact));
assertThat(result).hasSize(1);
assertThat(result.iterator().next()).isSameAs(artifact);
}
@Test
void includeGroupIdNoMatch() throws ArtifactFilterException {
IncludeFilter filter = new IncludeFilter(Arrays.asList(createInclude("com.foo", "bar")));
Artifact artifact = createArtifact("com.baz", "bar");
Set result = filter.filter(Collections.singleton(artifact));
assertThat(result).isEmpty();
}
@Test
void includeArtifactIdNoMatch() throws ArtifactFilterException {
IncludeFilter filter = new IncludeFilter(Arrays.asList(createInclude("com.foo", "bar")));
Artifact artifact = createArtifact("com.foo", "biz");
Set result = filter.filter(Collections.singleton(artifact));
assertThat(result).isEmpty();
}
@Test
void includeClassifier() throws ArtifactFilterException {
IncludeFilter filter = new IncludeFilter(Arrays.asList(createInclude("com.foo", "bar", "jdk5")));
Artifact artifact = createArtifact("com.foo", "bar", "jdk5");
Set result = filter.filter(Collections.singleton(artifact));
assertThat(result).hasSize(1);
assertThat(result.iterator().next()).isSameAs(artifact);
}
@Test
void includeClassifierNoTargetClassifier() throws ArtifactFilterException {
IncludeFilter filter = new IncludeFilter(Arrays.asList(createInclude("com.foo", "bar", "jdk5")));
Artifact artifact = createArtifact("com.foo", "bar");
Set result = filter.filter(Collections.singleton(artifact));
assertThat(result).isEmpty();
}
@Test
void includeClassifierNoMatch() throws ArtifactFilterException {
IncludeFilter filter = new IncludeFilter(Arrays.asList(createInclude("com.foo", "bar", "jdk5")));
Artifact artifact = createArtifact("com.foo", "bar", "jdk6");
Set result = filter.filter(Collections.singleton(artifact));
assertThat(result).isEmpty();
}
@Test
void includeMulti() throws ArtifactFilterException {
IncludeFilter filter = new IncludeFilter(Arrays.asList(createInclude("com.foo", "bar"),
createInclude("com.foo", "bar2"), createInclude("org.acme", "app")));
Set<Artifact> artifacts = new HashSet<>();
artifacts.add(createArtifact("com.foo", "bar"));
artifacts.add(createArtifact("com.foo", "bar"));
Artifact anotherAcme = createArtifact("org.acme", "another-app");
artifacts.add(anotherAcme);
Set result = filter.filter(artifacts);
assertThat(result).hasSize(2);
}
private Include createInclude(String groupId, String artifactId) {
return createInclude(groupId, artifactId, null);
}
private Include createInclude(String groupId, String artifactId, @Nullable String classifier) {
Include include = new Include();
include.setGroupId(groupId);
include.setArtifactId(artifactId);
if (classifier != null) {
include.setClassifier(classifier);
}
return include;
}
private Artifact createArtifact(String groupId, String artifactId, @Nullable String classifier) {
Artifact a = mock(Artifact.class);
given(a.getGroupId()).willReturn(groupId);
given(a.getArtifactId()).willReturn(artifactId);
given(a.getClassifier()).willReturn(classifier);
return a;
}
private Artifact createArtifact(String groupId, String artifactId) {
return createArtifact(groupId, artifactId, null);
}
}
| IncludeFilterTests |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/util/reflection/GenericTypeExtractorTest.java | {
"start": 1086,
"end": 1147
} | class ____ extends Deeper implements Cloneable {}
| EvenDeeper |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/testrunner/TestFailingBeforeAllTestCase.java | {
"start": 524,
"end": 2515
} | class ____ {
@RegisterExtension
static QuarkusDevModeTest test = new QuarkusDevModeTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class).addClasses(HelloResource.class)
.add(new StringAsset(ContinuousTestingTestUtils.appProperties()),
"application.properties");
}
})
.setTestArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class).addClasses(SimpleET.class);
}
});
@Test
public void testBrokenBeforeAllHandling() throws InterruptedException {
ContinuousTestingTestUtils utils = new ContinuousTestingTestUtils();
TestStatus ts = utils.waitForNextCompletion();
Assertions.assertEquals(1L, ts.getTestsFailed());
Assertions.assertEquals(1L, ts.getTestsPassed());
Assertions.assertEquals(0L, ts.getTestsSkipped());
Assertions.assertEquals(1L, ts.getTotalTestsFailed());
Assertions.assertEquals(1L, ts.getTotalTestsPassed());
Assertions.assertEquals(0L, ts.getTotalTestsSkipped());
test.modifyTestSourceFile(SimpleET.class, s -> s.replaceFirst("\\{", "{ \n" +
" @org.junit.jupiter.api.BeforeAll public static void error() { throw new RuntimeException(); }"));
ts = utils.waitForNextCompletion();
Assertions.assertEquals(2L, ts.getTestsFailed());
Assertions.assertEquals(0L, ts.getTestsPassed());
Assertions.assertEquals(0L, ts.getTestsSkipped());
Assertions.assertEquals(2L, ts.getTotalTestsFailed());
Assertions.assertEquals(0L, ts.getTotalTestsPassed());
Assertions.assertEquals(0L, ts.getTotalTestsSkipped());
}
}
| TestFailingBeforeAllTestCase |
java | quarkusio__quarkus | extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientConfiguration.java | {
"start": 9665,
"end": 9903
} | interface ____ extends GrpcServerConfiguration.Xds {
/**
* Optional explicit target.
*/
Optional<String> target();
}
/**
* Stork config for new Vert.x gRPC
*/
@ConfigGroup
| ClientXds |
java | apache__rocketmq | auth/src/main/java/org/apache/rocketmq/auth/authorization/model/RequestContext.java | {
"start": 976,
"end": 1725
} | class ____ {
private Subject subject;
private Resource resource;
private Action action;
private String sourceIp;
public Subject getSubject() {
return subject;
}
public void setSubject(Subject subject) {
this.subject = subject;
}
public Resource getResource() {
return resource;
}
public void setResource(Resource resource) {
this.resource = resource;
}
public Action getAction() {
return action;
}
public void setAction(Action action) {
this.action = action;
}
public String getSourceIp() {
return sourceIp;
}
public void setSourceIp(String sourceIp) {
this.sourceIp = sourceIp;
}
}
| RequestContext |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/config/ServiceLocatorFactoryBeanTests.java | {
"start": 11014,
"end": 11427
} | class ____
}
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testWhenServiceLocatorExceptionClassIsNotAnExceptionSubclass() {
ServiceLocatorFactoryBean factory = new ServiceLocatorFactoryBean();
assertThatIllegalArgumentException().isThrownBy(() ->
factory.setServiceLocatorExceptionClass((Class) getClass()));
// should throw, bad (non-Exception-type) serviceLocatorException | supplied |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/method/ResolvableMethod.java | {
"start": 4909,
"end": 10255
} | class ____ {
private static final Log logger = LogFactory.getLog(ResolvableMethod.class);
private static final SpringObjenesis objenesis = new SpringObjenesis();
private static final ParameterNameDiscoverer nameDiscoverer = new DefaultParameterNameDiscoverer();
private final Method method;
private ResolvableMethod(Method method) {
Assert.notNull(method, "method is required");
this.method = method;
}
/**
* Return the resolved method.
*/
public Method method() {
return this.method;
}
/**
* Return the declared return type of the resolved method.
*/
public MethodParameter returnType() {
return new SynthesizingMethodParameter(this.method, -1);
}
/**
* Find a unique argument matching the given type.
* @param type the expected type
* @param generics optional array of generic types
*/
public MethodParameter arg(Class<?> type, Class<?>... generics) {
return new ArgResolver().arg(type, generics);
}
/**
* Find a unique argument matching the given type.
* @param type the expected type
* @param generic at least one generic type
* @param generics optional array of generic types
*/
public MethodParameter arg(Class<?> type, ResolvableType generic, ResolvableType... generics) {
return new ArgResolver().arg(type, generic, generics);
}
/**
* Find a unique argument matching the given type.
* @param type the expected type
*/
public MethodParameter arg(ResolvableType type) {
return new ArgResolver().arg(type);
}
/**
* Filter on method arguments with annotation. See {@link MvcAnnotationPredicates}.
*/
@SafeVarargs
public final ArgResolver annot(Predicate<MethodParameter>... filter) {
return new ArgResolver(filter);
}
@SafeVarargs
public final ArgResolver annotPresent(Class<? extends Annotation>... annotationTypes) {
return new ArgResolver().annotPresent(annotationTypes);
}
/**
* Filter on method arguments that don't have the given annotation type(s).
* @param annotationTypes the annotation types
*/
@SafeVarargs
public final ArgResolver annotNotPresent(Class<? extends Annotation>... annotationTypes) {
return new ArgResolver().annotNotPresent(annotationTypes);
}
@Override
public String toString() {
return "ResolvableMethod=" + formatMethod();
}
private String formatMethod() {
return this.method().getName() + Arrays.stream(this.method.getParameters())
.map(this::formatParameter)
.collect(Collectors.joining(",\n\t", "(\n\t", "\n)"));
}
private String formatParameter(Parameter param) {
Annotation[] annot = param.getAnnotations();
return (annot.length > 0)
? Arrays.stream(annot).map(this::formatAnnotation).collect(Collectors.joining(",", "[", "]")) + " "
+ param
: param.toString();
}
private String formatAnnotation(Annotation annotation) {
Map<String, Object> map = AnnotationUtils.getAnnotationAttributes(annotation);
map.forEach((key, value) -> {
if (value.equals(ValueConstants.DEFAULT_NONE)) {
map.put(key, "NONE");
}
});
return annotation.annotationType().getName() + map;
}
private static ResolvableType toResolvableType(Class<?> type, Class<?>... generics) {
return ObjectUtils.isEmpty(generics) ? ResolvableType.forClass(type)
: ResolvableType.forClassWithGenerics(type, generics);
}
private static ResolvableType toResolvableType(Class<?> type, ResolvableType generic, ResolvableType... generics) {
ResolvableType[] genericTypes = new ResolvableType[generics.length + 1];
genericTypes[0] = generic;
System.arraycopy(generics, 0, genericTypes, 1, generics.length);
return ResolvableType.forClassWithGenerics(type, genericTypes);
}
/**
* Main entry point providing access to a {@code ResolvableMethod} builder.
*/
public static <T> Builder<T> on(Class<T> objectClass) {
return new Builder<>(objectClass);
}
@SuppressWarnings("unchecked")
private static <T> T initProxy(Class<?> type, MethodInvocationInterceptor interceptor) {
Assert.notNull(type, "'type' must not be null");
if (type.isInterface()) {
ProxyFactory factory = new ProxyFactory(EmptyTargetSource.INSTANCE);
factory.addInterface(type);
factory.addInterface(Supplier.class);
factory.addAdvice(interceptor);
return (T) factory.getProxy();
}
else {
Enhancer enhancer = new Enhancer();
enhancer.setSuperclass(type);
enhancer.setInterfaces(new Class<?>[] { Supplier.class });
enhancer.setNamingPolicy(SpringNamingPolicy.INSTANCE);
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
Class<?> proxyClass = enhancer.createClass();
Object proxy = null;
if (objenesis.isWorthTrying()) {
try {
proxy = objenesis.newInstance(proxyClass, enhancer.getUseCache());
}
catch (ObjenesisException ex) {
logger.debug("Objenesis failed, falling back to default constructor", ex);
}
}
if (proxy == null) {
try {
proxy = ReflectionUtils.accessibleConstructor(proxyClass).newInstance();
}
catch (Throwable ex) {
throw new IllegalStateException(
"Unable to instantiate proxy " + "via both Objenesis and default constructor fails as well",
ex);
}
}
((Factory) proxy).setCallbacks(new Callback[] { interceptor });
return (T) proxy;
}
}
/**
* Builder for {@code ResolvableMethod}.
*/
public static final | ResolvableMethod |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/tool/schema/SourceType.java | {
"start": 1812,
"end": 2506
} | enum ____. An empty value will return {@code null}.
*
* @throws IllegalArgumentException If the incoming value is unrecognized
*/
public static SourceType interpret(Object value, SourceType defaultValue) {
if ( value == null ) {
return defaultValue;
}
if ( value instanceof SourceType sourceType ) {
return sourceType;
}
final String name = value.toString().trim().replace('-', '_');
if ( name.isEmpty() ) {
return METADATA;
}
for ( var sourceType: values() ) {
if ( sourceType.toString().equalsIgnoreCase(name) ) {
return sourceType;
}
}
throw new IllegalArgumentException( "Unrecognized schema generation source type: '" + value + "'");
}
}
| value |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/function/server/DispatcherHandlerIntegrationTests.java | {
"start": 5264,
"end": 6467
} | class ____ {
@Bean
public PersonHandler personHandler() {
return new PersonHandler();
}
@Bean
public PersonController personController() {
return new PersonController();
}
@Bean
public AttributesHandler attributesHandler() {
return new AttributesHandler();
}
@Bean
public RouterFunction<EntityResponse<Person>> monoRouterFunction(PersonHandler personHandler) {
return route(RequestPredicates.GET("/mono"), personHandler::mono);
}
@Bean
public RouterFunction<ServerResponse> fluxRouterFunction(PersonHandler personHandler) {
return route(RequestPredicates.GET("/flux"), personHandler::flux);
}
@Bean
public RouterFunction<ServerResponse> attributesRouterFunction(AttributesHandler attributesHandler) {
return nest(RequestPredicates.GET("/attributes"),
route(RequestPredicates.GET("/{foo}"), attributesHandler::attributes));
}
@Bean
public RouterFunction<ServerResponse> nested() {
return route()
.path("/foo", () -> route()
.nest(accept(MediaType.APPLICATION_JSON), builder -> builder
.GET("/bar", request -> ServerResponse.ok().build()))
.build())
.build();
}
}
private static | TestConfiguration |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java | {
"start": 1114,
"end": 4103
} | class ____ {
private BytesRef text;
private BytesRef prefix;
private BytesRef regex;
private String field;
private Analyzer analyzer;
private int size = 5;
private int shardSize = -1;
private final SearchExecutionContext searchExecutionContext;
private final Suggester<?> suggester;
protected SuggestionContext(Suggester<?> suggester, SearchExecutionContext searchExecutionContext) {
this.suggester = suggester;
this.searchExecutionContext = searchExecutionContext;
}
public BytesRef getText() {
return text;
}
public void setText(BytesRef text) {
this.text = text;
}
public BytesRef getPrefix() {
return prefix;
}
public void setPrefix(BytesRef prefix) {
this.prefix = prefix;
}
public BytesRef getRegex() {
return regex;
}
public void setRegex(BytesRef regex) {
this.regex = regex;
}
@SuppressWarnings("unchecked")
public Suggester<SuggestionContext> getSuggester() {
return ((Suggester<SuggestionContext>) suggester);
}
public Analyzer getAnalyzer() {
return analyzer;
}
public void setAnalyzer(Analyzer analyzer) {
this.analyzer = analyzer;
}
public String getField() {
return field;
}
public void setField(String field) {
this.field = field;
}
public int getSize() {
return size;
}
public void setSize(int size) {
if (size <= 0) {
throw new IllegalArgumentException("Size must be positive but was: " + size);
}
this.size = size;
}
public Integer getShardSize() {
return shardSize;
}
public void setShardSize(int shardSize) {
if (shardSize <= 0) {
throw new IllegalArgumentException("ShardSize must be positive but was: " + shardSize);
}
this.shardSize = shardSize;
}
public SearchExecutionContext getSearchExecutionContext() {
return this.searchExecutionContext;
}
@Override
public String toString() {
return "["
+ "text="
+ text
+ ",field="
+ field
+ ",prefix="
+ prefix
+ ",regex="
+ regex
+ ",size="
+ size
+ ",shardSize="
+ shardSize
+ ",suggester="
+ suggester
+ ",analyzer="
+ analyzer
+ ",searchExecutionContext="
+ searchExecutionContext
+ "]";
}
}
}
| SuggestionContext |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/http/HttpConnectOptions.java | {
"start": 1041,
"end": 7726
} | class ____ {
/**
* The default value for proxy options = {@code null}
*/
public static final ProxyOptions DEFAULT_PROXY_OPTIONS = null;
/**
* The default value for server method = {@code null}
*/
public static final SocketAddress DEFAULT_SERVER = null;
/**
* The default value for host name = {@code null}
*/
public static final String DEFAULT_HOST = null;
/**
* The default value for port = {@code null}
*/
public static final Integer DEFAULT_PORT = null;
/**
* The default value for SSL = {@code null}
*/
public static final Boolean DEFAULT_SSL = null;
/**
* The default connect timeout = {@code -1L} (disabled)
*/
public static final long DEFAULT_CONNECT_TIMEOUT = -1L;
private ProxyOptions proxyOptions;
private Address server;
private String host;
private Integer port;
private Boolean ssl;
private ClientSSLOptions sslOptions;;
private long connectTimeout;
/**
* Default constructor
*/
public HttpConnectOptions() {
init();
}
/**
* Copy constructor
*
* @param other the options to copy
*/
public HttpConnectOptions(HttpConnectOptions other) {
init();
setProxyOptions(other.proxyOptions);
setServer(other.server);
setHost(other.host);
setPort(other.port);
setSsl(other.ssl);
sslOptions = other.sslOptions != null ? new ClientSSLOptions(other.sslOptions) : null;
setConnectTimeout(other.connectTimeout);
}
/**
* Create options from JSON
*
* @param json the JSON
*/
public HttpConnectOptions(JsonObject json) {
init();
HttpConnectOptionsConverter.fromJson(json, this);
JsonObject server = json.getJsonObject("server");
if (server != null) {
this.server = SocketAddress.fromJson(server);
}
}
protected void init() {
proxyOptions = DEFAULT_PROXY_OPTIONS;
server = DEFAULT_SERVER;
host = DEFAULT_HOST;
port = DEFAULT_PORT;
ssl = DEFAULT_SSL;
sslOptions = null;
connectTimeout = DEFAULT_CONNECT_TIMEOUT;
}
/**
* Get the proxy options override for connections
*
* @return proxy options override
*/
public ProxyOptions getProxyOptions() {
return proxyOptions;
}
/**
* Override the {@link HttpClientOptions#setProxyOptions(ProxyOptions)} proxy options
* for connections.
*
* @param proxyOptions proxy options override object
* @return a reference to this, so the API can be used fluently
*/
public HttpConnectOptions setProxyOptions(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Get the server address to be used by the client request.
*
* @return the server address
*/
public Address getServer() {
return server;
}
/**
* Set the server address to be used by the client request.
*
* <p> When the server address is {@code null}, the address will be resolved after the {@code host}
* property by the Vert.x resolver.
*
* <p> Use this when you want to connect to a specific server address without name resolution.
*
* @return a reference to this, so the API can be used fluently
*/
public HttpConnectOptions setServer(Address server) {
this.server = server;
return this;
}
/**
* Get the host name to be used by the client request.
*
* @return the host name
*/
public String getHost() {
return host;
}
/**
* Set the host name to be used by the client request.
*
* @return a reference to this, so the API can be used fluently
*/
public HttpConnectOptions setHost(String host) {
this.host = host;
return this;
}
/**
* Get the port to be used by the client request.
*
* @return the port
*/
public Integer getPort() {
return port;
}
/**
* Set the port to be used by the client request.
*
* @return a reference to this, so the API can be used fluently
*/
public HttpConnectOptions setPort(Integer port) {
this.port = port;
return this;
}
/**
* @return is SSL/TLS enabled?
*/
public Boolean isSsl() {
return ssl;
}
/**
* Set whether SSL/TLS is enabled.
*
* @param ssl true if enabled
* @return a reference to this, so the API can be used fluently
*/
public HttpConnectOptions setSsl(Boolean ssl) {
this.ssl = ssl;
return this;
}
/**
* @return the SSL options
*/
public ClientSSLOptions getSslOptions() {
return sslOptions;
}
/**
* Set the SSL options to use.
* <p>
* When none is provided, the client SSL options will be used instead.
* @param sslOptions the SSL options to use
* @return a reference to this, so the API can be used fluently
*/
public HttpConnectOptions setSslOptions(ClientSSLOptions sslOptions) {
this.sslOptions = sslOptions;
return this;
}
/**
* @return the amount of time after which, if the request is not obtained from the client within the timeout period,
* the {@code Future<HttpClientRequest>} obtained from the client is failed with a {@link java.util.concurrent.TimeoutException}
*/
public long getConnectTimeout() {
return connectTimeout;
}
/**
* Sets the amount of time after which, if the request is not obtained from the client within the timeout period,
* the {@code Future<HttpClientRequest>} obtained from the client is failed with a {@link java.util.concurrent.TimeoutException}.
*
* Note this is not related to the TCP {@link HttpClientOptions#setConnectTimeout(int)} option, when a request is made against
* a pooled HTTP client, the timeout applies to the duration to obtain a connection from the pool to serve the request, the timeout
* might fire because the server does not respond in time or the pool is too busy to serve a request.
*
* @param timeout the amount of time in milliseconds.
* @return a reference to this, so the API can be used fluently
*/
public HttpConnectOptions setConnectTimeout(long timeout) {
this.connectTimeout = timeout;
return this;
}
private URL parseUrl(String surl) {
// Note - parsing a URL this way is slower than specifying host, port and relativeURI
try {
return new URL(surl);
} catch (MalformedURLException e) {
throw new VertxException("Invalid url: " + surl, e);
}
}
public JsonObject toJson() {
JsonObject json = new JsonObject();
HttpConnectOptionsConverter.toJson(this, json);
Address serverAddr = this.server;
if (serverAddr instanceof SocketAddress) {
SocketAddress socketAddr = (SocketAddress) serverAddr;
json.put("server", socketAddr.toJson());
}
return json;
}
}
| HttpConnectOptions |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java | {
"start": 2556,
"end": 7652
} | class ____ extends Plugin implements ScriptPlugin {}
static PluginsService newPluginsService(Settings settings) {
return new PluginsService(
settings,
null,
PluginsLoader.createPluginsLoader(
Set.of(),
PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsDir()),
Map.of(),
false
)
);
}
static PluginsService newMockPluginsService(List<Class<? extends Plugin>> classpathPlugins) {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put("my.setting", "test")
.put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.NIOFS.getSettingsKey())
.build();
return new MockPluginsService(settings, TestEnvironment.newEnvironment(settings), classpathPlugins);
}
// This test uses a mock in order to use plugins from the classpath
public void testFilterPlugins() {
PluginsService service = newMockPluginsService(List.of(FakePlugin.class, FilterablePlugin.class));
List<ScriptPlugin> scriptPlugins = service.filterPlugins(ScriptPlugin.class).toList();
assertThat(scriptPlugins, contains(instanceOf(FilterablePlugin.class)));
}
// This test uses a mock in order to use plugins from the classpath
public void testMapPlugins() {
PluginsService service = newMockPluginsService(List.of(FakePlugin.class, FilterablePlugin.class));
List<String> mapResult = service.map(p -> p.getClass().getSimpleName()).toList();
assertThat(mapResult, containsInAnyOrder("FakePlugin", "FilterablePlugin"));
List<String> flatmapResult = service.flatMap(p -> List.of(p.getClass().getSimpleName())).toList();
assertThat(flatmapResult, containsInAnyOrder("FakePlugin", "FilterablePlugin"));
List<String> forEachConsumer = new ArrayList<>();
service.forEach(p -> forEachConsumer.add(p.getClass().getSimpleName()));
assertThat(forEachConsumer, containsInAnyOrder("FakePlugin", "FilterablePlugin"));
}
public void testHiddenFiles() throws IOException {
final Path home = createTempDir();
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();
final Path hidden = home.resolve("plugins").resolve(".hidden");
Files.createDirectories(hidden);
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> newPluginsService(settings));
final String expected = "Plugin [.hidden] is missing a descriptor properties file";
assertThat(e, hasToString(containsString(expected)));
}
public void testDesktopServicesStoreFiles() throws IOException {
final Path home = createTempDir();
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();
final Path plugins = home.resolve("plugins");
Files.createDirectories(plugins);
final Path desktopServicesStore = plugins.resolve(".DS_Store");
Files.createFile(desktopServicesStore);
if (Constants.MAC_OS_X) {
final PluginsService pluginsService = newPluginsService(settings);
assertNotNull(pluginsService);
} else {
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> newPluginsService(settings));
assertThat(e.getMessage(), containsString("Plugin [.DS_Store] is missing a descriptor properties file"));
}
}
public void testStartupWithRemovingMarker() throws IOException {
final Path home = createTempDir();
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();
final Path fake = home.resolve("plugins").resolve("fake");
Files.createDirectories(fake);
Files.createFile(fake.resolve("plugin.jar"));
final Path removing = home.resolve("plugins").resolve(".removing-fake");
Files.createFile(removing);
PluginTestUtil.writePluginProperties(
fake,
"description",
"fake",
"name",
"fake",
"version",
"1.0.0",
"elasticsearch.version",
Version.CURRENT.toString(),
"java.version",
System.getProperty("java.specification.version"),
"classname",
"Fake",
"has.native.controller",
"false"
);
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> newPluginsService(settings));
final String expected = Strings.format(
"found file [%s] from a failed attempt to remove the plugin [fake]; execute [elasticsearch-plugin remove fake]",
removing
);
assertThat(e, hasToString(containsString(expected)));
}
public void testLoadPluginWithNoPublicConstructor() {
| FilterablePlugin |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/merge/PropertyMergeTest.java | {
"start": 1255,
"end": 1520
} | class ____ {
public int a;
public int b;
protected AB() { }
protected AB(int a0, int b0) {
a = a0;
b = b0;
}
}
@JsonPropertyOrder(alphabetic=true)
@JsonFormat(shape=Shape.ARRAY)
static | AB |
java | quarkusio__quarkus | extensions/websockets/server/deployment/src/test/java/io/quarkus/websockets/test/WebsocketDevModeTestCase.java | {
"start": 876,
"end": 3247
} | class ____ {
@TestHTTPResource("api/echo")
URI echoUri;
@RegisterExtension
public static final QuarkusDevModeTest test = new QuarkusDevModeTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(EchoWebSocket.class, EchoService.class)
.addAsResource(new StringAsset("quarkus.http.root-path=/api/"), "application.properties");
}
});
@Test
public void testWebsocketHotReplacement() throws Exception {
LinkedBlockingDeque<String> message = new LinkedBlockingDeque<>();
Session session = ContainerProvider.getWebSocketContainer().connectToServer(new Endpoint() {
@Override
public void onOpen(Session session, EndpointConfig endpointConfig) {
session.addMessageHandler(new MessageHandler.Whole<String>() {
@Override
public void onMessage(String s) {
message.add(s);
}
});
session.getAsyncRemote().sendText("hello");
}
}, ClientEndpointConfig.Builder.create().build(), echoUri);
try {
Assertions.assertEquals("hello", message.poll(20, TimeUnit.SECONDS));
} finally {
session.close();
}
test.modifySourceFile(EchoService.class, (s) -> s.replace("return msg;", "return \"changed:\" + msg;"));
session = ContainerProvider.getWebSocketContainer().connectToServer(new Endpoint() {
@Override
public void onOpen(Session session, EndpointConfig endpointConfig) {
session.addMessageHandler(new MessageHandler.Whole<String>() {
@Override
public void onMessage(String s) {
message.add(s);
}
});
session.getAsyncRemote().sendText("hello");
}
}, ClientEndpointConfig.Builder.create().build(), echoUri);
try {
Assertions.assertEquals("changed:hello", message.poll(20, TimeUnit.SECONDS));
} finally {
session.close();
}
}
}
| WebsocketDevModeTestCase |
java | quarkusio__quarkus | extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/UnknownDriverConfigTest.java | {
"start": 653,
"end": 3202
} | class ____ {
@Inject
AgroalDataSource defaultDataSource;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("application-default-datasource-unknown-driver.properties");
@Test
public void testDefaultDataSourceInjection() throws SQLException {
testDataSource(defaultDataSource, "username-default", 3, 13, 7, Duration.ofSeconds(53), Duration.ofSeconds(54),
Duration.ofSeconds(55), Duration.ofSeconds(56), Duration.ofSeconds(57),
"create schema if not exists schema_default");
}
private static void testDataSource(AgroalDataSource dataSource, String username, int minSize, int maxSize,
int initialSize, Duration backgroundValidationInterval, Duration acquisitionTimeout, Duration leakDetectionInterval,
Duration idleRemovalInterval, Duration maxLifetime, String newConnectionSql) throws SQLException {
AgroalConnectionPoolConfiguration configuration = dataSource.getConfiguration().connectionPoolConfiguration();
AgroalConnectionFactoryConfiguration agroalConnectionFactoryConfiguration = configuration
.connectionFactoryConfiguration();
assertEquals("jdbc:h2:tcp://localhost/mem:default", agroalConnectionFactoryConfiguration.jdbcUrl());
assertEquals(username, agroalConnectionFactoryConfiguration.principal().getName());
assertEquals(maxSize, configuration.maxSize());
assertEquals(initialSize, configuration.initialSize());
assertEquals(backgroundValidationInterval, configuration.validationTimeout());
assertEquals(acquisitionTimeout, configuration.acquisitionTimeout());
assertEquals(leakDetectionInterval, configuration.leakTimeout());
assertEquals(idleRemovalInterval, configuration.reapTimeout());
assertEquals(maxLifetime, configuration.maxLifetime());
assertTrue(configuration.transactionIntegration() instanceof NarayanaTransactionIntegration);
assertEquals(AgroalConnectionFactoryConfiguration.TransactionIsolation.SERIALIZABLE,
agroalConnectionFactoryConfiguration.jdbcTransactionIsolation());
assertTrue(agroalConnectionFactoryConfiguration.trackJdbcResources());
assertTrue(dataSource.getConfiguration().metricsEnabled());
assertEquals(newConnectionSql, agroalConnectionFactoryConfiguration.initialSql());
try (Connection connection = dataSource.getConnection()) {
}
}
}
| UnknownDriverConfigTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/IncrementalRemoteKeyedStateHandleTest.java | {
"start": 1822,
"end": 14944
} | class ____ {
/**
* This test checks, that for an unregistered {@link IncrementalRemoteKeyedStateHandle} all
* state (including shared) is discarded.
*/
@Test
void testUnregisteredDiscarding() throws Exception {
IncrementalRemoteKeyedStateHandle stateHandle = create(new Random(42));
stateHandle.discardState();
for (HandleAndLocalPath handleAndLocalPath : stateHandle.getPrivateState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.TRUE);
}
for (HandleAndLocalPath handleAndLocalPath : stateHandle.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.TRUE);
}
verify(stateHandle.getMetaDataStateHandle()).discardState();
}
/**
* This test checks, that for a registered {@link IncrementalRemoteKeyedStateHandle} discards
* respect all shared state and only discard it one all references are released.
*/
@Test
void testSharedStateDeRegistration() throws Exception {
SharedStateRegistry registry = spy(new SharedStateRegistryImpl());
// Create two state handles with overlapping shared state
IncrementalRemoteKeyedStateHandle stateHandle1 = create(new Random(42));
IncrementalRemoteKeyedStateHandle stateHandle2 = create(new Random(42));
// Both handles should not be registered and not discarded by now.
for (HandleAndLocalPath handleAndLocalPath : stateHandle1.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.FALSE);
}
for (HandleAndLocalPath handleAndLocalPath : stateHandle2.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.FALSE);
}
// Now we register both ...
stateHandle1.registerSharedStates(registry, 0L);
registry.checkpointCompleted(0L);
stateHandle2.registerSharedStates(registry, 0L);
for (HandleAndLocalPath handleAndLocalPath : stateHandle1.getSharedState()) {
StreamStateHandle handle = handleAndLocalPath.getHandle();
SharedStateRegistryKey registryKey =
SharedStateRegistryKey.forStreamStateHandle(handle);
// stateHandle1 and stateHandle2 has same shared states, so same key register 2 times
verify(registry, times(2)).registerReference(registryKey, handle, 0L);
}
for (HandleAndLocalPath handleAndLocalPath : stateHandle2.getSharedState()) {
StreamStateHandle handle = handleAndLocalPath.getHandle();
SharedStateRegistryKey registryKey =
SharedStateRegistryKey.forStreamStateHandle(handle);
// stateHandle1 and stateHandle2 has same shared states, so same key register 2 times
verify(registry, times(2)).registerReference(registryKey, handle, 0L);
}
// We discard the first
stateHandle1.discardState();
// Should be unregistered, non-shared discarded, shared not discarded
for (HandleAndLocalPath handleAndLocalPath : stateHandle1.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.FALSE);
}
for (HandleAndLocalPath handleAndLocalPath : stateHandle2.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.FALSE);
}
for (HandleAndLocalPath handleAndLocalPath : stateHandle1.getPrivateState()) {
verify(handleAndLocalPath.getHandle(), times(1)).discardState();
}
for (HandleAndLocalPath handleAndLocalPath : stateHandle2.getPrivateState()) {
verify(handleAndLocalPath.getHandle(), times(0)).discardState();
}
verify(stateHandle1.getMetaDataStateHandle(), times(1)).discardState();
verify(stateHandle2.getMetaDataStateHandle(), times(0)).discardState();
// We discard the second
stateHandle2.discardState();
// Now everything should be unregistered and discarded
registry.unregisterUnusedState(Long.MAX_VALUE);
for (HandleAndLocalPath handleAndLocalPath : stateHandle1.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.TRUE);
}
for (HandleAndLocalPath handleAndLocalPath : stateHandle2.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.TRUE);
}
verify(stateHandle1.getMetaDataStateHandle(), times(1)).discardState();
verify(stateHandle2.getMetaDataStateHandle(), times(1)).discardState();
}
/**
* This tests that re-registration of shared state with another registry works as expected. This
* simulates a recovery from a checkpoint, when the checkpoint coordinator creates a new shared
* state registry and re-registers all live checkpoint states.
*/
@Test
void testSharedStateReRegistration() throws Exception {
SharedStateRegistry stateRegistryA = spy(new SharedStateRegistryImpl());
IncrementalRemoteKeyedStateHandle stateHandleX = create(new Random(1));
IncrementalRemoteKeyedStateHandle stateHandleY = create(new Random(2));
IncrementalRemoteKeyedStateHandle stateHandleZ = create(new Random(3));
// Now we register first time ...
stateHandleX.registerSharedStates(stateRegistryA, 0L);
stateHandleY.registerSharedStates(stateRegistryA, 0L);
stateHandleZ.registerSharedStates(stateRegistryA, 0L);
// Second attempt should fail
assertThatThrownBy(() -> stateHandleX.registerSharedStates(stateRegistryA, 0L))
.withFailMessage("Should not be able to register twice with the same registry.")
.isInstanceOf(IllegalStateException.class);
// Everything should be discarded for this handle
stateHandleZ.discardState();
verify(stateHandleZ.getMetaDataStateHandle(), times(1)).discardState();
// Close the first registry
stateRegistryA.close();
// Attempt to register to closed registry should trigger exception
assertThatThrownBy(() -> create(new Random(4)).registerSharedStates(stateRegistryA, 0L))
.withFailMessage("Should not be able to register new state to closed registry.")
.isInstanceOf(IllegalStateException.class);
// Private state should still get discarded
stateHandleY.discardState();
verify(stateHandleY.getMetaDataStateHandle(), times(1)).discardState();
// This should still be unaffected
verify(stateHandleX.getMetaDataStateHandle(), never()).discardState();
// We re-register the handle with a new registry
SharedStateRegistry sharedStateRegistryB = spy(new SharedStateRegistryImpl());
stateHandleX.registerSharedStates(sharedStateRegistryB, 0L);
stateHandleX.discardState();
verify(stateHandleX.getMetaDataStateHandle(), times(1)).discardState();
// Should be completely discarded because it is tracked through the new registry
sharedStateRegistryB.unregisterUnusedState(1L);
for (HandleAndLocalPath handleAndLocalPath : stateHandleX.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.TRUE);
}
for (HandleAndLocalPath handleAndLocalPath : stateHandleY.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.FALSE);
}
for (HandleAndLocalPath handleAndLocalPath : stateHandleZ.getSharedState()) {
verifyDiscard(handleAndLocalPath.getHandle(), TernaryBoolean.FALSE);
}
sharedStateRegistryB.close();
}
@Test
void testCheckpointedSize() {
IncrementalRemoteKeyedStateHandle stateHandle1 = create(ThreadLocalRandom.current());
assertThat(stateHandle1.getCheckpointedSize()).isEqualTo(stateHandle1.getStateSize());
long checkpointedSize = 123L;
IncrementalRemoteKeyedStateHandle stateHandle2 =
create(ThreadLocalRandom.current(), checkpointedSize);
assertThat(stateHandle2.getCheckpointedSize()).isEqualTo(checkpointedSize);
}
@Test
void testNonEmptyIntersection() {
IncrementalRemoteKeyedStateHandle handle = create(ThreadLocalRandom.current());
KeyGroupRange expectedRange = new KeyGroupRange(0, 3);
KeyedStateHandle newHandle = handle.getIntersection(expectedRange);
assertThat(newHandle).isInstanceOf(IncrementalRemoteKeyedStateHandle.class);
assertThat(newHandle.getStateHandleId()).isEqualTo(handle.getStateHandleId());
}
@Test
void testCollectSizeStats() {
IncrementalRemoteKeyedStateHandle handle = create(ThreadLocalRandom.current());
StateObject.StateObjectSizeStatsCollector statsCollector =
StateObject.StateObjectSizeStatsCollector.create();
handle.collectSizeStats(statsCollector);
Assertions.assertEquals(
new HashMap<StateObject.StateObjectLocation, Long>() {
{
// Location is LOCAL_MEMORY, even though the handle is called remote because
// we test against a local file system
put(StateObject.StateObjectLocation.LOCAL_MEMORY, handle.getStateSize());
}
},
statsCollector.getStats());
}
@Test
void testConcurrentCheckpointSharedStateRegistration() throws Exception {
String localPath = "1.sst";
StreamStateHandle streamHandle1 = new ByteStreamStateHandle("file-1", new byte[] {'s'});
StreamStateHandle streamHandle2 = new ByteStreamStateHandle("file-2", new byte[] {'s'});
SharedStateRegistry registry = new SharedStateRegistryImpl();
UUID backendID = UUID.randomUUID();
IncrementalRemoteKeyedStateHandle handle1 =
new IncrementalRemoteKeyedStateHandle(
backendID,
KeyGroupRange.of(0, 0),
1L,
placeSpies(
Collections.singletonList(
HandleAndLocalPath.of(streamHandle1, localPath))),
Collections.emptyList(),
new ByteStreamStateHandle("", new byte[] {'s'}));
handle1.registerSharedStates(registry, handle1.getCheckpointId());
IncrementalRemoteKeyedStateHandle handle2 =
new IncrementalRemoteKeyedStateHandle(
backendID,
KeyGroupRange.of(0, 0),
2L,
placeSpies(
Collections.singletonList(
HandleAndLocalPath.of(streamHandle2, localPath))),
Collections.emptyList(),
new ByteStreamStateHandle("", new byte[] {'s'}));
handle2.registerSharedStates(registry, handle2.getCheckpointId());
registry.checkpointCompleted(1L);
// checkpoint 2 failed
handle2.discardState();
for (HandleAndLocalPath handleAndLocalPath : handle1.getSharedState()) {
verify(handleAndLocalPath.getHandle(), never()).discardState();
}
for (HandleAndLocalPath handleAndLocalPath : handle2.getSharedState()) {
verify(handleAndLocalPath.getHandle(), never()).discardState();
}
registry.close();
}
private static IncrementalRemoteKeyedStateHandle create(Random rnd) {
return new IncrementalRemoteKeyedStateHandle(
UUID.nameUUIDFromBytes("test".getBytes(StandardCharsets.UTF_8)),
KeyGroupRange.of(0, 0),
1L,
// not place spies on shared state handle
CheckpointTestUtils.createRandomHandleAndLocalPathList(rnd),
placeSpies(CheckpointTestUtils.createRandomHandleAndLocalPathList(rnd)),
spy(CheckpointTestUtils.createDummyStreamStateHandle(rnd, null)));
}
private static IncrementalRemoteKeyedStateHandle create(Random rnd, long checkpointedSize) {
return new IncrementalRemoteKeyedStateHandle(
UUID.nameUUIDFromBytes("test".getBytes()),
KeyGroupRange.of(0, 0),
1L,
// not place spies on shared state handle
CheckpointTestUtils.createRandomHandleAndLocalPathList(rnd),
placeSpies(CheckpointTestUtils.createRandomHandleAndLocalPathList(rnd)),
spy(CheckpointTestUtils.createDummyStreamStateHandle(rnd, null)),
checkpointedSize);
}
private static List<HandleAndLocalPath> placeSpies(List<HandleAndLocalPath> list) {
return list.stream()
.map(e -> HandleAndLocalPath.of(spy(e.getHandle()), e.getLocalPath()))
.collect(Collectors.toList());
}
}
| IncrementalRemoteKeyedStateHandleTest |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/RequestMatcherRedirectFilterTests.java | {
"start": 1476,
"end": 3562
} | class ____ {
@Test
public void doFilterWhenRequestMatchThenRedirectToSpecifiedUrl() throws Exception {
RequestMatcherRedirectFilter filter = new RequestMatcherRedirectFilter(pathPattern("/context"), "/test");
MockHttpServletRequest request = get("/context").build();
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
filter.doFilter(request, response, filterChain);
assertThat(response.getStatus()).isEqualTo(HttpStatus.FOUND.value());
assertThat(response.getRedirectedUrl()).isEqualTo("/test");
verifyNoInteractions(filterChain);
}
@Test
public void doFilterWhenRequestNotMatchThenNextFilter() throws Exception {
RequestMatcherRedirectFilter filter = new RequestMatcherRedirectFilter(pathPattern("/context"), "/test");
MockHttpServletRequest request = get("/test").build();
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
filter.doFilter(request, response, filterChain);
assertThat(response.getStatus()).isEqualTo(HttpStatus.OK.value());
verify(filterChain).doFilter(request, response);
}
@Test
public void constructWhenRequestMatcherNull() {
assertThatIllegalArgumentException().isThrownBy(() -> new RequestMatcherRedirectFilter(null, "/test"))
.withMessage("requestMatcher cannot be null");
}
@Test
public void constructWhenRedirectUrlNull() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new RequestMatcherRedirectFilter(pathPattern("/**"), null))
.withMessage("redirectUrl cannot be empty");
}
@Test
public void constructWhenRedirectUrlEmpty() {
assertThatIllegalArgumentException().isThrownBy(() -> new RequestMatcherRedirectFilter(pathPattern("/**"), ""))
.withMessage("redirectUrl cannot be empty");
}
@Test
public void constructWhenRedirectUrlBlank() {
assertThatIllegalArgumentException().isThrownBy(() -> new RequestMatcherRedirectFilter(pathPattern("/**"), " "))
.withMessage("redirectUrl cannot be empty");
}
}
| RequestMatcherRedirectFilterTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/health/node/HealthIndicatorDisplayValues.java | {
"start": 1290,
"end": 7785
} | class ____ {
/**
* Formats the display name of a discovery node in the following way:
* - [id][name] or
* - [id] if name is null
*/
public static String getNodeName(DiscoveryNode node) {
if (node.getName() != null) {
return String.format(Locale.ROOT, "[%s][%s]", node.getId(), node.getName());
}
return String.format(Locale.ROOT, "[%s]", node.getId());
}
/**
* Creates a string that displays max 10 indices from the given set to be used as examples in
* logging or user messages. The indices are sorted by priority and then by name to ensure a
* deterministic message. If there are more indices than 10, it adds the '...' suffix.
*/
@Deprecated
public static String getTruncatedIndices(Set<String> indices, Metadata clusterMetadata) {
final int maxIndices = 10;
String truncatedIndicesString = indices.stream()
.sorted(indicesComparatorByPriorityAndName(clusterMetadata))
.limit(maxIndices)
.collect(joining(", "));
if (maxIndices < indices.size()) {
truncatedIndicesString = truncatedIndicesString + ", ...";
}
return truncatedIndicesString;
}
/**
* Creates a string that displays max 10 indices from the given set to be used as examples in
* logging or user messages. The indices are sorted by priority and then by name to ensure a
* deterministic message. If there are more indices than 10, it adds the '...' suffix.
*/
public static String getTruncatedProjectIndices(
Set<ProjectIndexName> indices,
Metadata clusterMetadata,
boolean supportsMultipleProjects
) {
final int maxIndices = 10;
String truncatedIndicesString = indices.stream()
.sorted(indicesComparatorByPriorityAndProjectIndex(clusterMetadata, supportsMultipleProjects))
.limit(maxIndices)
.map(projectIndexName -> projectIndexName.toString(supportsMultipleProjects))
.collect(joining(", "));
if (maxIndices < indices.size()) {
truncatedIndicesString = truncatedIndicesString + ", ...";
}
return truncatedIndicesString;
}
/**
* Creates a string that displays all the values that fulfilled the predicate sorted in the natural order.
* @param values, the values to be displayed
* @param predicate, the predicated by which all values will be filtered
* @param toString, the desired way to convert the type 'T' to string for the purpose of this message.
*/
public static <T> String getSortedUniqueValuesString(Collection<T> values, Predicate<T> predicate, Function<T, String> toString) {
return values.stream().filter(predicate).map(toString).distinct().sorted().collect(Collectors.joining(", "));
}
/**
* Creates a string that displays all the values sorted in the natural order.
* @param values, the values to be displayed
* @param toString, the desired way to convert the type 'T' to string for the purpose of this message.
*/
public static <T> String getSortedUniqueValuesString(Collection<T> values, Function<T, String> toString) {
return values.stream().map(toString).distinct().sorted().collect(Collectors.joining(", "));
}
/**
* Provides the correct form (singular or plural) of the word index depending on the given count.
*/
public static String indices(int count) {
return count == 1 ? "index" : "indices";
}
/**
* Provides the correct form (singular or plural) of the verb to be depending on the given count.
*/
public static String are(int count) {
return count == 1 ? "is" : "are";
}
/**
* Provides the correct form (singular or plural) of the word this depending on the given count.
*/
public static String these(int count) {
return count == 1 ? "this" : "these";
}
/**
* Provides the correct form (singular or plural) of a regular noun depending on the given count.
*/
public static String regularNoun(String noun, int count) {
return count == 1 ? noun : noun + "s";
}
/**
* Provides the correct form (singular or plural) of a regular verb depending on the given count.
*/
public static String regularVerb(String verb, int count) {
return count == 1 ? verb + "s" : verb;
}
/**
* Sorts index names by their priority first, then alphabetically by name. If the priority cannot be determined for an index then
* a priority of -1 is used to sort it behind other index names.
* @param clusterMetadata Used to look up index priority.
* @return Comparator instance
*/
@Deprecated
public static Comparator<String> indicesComparatorByPriorityAndName(Metadata clusterMetadata) {
// We want to show indices with a numerically higher index.priority first (since lower priority ones might get truncated):
return Comparator.comparingInt((String indexName) -> {
IndexMetadata indexMetadata = clusterMetadata.getProject().index(indexName);
return indexMetadata == null ? -1 : indexMetadata.priority();
}).reversed().thenComparing(Comparator.naturalOrder());
}
/**
* Sorts index names by their priority first, then alphabetically by name. If the priority cannot be determined for an index then
* a priority of -1 is used to sort it behind other index names.
* @param clusterMetadata Used to look up index priority.
* @param supportsMultipleProjects Whether cluster supports multi-project
* @return Comparator instance
*/
public static Comparator<ProjectIndexName> indicesComparatorByPriorityAndProjectIndex(
Metadata clusterMetadata,
boolean supportsMultipleProjects
) {
// We want to show indices with a numerically higher index.priority first (since lower priority ones might get truncated):
return Comparator.comparingInt((ProjectIndexName projectIndexName) -> {
ProjectMetadata projectMetadata = clusterMetadata.getProject(projectIndexName.projectId());
IndexMetadata indexMetadata = projectMetadata.index(projectIndexName.indexName());
return indexMetadata == null ? -1 : indexMetadata.priority();
}).reversed().thenComparing(projectIndex -> projectIndex.toString(supportsMultipleProjects));
}
}
| HealthIndicatorDisplayValues |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/LastValueWithRetractAggFunctionWithOrderTest.java | {
"start": 4901,
"end": 5445
} | class ____
extends NumberLastValueWithRetractAggFunctionWithOrderTestBase<Float> {
@Override
protected Float getValue(String v) {
return Float.valueOf(v);
}
@Override
protected AggregateFunction<Float, LastValueWithRetractAccumulator<Float>> getAggregator() {
return new LastValueWithRetractAggFunction<>(DataTypes.FLOAT().getLogicalType());
}
}
/** Test for {@link DoubleType}. */
@Nested
final | FloatLastValueWithRetractAggFunctionWithOrderTest |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-metadata/src/main/java/org/springframework/boot/configurationmetadata/ValueProvider.java | {
"start": 1113,
"end": 1706
} | class ____ implements Serializable {
private String name;
private final Map<String, Object> parameters = new LinkedHashMap<>();
/**
* Return the name of the provider.
* @return the name
*/
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
/**
* Return the parameters.
* @return the parameters
*/
public Map<String, Object> getParameters() {
return this.parameters;
}
@Override
public String toString() {
return "ValueProvider{name='" + this.name + ", parameters=" + this.parameters + '}';
}
}
| ValueProvider |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/AbortingRequestFilterTest.java | {
"start": 505,
"end": 1180
} | class ____ {
@RegisterExtension
static QuarkusUnitTest testExtension = new QuarkusUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
JavaArchive war = ShrinkWrap.create(JavaArchive.class);
war.addClasses(ClassLevelMediaTypeResource.class, AbortingRequestFilter.class);
return war;
}
});
@Test
public void testAbortingRequestFilter() {
RestAssured.get("/test")
.then().body(Matchers.equalTo("aborted"))
.statusCode(555);
}
}
| AbortingRequestFilterTest |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/annotation/AnnotationTransactionAttributeSourceTests.java | {
"start": 21443,
"end": 21933
} | class ____ implements ITestBean1 {
private String name;
private int age;
public Empty() {
}
public Empty(String name, int age) {
this.name = name;
this.age = age;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public int getAge() {
return age;
}
@Override
public void setAge(int age) {
this.age = age;
}
}
@SuppressWarnings("serial")
static | Empty |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/write/WriteObjectFlags.java | {
"start": 1487,
"end": 2465
} | enum ____ {
ConditionalOverwrite(FS_OPTION_CREATE_CONDITIONAL_OVERWRITE),
ConditionalOverwriteEtag(FS_OPTION_CREATE_CONDITIONAL_OVERWRITE_ETAG),
CreateMultipart(FS_S3A_CREATE_MULTIPART),
Performance(FS_S3A_CREATE_PERFORMANCE),
Recursive("");
/** Configuration key, or "" if not configurable. */
private final String key;
/**
* Constructor.
* @param key key configuration key, or "" if not configurable.
*/
WriteObjectFlags(final String key) {
this.key = key;
}
/**
* does the configuration contain this option as a boolean?
* @param options options to scan
* @return true if this is defined as a boolean
*/
public boolean isEnabled(Configuration options) {
return options.getBoolean(key, false);
}
/**
* Does the key of this option match the parameter?
* @param k key
* @return true if there is a match.
*/
public boolean hasKey(String k) {
return !key.isEmpty() && key.equals(k);
}
}
| WriteObjectFlags |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/watch/Watch.java | {
"start": 1191,
"end": 6651
} | class ____ implements ToXContentObject {
public static final String INCLUDE_STATUS_KEY = "include_status";
public static final String INDEX = ".watches";
private final String id;
private final Trigger trigger;
private final ExecutableInput<? extends Input, ? extends Input.Result> input;
private final ExecutableCondition condition;
@Nullable
private final ExecutableTransform<? extends Transform, ? extends Transform.Result> transform;
private final List<ActionWrapper> actions;
@Nullable
private final TimeValue throttlePeriod;
@Nullable
private final Map<String, Object> metadata;
private final WatchStatus status;
private final long sourceSeqNo;
private final long sourcePrimaryTerm;
public Watch(
String id,
Trigger trigger,
ExecutableInput<? extends Input, ? extends Input.Result> input,
ExecutableCondition condition,
@Nullable ExecutableTransform<? extends Transform, ? extends Transform.Result> transform,
@Nullable TimeValue throttlePeriod,
List<ActionWrapper> actions,
@Nullable Map<String, Object> metadata,
WatchStatus status,
long sourceSeqNo,
long sourcePrimaryTerm
) {
this.id = id;
this.trigger = trigger;
this.input = input;
this.condition = condition;
this.transform = transform;
this.actions = actions;
this.throttlePeriod = throttlePeriod;
this.metadata = metadata;
this.status = status;
this.sourceSeqNo = sourceSeqNo;
this.sourcePrimaryTerm = sourcePrimaryTerm;
}
public String id() {
return id;
}
public Trigger trigger() {
return trigger;
}
public ExecutableInput<? extends Input, ? extends Input.Result> input() {
return input;
}
public ExecutableCondition condition() {
return condition;
}
public ExecutableTransform<? extends Transform, ? extends Transform.Result> transform() {
return transform;
}
public TimeValue throttlePeriod() {
return throttlePeriod;
}
public List<ActionWrapper> actions() {
return actions;
}
public Map<String, Object> metadata() {
return metadata;
}
public WatchStatus status() {
return status;
}
/**
* The sequence number of the document that was used to create this watch, {@link SequenceNumbers#UNASSIGNED_SEQ_NO}
* if the watch wasn't read from a document
***/
public long getSourceSeqNo() {
return sourceSeqNo;
}
/**
* The primary term of the document that was used to create this watch, {@link SequenceNumbers#UNASSIGNED_PRIMARY_TERM}
* if the watch wasn't read from a document
***/
public long getSourcePrimaryTerm() {
return sourcePrimaryTerm;
}
/**
* Sets the state of this watch to in/active
*
* @return {@code true} if the status of this watch changed, {@code false} otherwise.
*/
public boolean setState(boolean active, ZonedDateTime now) {
return status.setActive(active, now);
}
/**
* Acks this watch.
*
* @return {@code true} if the status of this watch changed, {@code false} otherwise.
*/
public boolean ack(ZonedDateTime now, String... actionIds) {
return status.onAck(now, actionIds);
}
public boolean acked(String actionId) {
ActionStatus actionStatus = status.actionStatus(actionId);
return actionStatus.ackStatus().state() == ActionStatus.AckStatus.State.ACKED;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Watch watch = (Watch) o;
return watch.id.equals(id);
}
@Override
public int hashCode() {
return id.hashCode();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(WatchField.TRIGGER.getPreferredName()).startObject().field(trigger.type(), trigger, params).endObject();
builder.field(WatchField.INPUT.getPreferredName()).startObject().field(input.type(), input, params).endObject();
builder.field(WatchField.CONDITION.getPreferredName()).startObject().field(condition.type(), condition, params).endObject();
if (transform != null) {
builder.field(WatchField.TRANSFORM.getPreferredName()).startObject().field(transform.type(), transform, params).endObject();
}
if (throttlePeriod != null) {
builder.humanReadableField(
WatchField.THROTTLE_PERIOD.getPreferredName(),
WatchField.THROTTLE_PERIOD_HUMAN.getPreferredName(),
throttlePeriod
);
}
builder.startObject(WatchField.ACTIONS.getPreferredName());
for (ActionWrapper action : actions) {
builder.field(action.id(), action, params);
}
builder.endObject();
if (metadata != null) {
builder.field(WatchField.METADATA.getPreferredName(), metadata);
}
if (params.paramAsBoolean(INCLUDE_STATUS_KEY, false)) {
builder.field(WatchField.STATUS.getPreferredName(), status, params);
}
builder.endObject();
return builder;
}
}
| Watch |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/metrics/MemoryTrackingTDigestArrays.java | {
"start": 6951,
"end": 9417
} | class ____ extends AbstractMemoryTrackingArray implements TDigestIntArray {
static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(MemoryTrackingTDigestIntArray.class);
private int[] array;
private int size;
public MemoryTrackingTDigestIntArray(CircuitBreaker breaker, int initialSize) {
this(breaker, new int[initialSize]);
}
public MemoryTrackingTDigestIntArray(CircuitBreaker breaker, int[] array) {
super(breaker);
this.array = array;
this.size = array.length;
}
public static long estimatedRamBytesUsed(int size) {
return SHALLOW_SIZE + estimatedArraySize(size, Integer.BYTES);
}
@Override
public long ramBytesUsed() {
return estimatedRamBytesUsed(array.length);
}
@Override
public int size() {
return size;
}
@Override
public int get(int index) {
assert index >= 0 && index < size;
return array[index];
}
@Override
public void set(int index, int value) {
assert index >= 0 && index < size;
array[index] = value;
}
@Override
public void resize(int newSize) {
ensureCapacity(newSize);
if (newSize > size) {
Arrays.fill(array, size, newSize, 0);
}
size = newSize;
}
private void ensureCapacity(int requiredCapacity) {
if (requiredCapacity > array.length) {
int[] oldArray = array;
// Used for used bytes assertion
long oldRamBytesUsed = ramBytesUsed();
long oldArraySize = RamUsageEstimator.sizeOf(oldArray);
int newSize = ArrayUtil.oversize(requiredCapacity, Integer.BYTES);
long newArraySize = estimatedArraySize(newSize, Integer.BYTES);
breaker.addEstimateBytesAndMaybeBreak(newArraySize, "tdigest-new-capacity-int-array");
array = Arrays.copyOf(array, newSize);
breaker.addWithoutBreaking(-RamUsageEstimator.sizeOf(oldArray));
assert ramBytesUsed() - oldRamBytesUsed == newArraySize - oldArraySize
: "ramBytesUsed() should be aligned with manual array calculations";
}
}
}
public static | MemoryTrackingTDigestIntArray |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/MapperConfig.java | {
"start": 2346,
"end": 2536
} | interface ____ {
* // ...
* }
* </code></pre>
*
* @author Sjaak Derksen
* @see Mapper#config()
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.CLASS)
public @ | SourceTargetMapper |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java | {
"start": 2495,
"end": 2752
} | class ____ extends FileOutputCommitter {
@Override
public void abortTask(TaskAttemptContext context) throws IOException {
// does nothing
}
}
/**
* Special committer that always requires commit.
*/
static | CommitterWithoutCleanup |
java | netty__netty | transport-native-io_uring/src/test/java/io/netty/channel/uring/IoUringDatagramConnectNotExistsTest.java | {
"start": 970,
"end": 1331
} | class ____ extends DatagramConnectNotExistsTest {
@BeforeAll
public static void loadJNI() {
assumeTrue(IoUring.isAvailable());
}
@Override
protected List<TestsuitePermutation.BootstrapFactory<Bootstrap>> newFactories() {
return IoUringSocketTestPermutation.INSTANCE.datagramSocket();
}
}
| IoUringDatagramConnectNotExistsTest |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/interceptor/TransactionAttributeSourceAdvisorTests.java | {
"start": 849,
"end": 1202
} | class ____ {
@Test
void serializability() throws Exception {
TransactionInterceptor ti = new TransactionInterceptor();
ti.setTransactionAttributes(new Properties());
TransactionAttributeSourceAdvisor tas = new TransactionAttributeSourceAdvisor(ti);
SerializationTestUtils.serializeAndDeserialize(tas);
}
}
| TransactionAttributeSourceAdvisorTests |
java | elastic__elasticsearch | x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/GetInferenceFieldsIT.java | {
"start": 2441,
"end": 25116
} | class ____ extends ESIntegTestCase {
private static final Map<String, Object> SPARSE_EMBEDDING_SERVICE_SETTINGS = Map.of("model", "my_model", "api_key", "my_api_key");
private static final Map<String, Object> TEXT_EMBEDDING_SERVICE_SETTINGS = Map.of(
"model",
"my_model",
"dimensions",
256,
"similarity",
"cosine",
"api_key",
"my_api_key"
);
private static final String SPARSE_EMBEDDING_INFERENCE_ID = "sparse-embedding-id";
private static final String TEXT_EMBEDDING_INFERENCE_ID = "text-embedding-id";
private static final String INDEX_1 = "index-1";
private static final String INDEX_2 = "index-2";
private static final Set<String> ALL_INDICES = Set.of(INDEX_1, INDEX_2);
private static final String INDEX_ALIAS = "index-alias";
private static final String INFERENCE_FIELD_1 = "inference-field-1";
private static final String INFERENCE_FIELD_2 = "inference-field-2";
private static final String INFERENCE_FIELD_3 = "inference-field-3";
private static final String INFERENCE_FIELD_4 = "inference-field-4";
private static final String TEXT_FIELD_1 = "text-field-1";
private static final String TEXT_FIELD_2 = "text-field-2";
private static final Map<String, Float> ALL_FIELDS = Collections.unmodifiableMap(
generateDefaultWeightFieldMap(
Set.of(INFERENCE_FIELD_1, INFERENCE_FIELD_2, INFERENCE_FIELD_3, INFERENCE_FIELD_4, TEXT_FIELD_1, TEXT_FIELD_2)
)
);
private static final Set<InferenceFieldWithTestMetadata> INDEX_1_EXPECTED_INFERENCE_FIELDS = Set.of(
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_1, SPARSE_EMBEDDING_INFERENCE_ID, 1.0f),
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_2, TEXT_EMBEDDING_INFERENCE_ID, 1.0f),
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_3, SPARSE_EMBEDDING_INFERENCE_ID, 1.0f),
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_4, TEXT_EMBEDDING_INFERENCE_ID, 1.0f)
);
private static final Set<InferenceFieldWithTestMetadata> INDEX_2_EXPECTED_INFERENCE_FIELDS = Set.of(
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_1, TEXT_EMBEDDING_INFERENCE_ID, 1.0f),
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_2, SPARSE_EMBEDDING_INFERENCE_ID, 1.0f),
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_3, SPARSE_EMBEDDING_INFERENCE_ID, 1.0f),
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_4, TEXT_EMBEDDING_INFERENCE_ID, 1.0f)
);
private static final Map<String, Class<? extends InferenceResults>> ALL_EXPECTED_INFERENCE_RESULTS = Map.of(
SPARSE_EMBEDDING_INFERENCE_ID,
TextExpansionResults.class,
TEXT_EMBEDDING_INFERENCE_ID,
MlDenseEmbeddingResults.class
);
private boolean clusterConfigured = false;
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
return Settings.builder().put(LicenseSettings.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial").build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(LocalStateInferencePlugin.class, TestInferenceServicePlugin.class, FakeMlPlugin.class);
}
@Before
public void setUpCluster() throws Exception {
if (clusterConfigured == false) {
createInferenceEndpoints();
createTestIndices();
clusterConfigured = true;
}
}
public void testNullQuery() {
explicitIndicesAndFieldsTestCase(null);
}
public void testNonNullQuery() {
explicitIndicesAndFieldsTestCase("foo");
}
public void testBlankQuery() {
explicitIndicesAndFieldsTestCase(" ");
}
public void testFieldWeight() {
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(
ALL_INDICES,
Map.of(INFERENCE_FIELD_1, 2.0f, "inference-*", 1.5f, TEXT_FIELD_1, 1.75f),
false,
false,
"foo"
),
Map.of(
INDEX_1,
Set.of(new InferenceFieldWithTestMetadata(INFERENCE_FIELD_1, SPARSE_EMBEDDING_INFERENCE_ID, 2.0f)),
INDEX_2,
Set.of(new InferenceFieldWithTestMetadata(INFERENCE_FIELD_1, TEXT_EMBEDDING_INFERENCE_ID, 2.0f))
),
ALL_EXPECTED_INFERENCE_RESULTS
);
}
public void testNoInferenceFields() {
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(
ALL_INDICES,
generateDefaultWeightFieldMap(Set.of(TEXT_FIELD_1, TEXT_FIELD_2)),
false,
false,
"foo"
),
Map.of(INDEX_1, Set.of(), INDEX_2, Set.of()),
Map.of()
);
}
public void testResolveFieldWildcards() {
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(ALL_INDICES, generateDefaultWeightFieldMap(Set.of("*")), true, false, "foo"),
Map.of(INDEX_1, INDEX_1_EXPECTED_INFERENCE_FIELDS, INDEX_2, INDEX_2_EXPECTED_INFERENCE_FIELDS),
ALL_EXPECTED_INFERENCE_RESULTS
);
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(
ALL_INDICES,
Map.of("*-field-1", 2.0f, "*-1", 1.75f, "inference-*-3", 2.0f),
true,
false,
"foo"
),
Map.of(
INDEX_1,
Set.of(
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_1, SPARSE_EMBEDDING_INFERENCE_ID, 3.5f),
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_3, SPARSE_EMBEDDING_INFERENCE_ID, 2.0f)
),
INDEX_2,
Set.of(
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_1, TEXT_EMBEDDING_INFERENCE_ID, 3.5f),
new InferenceFieldWithTestMetadata(INFERENCE_FIELD_3, SPARSE_EMBEDDING_INFERENCE_ID, 2.0f)
)
),
ALL_EXPECTED_INFERENCE_RESULTS
);
}
public void testUseDefaultFields() {
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(Set.of(INDEX_1), Map.of(), true, true, "foo"),
Map.of(INDEX_1, Set.of(new InferenceFieldWithTestMetadata(INFERENCE_FIELD_1, SPARSE_EMBEDDING_INFERENCE_ID, 5.0f))),
filterExpectedInferenceResults(ALL_EXPECTED_INFERENCE_RESULTS, Set.of(SPARSE_EMBEDDING_INFERENCE_ID))
);
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(Set.of(INDEX_2), Map.of(), true, true, "foo"),
Map.of(INDEX_2, INDEX_2_EXPECTED_INFERENCE_FIELDS),
ALL_EXPECTED_INFERENCE_RESULTS
);
}
public void testMissingIndexName() {
Set<String> indicesWithIndex1 = Set.of(INDEX_1, "missing-index");
assertFailedRequest(
new GetInferenceFieldsAction.Request(indicesWithIndex1, ALL_FIELDS, false, false, "foo"),
IndexNotFoundException.class,
e -> assertThat(e.getMessage(), containsString("no such index [missing-index]"))
);
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(indicesWithIndex1, ALL_FIELDS, false, false, "foo", IndicesOptions.LENIENT_EXPAND_OPEN),
Map.of(INDEX_1, INDEX_1_EXPECTED_INFERENCE_FIELDS),
ALL_EXPECTED_INFERENCE_RESULTS
);
Set<String> indicesWithoutIndex1 = Set.of("missing-index");
assertFailedRequest(
new GetInferenceFieldsAction.Request(indicesWithoutIndex1, ALL_FIELDS, false, false, "foo"),
IndexNotFoundException.class,
e -> assertThat(e.getMessage(), containsString("no such index [missing-index]"))
);
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(indicesWithoutIndex1, ALL_FIELDS, false, false, "foo", IndicesOptions.LENIENT_EXPAND_OPEN),
Map.of(),
Map.of()
);
}
public void testMissingFieldName() {
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(ALL_INDICES, generateDefaultWeightFieldMap(Set.of("missing-field")), false, false, "foo"),
Map.of(INDEX_1, Set.of(), INDEX_2, Set.of()),
Map.of()
);
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(ALL_INDICES, generateDefaultWeightFieldMap(Set.of("missing-*")), true, false, "foo"),
Map.of(INDEX_1, Set.of(), INDEX_2, Set.of()),
Map.of()
);
}
public void testNoIndices() {
// By default, an empty index set will be interpreted as _all
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(Set.of(), ALL_FIELDS, false, false, "foo"),
Map.of(INDEX_1, INDEX_1_EXPECTED_INFERENCE_FIELDS, INDEX_2, INDEX_2_EXPECTED_INFERENCE_FIELDS),
ALL_EXPECTED_INFERENCE_RESULTS
);
// We can provide an IndicesOptions that changes this behavior to interpret an empty index set as no indices
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(Set.of(), ALL_FIELDS, false, false, "foo", IndicesOptions.STRICT_NO_EXPAND_FORBID_CLOSED),
Map.of(),
Map.of()
);
}
public void testAllIndices() {
// By default, _all expands to all indices
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(Set.of("_all"), ALL_FIELDS, false, false, "foo"),
Map.of(INDEX_1, INDEX_1_EXPECTED_INFERENCE_FIELDS, INDEX_2, INDEX_2_EXPECTED_INFERENCE_FIELDS),
ALL_EXPECTED_INFERENCE_RESULTS
);
// We can provide an IndicesOptions that changes this behavior to interpret it as no indices
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(
Set.of("_all"),
ALL_FIELDS,
false,
false,
"foo",
IndicesOptions.STRICT_NO_EXPAND_FORBID_CLOSED
),
Map.of(),
Map.of()
);
}
public void testIndexAlias() {
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(Set.of(INDEX_ALIAS), ALL_FIELDS, false, false, "foo"),
Map.of(INDEX_1, INDEX_1_EXPECTED_INFERENCE_FIELDS, INDEX_2, INDEX_2_EXPECTED_INFERENCE_FIELDS),
ALL_EXPECTED_INFERENCE_RESULTS
);
}
public void testResolveIndexWildcards() {
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(Set.of("index-*"), ALL_FIELDS, false, false, "foo"),
Map.of(INDEX_1, INDEX_1_EXPECTED_INFERENCE_FIELDS, INDEX_2, INDEX_2_EXPECTED_INFERENCE_FIELDS),
ALL_EXPECTED_INFERENCE_RESULTS
);
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(Set.of("*-1"), ALL_FIELDS, false, false, "foo"),
Map.of(INDEX_1, INDEX_1_EXPECTED_INFERENCE_FIELDS),
ALL_EXPECTED_INFERENCE_RESULTS
);
assertFailedRequest(
new GetInferenceFieldsAction.Request(
Set.of("index-*"),
ALL_FIELDS,
false,
false,
"foo",
IndicesOptions.STRICT_NO_EXPAND_FORBID_CLOSED
),
IndexNotFoundException.class,
e -> assertThat(e.getMessage(), containsString("no such index [index-*]"))
);
}
public void testNoFields() {
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(ALL_INDICES, Map.of(), false, false, "foo"),
Map.of(INDEX_1, Set.of(), INDEX_2, Set.of()),
Map.of()
);
}
public void testInvalidRequest() {
final BiConsumer<ActionRequestValidationException, List<String>> validator = (e, l) -> l.forEach(
s -> assertThat(e.getMessage(), containsString(s))
);
assertFailedRequest(
new GetInferenceFieldsAction.Request(null, Map.of(), false, false, null),
ActionRequestValidationException.class,
e -> validator.accept(e, List.of("indices must not be null"))
);
assertFailedRequest(
new GetInferenceFieldsAction.Request(Set.of(), null, false, false, null),
ActionRequestValidationException.class,
e -> validator.accept(e, List.of("fields must not be null"))
);
assertFailedRequest(
new GetInferenceFieldsAction.Request(null, null, false, false, null),
ActionRequestValidationException.class,
e -> validator.accept(e, List.of("indices must not be null", "fields must not be null"))
);
Map<String, Float> fields = new HashMap<>();
fields.put(INFERENCE_FIELD_1, null);
assertFailedRequest(
new GetInferenceFieldsAction.Request(Set.of(), fields, false, false, null),
ActionRequestValidationException.class,
e -> validator.accept(e, List.of("weight for field [" + INFERENCE_FIELD_1 + "] must not be null"))
);
}
private void explicitIndicesAndFieldsTestCase(String query) {
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(ALL_INDICES, ALL_FIELDS, false, false, query),
Map.of(INDEX_1, INDEX_1_EXPECTED_INFERENCE_FIELDS, INDEX_2, INDEX_2_EXPECTED_INFERENCE_FIELDS),
query == null || query.isBlank() ? Map.of() : ALL_EXPECTED_INFERENCE_RESULTS
);
Map<String, Class<? extends InferenceResults>> expectedInferenceResultsSparseOnly = filterExpectedInferenceResults(
ALL_EXPECTED_INFERENCE_RESULTS,
Set.of(SPARSE_EMBEDDING_INFERENCE_ID)
);
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(
ALL_INDICES,
generateDefaultWeightFieldMap(Set.of(INFERENCE_FIELD_3)),
false,
false,
query
),
Map.of(
INDEX_1,
filterExpectedInferenceFieldSet(INDEX_1_EXPECTED_INFERENCE_FIELDS, Set.of(INFERENCE_FIELD_3)),
INDEX_2,
filterExpectedInferenceFieldSet(INDEX_2_EXPECTED_INFERENCE_FIELDS, Set.of(INFERENCE_FIELD_3))
),
query == null || query.isBlank() ? Map.of() : expectedInferenceResultsSparseOnly
);
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(
Set.of(INDEX_1),
generateDefaultWeightFieldMap(Set.of(INFERENCE_FIELD_3)),
false,
false,
query
),
Map.of(INDEX_1, filterExpectedInferenceFieldSet(INDEX_1_EXPECTED_INFERENCE_FIELDS, Set.of(INFERENCE_FIELD_3))),
query == null || query.isBlank() ? Map.of() : expectedInferenceResultsSparseOnly
);
assertSuccessfulRequest(
new GetInferenceFieldsAction.Request(ALL_INDICES, generateDefaultWeightFieldMap(Set.of("*")), false, false, query),
Map.of(INDEX_1, Set.of(), INDEX_2, Set.of()),
Map.of()
);
}
private void createInferenceEndpoints() throws IOException {
createInferenceEndpoint(client(), TaskType.SPARSE_EMBEDDING, SPARSE_EMBEDDING_INFERENCE_ID, SPARSE_EMBEDDING_SERVICE_SETTINGS);
createInferenceEndpoint(client(), TaskType.TEXT_EMBEDDING, TEXT_EMBEDDING_INFERENCE_ID, TEXT_EMBEDDING_SERVICE_SETTINGS);
}
private void createTestIndices() throws IOException {
createTestIndex(INDEX_1, List.of("*-field-1^5"));
createTestIndex(INDEX_2, null);
assertAcked(
indicesAdmin().prepareAliases(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT)
.addAlias(new String[] { INDEX_1, INDEX_2 }, INDEX_ALIAS)
);
}
private void createTestIndex(String indexName, @Nullable List<String> defaultFields) throws IOException {
final String inferenceField1InferenceId = switch (indexName) {
case INDEX_1 -> SPARSE_EMBEDDING_INFERENCE_ID;
case INDEX_2 -> TEXT_EMBEDDING_INFERENCE_ID;
default -> throw new AssertionError("Unhandled index name [" + indexName + "]");
};
final String inferenceField2InferenceId = switch (indexName) {
case INDEX_1 -> TEXT_EMBEDDING_INFERENCE_ID;
case INDEX_2 -> SPARSE_EMBEDDING_INFERENCE_ID;
default -> throw new AssertionError("Unhandled index name [" + indexName + "]");
};
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties");
addSemanticTextField(INFERENCE_FIELD_1, inferenceField1InferenceId, mapping);
addSemanticTextField(INFERENCE_FIELD_2, inferenceField2InferenceId, mapping);
addSemanticTextField(INFERENCE_FIELD_3, SPARSE_EMBEDDING_INFERENCE_ID, mapping);
addSemanticTextField(INFERENCE_FIELD_4, TEXT_EMBEDDING_INFERENCE_ID, mapping);
addTextField(TEXT_FIELD_1, mapping);
addTextField(TEXT_FIELD_2, mapping);
mapping.endObject().endObject();
var createIndexRequest = prepareCreate(indexName).setMapping(mapping);
if (defaultFields != null) {
Settings settings = Settings.builder().putList(DEFAULT_FIELD_SETTING.getKey(), defaultFields).build();
createIndexRequest.setSettings(settings);
}
assertAcked(createIndexRequest);
}
private void addSemanticTextField(String fieldName, String inferenceId, XContentBuilder mapping) throws IOException {
mapping.startObject(fieldName);
mapping.field("type", SemanticTextFieldMapper.CONTENT_TYPE);
mapping.field("inference_id", inferenceId);
mapping.endObject();
}
private void addTextField(String fieldName, XContentBuilder mapping) throws IOException {
mapping.startObject(fieldName);
mapping.field("type", TextFieldMapper.CONTENT_TYPE);
mapping.endObject();
}
private static GetInferenceFieldsAction.Response executeRequest(GetInferenceFieldsAction.Request request) {
return client().execute(GetInferenceFieldsAction.INSTANCE, request).actionGet(TEST_REQUEST_TIMEOUT);
}
private static void assertSuccessfulRequest(
GetInferenceFieldsAction.Request request,
Map<String, Set<InferenceFieldWithTestMetadata>> expectedInferenceFields,
Map<String, Class<? extends InferenceResults>> expectedInferenceResults
) {
var response = executeRequest(request);
assertInferenceFieldsMap(response.getInferenceFieldsMap(), expectedInferenceFields);
assertInferenceResultsMap(response.getInferenceResultsMap(), expectedInferenceResults);
}
private static <T extends Exception> void assertFailedRequest(
GetInferenceFieldsAction.Request request,
Class<T> expectedException,
Consumer<T> exceptionValidator
) {
T exception = assertThrows(expectedException, () -> executeRequest(request));
exceptionValidator.accept(exception);
}
static void assertInferenceFieldsMap(
Map<String, List<GetInferenceFieldsAction.ExtendedInferenceFieldMetadata>> inferenceFieldsMap,
Map<String, Set<InferenceFieldWithTestMetadata>> expectedInferenceFields
) {
assertThat(inferenceFieldsMap.size(), equalTo(expectedInferenceFields.size()));
for (var entry : inferenceFieldsMap.entrySet()) {
String indexName = entry.getKey();
List<GetInferenceFieldsAction.ExtendedInferenceFieldMetadata> indexInferenceFields = entry.getValue();
Set<InferenceFieldWithTestMetadata> expectedIndexInferenceFields = expectedInferenceFields.get(indexName);
assertThat(expectedIndexInferenceFields, notNullValue());
Set<InferenceFieldWithTestMetadata> remainingExpectedIndexInferenceFields = new HashSet<>(expectedIndexInferenceFields);
for (var indexInferenceField : indexInferenceFields) {
InferenceFieldWithTestMetadata inferenceFieldWithTestMetadata = new InferenceFieldWithTestMetadata(
indexInferenceField.inferenceFieldMetadata().getName(),
indexInferenceField.inferenceFieldMetadata().getSearchInferenceId(),
indexInferenceField.weight()
);
assertThat(remainingExpectedIndexInferenceFields.remove(inferenceFieldWithTestMetadata), is(true));
}
assertThat(remainingExpectedIndexInferenceFields, empty());
}
}
static void assertInferenceResultsMap(
Map<String, InferenceResults> inferenceResultsMap,
Map<String, Class<? extends InferenceResults>> expectedInferenceResults
) {
assertThat(inferenceResultsMap.size(), equalTo(expectedInferenceResults.size()));
for (var entry : inferenceResultsMap.entrySet()) {
String inferenceId = entry.getKey();
InferenceResults inferenceResults = entry.getValue();
Class<? extends InferenceResults> expectedInferenceResultsClass = expectedInferenceResults.get(inferenceId);
assertThat(expectedInferenceResultsClass, notNullValue());
assertThat(inferenceResults, instanceOf(expectedInferenceResultsClass));
}
}
static Map<String, Float> generateDefaultWeightFieldMap(Set<String> fieldList) {
Map<String, Float> fieldMap = new HashMap<>();
fieldList.forEach(field -> fieldMap.put(field, 1.0f));
return fieldMap;
}
private static Set<InferenceFieldWithTestMetadata> filterExpectedInferenceFieldSet(
Set<InferenceFieldWithTestMetadata> inferenceFieldSet,
Set<String> fieldNames
) {
return inferenceFieldSet.stream().filter(i -> fieldNames.contains(i.field())).collect(Collectors.toSet());
}
private static Map<String, Class<? extends InferenceResults>> filterExpectedInferenceResults(
Map<String, Class<? extends InferenceResults>> expectedInferenceResults,
Set<String> inferenceIds
) {
return expectedInferenceResults.entrySet()
.stream()
.filter(e -> inferenceIds.contains(e.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
record InferenceFieldWithTestMetadata(String field, String inferenceId, float weight) {}
}
| GetInferenceFieldsIT |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/boot/model/PluralAttribute.java | {
"start": 242,
"end": 291
} | interface ____ extends Attribute {
}
| PluralAttribute |
java | apache__maven | compat/maven-settings/src/main/java/org/apache/maven/settings/io/xpp3/SettingsXpp3Writer.java | {
"start": 1277,
"end": 2594
} | class ____ {
private final SettingsStaxWriter delegate;
public SettingsXpp3Writer() {
delegate = new SettingsStaxWriter();
delegate.setAddLocationInformation(false);
}
/**
* Method setFileComment.
*
* @param fileComment a fileComment object.
*/
public void setFileComment(String fileComment) {
delegate.setFileComment(fileComment);
}
/**
* Method write.
*
* @param writer a writer object.
* @param settings a settings object.
* @throws IOException java.io.IOException if any.
*/
public void write(Writer writer, Settings settings) throws IOException {
try {
delegate.write(writer, settings.getDelegate());
} catch (XMLStreamException e) {
throw new IOException("Error writing settings to " + writer, e);
}
}
/**
* Method write.
*
* @param stream a stream object.
* @param settings a settings object.
* @throws IOException java.io.IOException if any.
*/
public void write(OutputStream stream, Settings settings) throws IOException {
try {
delegate.write(stream, settings.getDelegate());
} catch (XMLStreamException e) {
throw new IOException(e);
}
}
}
| SettingsXpp3Writer |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/NullCheckTest.java | {
"start": 139,
"end": 387
} | class ____ extends TestCase {
public void test_0() throws Exception {
Assert.assertEquals(null, JSON.parse(null));
Assert.assertEquals(null, JSON.parse(""));
Assert.assertEquals(null, JSON.parse(" "));
}
}
| NullCheckTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhance/internal/bytebuddy/DirtyCheckingWithEmbeddableAndMappedSuperclassTest.java | {
"start": 2926,
"end": 6594
} | class ____ {
@Test
public void shouldDeclareFieldsInEntityClass() {
assertThat( CardGame.class )
.hasDeclaredFields( ENTITY_ENTRY_FIELD_NAME, PREVIOUS_FIELD_NAME, NEXT_FIELD_NAME, TRACKER_FIELD_NAME );
}
@Test
public void shouldDeclareMethodsInEntityClass() {
assertThat( CardGame.class )
.hasDeclaredMethods( PERSISTENT_FIELD_READER_PREFIX + "id", PERSISTENT_FIELD_WRITER_PREFIX + "id" )
.hasDeclaredMethods( PERSISTENT_FIELD_READER_PREFIX + "name", PERSISTENT_FIELD_WRITER_PREFIX + "name" )
.hasDeclaredMethods( ENTITY_INSTANCE_GETTER_NAME, ENTITY_ENTRY_GETTER_NAME )
.hasDeclaredMethods( PREVIOUS_GETTER_NAME, PREVIOUS_SETTER_NAME, NEXT_GETTER_NAME, NEXT_SETTER_NAME )
.hasDeclaredMethods( TRACKER_HAS_CHANGED_NAME, TRACKER_CLEAR_NAME, TRACKER_SUSPEND_NAME, TRACKER_GET_NAME );
}
@Test
public void shouldDeclareFieldsInEmbeddedClass() {
assertThat( Component.class )
.hasDeclaredFields( TRACKER_COMPOSITE_FIELD_NAME );
}
@Test
public void shouldDeclareMethodsInEmbeddedClass() {
assertThat(Component.class )
.hasDeclaredMethods( PERSISTENT_FIELD_READER_PREFIX + "component", PERSISTENT_FIELD_WRITER_PREFIX + "component" )
.hasDeclaredMethods( TRACKER_COMPOSITE_SET_OWNER, TRACKER_COMPOSITE_CLEAR_OWNER );
}
@Test
public void shouldCreateTheTracker() throws Exception {
CardGame entity = new CardGame( "MTG", "Magic the Gathering" );
assertThat( entity )
.extracting( NEXT_FIELD_NAME ).isNull();
assertThat( entity )
.extracting( PREVIOUS_FIELD_NAME ).isNull();
assertThat( entity )
.extracting( ENTITY_ENTRY_FIELD_NAME ).isNull();
assertThat( entity )
.extracting( TRACKER_FIELD_NAME ).isInstanceOf( SimpleFieldTracker.class );
assertThat( entity.getFirstPlayerToken() )
.extracting( TRACKER_COMPOSITE_FIELD_NAME ).isInstanceOf( CompositeOwnerTracker.class);
assertThat( entity ).extracting( resultOf( TRACKER_HAS_CHANGED_NAME ) ).isEqualTo( true );
assertThat( entity ).extracting( resultOf( TRACKER_GET_NAME ) )
.isEqualTo( new String[] { "name", "firstPlayerToken" } );
assertThat( entity.getFirstPlayerToken() )
.extracting( TRACKER_COMPOSITE_FIELD_NAME + ".names" ).isEqualTo( new String[] { "firstPlayerToken" } );
}
@Test
public void shouldResetTheTracker() throws Exception {
CardGame entity = new CardGame( "7WD", "7 Wonders duel" );
Method trackerClearMethod = CardGame.class.getMethod( TRACKER_CLEAR_NAME );
trackerClearMethod.invoke( entity );
assertThat( entity ).extracting( resultOf( TRACKER_HAS_CHANGED_NAME ) ).isEqualTo( false );
assertThat( entity ).extracting( resultOf( TRACKER_GET_NAME ) ).isEqualTo( new String[0] );
}
@Test
public void shouldUpdateTheTracker() throws Exception {
CardGame entity = new CardGame( "SPL", "Splendor" );
Method trackerClearMethod = CardGame.class.getMethod( TRACKER_CLEAR_NAME );
trackerClearMethod.invoke( entity );
entity.setName( "Splendor: Cities of Splendor" );
assertThat( entity ).extracting( resultOf( TRACKER_HAS_CHANGED_NAME ) ).isEqualTo( true );
assertThat( entity ).extracting( resultOf( TRACKER_GET_NAME ) )
.isEqualTo( new String[] { "name", "firstPlayerToken" } );
trackerClearMethod.invoke( entity );
entity.setFirstPlayerToken( new Component( "FIRST PLAYER!!!!!!!!" ) );
assertThat( entity ).extracting( resultOf( TRACKER_GET_NAME ) )
.isEqualTo( new String[] { "firstPlayerToken" } );
assertThat( entity.getFirstPlayerToken() )
.extracting( TRACKER_COMPOSITE_FIELD_NAME + ".names" ).isEqualTo( new String[] { "firstPlayerToken" } );
}
@MappedSuperclass
public static abstract | DirtyCheckingWithEmbeddableAndMappedSuperclassTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/util/ReflectHelper.java | {
"start": 8931,
"end": 9822
} | class ____ which to retrieve the default ctor.
* @return The default constructor.
* @throws PropertyNotFoundException Indicates there was not publicly accessible, no-arg constructor (todo : why PropertyNotFoundException???)
*/
public static <T> Constructor<T> getDefaultConstructor(Class<T> clazz) throws PropertyNotFoundException {
if ( isAbstractClass( clazz ) ) {
return null;
}
try {
final var constructor = clazz.getDeclaredConstructor( NO_PARAM_SIGNATURE );
ensureAccessibility( constructor );
return constructor;
}
catch ( NoSuchMethodException nme ) {
throw new PropertyNotFoundException(
"Object class [" + clazz.getName() + "] must declare a default (no-argument) constructor"
);
}
}
public static <T> Supplier<T> getDefaultSupplier(Class<T> clazz) {
if ( isAbstractClass( clazz ) ) {
throw new IllegalArgumentException( "Abstract | for |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java | {
"start": 6222,
"end": 6484
} | class ____ {
/** Counter for retries. */
private int retries;
/** Counter for method invocation has been failed over. */
private int failovers;
boolean isZeros() {
return retries == 0 && failovers == 0;
}
}
private static | Counters |
java | apache__camel | components/camel-disruptor/src/test/java/org/apache/camel/component/disruptor/vm/DisruptorVmComponentReferenceEndpointTest.java | {
"start": 1284,
"end": 3477
} | class ____ extends CamelTestSupport {
@Test
void testDisruptorVmComponentReference() throws Exception {
DisruptorVmComponent vm = context.getComponent("disruptor-vm", DisruptorVmComponent.class);
String key = DisruptorComponent.getDisruptorKey("disruptor-vm://foo");
assertEquals(1, vm.getDisruptors().get(key).getEndpointCount());
assertEquals(2, numberOfReferences(vm));
// add a second consumer on the endpoint
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("disruptor-vm:foo?blockWhenFull=true").routeId("foo2").to("mock:foo2");
}
});
assertEquals(2, vm.getDisruptors().get(key).getEndpointCount());
assertEquals(3, numberOfReferences(vm));
// remove the 1st route
context.getRouteController().stopRoute("foo");
context.removeRoute("foo");
assertEquals(1, vm.getDisruptors().get(key).getEndpointCount());
assertEquals(2, numberOfReferences(vm));
// remove the 2nd route
context.getRouteController().stopRoute("foo2");
context.removeRoute("foo2");
// and there is no longer queues for the foo key
assertNull(vm.getDisruptors().get(key));
// there should still be a bar
assertEquals(1, numberOfReferences(vm));
key = DisruptorComponent.getDisruptorKey("disruptor-vm://bar");
assertEquals(1, vm.getDisruptors().get(key).getEndpointCount());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("disruptor-vm:foo").routeId("foo").to("mock:foo");
from("disruptor-vm:bar").routeId("bar").to("mock:bar");
}
};
}
private int numberOfReferences(DisruptorVmComponent vm) {
int num = 0;
Iterator<DisruptorReference> it = vm.getDisruptors().values().iterator();
while (it.hasNext()) {
num += it.next().getEndpointCount();
}
return num;
}
}
| DisruptorVmComponentReferenceEndpointTest |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/csv2/BindyUnmarshalCommaIssueTest.java | {
"start": 1258,
"end": 4521
} | class ____ extends CamelTestSupport {
@Test
public void testBindyUnmarshalNoCommaIssue() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
String body = "123,\"Wednesday November 9 2011\",\"Central California\"";
template.sendBody("direct:start", body);
MockEndpoint.assertIsSatisfied(context);
WeatherModel model = mock.getReceivedExchanges().get(0).getIn().getBody(WeatherModel.class);
assertEquals(123, model.getId());
assertEquals("Wednesday November 9 2011", model.getDate());
assertEquals("Central California", model.getPlace());
}
@Test
public void testBindyUnmarshalCommaIssue() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
String body = "123,\"Wednesday, November 9, 2011\",\"Central California\"";
template.sendBody("direct:start", body);
MockEndpoint.assertIsSatisfied(context);
WeatherModel model = mock.getReceivedExchanges().get(0).getIn().getBody(WeatherModel.class);
assertEquals(123, model.getId());
assertEquals("Wednesday, November 9, 2011", model.getDate());
assertEquals("Central California", model.getPlace());
}
@Test
public void testBindyUnmarshalCommaIssueTwo() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
String body = "123,\"Wednesday, November 9, 2011\",\"Central California, United States\"";
template.sendBody("direct:start", body);
MockEndpoint.assertIsSatisfied(context);
WeatherModel model = mock.getReceivedExchanges().get(0).getIn().getBody(WeatherModel.class);
assertEquals(123, model.getId());
assertEquals("Wednesday, November 9, 2011", model.getDate());
assertEquals("Central California, United States", model.getPlace());
}
@Test
@Disabled("To fix CAMEL-5871. doesn't support the signle quote test case any more")
public void testBindyUnmarshalSingleQuoteCommaIssueTwo() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
String body = "123,'Wednesday, November 9, 2011','Central California, United States'";
template.sendBody("direct:start", body);
MockEndpoint.assertIsSatisfied(context);
Map<?, ?> map = (Map<?, ?>) mock.getReceivedExchanges().get(0).getIn().getBody(List.class).get(0);
WeatherModel model = (WeatherModel) map.values().iterator().next();
assertEquals(123, model.getId());
assertEquals("Wednesday, November 9, 2011", model.getDate());
assertEquals("Central California, United States", model.getPlace());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.unmarshal().bindy(BindyType.Csv, org.apache.camel.dataformat.bindy.csv2.WeatherModel.class)
.to("mock:result");
}
};
}
}
| BindyUnmarshalCommaIssueTest |
java | apache__hadoop | hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/Statistics.java | {
"start": 7677,
"end": 10133
} | class ____ extends SubjectInheritingThread {
StatCollector() {
super("StatsCollectorThread");
}
public void work() {
try {
startFlag.await();
if (Thread.currentThread().isInterrupted()) {
return;
}
} catch (InterruptedException ie) {
LOG.error(
"Statistics Error while waiting for other threads to get ready ", ie);
return;
}
while (!shutdown) {
lock.lock();
try {
jobCompleted.await(jtPollingInterval, TimeUnit.MILLISECONDS);
} catch (InterruptedException ie) {
if (!shutdown) {
LOG.error("Statistics interrupt while waiting for completion of "
+ "a job.", ie);
}
return;
} finally {
lock.unlock();
}
//Fetch cluster data only if required.i.e .
// only if there are clusterStats listener.
if (clusterStatlisteners.size() > 0) {
try {
ClusterStatus clusterStatus = cluster.getClusterStatus();
updateAndNotifyClusterStatsListeners(clusterStatus);
} catch (IOException e) {
LOG.error(
"Statistics io exception while polling JT ", e);
return;
}
}
}
}
private void updateAndNotifyClusterStatsListeners(
ClusterStatus clusterStatus) {
ClusterStats stats = ClusterStats.getClusterStats();
stats.setClusterMetric(clusterStatus);
for (StatListener<ClusterStats> listener : clusterStatlisteners) {
listener.update(stats);
}
}
}
/**
* Wait until the service completes. It is assumed that either a
* {@link #shutdown} or {@link #abort} has been requested.
*/
@Override
public void join(long millis) throws InterruptedException {
statistics.join(millis);
}
@Override
public void shutdown() {
shutdown = true;
submittedJobsMap.clear();
clusterStatlisteners.clear();
jobStatListeners.clear();
statistics.interrupt();
}
@Override
public void abort() {
shutdown = true;
submittedJobsMap.clear();
clusterStatlisteners.clear();
jobStatListeners.clear();
statistics.interrupt();
}
/**
* Class to encapsulate the JobStats information.
* Current we just need information about completedJob.
* TODO: In future we need to extend this to send more information.
*/
static | StatCollector |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/OAuth2AuthorizeRequest.java | {
"start": 1670,
"end": 4133
} | class ____ {
private String clientRegistrationId;
private OAuth2AuthorizedClient authorizedClient;
private Authentication principal;
private Map<String, Object> attributes;
private OAuth2AuthorizeRequest() {
}
/**
* Returns the identifier for the {@link ClientRegistration client registration}.
* @return the identifier for the client registration
*/
public String getClientRegistrationId() {
return this.clientRegistrationId;
}
/**
* Returns the {@link OAuth2AuthorizedClient authorized client} or {@code null} if it
* was not provided.
* @return the {@link OAuth2AuthorizedClient} or {@code null} if it was not provided
*/
@Nullable
public OAuth2AuthorizedClient getAuthorizedClient() {
return this.authorizedClient;
}
/**
* Returns the {@code Principal} (to be) associated to the authorized client.
* @return the {@code Principal} (to be) associated to the authorized client
*/
public Authentication getPrincipal() {
return this.principal;
}
/**
* Returns the attributes associated to the request.
* @return a {@code Map} of the attributes associated to the request
*/
public Map<String, Object> getAttributes() {
return this.attributes;
}
/**
* Returns the value of an attribute associated to the request or {@code null} if not
* available.
* @param name the name of the attribute
* @param <T> the type of the attribute
* @return the value of the attribute associated to the request
*/
@Nullable
@SuppressWarnings("unchecked")
public <T> T getAttribute(String name) {
return (T) this.getAttributes().get(name);
}
/**
* Returns a new {@link Builder} initialized with the identifier for the
* {@link ClientRegistration client registration}.
* @param clientRegistrationId the identifier for the {@link ClientRegistration client
* registration}
* @return the {@link Builder}
*/
public static Builder withClientRegistrationId(String clientRegistrationId) {
return new Builder(clientRegistrationId);
}
/**
* Returns a new {@link Builder} initialized with the {@link OAuth2AuthorizedClient
* authorized client}.
* @param authorizedClient the {@link OAuth2AuthorizedClient authorized client}
* @return the {@link Builder}
*/
public static Builder withAuthorizedClient(OAuth2AuthorizedClient authorizedClient) {
return new Builder(authorizedClient);
}
/**
* A builder for {@link OAuth2AuthorizeRequest}.
*/
public static final | OAuth2AuthorizeRequest |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/PooledExchangeTaskFactory.java | {
"start": 1057,
"end": 2103
} | interface ____ extends PooledObjectFactory<PooledExchangeTask> {
/**
* Creates a new task to use for processing the exchange.
*
* @param exchange the current exchange
* @param callback the callback for the exchange
* @return the task
*/
PooledExchangeTask create(Exchange exchange, AsyncCallback callback);
/**
* Attempts to acquire a pooled task to use for processing the exchange, if not possible then a new task is created.
*
* @param exchange the current exchange
* @param callback the callback for the exchange
* @return the task
*/
PooledExchangeTask acquire(Exchange exchange, AsyncCallback callback);
/**
* Releases the task after its done being used
*
* @param task the task
* @return true if the task was released, and false if the task failed to be released or no space in pool, and
* the task was discarded.
*/
boolean release(PooledExchangeTask task);
}
| PooledExchangeTaskFactory |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java | {
"start": 9275,
"end": 11222
} | class ____. */
private final double avgPrecision;
public Result(List<PerClassSingleValue> classes, double avgPrecision) {
this.classes = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(classes, CLASSES));
this.avgPrecision = avgPrecision;
}
public Result(StreamInput in) throws IOException {
this.classes = in.readCollectionAsImmutableList(PerClassSingleValue::new);
this.avgPrecision = in.readDouble();
}
@Override
public String getWriteableName() {
return registeredMetricName(Classification.NAME, NAME);
}
@Override
public String getMetricName() {
return NAME.getPreferredName();
}
public List<PerClassSingleValue> getClasses() {
return classes;
}
public double getAvgPrecision() {
return avgPrecision;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(classes);
out.writeDouble(avgPrecision);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(CLASSES.getPreferredName(), classes);
builder.field(AVG_PRECISION.getPreferredName(), avgPrecision);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result that = (Result) o;
return Objects.equals(this.classes, that.classes) && this.avgPrecision == that.avgPrecision;
}
@Override
public int hashCode() {
return Objects.hash(classes, avgPrecision);
}
}
}
| precisions |
java | google__gson | gson/src/test/java/com/google/gson/functional/DefaultTypeAdaptersTest.java | {
"start": 27345,
"end": 27608
} | class ____ {
BigDecimal value;
ClassWithBigDecimal(String value) {
this.value = new BigDecimal(value);
}
String getExpectedJson() {
return "{\"value\":" + value.toEngineeringString() + "}";
}
}
private static | ClassWithBigDecimal |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SimplifyComparisonsArithmetics.java | {
"start": 9104,
"end": 11270
} | class ____ extends OperationSimplifier {
private final boolean isDiv; // and not MUL.
private final int opRightSign; // sign of the right operand in: (left) (op) (right) (comp) (literal)
MulDivSimplifier(FoldContext foldContext, BinaryComparison comparison) {
super(foldContext, comparison);
isDiv = operation.symbol().equals(DIV.symbol());
opRightSign = sign(opRight);
}
@Override
boolean isOpUnsafe() {
// Integer divisions are not safe to optimise: x / 5 > 1 <=/=> x > 5 for x in [6, 9]; same for the `==` comp
if (operation.dataType().isWholeNumber() && isDiv) {
return true;
}
// If current operation is a multiplication, it's inverse will be a division: safe only if outcome is still integral.
if (isDiv == false && opLeft.dataType().isWholeNumber()) {
long opLiteralValue = ((Number) opLiteral.value()).longValue();
return opLiteralValue == 0 || ((Number) bcLiteral.value()).longValue() % opLiteralValue != 0;
}
// can't move a 0 in Mul/Div comparisons
return opRightSign == 0;
}
@Override
Expression postProcess(BinaryComparison binaryComparison) {
// negative multiplication/division changes the direction of the comparison
return opRightSign < 0 ? binaryComparison.reverse() : binaryComparison;
}
private static int sign(Object obj) {
int sign = 1;
if (obj instanceof Number) {
sign = (int) signum(((Number) obj).doubleValue());
} else if (obj instanceof Literal) {
sign = sign(((Literal) obj).value());
} else if (obj instanceof Neg) {
sign = -sign(((Neg) obj).field());
} else if (obj instanceof ArithmeticOperation operation) {
if (isMulOrDiv(operation.symbol())) {
sign = sign(operation.left()) * sign(operation.right());
}
}
return sign;
}
}
}
| MulDivSimplifier |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/ai/remote/request/AbstractAgentRequestTest.java | {
"start": 2493,
"end": 2562
} | class ____ extends AbstractAgentRequest {
}
} | TestAbstractAgentRequest |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/security/ImmutableSubstringMap.java | {
"start": 377,
"end": 4054
} | class ____<V> {
private static final int ALL_BUT_LAST_BIT = ~1;
private final Object[] table;
ImmutableSubstringMap(Object[] table) {
this.table = Arrays.copyOf(table, table.length);
}
@SuppressWarnings("unchecked")
public SubstringMatch<V> get(String key, int length) {
if (key.length() < length) {
throw new IllegalArgumentException();
}
int hash = hash(key, length);
int pos = tablePos(table, hash);
int start = pos;
while (table[pos] != null) {
if (doEquals((String) table[pos], key, length)) {
SubstringMatch<V> match = (SubstringMatch<V>) table[pos + 1];
if (match == null) {
return null;
}
if (match.hasSubPathMatcher) {
// consider request path '/one/two/three/four/five'
// 'match.key' (which is prefix path) never ends with a slash, e.g. 'match.key=/one/two'
// which means index 'match.key.length()' is index of the last char of the '/one/two/' sub-path
// considering we are looking for a path segment after '/one/two/*', that is the first char
// of the '/four/five' sub-path, the separator index must be greater than 'match.key.length() + 1'
if (key.length() > (match.key.length() + 1)) {
// let say match key is '/one/two'
// then next path segment is '/four' and '/three' is skipped
// for path pattern was like: '/one/two/*/four/five'
int nextPathSegmentIdx = key.indexOf('/', match.key.length() + 1);
if (nextPathSegmentIdx != -1) {
// following the example above, 'nextPath' would be '/four/five'
// and * matched 'three' path segment characters
String nextPath = key.substring(nextPathSegmentIdx);
PathMatch<SubstringMatch<V>> subMatch = match.subPathMatcher.match(nextPath);
if (subMatch.getValue() != null) {
return subMatch.getValue();
}
}
}
if (match.value == null) {
// paths with inner wildcard didn't match
// and there is no prefix path with ending wildcard either
return null;
}
}
// prefix path with ending wildcard: /one/two*
return match;
}
pos += 2;
if (pos >= table.length) {
pos = 0;
}
if (pos == start) {
return null;
}
}
return null;
}
static int tablePos(Object[] table, int hash) {
return (hash & (table.length - 1)) & ALL_BUT_LAST_BIT;
}
static boolean doEquals(String s1, String s2, int length) {
if (s1.length() != length || s2.length() < length) {
return false;
}
for (int i = 0; i < length; ++i) {
if (s1.charAt(i) != s2.charAt(i)) {
return false;
}
}
return true;
}
static int hash(String value, int length) {
if (length == 0) {
return 0;
}
int h = 0;
for (int i = 0; i < length; i++) {
h = 31 * h + value.charAt(i);
}
return h;
}
public static final | ImmutableSubstringMap |
java | elastic__elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java | {
"start": 887,
"end": 1879
} | class ____ extends ScriptTestCase {
@Override
protected Map<ScriptContext<?>, List<Whitelist>> scriptContexts() {
Map<ScriptContext<?>, List<Whitelist>> contexts = new HashMap<>();
List<Whitelist> whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST);
whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.alias"));
contexts.put(PainlessTestScript.CONTEXT, whitelists);
return contexts;
}
public void testNoShadowing() {
IllegalArgumentException err = expectThrows(
IllegalArgumentException.class,
() -> PainlessLookupBuilder.buildFromWhitelists(
List.of(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.alias-shadow")),
new HashMap<>(),
new HashMap<>()
)
);
assertEquals(
"Cannot add alias [AliasedTestInnerClass] for [ | AliasTests |
java | spring-projects__spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/context/PropertyMapping.java | {
"start": 1853,
"end": 2507
} | interface ____ {
/**
* Defines the property mapping. When used at the type-level, this value will be used
* as a prefix for all mapped attributes. When used on an attribute, the value
* overrides the generated (kebab case) name.
* @return the property mapping
*/
String value() default "";
/**
* Determines if mapping should be skipped. When specified at the type-level indicates
* if skipping should occur by default or not. When used at the attribute-level,
* overrides the type-level default.
* @return if mapping should be skipped
*/
Skip skip() default Skip.NO;
/**
* Controls when mapping is skipped.
*/
| PropertyMapping |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/JdbcOAuth2AuthorizationService.java | {
"start": 40909,
"end": 41266
} | class ____ {
private final String columnName;
private final int dataType;
private ColumnMetadata(String columnName, int dataType) {
this.columnName = columnName;
this.dataType = dataType;
}
private String getColumnName() {
return this.columnName;
}
private int getDataType() {
return this.dataType;
}
}
static | ColumnMetadata |
java | apache__kafka | streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/StreamsUncaughtExceptionHandlerIntegrationTest.java | {
"start": 9438,
"end": 18092
} | class ____<KIn, VIn, KOut, VOut> implements Processor<KIn, VIn, KOut, VOut> {
final List<String> valueList;
ShutdownProcessor(final List<String> valueList) {
this.valueList = valueList;
}
@Override
public void init(final ProcessorContext<KOut, VOut> context) {
}
@Override
public void close() {
// do nothing
}
@Override
public void process(final Record<KIn, VIn> record) {
valueList.add(record.value().toString());
if (THROW_ERROR.get()) {
throw new StreamsException(Thread.currentThread().getName());
}
THROW_ERROR.set(true);
}
}
@ParameterizedTest
@ValueSource(booleans = {false, true})
public void shouldShutDownClientIfGlobalStreamThreadWantsToReplaceThread(final boolean streamsRebalanceProtocolEnabled) throws Exception {
properties = basicProps(streamsRebalanceProtocolEnabled);
builder.addGlobalStore(
new KeyValueStoreBuilder<>(
Stores.persistentKeyValueStore("globalStore"),
Serdes.String(),
Serdes.String(),
CLUSTER.time
),
inputTopic2,
Consumed.with(Serdes.String(), Serdes.String()),
() -> new ShutdownProcessor<String, String, Void, Void>(processorValueCollector)
);
properties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 0);
try (final KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), properties)) {
kafkaStreams.setUncaughtExceptionHandler(exception -> REPLACE_THREAD);
startApplicationAndWaitUntilRunning(kafkaStreams);
produceMessages(NOW, inputTopic2, "A");
waitForApplicationState(Collections.singletonList(kafkaStreams), KafkaStreams.State.ERROR, DEFAULT_DURATION);
assertThat(processorValueCollector.size(), equalTo(1));
}
}
@ParameterizedTest
@ValueSource(booleans = {false, true})
public void shouldEmitSameRecordAfterFailover(final boolean streamsRebalanceProtocolEnabled) throws Exception {
properties = basicProps(streamsRebalanceProtocolEnabled);
properties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 1);
properties.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 300000L);
properties.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.IntegerSerde.class);
properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class);
properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 10000);
final AtomicBoolean shouldThrow = new AtomicBoolean(true);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(inputTopic, Materialized.as("test-store"))
.toStream()
.map((key, value) -> {
if (shouldThrow.compareAndSet(true, false)) {
throw new RuntimeException("Kaboom");
} else {
return new KeyValue<>(key, value);
}
})
.to(outputTopic);
builder.stream(inputTopic2).to(outputTopic2);
try (final KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), properties)) {
kafkaStreams.setUncaughtExceptionHandler(exception -> StreamThreadExceptionResponse.REPLACE_THREAD);
startApplicationAndWaitUntilRunning(kafkaStreams);
IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp(
inputTopic,
asList(
new KeyValue<>(1, "A"),
new KeyValue<>(1, "B")
),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
IntegerSerializer.class,
StringSerializer.class,
new Properties()),
NOW);
IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp(
inputTopic2,
asList(
new KeyValue<>(1, "A"),
new KeyValue<>(1, "B")
),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
IntegerSerializer.class,
StringSerializer.class,
new Properties()),
NOW);
IntegrationTestUtils.waitUntilFinalKeyValueRecordsReceived(
TestUtils.consumerConfig(
CLUSTER.bootstrapServers(),
IntegerDeserializer.class,
StringDeserializer.class
),
outputTopic,
asList(
new KeyValue<>(1, "A"),
new KeyValue<>(1, "B")
)
);
IntegrationTestUtils.waitUntilFinalKeyValueRecordsReceived(
TestUtils.consumerConfig(
CLUSTER.bootstrapServers(),
IntegerDeserializer.class,
StringDeserializer.class
),
outputTopic2,
asList(
new KeyValue<>(1, "A"),
new KeyValue<>(1, "B")
)
);
}
}
private void produceMessages(final long timestamp, final String streamOneInput, final String msg) {
IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp(
streamOneInput,
Collections.singletonList(new KeyValue<>("1", msg)),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
StringSerializer.class,
StringSerializer.class,
new Properties()),
timestamp);
}
private void testShutdownApplication(final int numThreads) throws Exception {
properties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numThreads);
final Topology topology = builder.build();
final MockTime time = new MockTime(0L);
try (final KafkaStreams kafkaStreams1 = new KafkaStreams(topology, properties, time);
final KafkaStreams kafkaStreams2 = new KafkaStreams(topology, properties, time);
final LogCaptureAppender logCaptureAppender = LogCaptureAppender.createAndRegister()) {
kafkaStreams1.setUncaughtExceptionHandler(exception -> SHUTDOWN_APPLICATION);
kafkaStreams2.setUncaughtExceptionHandler(exception -> SHUTDOWN_APPLICATION);
logCaptureAppender.setClassLogger(StreamThread.class, Level.WARN);
startApplicationAndWaitUntilRunning(asList(kafkaStreams1, kafkaStreams2));
produceMessages(NOW, inputTopic, "A");
waitForApplicationState(asList(kafkaStreams1, kafkaStreams2), KafkaStreams.State.ERROR, DEFAULT_DURATION);
assertThat(processorValueCollector.size(), equalTo(1));
assertThat("Shutdown warning log message should be exported exactly once",
logCaptureAppender.getMessages("WARN").stream().filter(msg -> msg.contains("Detected that shutdown was requested")).count(), equalTo(1L));
}
}
private void testReplaceThreads(final int numThreads) throws Exception {
properties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numThreads);
try (final KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), properties)) {
final AtomicInteger count = new AtomicInteger();
kafkaStreams.setUncaughtExceptionHandler(exception -> {
if (count.incrementAndGet() == numThreads) {
THROW_ERROR.set(false);
}
return REPLACE_THREAD;
});
startApplicationAndWaitUntilRunning(kafkaStreams);
produceMessages(NOW, inputTopic, "A");
TestUtils.waitForCondition(() -> count.get() == numThreads, "finished replacing threads");
TestUtils.waitForCondition(THROW_ERROR::get, "finished replacing threads");
kafkaStreams.close();
waitForApplicationState(Collections.singletonList(kafkaStreams), KafkaStreams.State.NOT_RUNNING, DEFAULT_DURATION);
assertThat("All initial threads have failed and the replacement thread had processed on record",
processorValueCollector.size(), equalTo(numThreads + 1));
}
}
}
| ShutdownProcessor |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/MvcUriComponentsBuilder.java | {
"start": 16839,
"end": 17353
} | class ____ PersonController with method getPerson. In case the
* naming convention does not produce unique results, an explicit name may
* be assigned through the name attribute of the {@code @RequestMapping}
* annotation.
* <p>This is aimed primarily for use in view rendering technologies and EL
* expressions. The Spring URL tag library registers this method as a function
* called "mvcUrl".
* <p>For example, given this controller:
* <pre class="code">
* @RequestMapping("/people")
* | named |
java | quarkusio__quarkus | independent-projects/qute/debug/src/test/java/io/quarkus/qute/debug/completions/CompletionListTest.java | {
"start": 858,
"end": 8857
} | class ____ {
private static final String TEMPLATE_ID = "hello.qute";
@Test
public void debuggingTemplate() throws Exception {
int port = DebuggerUtils.findAvailableSocketPort();
// Server side :
// - create a Qute engine and set the debugging port as 1234
Engine engine = Engine.builder() //
.enableTracing(true) // enable tracing required by debugger
.addEngineListener(new RegisterDebugServerAdapter(port, false)) // debug engine on the given port
.addDefaults()//
.addValueResolver(new ReflectionValueResolver()) //
.build();
// - create a Qute template
Template template = engine.parse("<html>\n" + //
" Hello {name}!\n" + //
" {#for item in items}\n" + //
" {item}\n" + //
" {item_count}\n" + //
" {/for}\n" + //
"</html>", null, TEMPLATE_ID);
// Client side
// - connect the remote debugger client on the given port
DAPClient client = new DAPClient();
client.connectToServer(port) //
.get(10000, TimeUnit.MILLISECONDS);
// Set a breakpoint on line 5: --> {item_count}
client.setBreakpoint("src/main/resources/templates/" + TEMPLATE_ID, 5);
// Render template with breakpoint on line 5
final StringBuilder renderResult = new StringBuilder(1028);
new RenderTemplateInThread(template, renderResult, instance -> {
instance.data("name", "Quarkus") //
.data("items", List.of("foo", "bar", "baz"));
});
// Collect debuggee Thread (one thread)
var threads = client.getThreads();
assertEquals(1, threads.length);
var thread = threads[0];
int threadId = thread.getId();
assertEquals("Qute render thread", thread.getName());
// Get stack trace of the debuggee Thread
StackFrame[] stackFrames = client.getStackFrames(threadId);
StackFrame currentFrame = stackFrames[0];
int frameId = currentFrame.getId();
String frameName = currentFrame.getName();
assertEquals("ExpressionNode [expression=Expression [namespace=null, parts=[item_count], literal=null]]",
frameName);
// Execute completion on current frame context
// completion with data model root
CompletionItem[] items = client.completion("", 1, 1, frameId);
assertNotNull(items, "");
assertCompletion(new CompletionItem[] { //
c("name", CompletionItemType.REFERENCE), //
c("items", CompletionItemType.REFERENCE), //
c("item", CompletionItemType.REFERENCE), //
c("item_count", CompletionItemType.REFERENCE), //
c("item_index", CompletionItemType.REFERENCE) }, items);
// completion with value resolvers
items = client.completion("items.", 1, 7, frameId);
assertNotNull(items, "");
assertCompletion(new CompletionItem[] { //
c("size", CompletionItemType.PROPERTY), //
c("take(|index|)", CompletionItemType.FUNCTION), //
c("get(|index|)", CompletionItemType.FUNCTION) }, items);
// completion with reflection
items = client.completion("item.", 1, 6, frameId);
assertNotNull(items, "");
assertCompletion(new CompletionItem[] { //
c("compareTo(|arg0|)", CompletionItemType.METHOD), //
c("length()", CompletionItemType.METHOD), //
c("toString()", CompletionItemType.METHOD), //
c("charAt(|arg0|)", CompletionItemType.METHOD), //
c("isEmpty()", CompletionItemType.METHOD), //
c("codePoints()", CompletionItemType.METHOD), //
c("subSequence(|arg0|,arg1)", CompletionItemType.METHOD), //
c("chars()", CompletionItemType.METHOD), //
c("describeConstable()", CompletionItemType.METHOD), //
c("codePoints()", CompletionItemType.METHOD), //
c("resolveConstantDesc(|arg0|)", CompletionItemType.METHOD), //
c("equals(|arg0|)", CompletionItemType.METHOD), //
c("hashCode()", CompletionItemType.METHOD), //
// c("indexOf(|arg0|,arg1,arg2)", CompletionItemType.METHOD), // Only available with Java 21
c("indexOf(|arg0|,arg1)", CompletionItemType.METHOD), //
c("indexOf(|arg0|)", CompletionItemType.METHOD), //
c("codePointAt(|arg0|)", CompletionItemType.METHOD), //
c("codePointBefore(|arg0|)", CompletionItemType.METHOD), //
c("codePointCount(|arg0|,arg1)", CompletionItemType.METHOD), //
c("offsetByCodePoints(|arg0|,arg1)", CompletionItemType.METHOD), //
c("getBytes()", CompletionItemType.METHOD), //
c("contentEquals(|arg0|)", CompletionItemType.METHOD), //
c("regionMatches(|arg0|,arg1,arg2,arg3)", CompletionItemType.METHOD), //
c("regionMatches(|arg0|,arg1,arg2,arg3,arg4)", CompletionItemType.METHOD), //
c("startsWith(|arg0|)", CompletionItemType.METHOD), //
c("startsWith(|arg0|,arg1)", CompletionItemType.METHOD), //
c("lastIndexOf(|arg0|)", CompletionItemType.METHOD), //
c("lastIndexOf(|arg0|,arg1)", CompletionItemType.METHOD), //
c("substring(|arg0|,arg1)", CompletionItemType.METHOD), //
c("substring(|arg0|)", CompletionItemType.METHOD), //
c("replace(|arg0|,arg1)", CompletionItemType.METHOD), //
c("matches(|arg0|)", CompletionItemType.METHOD), //
c("replaceFirst(|arg0|,arg1)", CompletionItemType.METHOD), //
c("replaceAll(|arg0|,arg1)", CompletionItemType.METHOD), //
c("split(|arg0|)", CompletionItemType.METHOD), //
c("split(|arg0|,arg1)", CompletionItemType.METHOD), //
// c("splitWithDelimiters(|arg0|,arg1)", CompletionItemType.METHOD), // Only available with Java 21
c("toLowerCase()", CompletionItemType.METHOD), //
c("toLowerCase(|arg0|)", CompletionItemType.METHOD), //
c("toUpperCase()", CompletionItemType.METHOD), //
c("toUpperCase(|arg0|)", CompletionItemType.METHOD), //
c("trim()", CompletionItemType.METHOD), //
c("strip()", CompletionItemType.METHOD), //
c("stripLeading()", CompletionItemType.METHOD), //
c("stripTrailing()", CompletionItemType.METHOD), //
c("lines()", CompletionItemType.METHOD), //
c("repeat(|arg0|)", CompletionItemType.METHOD), //
c("isBlank()", CompletionItemType.METHOD), //
c("toCharArray()", CompletionItemType.METHOD), //
c("equalsIgnoreCase(|arg0|)", CompletionItemType.METHOD), //
c("compareToIgnoreCase(|arg0|)", CompletionItemType.METHOD), //
c("endsWith(|arg0|)", CompletionItemType.METHOD), //
c("concat(|arg0|)", CompletionItemType.METHOD), //
c("contains(|arg0|)", CompletionItemType.METHOD), //
c("indent(|arg0|)", CompletionItemType.METHOD), //
c("stripIndent()", CompletionItemType.METHOD), //
c("translateEscapes()", CompletionItemType.METHOD), //
c("transform(|arg0|)", CompletionItemType.METHOD), //
c("formatted(|arg0|)", CompletionItemType.METHOD), //
c("intern()", CompletionItemType.METHOD) }, items);
// On client side, disconnect the client
client.terminate();
// On server side, terminate the server
// server.terminate();
}
} | CompletionListTest |
java | quarkusio__quarkus | integration-tests/oidc-client/src/main/java/io/quarkus/it/keycloak/OidcClientRequestCustomFilter.java | {
"start": 430,
"end": 780
} | class ____ implements ClientRequestFilter {
@Inject
@NamedOidcClient("named")
Tokens grantTokens;
@Override
public void filter(ClientRequestContext requestContext) throws IOException {
requestContext.getHeaders().add(HttpHeaders.AUTHORIZATION, "Bearer " + grantTokens.getAccessToken());
}
}
| OidcClientRequestCustomFilter |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncSearchSecurityTests.java | {
"start": 857,
"end": 7522
} | class ____ extends ESSingleNodeTestCase {
public void testEnsuredAuthenticatedUserIsSame() throws IOException {
final ThreadContext threadContext = client().threadPool().getThreadContext();
final AsyncSearchSecurity security = new AsyncSearchSecurity(
".async-search",
new SecurityContext(Settings.EMPTY, threadContext),
client(),
"async_origin"
);
Authentication original = AuthenticationTestHelper.builder()
.user(new User("test", "role"))
.realmRef(new Authentication.RealmRef("realm", "file", "node"))
.build(false);
Authentication current = randomBoolean()
? original
: AuthenticationTestHelper.builder()
.user(new User("test", "role"))
.realmRef(new Authentication.RealmRef("realm", "file", "node"))
.build(false);
current.writeToContext(threadContext);
assertThat(security.currentUserHasAccessToTaskWithHeaders(getAuthenticationAsHeaders(original)), is(true));
// "original" search was unauthenticated (e.g. security was turned off when it was performed)
assertThat(security.currentUserHasAccessToTaskWithHeaders(Collections.emptyMap()), is(true));
// current is not authenticated
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
assertThat(security.currentUserHasAccessToTaskWithHeaders(getAuthenticationAsHeaders(original)), is(false));
assertThat(security.currentUserHasAccessToTaskWithHeaders(Map.of()), is(true));
}
// original user being run as
final User authenticatingUser = new User("authenticated", "runas");
final User effectiveUser = new User("test", "role");
assertThat(
security.currentUserHasAccessToTaskWithHeaders(
getAuthenticationAsHeaders(
AuthenticationTestHelper.builder()
.user(authenticatingUser)
.realmRef(new Authentication.RealmRef(randomAlphaOfLengthBetween(1, 16), "file", "node"))
.runAs()
.user(effectiveUser)
.realmRef(new Authentication.RealmRef("realm", "file", "node"))
.build()
)
),
is(true)
);
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
// current user being run as
current = AuthenticationTestHelper.builder()
.user(authenticatingUser)
.realmRef(new Authentication.RealmRef(randomAlphaOfLengthBetween(1, 16), "file", "node"))
.runAs()
.user(effectiveUser)
.realmRef(new Authentication.RealmRef("realm", "file", "node"))
.build();
current.writeToContext(threadContext);
assertThat(security.currentUserHasAccessToTaskWithHeaders(getAuthenticationAsHeaders(original)), is(true));
// both users are run as
assertThat(
security.currentUserHasAccessToTaskWithHeaders(
getAuthenticationAsHeaders(
AuthenticationTestHelper.builder()
.user(authenticatingUser)
.realmRef(new Authentication.RealmRef(randomAlphaOfLengthBetween(1, 16), "file", "node"))
.runAs()
.user(effectiveUser)
.realmRef(new Authentication.RealmRef("realm", "file", "node"))
.build()
)
),
is(true)
);
}
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
// different authenticated by type
final Authentication differentRealmType = AuthenticationTestHelper.builder()
.user(new User("test", "role"))
.realmRef(new Authentication.RealmRef("realm", randomAlphaOfLength(10), "node"))
.build(false);
differentRealmType.writeToContext(threadContext);
assertFalse(security.currentUserHasAccessToTaskWithHeaders(getAuthenticationAsHeaders(original)));
}
// different user
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
final Authentication differentUser = AuthenticationTestHelper.builder()
.user(new User("test2", "role"))
.realmRef(new Authentication.RealmRef("realm", "file", "node"))
.build(false);
differentUser.writeToContext(threadContext);
assertFalse(security.currentUserHasAccessToTaskWithHeaders(getAuthenticationAsHeaders(original)));
}
// run as different user
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
final Authentication differentRunAs = AuthenticationTestHelper.builder()
.user(new User("authenticated", "runas"))
.realmRef(new Authentication.RealmRef("realm_runas", "file", "node1"))
.runAs()
.user(new User("test2", "role"))
.realmRef(new Authentication.RealmRef("realm", "file", "node1"))
.build();
differentRunAs.writeToContext(threadContext);
assertFalse(security.currentUserHasAccessToTaskWithHeaders(getAuthenticationAsHeaders(original)));
}
// run as different looked up by type
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
final Authentication runAsDiffType = AuthenticationTestHelper.builder()
.user(authenticatingUser)
.realmRef(new Authentication.RealmRef("realm", "file", "node"))
.runAs()
.user(effectiveUser)
.realmRef(new Authentication.RealmRef(randomAlphaOfLengthBetween(1, 16), randomAlphaOfLengthBetween(5, 12), "node"))
.build();
runAsDiffType.writeToContext(threadContext);
assertFalse(security.currentUserHasAccessToTaskWithHeaders(getAuthenticationAsHeaders(original)));
}
}
private Map<String, String> getAuthenticationAsHeaders(Authentication authentication) throws IOException {
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
authentication.writeToContext(threadContext);
return threadContext.getHeaders();
}
}
| AsyncSearchSecurityTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/sps/ExternalStoragePolicySatisfier.java | {
"start": 2191,
"end": 4818
} | class ____ start and run external sps.
}
/**
* Main method to start SPS service.
*/
public static void main(String[] args) throws Exception {
NameNodeConnector nnc = null;
ExternalSPSContext context = null;
try {
StringUtils.startupShutdownMessage(StoragePolicySatisfier.class, args,
LOG);
HdfsConfiguration spsConf = new HdfsConfiguration();
// login with SPS keytab
secureLogin(spsConf);
StoragePolicySatisfier sps = new StoragePolicySatisfier(spsConf);
nnc = getNameNodeConnector(spsConf);
context = new ExternalSPSContext(sps, nnc);
sps.init(context);
sps.start(StoragePolicySatisfierMode.EXTERNAL);
context.initMetrics(sps);
if (sps != null) {
sps.join();
}
} catch (Throwable e) {
LOG.error("Failed to start storage policy satisfier.", e);
terminate(1, e);
} finally {
if (nnc != null) {
nnc.close();
}
if (context!= null) {
if (context.getSpsBeanMetrics() != null) {
context.closeMetrics();
}
}
}
}
private static void secureLogin(Configuration conf)
throws IOException {
UserGroupInformation.setConfiguration(conf);
String addr = conf.get(DFSConfigKeys.DFS_SPS_ADDRESS_KEY,
DFSConfigKeys.DFS_SPS_ADDRESS_DEFAULT);
InetSocketAddress socAddr = NetUtils.createSocketAddr(addr, 0,
DFSConfigKeys.DFS_SPS_ADDRESS_KEY);
SecurityUtil.login(conf, DFSConfigKeys.DFS_SPS_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_SPS_KERBEROS_PRINCIPAL_KEY,
socAddr.getHostName());
}
public static NameNodeConnector getNameNodeConnector(Configuration conf)
throws InterruptedException {
final Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
final Path externalSPSPathId = HdfsServerConstants.MOVER_ID_PATH;
String serverName = ExternalStoragePolicySatisfier.class.getSimpleName();
while (true) {
try {
final List<NameNodeConnector> nncs = NameNodeConnector
.newNameNodeConnectors(namenodes,
serverName,
externalSPSPathId, conf,
NameNodeConnector.DEFAULT_MAX_IDLE_ITERATIONS);
return nncs.get(0);
} catch (IOException e) {
LOG.warn("Failed to connect with namenode", e);
if (e.getMessage().equals("Another " + serverName + " is running.")) {
ExitUtil.terminate(-1,
"Exit immediately because another " + serverName + " is running");
}
Thread.sleep(3000); // retry the connection after few secs
}
}
}
}
| to |
java | grpc__grpc-java | examples/example-jwt-auth/src/test/java/io/grpc/examples/jwtauth/AuthClientTest.java | {
"start": 1845,
"end": 4239
} | class ____ {
/**
* This rule manages automatic graceful shutdown for the registered servers and channels at the
* end of test.
*/
@Rule
public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule();
private final ServerInterceptor mockServerInterceptor = mock(ServerInterceptor.class, delegatesTo(
new ServerInterceptor() {
@Override
public <ReqT, RespT> Listener<ReqT> interceptCall(
ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) {
return next.startCall(call, headers);
}
}));
private AuthClient client;
@Before
public void setUp() throws IOException {
// Generate a unique in-process server name.
String serverName = InProcessServerBuilder.generateName();
// Create a server, add service, start, and register for automatic graceful shutdown.
grpcCleanup.register(InProcessServerBuilder.forName(serverName).directExecutor()
.addService(ServerInterceptors.intercept(
new GreeterGrpc.GreeterImplBase() {
@Override
public void sayHello(
HelloRequest request, StreamObserver<HelloReply> responseObserver) {
HelloReply reply = HelloReply.newBuilder()
.setMessage("AuthClientTest user=" + request.getName()).build();
responseObserver.onNext(reply);
responseObserver.onCompleted();
}
},
mockServerInterceptor))
.build().start());
CallCredentials credentials = new JwtCredential("test-client");
ManagedChannel channel = InProcessChannelBuilder.forName(serverName).directExecutor().build();
client = new AuthClient(credentials, channel);
}
@Test
public void greet() {
ArgumentCaptor<Metadata> metadataCaptor = ArgumentCaptor.forClass(Metadata.class);
String retVal = client.greet("John");
verify(mockServerInterceptor).interceptCall(
ArgumentMatchers.<ServerCall<HelloRequest, HelloReply>>any(),
metadataCaptor.capture(),
ArgumentMatchers.<ServerCallHandler<HelloRequest, HelloReply>>any());
String token = metadataCaptor.getValue().get(Constant.AUTHORIZATION_METADATA_KEY);
assertNotNull(token);
assertTrue(token.startsWith("Bearer"));
assertEquals("AuthClientTest user=John", retVal);
}
}
| AuthClientTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java | {
"start": 3948,
"end": 4831
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory left;
private final EvalOperator.ExpressionEvaluator.Factory right;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left,
EvalOperator.ExpressionEvaluator.Factory right) {
this.source = source;
this.left = left;
this.right = right;
}
@Override
public SpatialDisjointCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) {
return new SpatialDisjointCartesianPointDocValuesAndSourceEvaluator(source, left.get(context), right.get(context), context);
}
@Override
public String toString() {
return "SpatialDisjointCartesianPointDocValuesAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]";
}
}
}
| Factory |
java | google__guava | guava/src/com/google/common/collect/ImmutableSet.java | {
"start": 25472,
"end": 34219
} | class ____<E> extends SetBuilderImpl<E> {
// null until at least two elements are present
private @Nullable Object @Nullable [] hashTable;
private int maxRunBeforeFallback;
private int expandTableThreshold;
private int hashCode;
RegularSetBuilderImpl(int expectedCapacity) {
super(expectedCapacity);
this.hashTable = null;
this.maxRunBeforeFallback = 0;
this.expandTableThreshold = 0;
}
RegularSetBuilderImpl(RegularSetBuilderImpl<E> toCopy) {
super(toCopy);
this.hashTable = (toCopy.hashTable == null) ? null : toCopy.hashTable.clone();
this.maxRunBeforeFallback = toCopy.maxRunBeforeFallback;
this.expandTableThreshold = toCopy.expandTableThreshold;
this.hashCode = toCopy.hashCode;
}
@Override
SetBuilderImpl<E> add(E e) {
checkNotNull(e);
if (hashTable == null) {
if (distinct == 0) {
addDedupedElement(e);
return this;
} else {
ensureTableCapacity(dedupedElements.length);
E elem = dedupedElements[0];
distinct--;
return insertInHashTable(elem).add(e);
}
}
return insertInHashTable(e);
}
private SetBuilderImpl<E> insertInHashTable(E e) {
requireNonNull(hashTable);
int eHash = e.hashCode();
int i0 = Hashing.smear(eHash);
int mask = hashTable.length - 1;
for (int i = i0; i - i0 < maxRunBeforeFallback; i++) {
int index = i & mask;
Object tableEntry = hashTable[index];
if (tableEntry == null) {
addDedupedElement(e);
hashTable[index] = e;
hashCode += eHash;
ensureTableCapacity(distinct); // rebuilds table if necessary
return this;
} else if (tableEntry.equals(e)) { // not a new element, ignore
return this;
}
}
// we fell out of the loop due to a long run; fall back to JDK impl
return new JdkBackedSetBuilderImpl<E>(this).add(e);
}
@Override
SetBuilderImpl<E> copy() {
return new RegularSetBuilderImpl<>(this);
}
@Override
SetBuilderImpl<E> review() {
if (hashTable == null) {
return this;
}
int targetTableSize = chooseTableSize(distinct);
if (targetTableSize * 2 < hashTable.length) {
hashTable = rebuildHashTable(targetTableSize, dedupedElements, distinct);
maxRunBeforeFallback = maxRunBeforeFallback(targetTableSize);
expandTableThreshold = (int) (DESIRED_LOAD_FACTOR * targetTableSize);
}
return hashFloodingDetected(hashTable) ? new JdkBackedSetBuilderImpl<E>(this) : this;
}
@Override
ImmutableSet<E> build() {
switch (distinct) {
case 0:
return of();
case 1:
/*
* requireNonNull is safe because we ensure that the first `distinct` elements have been
* populated.
*/
return of(requireNonNull(dedupedElements[0]));
default:
/*
* The suppression is safe because we ensure that the first `distinct` elements have been
* populated.
*/
@SuppressWarnings("nullness")
Object[] elements =
(distinct == dedupedElements.length)
? dedupedElements
: Arrays.copyOf(dedupedElements, distinct);
return new RegularImmutableSet<>(
elements, hashCode, requireNonNull(hashTable), hashTable.length - 1);
}
}
/** Builds a new open-addressed hash table from the first n objects in elements. */
static @Nullable Object[] rebuildHashTable(int newTableSize, Object[] elements, int n) {
@Nullable Object[] hashTable = new @Nullable Object[newTableSize];
int mask = hashTable.length - 1;
for (int i = 0; i < n; i++) {
// requireNonNull is safe because we ensure that the first n elements have been populated.
Object e = requireNonNull(elements[i]);
int j0 = Hashing.smear(e.hashCode());
for (int j = j0; ; j++) {
int index = j & mask;
if (hashTable[index] == null) {
hashTable[index] = e;
break;
}
}
}
return hashTable;
}
void ensureTableCapacity(int minCapacity) {
int newTableSize;
if (hashTable == null) {
newTableSize = chooseTableSize(minCapacity);
hashTable = new Object[newTableSize];
} else if (minCapacity > expandTableThreshold && hashTable.length < MAX_TABLE_SIZE) {
newTableSize = hashTable.length * 2;
hashTable = rebuildHashTable(newTableSize, dedupedElements, distinct);
} else {
return;
}
maxRunBeforeFallback = maxRunBeforeFallback(newTableSize);
expandTableThreshold = (int) (DESIRED_LOAD_FACTOR * newTableSize);
}
/**
* We attempt to detect deliberate hash flooding attempts. If one is detected, we fall back to a
* wrapper around j.u.HashSet, which has built-in flooding protection. MAX_RUN_MULTIPLIER was
* determined experimentally to match our desired probability of false positives.
*/
// NB: yes, this is surprisingly high, but that's what the experiments said was necessary
// Raising this number slows the worst-case contains behavior, speeds up hashFloodingDetected,
// and reduces the false-positive probability.
static final int MAX_RUN_MULTIPLIER = 13;
/**
* Checks the whole hash table for poor hash distribution. Takes O(n) in the worst case, O(n /
* log n) on average.
*
* <p>The online hash flooding detecting in RegularSetBuilderImpl.add can detect e.g. many
* exactly matching hash codes, which would cause construction to take O(n^2), but can't detect
* e.g. hash codes adversarially designed to go into ascending table locations, which keeps
* construction O(n) (as desired) but then can have O(n) queries later.
*
* <p>If this returns false, then no query can take more than O(log n).
*
* <p>Note that for a RegularImmutableSet with elements with truly random hash codes, contains
* operations take expected O(1) time but with high probability take O(log n) for at least some
* element. (https://en.wikipedia.org/wiki/Linear_probing#Analysis)
*
* <p>This method may return {@code true} even on truly random input, but {@code
* ImmutableSetTest} tests that the probability of that is low.
*/
static boolean hashFloodingDetected(@Nullable Object[] hashTable) {
int maxRunBeforeFallback = maxRunBeforeFallback(hashTable.length);
int mask = hashTable.length - 1;
// Invariant: all elements at indices in [knownRunStart, knownRunEnd) are nonnull.
// If knownRunStart == knownRunEnd, this is vacuously true.
// When knownRunEnd exceeds hashTable.length, it "wraps", detecting runs around the end
// of the table.
int knownRunStart = 0;
int knownRunEnd = 0;
outerLoop:
while (knownRunStart < hashTable.length) {
if (knownRunStart == knownRunEnd && hashTable[knownRunStart] == null) {
if (hashTable[(knownRunStart + maxRunBeforeFallback - 1) & mask] == null) {
// There are only maxRunBeforeFallback - 1 elements between here and there,
// so even if they were all nonnull, we wouldn't detect a hash flood. Therefore,
// we can skip them all.
knownRunStart += maxRunBeforeFallback;
} else {
knownRunStart++; // the only case in which maxRunEnd doesn't increase by mRBF
// happens about f * (1-f) for f = DESIRED_LOAD_FACTOR, so around 21% of the time
}
knownRunEnd = knownRunStart;
} else {
for (int j = knownRunStart + maxRunBeforeFallback - 1; j >= knownRunEnd; j--) {
if (hashTable[j & mask] == null) {
knownRunEnd = knownRunStart + maxRunBeforeFallback;
knownRunStart = j + 1;
continue outerLoop;
}
}
return true;
}
}
return false;
}
/**
* If more than this many consecutive positions are filled in a table of the specified size,
* report probable hash flooding. ({@link #hashFloodingDetected} may also report hash flooding
* if fewer consecutive positions are filled; see that method for details.)
*/
static int maxRunBeforeFallback(int tableSize) {
return MAX_RUN_MULTIPLIER * IntMath.log2(tableSize, RoundingMode.UNNECESSARY);
}
}
/**
* SetBuilderImpl version that uses a JDK HashSet, which has built in hash flooding protection.
*/
private static final | RegularSetBuilderImpl |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/web/servlet/DynamicRegistrationBeanTests.java | {
"start": 980,
"end": 1915
} | class ____ {
@Test
void shouldUseNameIfSet() {
DynamicRegistrationBean<?> bean = createBean();
bean.setName("givenName");
assertThat(bean.getOrDeduceName("dummy")).isEqualTo("givenName");
}
@Test
void shouldUseBeanNameIfNameIsNotSet() {
DynamicRegistrationBean<?> bean = createBean();
bean.setBeanName("beanName");
assertThat(bean.getOrDeduceName("dummy")).isEqualTo("beanName");
}
@Test
void shouldUseConventionBasedNameIfNoNameOrBeanNameIsSet() {
DynamicRegistrationBean<?> bean = createBean();
assertThat(bean.getOrDeduceName("dummy")).isEqualTo("string");
}
private static DynamicRegistrationBean<?> createBean() {
return new DynamicRegistrationBean<>() {
@Override
protected @Nullable Dynamic addRegistration(String description, ServletContext servletContext) {
return null;
}
@Override
protected String getDescription() {
return "";
}
};
}
}
| DynamicRegistrationBeanTests |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/client/ClientConnectionEventsTest.java | {
"start": 1176,
"end": 2691
} | class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> {
root.addClasses(Endpoint.class, ObservingBean.class, WSClient.class);
});
@TestHTTPResource("/")
URI baseUri;
@Inject
WebSocketConnector<EndpointClient> connector;
@Test
void testEvents() throws Exception {
// Open connection, EndpointClient sends a message with client connection id
WebSocketClientConnection connection = connector
.baseUri(baseUri)
.connectAndAwait();
// Wait for the message
assertTrue(Endpoint.MESSAGE_LATCH.await(5, TimeUnit.SECONDS));
// Assert the @Open event was fired
assertTrue(ObservingBean.OPEN_LATCH.await(5, TimeUnit.SECONDS));
assertNotNull(ObservingBean.OPEN_CONN.get());
assertEquals(connection.id(), ObservingBean.OPEN_CONN.get().id());
assertEquals(connection.id(), Endpoint.MESSAGE.get());
// Close the connection
connection.closeAndAwait();
assertTrue(EndpointClient.CLOSED_LATCH.await(5, TimeUnit.SECONDS));
// Assert the @Closed event was fired
assertTrue(ObservingBean.CLOSED_LATCH.await(5, TimeUnit.SECONDS));
assertNotNull(ObservingBean.CLOSED_CONN.get());
assertEquals(connection.id(), ObservingBean.CLOSED_CONN.get().id());
}
@WebSocket(path = "/endpoint")
public static | ClientConnectionEventsTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SleepJob.java | {
"start": 2202,
"end": 2431
} | class ____ extends
Partitioner<IntWritable, NullWritable> {
public int getPartition(IntWritable k, NullWritable v, int numPartitions) {
return k.get() % numPartitions;
}
}
public static | SleepJobPartitioner |
java | elastic__elasticsearch | x-pack/plugin/rank-vectors/src/main/java/org/elasticsearch/xpack/rank/vectors/mapper/RankVectorsIndexFieldData.java | {
"start": 2872,
"end": 3653
} | class ____ implements IndexFieldData.Builder {
private final String name;
private final ValuesSourceType valuesSourceType;
private final int dims;
private final DenseVectorFieldMapper.ElementType elementType;
public Builder(String name, ValuesSourceType valuesSourceType, int dims, DenseVectorFieldMapper.ElementType elementType) {
this.name = name;
this.valuesSourceType = valuesSourceType;
this.dims = dims;
this.elementType = elementType;
}
@Override
public IndexFieldData<?> build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new RankVectorsIndexFieldData(name, dims, valuesSourceType, elementType);
}
}
}
| Builder |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SqlEndpointBuilderFactory.java | {
"start": 102806,
"end": 105194
} | interface ____ {
/**
* SQL (camel-sql)
* Perform SQL queries using Spring JDBC.
*
* Category: database
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-sql
*
* @return the dsl builder for the headers' name.
*/
default SqlHeaderNameBuilder sql() {
return SqlHeaderNameBuilder.INSTANCE;
}
/**
* SQL (camel-sql)
* Perform SQL queries using Spring JDBC.
*
* Category: database
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-sql
*
* Syntax: <code>sql:query</code>
*
* Path parameter: query (required)
* Sets the SQL query to perform. You can externalize the query by using
* file: or classpath: as prefix and specify the location of the file.
* This option can also be loaded from an existing file, by prefixing
* with file: or classpath: followed by the location of the file.
*
* @param path query
* @return the dsl builder
*/
default SqlEndpointBuilder sql(String path) {
return SqlEndpointBuilderFactory.endpointBuilder("sql", path);
}
/**
* SQL (camel-sql)
* Perform SQL queries using Spring JDBC.
*
* Category: database
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-sql
*
* Syntax: <code>sql:query</code>
*
* Path parameter: query (required)
* Sets the SQL query to perform. You can externalize the query by using
* file: or classpath: as prefix and specify the location of the file.
* This option can also be loaded from an existing file, by prefixing
* with file: or classpath: followed by the location of the file.
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path query
* @return the dsl builder
*/
default SqlEndpointBuilder sql(String componentName, String path) {
return SqlEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the SQL component.
*/
public static | SqlBuilders |
java | google__dagger | javatests/dagger/internal/codegen/PluginsVisitFullBindingGraphTest.java | {
"start": 4629,
"end": 4802
} | interface ____");
});
}
/** A test plugin that just reports each component with the given {@link Diagnostic.Kind}. */
private static final | ModuleWithoutErrors |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/PGWallTest.java | {
"start": 126,
"end": 349
} | class ____ extends TestCase {
public void test_false() throws Exception {
assertTrue(WallUtils.isValidatePostgres(//
"select wm_concat(article_id) over() from t_nds_web_article"));
}
}
| PGWallTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java | {
"start": 1431,
"end": 2689
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(YarnRPC.class);
public abstract Object getProxy(Class protocol, InetSocketAddress addr,
Configuration conf);
public abstract void stopProxy(Object proxy, Configuration conf);
public abstract Server getServer(Class protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager,
int numHandlers, String portRangeConfig);
public Server getServer(Class protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager,
int numHandlers) {
return getServer(protocol, instance, addr, conf, secretManager, numHandlers,
null);
}
public static YarnRPC create(Configuration conf) {
LOG.debug("Creating YarnRPC for {}",
conf.get(YarnConfiguration.IPC_RPC_IMPL));
String clazzName = conf.get(YarnConfiguration.IPC_RPC_IMPL);
if (clazzName == null) {
clazzName = YarnConfiguration.DEFAULT_IPC_RPC_IMPL;
}
try {
return (YarnRPC) Class.forName(clazzName).newInstance();
} catch (Exception e) {
throw new YarnRuntimeException(e);
}
}
}
| YarnRPC |
java | apache__rocketmq | tools/src/test/java/org/apache/rocketmq/tools/command/offset/SkipAccumulationCommandTest.java | {
"start": 1622,
"end": 3498
} | class ____ {
private static DefaultMQAdminExt defaultMQAdminExt;
private static DefaultMQAdminExtImpl defaultMQAdminExtImpl;
private static MQClientInstance mqClientInstance = MQClientManager.getInstance().getOrCreateMQClientInstance(new ClientConfig());
private static MQClientAPIImpl mQClientAPIImpl;
@BeforeClass
public static void init() throws Exception {
mQClientAPIImpl = mock(MQClientAPIImpl.class);
defaultMQAdminExt = new DefaultMQAdminExt();
defaultMQAdminExtImpl = new DefaultMQAdminExtImpl(defaultMQAdminExt, 1000);
Field field = DefaultMQAdminExtImpl.class.getDeclaredField("mqClientInstance");
field.setAccessible(true);
field.set(defaultMQAdminExtImpl, mqClientInstance);
field = MQClientInstance.class.getDeclaredField("mQClientAPIImpl");
field.setAccessible(true);
field.set(mqClientInstance, mQClientAPIImpl);
field = DefaultMQAdminExt.class.getDeclaredField("defaultMQAdminExtImpl");
field.setAccessible(true);
field.set(defaultMQAdminExt, defaultMQAdminExtImpl);
}
@AfterClass
public static void terminate() {
defaultMQAdminExt.shutdown();
}
@Ignore
@Test
public void testExecute() throws SubCommandException {
System.setProperty("rocketmq.namesrv.addr", "127.0.0.1:9876");
SkipAccumulationSubCommand cmd = new SkipAccumulationSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-g group-test", "-t topic-test", "-f false"};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs,
cmd.buildCommandlineOptions(options), new DefaultParser());
cmd.execute(commandLine, options, null);
}
}
| SkipAccumulationCommandTest |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/MediaType.java | {
"start": 1914,
"end": 25601
} | class ____ extends MimeType implements Serializable {
private static final long serialVersionUID = 2069937152339670231L;
/**
* Media type for "*/*", including all media ranges.
*/
public static final MediaType ALL;
/**
* A String equivalent of {@link MediaType#ALL}.
*/
public static final String ALL_VALUE = "*/*";
/**
* Media type for {@code application/atom+xml}.
*/
public static final MediaType APPLICATION_ATOM_XML;
/**
* A String equivalent of {@link MediaType#APPLICATION_ATOM_XML}.
*/
public static final String APPLICATION_ATOM_XML_VALUE = "application/atom+xml";
/**
* Media type for {@code application/cbor}.
* @since 5.2
*/
public static final MediaType APPLICATION_CBOR;
/**
* A String equivalent of {@link MediaType#APPLICATION_CBOR}.
* @since 5.2
*/
public static final String APPLICATION_CBOR_VALUE = "application/cbor";
/**
* Media type for {@code application/x-www-form-urlencoded}.
*/
public static final MediaType APPLICATION_FORM_URLENCODED;
/**
* A String equivalent of {@link MediaType#APPLICATION_FORM_URLENCODED}.
*/
public static final String APPLICATION_FORM_URLENCODED_VALUE = "application/x-www-form-urlencoded";
/**
* Media type for {@code application/graphql-response+json}.
* @since 6.0.3
* @see <a href="https://github.com/graphql/graphql-over-http">GraphQL over HTTP spec</a>
*/
public static final MediaType APPLICATION_GRAPHQL_RESPONSE;
/**
* A String equivalent of {@link MediaType#APPLICATION_GRAPHQL_RESPONSE}.
* @since 6.0.3
*/
public static final String APPLICATION_GRAPHQL_RESPONSE_VALUE = "application/graphql-response+json";
/**
* Media type for {@code application/json}.
*/
public static final MediaType APPLICATION_JSON;
/**
* A String equivalent of {@link MediaType#APPLICATION_JSON}.
*/
public static final String APPLICATION_JSON_VALUE = "application/json";
/**
* Media type for {@code application/octet-stream}.
*/
public static final MediaType APPLICATION_OCTET_STREAM;
/**
* A String equivalent of {@link MediaType#APPLICATION_OCTET_STREAM}.
*/
public static final String APPLICATION_OCTET_STREAM_VALUE = "application/octet-stream";
/**
* Media type for {@code application/pdf}.
* @since 4.3
*/
public static final MediaType APPLICATION_PDF;
/**
* A String equivalent of {@link MediaType#APPLICATION_PDF}.
* @since 4.3
*/
public static final String APPLICATION_PDF_VALUE = "application/pdf";
/**
* Media type for {@code application/problem+json}.
* @since 5.0
* @see <a href="https://www.iana.org/assignments/media-types/application/problem+json">
* Problem Details for HTTP APIs, 6.1. application/problem+json</a>
*/
public static final MediaType APPLICATION_PROBLEM_JSON;
/**
* A String equivalent of {@link MediaType#APPLICATION_PROBLEM_JSON}.
* @since 5.0
*/
public static final String APPLICATION_PROBLEM_JSON_VALUE = "application/problem+json";
/**
* Media type for {@code application/problem+xml}.
* @since 5.0
* @see <a href="https://www.iana.org/assignments/media-types/application/problem+xml">
* Problem Details for HTTP APIs, 6.2. application/problem+xml</a>
*/
public static final MediaType APPLICATION_PROBLEM_XML;
/**
* A String equivalent of {@link MediaType#APPLICATION_PROBLEM_XML}.
* @since 5.0
*/
public static final String APPLICATION_PROBLEM_XML_VALUE = "application/problem+xml";
/**
* Media type for {@code application/x-protobuf}.
* @since 6.0
*/
public static final MediaType APPLICATION_PROTOBUF;
/**
* A String equivalent of {@link MediaType#APPLICATION_PROTOBUF}.
* @since 6.0
*/
public static final String APPLICATION_PROTOBUF_VALUE = "application/x-protobuf";
/**
* Media type for {@code application/rss+xml}.
* @since 4.3.6
*/
public static final MediaType APPLICATION_RSS_XML;
/**
* A String equivalent of {@link MediaType#APPLICATION_RSS_XML}.
* @since 4.3.6
*/
public static final String APPLICATION_RSS_XML_VALUE = "application/rss+xml";
/**
* Media type for {@code application/x-ndjson}.
* @since 5.3
*/
public static final MediaType APPLICATION_NDJSON;
/**
* A String equivalent of {@link MediaType#APPLICATION_NDJSON}.
* @since 5.3
*/
public static final String APPLICATION_NDJSON_VALUE = "application/x-ndjson";
/**
* Media type for {@code application/xhtml+xml}.
*/
public static final MediaType APPLICATION_XHTML_XML;
/**
* A String equivalent of {@link MediaType#APPLICATION_XHTML_XML}.
*/
public static final String APPLICATION_XHTML_XML_VALUE = "application/xhtml+xml";
/**
* Media type for {@code application/xml}.
*/
public static final MediaType APPLICATION_XML;
/**
* A String equivalent of {@link MediaType#APPLICATION_XML}.
*/
public static final String APPLICATION_XML_VALUE = "application/xml";
/**
* Media type for {@code application/yaml}.
* @since 6.2
*/
public static final MediaType APPLICATION_YAML;
/**
* A String equivalent of {@link MediaType#APPLICATION_YAML}.
* @since 6.2
*/
public static final String APPLICATION_YAML_VALUE = "application/yaml";
/**
* Media type for {@code image/gif}.
*/
public static final MediaType IMAGE_GIF;
/**
* A String equivalent of {@link MediaType#IMAGE_GIF}.
*/
public static final String IMAGE_GIF_VALUE = "image/gif";
/**
* Media type for {@code image/jpeg}.
*/
public static final MediaType IMAGE_JPEG;
/**
* A String equivalent of {@link MediaType#IMAGE_JPEG}.
*/
public static final String IMAGE_JPEG_VALUE = "image/jpeg";
/**
* Media type for {@code image/png}.
*/
public static final MediaType IMAGE_PNG;
/**
* A String equivalent of {@link MediaType#IMAGE_PNG}.
*/
public static final String IMAGE_PNG_VALUE = "image/png";
/**
* Media type for {@code multipart/form-data}.
*/
public static final MediaType MULTIPART_FORM_DATA;
/**
* A String equivalent of {@link MediaType#MULTIPART_FORM_DATA}.
*/
public static final String MULTIPART_FORM_DATA_VALUE = "multipart/form-data";
/**
* Media type for {@code multipart/mixed}.
* @since 5.2
*/
public static final MediaType MULTIPART_MIXED;
/**
* A String equivalent of {@link MediaType#MULTIPART_MIXED}.
* @since 5.2
*/
public static final String MULTIPART_MIXED_VALUE = "multipart/mixed";
/**
* Media type for {@code multipart/related}.
* @since 5.2.5
*/
public static final MediaType MULTIPART_RELATED;
/**
* A String equivalent of {@link MediaType#MULTIPART_RELATED}.
* @since 5.2.5
*/
public static final String MULTIPART_RELATED_VALUE = "multipart/related";
/**
* Media type for {@code text/event-stream}.
* @since 4.3.6
* @see <a href="https://html.spec.whatwg.org/multipage/server-sent-events.html">Server-Sent Events</a>
*/
public static final MediaType TEXT_EVENT_STREAM;
/**
* A String equivalent of {@link MediaType#TEXT_EVENT_STREAM}.
* @since 4.3.6
*/
public static final String TEXT_EVENT_STREAM_VALUE = "text/event-stream";
/**
* Media type for {@code text/html}.
*/
public static final MediaType TEXT_HTML;
/**
* A String equivalent of {@link MediaType#TEXT_HTML}.
*/
public static final String TEXT_HTML_VALUE = "text/html";
/**
* Media type for {@code text/markdown}.
* @since 4.3
*/
public static final MediaType TEXT_MARKDOWN;
/**
* A String equivalent of {@link MediaType#TEXT_MARKDOWN}.
* @since 4.3
*/
public static final String TEXT_MARKDOWN_VALUE = "text/markdown";
/**
* Media type for {@code text/plain}.
*/
public static final MediaType TEXT_PLAIN;
/**
* A String equivalent of {@link MediaType#TEXT_PLAIN}.
*/
public static final String TEXT_PLAIN_VALUE = "text/plain";
/**
* Media type for {@code text/xml}.
*/
public static final MediaType TEXT_XML;
/**
* A String equivalent of {@link MediaType#TEXT_XML}.
*/
public static final String TEXT_XML_VALUE = "text/xml";
private static final String PARAM_QUALITY_FACTOR = "q";
static {
// Not using "valueOf" to avoid static init cost
ALL = new MediaType(MimeType.WILDCARD_TYPE, MimeType.WILDCARD_TYPE);
APPLICATION_ATOM_XML = new MediaType("application", "atom+xml");
APPLICATION_CBOR = new MediaType("application", "cbor");
APPLICATION_FORM_URLENCODED = new MediaType("application", "x-www-form-urlencoded");
APPLICATION_GRAPHQL_RESPONSE = new MediaType("application", "graphql-response+json");
APPLICATION_JSON = new MediaType("application", "json");
APPLICATION_NDJSON = new MediaType("application", "x-ndjson");
APPLICATION_OCTET_STREAM = new MediaType("application", "octet-stream");
APPLICATION_PDF = new MediaType("application", "pdf");
APPLICATION_PROBLEM_JSON = new MediaType("application", "problem+json");
APPLICATION_PROBLEM_XML = new MediaType("application", "problem+xml");
APPLICATION_PROTOBUF = new MediaType("application", "x-protobuf");
APPLICATION_RSS_XML = new MediaType("application", "rss+xml");
APPLICATION_XHTML_XML = new MediaType("application", "xhtml+xml");
APPLICATION_XML = new MediaType("application", "xml");
APPLICATION_YAML = new MediaType("application", "yaml");
IMAGE_GIF = new MediaType("image", "gif");
IMAGE_JPEG = new MediaType("image", "jpeg");
IMAGE_PNG = new MediaType("image", "png");
MULTIPART_FORM_DATA = new MediaType("multipart", "form-data");
MULTIPART_MIXED = new MediaType("multipart", "mixed");
MULTIPART_RELATED = new MediaType("multipart", "related");
TEXT_EVENT_STREAM = new MediaType("text", "event-stream");
TEXT_HTML = new MediaType("text", "html");
TEXT_MARKDOWN = new MediaType("text", "markdown");
TEXT_PLAIN = new MediaType("text", "plain");
TEXT_XML = new MediaType("text", "xml");
}
/**
* Create a new {@code MediaType} for the given primary type.
* <p>The {@linkplain #getSubtype() subtype} is set to "*", parameters empty.
* @param type the primary type
* @throws IllegalArgumentException if any of the parameters contain illegal characters
*/
public MediaType(String type) {
super(type);
}
/**
* Create a new {@code MediaType} for the given primary type and subtype.
* <p>The parameters are empty.
* @param type the primary type
* @param subtype the subtype
* @throws IllegalArgumentException if any of the parameters contain illegal characters
*/
public MediaType(String type, String subtype) {
super(type, subtype, Collections.emptyMap());
}
/**
* Create a new {@code MediaType} for the given type, subtype, and character set.
* @param type the primary type
* @param subtype the subtype
* @param charset the character set
* @throws IllegalArgumentException if any of the parameters contain illegal characters
*/
public MediaType(String type, String subtype, Charset charset) {
super(type, subtype, charset);
}
/**
* Create a new {@code MediaType} for the given type, subtype, and quality value.
* @param type the primary type
* @param subtype the subtype
* @param qualityValue the quality value
* @throws IllegalArgumentException if any of the parameters contain illegal characters
*/
public MediaType(String type, String subtype, double qualityValue) {
this(type, subtype, Collections.singletonMap(PARAM_QUALITY_FACTOR, Double.toString(qualityValue)));
}
/**
* Copy-constructor that copies the type, subtype and parameters of the given
* {@code MediaType}, and allows to set the specified character set.
* @param other the other media type
* @param charset the character set
* @throws IllegalArgumentException if any of the parameters contain illegal characters
* @since 4.3
*/
public MediaType(MediaType other, Charset charset) {
super(other, charset);
}
/**
* Copy-constructor that copies the type and subtype of the given {@code MediaType},
* and allows for different parameters.
* @param other the other media type
* @param parameters the parameters, may be {@code null}
* @throws IllegalArgumentException if any of the parameters contain illegal characters
*/
public MediaType(MediaType other, @Nullable Map<String, String> parameters) {
super(other.getType(), other.getSubtype(), parameters);
}
/**
* Create a new {@code MediaType} for the given type, subtype, and parameters.
* @param type the primary type
* @param subtype the subtype
* @param parameters the parameters, may be {@code null}
* @throws IllegalArgumentException if any of the parameters contain illegal characters
*/
public MediaType(String type, String subtype, @Nullable Map<String, String> parameters) {
super(type, subtype, parameters);
}
/**
* Create a new {@code MediaType} for the given {@link MimeType}.
* The type, subtype and parameters information is copied and {@code MediaType}-specific
* checks on parameters are performed.
* @param mimeType the MIME type
* @throws IllegalArgumentException if any of the parameters contain illegal characters
* @since 5.3
*/
public MediaType(MimeType mimeType) {
super(mimeType);
getParameters().forEach(this::checkParameters);
}
@Override
protected void checkParameters(String parameter, String value) {
super.checkParameters(parameter, value);
if (PARAM_QUALITY_FACTOR.equals(parameter)) {
String unquotedValue = unquote(value);
double d = Double.parseDouble(unquotedValue);
Assert.isTrue(d >= 0D && d <= 1D,
() -> "Invalid quality value \"" + unquotedValue + "\": should be between 0.0 and 1.0");
}
}
/**
* Return the quality factor, as indicated by a {@code q} parameter, if any.
* Defaults to {@code 1.0}.
* @return the quality factor as double value
*/
public double getQualityValue() {
String qualityFactor = getParameter(PARAM_QUALITY_FACTOR);
return (qualityFactor != null ? Double.parseDouble(unquote(qualityFactor)) : 1D);
}
/**
* Indicates whether this {@code MediaType} more specific than the given type.
* <ol>
* <li>if this media type has a {@linkplain #getQualityValue() quality factor} higher than the other,
* then this method returns {@code true}.</li>
* <li>if this media type has a {@linkplain #getQualityValue() quality factor} lower than the other,
* then this method returns {@code false}.</li>
* <li>if this mime type has a {@linkplain #isWildcardType() wildcard type},
* and the other does not, then this method returns {@code false}.</li>
* <li>if this mime type does not have a {@linkplain #isWildcardType() wildcard type},
* and the other does, then this method returns {@code true}.</li>
* <li>if this mime type has a {@linkplain #isWildcardType() wildcard type},
* and the other does not, then this method returns {@code false}.</li>
* <li>if this mime type does not have a {@linkplain #isWildcardType() wildcard type},
* and the other does, then this method returns {@code true}.</li>
* <li>if the two mime types have identical {@linkplain #getType() type} and
* {@linkplain #getSubtype() subtype}, then the mime type with the most
* parameters is more specific than the other.</li>
* <li>Otherwise, this method returns {@code false}.</li>
* </ol>
* @param other the {@code MimeType} to be compared
* @return the result of the comparison
* @since 6.0
* @see #isLessSpecific(MimeType)
* @see <a href="https://tools.ietf.org/html/rfc7231#section-5.3.2">HTTP 1.1: Semantics
* and Content, section 5.3.2</a>
*/
@Override
public boolean isMoreSpecific(MimeType other) {
Assert.notNull(other, "Other must not be null");
if (other instanceof MediaType otherMediaType) {
double quality1 = getQualityValue();
double quality2 = otherMediaType.getQualityValue();
if (quality1 > quality2) {
return true;
}
else if (quality1 < quality2) {
return false;
}
}
return super.isMoreSpecific(other);
}
/**
* Indicates whether this {@code MediaType} more specific than the given type.
* <ol>
* <li>if this media type has a {@linkplain #getQualityValue() quality factor} higher than the other,
* then this method returns {@code false}.</li>
* <li>if this media type has a {@linkplain #getQualityValue() quality factor} lower than the other,
* then this method returns {@code true}.</li>
* <li>if this mime type has a {@linkplain #isWildcardType() wildcard type},
* and the other does not, then this method returns {@code true}.</li>
* <li>if this mime type does not have a {@linkplain #isWildcardType() wildcard type},
* and the other does, then this method returns {@code false}.</li>
* <li>if this mime type has a {@linkplain #isWildcardType() wildcard type},
* and the other does not, then this method returns {@code true}.</li>
* <li>if this mime type does not have a {@linkplain #isWildcardType() wildcard type},
* and the other does, then this method returns {@code false}.</li>
* <li>if the two mime types have identical {@linkplain #getType() type} and
* {@linkplain #getSubtype() subtype}, then the mime type with the least
* parameters is less specific than the other.</li>
* <li>Otherwise, this method returns {@code false}.</li>
* </ol>
* @param other the {@code MimeType} to be compared
* @return the result of the comparison
* @since 6.0
* @see #isMoreSpecific(MimeType)
* @see <a href="https://tools.ietf.org/html/rfc7231#section-5.3.2">HTTP 1.1: Semantics
* and Content, section 5.3.2</a>
*/
@Override
public boolean isLessSpecific(MimeType other) {
Assert.notNull(other, "Other must not be null");
return other.isMoreSpecific(this);
}
/**
* Indicate whether this {@code MediaType} includes the given media type.
* <p>For instance, {@code text/*} includes {@code text/plain} and {@code text/html},
* and {@code application/*+xml} includes {@code application/soap+xml}, etc.
* This method is <b>not</b> symmetric.
* <p>Simply calls {@link MimeType#includes(MimeType)} but declared with a
* {@code MediaType} parameter for binary backwards compatibility.
* @param other the reference media type with which to compare
* @return {@code true} if this media type includes the given media type;
* {@code false} otherwise
*/
public boolean includes(@Nullable MediaType other) {
return super.includes(other);
}
/**
* Indicate whether this {@code MediaType} is compatible with the given media type.
* <p>For instance, {@code text/*} is compatible with {@code text/plain},
* {@code text/html}, and vice versa. In effect, this method is similar to
* {@link #includes}, except that it <b>is</b> symmetric.
* <p>Simply calls {@link MimeType#isCompatibleWith(MimeType)} but declared with a
* {@code MediaType} parameter for binary backwards compatibility.
* @param other the reference media type with which to compare
* @return {@code true} if this media type is compatible with the given media type;
* {@code false} otherwise
*/
public boolean isCompatibleWith(@Nullable MediaType other) {
return super.isCompatibleWith(other);
}
/**
* Return a replica of this instance with the quality value of the given {@code MediaType}.
* @return the same instance if the given MediaType doesn't have a quality value,
* or a new one otherwise
*/
public MediaType copyQualityValue(MediaType mediaType) {
if (!mediaType.getParameters().containsKey(PARAM_QUALITY_FACTOR)) {
return this;
}
Map<String, String> params = new LinkedHashMap<>(getParameters());
params.put(PARAM_QUALITY_FACTOR, mediaType.getParameters().get(PARAM_QUALITY_FACTOR));
return new MediaType(this, params);
}
/**
* Return a replica of this instance with its quality value removed.
* @return the same instance if the media type doesn't contain a quality value,
* or a new one otherwise
*/
public MediaType removeQualityValue() {
if (!getParameters().containsKey(PARAM_QUALITY_FACTOR)) {
return this;
}
Map<String, String> params = new LinkedHashMap<>(getParameters());
params.remove(PARAM_QUALITY_FACTOR);
return new MediaType(this, params);
}
/**
* Parse the given String value into a {@code MediaType} object,
* with this method name following the 'valueOf' naming convention
* (as supported by {@link org.springframework.core.convert.ConversionService}).
* @param value the string to parse
* @throws InvalidMediaTypeException if the media type value cannot be parsed
* @see #parseMediaType(String)
*/
public static MediaType valueOf(String value) {
return parseMediaType(value);
}
/**
* Parse the given String into a single {@code MediaType}.
* @param mediaType the string to parse
* @return the media type
* @throws InvalidMediaTypeException if the media type value cannot be parsed
*/
public static MediaType parseMediaType(String mediaType) {
MimeType type;
try {
type = MimeTypeUtils.parseMimeType(mediaType);
}
catch (InvalidMimeTypeException ex) {
throw new InvalidMediaTypeException(ex);
}
try {
return new MediaType(type);
}
catch (IllegalArgumentException ex) {
throw new InvalidMediaTypeException(mediaType, ex.getMessage());
}
}
/**
* Parse the comma-separated string into a list of {@code MediaType} objects.
* <p>This method can be used to parse an Accept or Content-Type header.
* @param mediaTypes the string to parse
* @return the list of media types
* @throws InvalidMediaTypeException if the media type value cannot be parsed
*/
public static List<MediaType> parseMediaTypes(@Nullable String mediaTypes) {
if (!StringUtils.hasLength(mediaTypes)) {
return Collections.emptyList();
}
// Avoid using java.util.stream.Stream in hot paths
List<String> tokenizedTypes = MimeTypeUtils.tokenize(mediaTypes);
List<MediaType> result = new ArrayList<>(tokenizedTypes.size());
for (String type : tokenizedTypes) {
if (StringUtils.hasText(type)) {
result.add(parseMediaType(type));
}
}
return result;
}
/**
* Parse the given list of (potentially) comma-separated strings into a
* list of {@code MediaType} objects.
* <p>This method can be used to parse an Accept or Content-Type header.
* @param mediaTypes the string to parse
* @return the list of media types
* @throws InvalidMediaTypeException if the media type value cannot be parsed
* @since 4.3.2
*/
public static List<MediaType> parseMediaTypes(@Nullable List<String> mediaTypes) {
if (CollectionUtils.isEmpty(mediaTypes)) {
return Collections.emptyList();
}
else if (mediaTypes.size() == 1) {
return parseMediaTypes(mediaTypes.get(0));
}
else {
List<MediaType> result = new ArrayList<>(8);
for (String mediaType : mediaTypes) {
result.addAll(parseMediaTypes(mediaType));
}
return result;
}
}
/**
* Re-create the given mime types as media types.
* @since 5.0
*/
public static List<MediaType> asMediaTypes(List<MimeType> mimeTypes) {
List<MediaType> mediaTypes = new ArrayList<>(mimeTypes.size());
for (MimeType mimeType : mimeTypes) {
mediaTypes.add(MediaType.asMediaType(mimeType));
}
return mediaTypes;
}
/**
* Re-create the given mime type as a media type.
* @since 5.0
*/
public static MediaType asMediaType(MimeType mimeType) {
if (mimeType instanceof MediaType mediaType) {
return mediaType;
}
return new MediaType(mimeType.getType(), mimeType.getSubtype(), mimeType.getParameters());
}
/**
* Return a string representation of the given list of {@code MediaType} objects.
* <p>This method can be used to for an {@code Accept} or {@code Content-Type} header.
* @param mediaTypes the media types to create a string representation for
* @return the string representation
*/
public static String toString(Collection<MediaType> mediaTypes) {
return MimeTypeUtils.toString(mediaTypes);
}
}
| MediaType |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/LogVerificationAppender.java | {
"start": 1123,
"end": 2164
} | class ____ extends AppenderSkeleton {
private final List<LoggingEvent> log = new ArrayList<LoggingEvent>();
@Override
public boolean requiresLayout() {
return false;
}
@Override
protected void append(final LoggingEvent loggingEvent) {
log.add(loggingEvent);
}
@Override
public void close() {
}
public List<LoggingEvent> getLog() {
return new ArrayList<LoggingEvent>(log);
}
public int countExceptionsWithMessage(final String text) {
int count = 0;
for (LoggingEvent e: getLog()) {
ThrowableInformation t = e.getThrowableInformation();
if (t != null) {
String m = t.getThrowable().getMessage();
if (m.contains(text)) {
count++;
}
}
}
return count;
}
public int countLinesWithMessage(final String text) {
int count = 0;
for (LoggingEvent e: getLog()) {
String msg = e.getRenderedMessage();
if (msg != null && msg.contains(text)) {
count++;
}
}
return count;
}
}
| LogVerificationAppender |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/snapshot/RocksNativeFullSnapshotStrategy.java | {
"start": 2946,
"end": 5590
} | class ____ to upload state files. */
private final RocksDBStateUploader stateUploader;
public RocksNativeFullSnapshotStrategy(
@Nonnull RocksDB db,
@Nonnull ResourceGuard rocksDBResourceGuard,
@Nonnull TypeSerializer<K> keySerializer,
@Nonnull LinkedHashMap<String, RocksDbKvStateInfo> kvStateInformation,
@Nonnull KeyGroupRange keyGroupRange,
@Nonnegative int keyGroupPrefixBytes,
@Nonnull LocalRecoveryConfig localRecoveryConfig,
@Nonnull File instanceBasePath,
@Nonnull UUID backendUID,
@Nonnull RocksDBStateUploader rocksDBStateUploader) {
super(
DESCRIPTION,
db,
rocksDBResourceGuard,
keySerializer,
kvStateInformation,
keyGroupRange,
keyGroupPrefixBytes,
localRecoveryConfig,
instanceBasePath,
backendUID);
this.stateUploader = rocksDBStateUploader;
}
@Override
public SnapshotResultSupplier<KeyedStateHandle> asyncSnapshot(
NativeRocksDBSnapshotResources snapshotResources,
long checkpointId,
long timestamp,
@Nonnull CheckpointStreamFactory checkpointStreamFactory,
@Nonnull CheckpointOptions checkpointOptions) {
if (snapshotResources.stateMetaInfoSnapshots.isEmpty()) {
return registry -> SnapshotResult.empty();
}
return new RocksDBNativeFullSnapshotOperation(
checkpointId,
checkpointStreamFactory,
snapshotResources.snapshotDirectory,
snapshotResources.stateMetaInfoSnapshots);
}
@Override
public void notifyCheckpointComplete(long completedCheckpointId) {
// nothing to do
}
@Override
public void notifyCheckpointAborted(long abortedCheckpointId) {
// nothing to do
}
@Override
protected PreviousSnapshot snapshotMetaData(
long checkpointId, @Nonnull List<StateMetaInfoSnapshot> stateMetaInfoSnapshots) {
for (Map.Entry<String, RocksDbKvStateInfo> stateMetaInfoEntry :
kvStateInformation.entrySet()) {
stateMetaInfoSnapshots.add(stateMetaInfoEntry.getValue().metaInfo.snapshot());
}
return EMPTY_PREVIOUS_SNAPSHOT;
}
@Override
public void close() throws IOException {
stateUploader.close();
}
/** Encapsulates the process to perform a full snapshot of a RocksDBKeyedStateBackend. */
private final | used |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/test-selection/src/test/java/com/example/NotEnabledTest.java | {
"start": 212,
"end": 861
} | class ____ {
@Inject
MyBean myBean;
@Test
public void test() {
assertEquals("hello", myBean.hello());
}
@Test
public void executeAnyway() {
assertEquals("hello", myBean.hello());
}
@Test
public void executeAnywayAgain() {
assertEquals("hello", myBean.hello());
}
@Test
public void alwaysExecute() {
assertEquals("hello", myBean.hello());
}
@Test
public void alwaysExecuteButNotThis() {
assertEquals("hello", myBean.hello());
}
@Test
public void neverExecute() {
assertEquals("hello", myBean.hello());
}
}
| NotEnabledTest |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/dependent/listeners/PreDestroyAnotherBeanB.java | {
"start": 284,
"end": 643
} | class ____ implements BeanPreDestroyEventListener<AnotherBeanB> {
@Override
public AnotherBeanB onPreDestroy(BeanPreDestroyEvent<AnotherBeanB> event) {
TestData.DESTRUCTION_ORDER.add(AnotherBeanB.class.getSimpleName());
AnotherBeanB bean = event.getBean();
bean.destroyed = true;
return bean;
}
}
| PreDestroyAnotherBeanB |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/matchers/apachecommons/ReflectionEquals.java | {
"start": 248,
"end": 786
} | class ____ implements ArgumentMatcher<Object>, Serializable {
private final Object wanted;
private final String[] excludeFields;
public ReflectionEquals(Object wanted, String... excludeFields) {
this.wanted = wanted;
this.excludeFields = excludeFields;
}
@Override
public boolean matches(Object actual) {
return EqualsBuilder.reflectionEquals(wanted, actual, excludeFields);
}
@Override
public String toString() {
return "refEq(" + wanted + ")";
}
}
| ReflectionEquals |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/jmx/export/assembler/InterfaceBasedMBeanInfoAssemblerTests.java | {
"start": 707,
"end": 1267
} | class ____ extends AbstractJmxAssemblerTests {
@Override
protected String getObjectName() {
return "bean:name=testBean4";
}
@Override
protected int getExpectedOperationCount() {
return 7;
}
@Override
protected int getExpectedAttributeCount() {
return 2;
}
@Override
protected MBeanInfoAssembler getAssembler() {
return new InterfaceBasedMBeanInfoAssembler();
}
@Override
protected String getApplicationContextPath() {
return "org/springframework/jmx/export/assembler/interfaceAssembler.xml";
}
}
| InterfaceBasedMBeanInfoAssemblerTests |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/search/aggregate/GroupBy.java | {
"start": 728,
"end": 1140
} | interface ____ {
/**
* Defines field names used to group.
*
* @param names field names
* @return config object
*/
static GroupBy fieldNames(String... names) {
return new GroupParams(Arrays.asList(names));
}
/**
* Defines reducer objects.
*
* @param reducers reducer objects
* @return
*/
GroupBy reducers(Reducer... reducers);
}
| GroupBy |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryCoordinationAction.java | {
"start": 1740,
"end": 2064
} | class ____ {
public static final String NAME = "internal:admin/repository/verify/coordinate";
public static final ActionType<VerifyNodeRepositoryCoordinationAction.Response> TYPE = new ActionType<>(NAME);
private VerifyNodeRepositoryCoordinationAction() {}
public static | VerifyNodeRepositoryCoordinationAction |
java | quarkusio__quarkus | integration-tests/test-extension/extension/deployment/src/test/java/io/quarkus/config/StaticInitConfigSourceFactory.java | {
"start": 429,
"end": 965
} | class ____ implements ConfigSourceFactory {
@Override
public Iterable<ConfigSource> getConfigSources(final ConfigSourceContext context) {
ConfigValue value = context.getValue("skip.build.sources");
if (value.getValue() != null && value.getValue().equals("true")) {
return List.of(new MapBackedConfigSource("StaticInitConfigSource", Map.of("config.static.init.my-prop", "1234")) {
});
} else {
return Collections.emptyList();
}
}
}
| StaticInitConfigSourceFactory |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/test/java/org/springframework/boot/docs/testing/springbootapplications/jmx/MyJmxTestsTests.java | {
"start": 767,
"end": 813
} | class ____ extends MyJmxTests {
}
| MyJmxTestsTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/connections/SuppliedConnectionTest.java | {
"start": 2541,
"end": 2748
} | class ____ implements SettingProvider.Provider<String> {
@Override
public String getSetting() {
return UserSuppliedConnectionProviderImpl.class.getName();
}
}
public static | ConnectionProviderProvider |
java | elastic__elasticsearch | test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java | {
"start": 2115,
"end": 2432
} | class ____ extends AbstractSnapshotIntegTestCase {
@Override
protected boolean addMockInternalEngine() {
return false;
}
public static final String REPO_TYPE = "countingFs";
public static final LongAdder COUNTS = new LongAdder();
public static | LatencySimulatingBlobStoreRepositoryTests |
java | apache__camel | components/camel-aws/camel-aws2-sqs/src/test/java/org/apache/camel/component/aws2/sqs/SqsDoesNotExtendMessageVisibilityTest.java | {
"start": 1314,
"end": 2912
} | class ____ extends CamelTestSupport {
private static final int TIMEOUT = 4; // 4 seconds.
private static final String RECEIPT_HANDLE = "0NNAq8PwvXsyZkR6yu4nQ07FGxNmOBWi5";
@EndpointInject("mock:result")
private MockEndpoint mock;
@BindToRegistry("amazonSQSClient")
private AmazonSQSClientMock client = new AmazonSQSClientMock();
@Test
public void defaultsToDisabled() throws Exception {
this.mock.expectedMessageCount(1);
this.mock.whenAnyExchangeReceived(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
// Simulate message that takes a while to receive.
Thread.sleep(TIMEOUT * 1500L); // 150% of TIMEOUT.
}
});
Message.Builder message = Message.builder();
message.body("Message 1");
message.md5OfBody("6a1559560f67c5e7a7d5d838bf0272ee");
message.messageId("f6fb6f99-5eb2-4be4-9b15-144774141458");
message.receiptHandle(RECEIPT_HANDLE);
this.client.addMessage(message.build());
// Wait for message to arrive.
MockEndpoint.assertIsSatisfied(context);
assertEquals(0, this.client.getChangeMessageVisibilityBatchRequests().size());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("aws2-sqs://MyQueue?amazonSQSClient=#amazonSQSClient").to("mock:result");
}
};
}
}
| SqsDoesNotExtendMessageVisibilityTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TypeParameterNamingTest.java | {
"start": 9765,
"end": 9937
} | class ____ {
public <_T> void method(_T t) {}
}
""")
.addOutputLines(
"in/Test.java",
"""
| Test |
java | spring-projects__spring-security | oauth2/oauth2-core/src/test/java/org/springframework/security/oauth2/core/endpoint/OAuth2AuthorizationResponseTests.java | {
"start": 954,
"end": 5056
} | class ____ {
private static final String AUTH_CODE = "auth-code";
private static final String REDIRECT_URI = "https://example.com";
private static final String STATE = "state";
private static final String ERROR_CODE = "error-code";
private static final String ERROR_DESCRIPTION = "error-description";
private static final String ERROR_URI = "error-uri";
@Test
public void buildSuccessResponseWhenAuthCodeIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() ->
// @formatter:off
OAuth2AuthorizationResponse.success(null)
.redirectUri(REDIRECT_URI)
.state(STATE)
.build()
// @formatter:on
);
}
@Test
public void buildSuccessResponseWhenRedirectUriIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() ->
// @formatter:off
OAuth2AuthorizationResponse.success(AUTH_CODE)
.redirectUri(null)
.state(STATE)
.build()
// @formatter:on
);
}
@Test
public void buildSuccessResponseWhenStateIsNullThenDoesNotThrowAnyException() {
// @formatter:off
OAuth2AuthorizationResponse.success(AUTH_CODE)
.redirectUri(REDIRECT_URI)
.state(null)
.build();
// @formatter:on
}
@Test
public void buildSuccessResponseWhenAllAttributesProvidedThenAllAttributesAreSet() {
// @formatter:off
OAuth2AuthorizationResponse authorizationResponse = OAuth2AuthorizationResponse.success(AUTH_CODE)
.redirectUri(REDIRECT_URI)
.state(STATE)
.build();
assertThat(authorizationResponse.getCode())
.isEqualTo(AUTH_CODE);
assertThat(authorizationResponse.getRedirectUri())
.isEqualTo(REDIRECT_URI);
assertThat(authorizationResponse.getState())
.isEqualTo(STATE);
// @formatter:on
}
@Test
public void buildSuccessResponseWhenErrorCodeIsSetThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() ->
// @formatter:off
OAuth2AuthorizationResponse.success(AUTH_CODE)
.redirectUri(REDIRECT_URI)
.state(STATE)
.errorCode(ERROR_CODE)
.build()
// @formatter:on
);
}
@Test
public void buildErrorResponseWhenErrorCodeIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() ->
// @formatter:off
OAuth2AuthorizationResponse.error(null)
.redirectUri(REDIRECT_URI)
.state(STATE)
.build()
// @formatter:on
);
}
@Test
public void buildErrorResponseWhenRedirectUriIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() ->
// @formatter:off
OAuth2AuthorizationResponse.error(ERROR_CODE)
.redirectUri(null)
.state(STATE)
.build()
// @formatter:on
);
}
@Test
public void buildErrorResponseWhenStateIsNullThenDoesNotThrowAnyException() {
// @formatter:off
OAuth2AuthorizationResponse.error(ERROR_CODE)
.redirectUri(REDIRECT_URI)
.state(null)
.build();
// @formatter:on
}
@Test
public void buildErrorResponseWhenAllAttributesProvidedThenAllAttributesAreSet() {
// @formatter:off
OAuth2AuthorizationResponse authorizationResponse = OAuth2AuthorizationResponse.error(ERROR_CODE)
.errorDescription(ERROR_DESCRIPTION)
.errorUri(ERROR_URI)
.redirectUri(REDIRECT_URI)
.state(STATE)
.build();
assertThat(authorizationResponse.getError().getErrorCode())
.isEqualTo(ERROR_CODE);
assertThat(authorizationResponse.getError().getDescription())
.isEqualTo(ERROR_DESCRIPTION);
assertThat(authorizationResponse.getError().getUri())
.isEqualTo(ERROR_URI);
assertThat(authorizationResponse.getRedirectUri())
.isEqualTo(REDIRECT_URI);
assertThat(authorizationResponse.getState())
.isEqualTo(STATE);
// @formatter:on
}
@Test
public void buildErrorResponseWhenAuthCodeIsSetThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() ->
// @formatter:off
OAuth2AuthorizationResponse.error(ERROR_CODE)
.redirectUri(REDIRECT_URI)
.state(STATE)
.code(AUTH_CODE)
.build()
// @formatter:on
);
}
}
| OAuth2AuthorizationResponseTests |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/remote/client/grpc/DefaultGrpcClientConfig.java | {
"start": 1123,
"end": 5655
} | class ____ implements GrpcClientConfig {
private String name;
private final int retryTimes;
private final long timeOutMills;
private final long connectionKeepAlive;
private final long channelKeepAliveTimeout;
private final long threadPoolKeepAlive;
private final int threadPoolCoreSize;
private final int threadPoolMaxSize;
private final long serverCheckTimeOut;
private final int threadPoolQueueSize;
private final int maxInboundMessageSize;
private final int channelKeepAlive;
private final int healthCheckRetryTimes;
private final long healthCheckTimeOut;
private final long capabilityNegotiationTimeout;
private final boolean allowCoreThreadTimeOut;
private final Map<String, String> labels;
private RpcClientTlsConfig tlsConfig = new RpcClientTlsConfig();
/**
* constructor.
*
* @param builder builder of DefaultGrpcClientConfig builder.
*/
private DefaultGrpcClientConfig(Builder builder) {
this.name = builder.name;
this.retryTimes = builder.retryTimes;
this.timeOutMills = builder.timeOutMills;
this.connectionKeepAlive = builder.connectionKeepAlive;
this.threadPoolKeepAlive = builder.threadPoolKeepAlive;
this.threadPoolCoreSize = builder.threadPoolCoreSize;
this.threadPoolMaxSize = builder.threadPoolMaxSize;
this.serverCheckTimeOut = builder.serverCheckTimeOut;
this.threadPoolQueueSize = builder.threadPoolQueueSize;
this.maxInboundMessageSize = builder.maxInboundMessageSize;
this.channelKeepAlive = builder.channelKeepAlive;
this.healthCheckRetryTimes = builder.healthCheckRetryTimes;
this.healthCheckTimeOut = builder.healthCheckTimeOut;
this.channelKeepAliveTimeout = builder.channelKeepAliveTimeout;
this.capabilityNegotiationTimeout = builder.capabilityNegotiationTimeout;
this.allowCoreThreadTimeOut = builder.allowCoreThreadTimeOut;
this.labels = builder.labels;
this.labels.put("tls.enable", "false");
if (Objects.nonNull(builder.tlsConfig)) {
this.tlsConfig = builder.tlsConfig;
if (Objects.nonNull(builder.tlsConfig.getEnableTls()) && builder.tlsConfig.getEnableTls()) {
this.labels.put("tls.enable", "true");
}
}
}
@Override
public String name() {
return this.name;
}
@Override
public int retryTimes() {
return retryTimes;
}
@Override
public long timeOutMills() {
return timeOutMills;
}
@Override
public long connectionKeepAlive() {
return connectionKeepAlive;
}
@Override
public int threadPoolCoreSize() {
return threadPoolCoreSize;
}
@Override
public int threadPoolMaxSize() {
return threadPoolMaxSize;
}
@Override
public long threadPoolKeepAlive() {
return threadPoolKeepAlive;
}
@Override
public long serverCheckTimeOut() {
return serverCheckTimeOut;
}
@Override
public int threadPoolQueueSize() {
return threadPoolQueueSize;
}
@Override
public int maxInboundMessageSize() {
return maxInboundMessageSize;
}
@Override
public int channelKeepAlive() {
return channelKeepAlive;
}
@Override
public long channelKeepAliveTimeout() {
return channelKeepAliveTimeout;
}
@Override
public TlsConfig tlsConfig() {
return tlsConfig;
}
public void setTlsConfig(RpcClientTlsConfig tlsConfig) {
this.tlsConfig = tlsConfig;
}
public void setName(String name) {
this.name = name;
}
@Override
public long capabilityNegotiationTimeout() {
return this.capabilityNegotiationTimeout;
}
@Override
public boolean allowCoreThreadTimeOut() {
return this.allowCoreThreadTimeOut;
}
@Override
public int healthCheckRetryTimes() {
return healthCheckRetryTimes;
}
@Override
public long healthCheckTimeOut() {
return healthCheckTimeOut;
}
@Override
public Map<String, String> labels() {
return this.labels;
}
public static Builder newBuilder() {
return new Builder();
}
public static | DefaultGrpcClientConfig |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.