language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/JeeConfigurerTests.java | {
"start": 7813,
"end": 8210
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((authorize) -> authorize
.anyRequest().hasRole("USER")
)
.jee((jee) -> jee
.mappableRoles("USER")
);
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
public static | JeeMappableRolesConfig |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java | {
"start": 1909,
"end": 1976
} | class ____ {
// private constructor for utility | DataGeneratorForTest |
java | spring-projects__spring-boot | module/spring-boot-graphql-test/src/main/java/org/springframework/boot/graphql/test/autoconfigure/tester/AutoConfigureHttpGraphQlTester.java | {
"start": 1210,
"end": 1516
} | class ____ enable an {@link HttpGraphQlTester}.
*
* @author Brian Clozel
* @since 4.0.0
* @see HttpGraphQlTesterAutoConfiguration
*/
@Target({ ElementType.TYPE, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
@AutoConfigureWebTestClient
@ImportAutoConfiguration
public @ | to |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/create/MySqlCreateTable_refactor_test.java | {
"start": 433,
"end": 3620
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "create table test001\n" +
"(" +
"a varchar(10)," +
"b varchar(10)," +
"index c using btree (a(4) desc) comment 'hehe' key_block_size 4," +
"fulltext d (b) with parser ngram," +
"constraint symb primary key (a)," +
"constraint symb unique key e (b)," +
"constraint symb foreign key (b) references tb (a)" +
")" +
"auto_increment 1\n" +
"avg_row_length 1\n" +
"default character set utf8\n" +
"checksum 0\n" +
"default collate utf8_unicode_ci\n" +
"comment 'hehe'\n" +
"compression 'LZ4'\n" +
"connection 'conn'\n" +
"index directory 'path'\n" +
"delay_key_write 1\n" +
"encryption 'N'\n" +
"engine innodb\n" +
"insert_method no\n" +
"key_block_size 32\n" +
"max_rows 999\n" +
"min_rows 1\n" +
"pack_keys default\n" +
"password 'psw'\n" +
"row_format dynamic\n" +
"stats_auto_recalc default\n" +
"stats_persistent default\n" +
"stats_sample_pages 10\n" +
"tablespace `tbs_name` storage memory\n" +
"union (tb1,tb2,tb3)\n" +
"auto_increment 1,\n" +
"dbpartition by hash(a) tbpartition by hash(b) tbpartitions 4 partition by hash(b)";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
assertEquals(1, statementList.size());
assertEquals("CREATE TABLE test001 (\n" +
"\ta varchar(10),\n" +
"\tb varchar(10),\n" +
"\tINDEX c USING btree(a(4) DESC) KEY_BLOCK_SIZE = 4 COMMENT 'hehe',\n" +
"\tFULLTEXT INDEX d(b) WITH PARSER ngram,\n" +
"\tPRIMARY KEY (a),\n" +
"\tUNIQUE KEY e (b),\n" +
"\tCONSTRAINT symb FOREIGN KEY (b) REFERENCES tb (a)\n" +
") AUTO_INCREMENT = 1 AVG_ROW_LENGTH = 1 CHARACTER SET = utf8 CHECKSUM = 0 COLLATE = utf8_unicode_ci COMPRESSION = 'LZ4' CONNECTION = 'conn' INDEX DIRECTORY = 'path' DELAY_KEY_WRITE = 1 ENCRYPTION = 'N' ENGINE = innodb INSERT_METHOD = no KEY_BLOCK_SIZE = 32 MAX_ROWS = 999 MIN_ROWS = 1 PACK_KEYS = DEFAULT PASSWORD = 'psw' ROW_FORMAT = dynamic STATS_AUTO_RECALC = DEFAULT STATS_PERSISTENT = DEFAULT STATS_SAMPLE_PAGES = 10 TABLESPACE `tbs_name` STORAGE memory UNION = (tb1, tb2, tb3) AUTO_INCREMENT = 1 COMMENT 'hehe'\n" +
"PARTITION BY HASH (b)\n" +
"DBPARTITION BY hash(a)\n" +
"TBPARTITION BY hash(b) TBPARTITIONS 4",
SQLUtils.toMySqlString(statementList.get(0)));
}
}
| MySqlCreateTable_refactor_test |
java | elastic__elasticsearch | x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java | {
"start": 52384,
"end": 54604
} | class ____<T> extends IsEqual<T> {
@SuppressWarnings("unchecked")
IsEqualIgnoringIds(T operand) {
super((T) ignoreIds(operand));
}
@Override
public boolean matches(Object actualValue) {
return super.matches(ignoreIds(actualValue));
}
}
public static Object ignoreIds(Object node) {
return switch (node) {
case Expression expression -> ignoreIdsInExpression(expression);
case LogicalPlan plan -> ignoreIdsInLogicalPlan(plan);
case PhysicalPlan pplan -> ignoreIdsInPhysicalPlan(pplan);
case List<?> list -> list.stream().map(EsqlTestUtils::ignoreIds).toList();
case null, default -> node;
};
}
private static final NameId DUMMY_ID = new NameId();
private static Expression ignoreIdsInExpression(Expression expression) {
return expression.transformDown(
NamedExpression.class,
ne -> ne instanceof Alias alias ? alias.withId(DUMMY_ID) : ne instanceof Attribute attr ? attr.withId(DUMMY_ID) : ne
);
}
private static LogicalPlan ignoreIdsInLogicalPlan(LogicalPlan plan) {
if (plan instanceof Explain explain) {
return new Explain(explain.source(), ignoreIdsInLogicalPlan(explain.query()));
}
return plan.transformExpressionsDown(
NamedExpression.class,
ne -> ne instanceof Alias alias ? alias.withId(DUMMY_ID) : ne instanceof Attribute attr ? attr.withId(DUMMY_ID) : ne
);
}
private static PhysicalPlan ignoreIdsInPhysicalPlan(PhysicalPlan plan) {
PhysicalPlan ignoredInPhysicalNodes = plan.transformExpressionsDown(
NamedExpression.class,
ne -> ne instanceof Alias alias ? alias.withId(DUMMY_ID) : ne instanceof Attribute attr ? attr.withId(DUMMY_ID) : ne
);
return ignoredInPhysicalNodes.transformDown(FragmentExec.class, fragmentExec -> {
LogicalPlan fragment = fragmentExec.fragment();
LogicalPlan ignoredInFragment = ignoreIdsInLogicalPlan(fragment);
return fragmentExec.withFragment(ignoredInFragment);
});
}
}
| IsEqualIgnoringIds |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/statistics/TestErrorCodeMapping.java | {
"start": 2359,
"end": 3280
} | class ____ extends AbstractHadoopTestBase {
/**
* Parameterization.
*/
public static Collection<Object[]> params() {
return Arrays.asList(new Object[][]{
{200, null},
{302, null},
{SC_400_BAD_REQUEST, HTTP_RESPONSE_400},
{SC_404_NOT_FOUND, null},
{416, HTTP_RESPONSE_4XX},
{SC_429_TOO_MANY_REQUESTS_GCS, HTTP_RESPONSE_503},
{SC_500_INTERNAL_SERVER_ERROR, HTTP_RESPONSE_500},
{SC_503_SERVICE_UNAVAILABLE, HTTP_RESPONSE_503},
{510, HTTP_RESPONSE_5XX},
});
}
private final int code;
private final String name;
public TestErrorCodeMapping(final int code, final String name) {
this.code = code;
this.name = name;
}
@Test
public void testMapping() {
assertThat(mapErrorStatusCodeToStatisticName(code))
.describedAs("Mapping of status code %d", code)
.isEqualTo(name);
}
}
| TestErrorCodeMapping |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/processor/src/main/java/org/jboss/resteasy/reactive/common/processor/transformation/AnnotationsTransformer.java | {
"start": 866,
"end": 2833
} | interface ____ extends AnnotationTransformation {
int DEFAULT_PRIORITY = 1000;
static int compare(AnnotationsTransformer e1, AnnotationsTransformer e2) {
return Integer.compare(e2.getPriority(), e1.getPriority());
}
/**
* Processors with higher priority are called first.
*
* @return the priority
*/
default int getPriority() {
return DEFAULT_PRIORITY;
}
/**
* By default, the transformation is applied to all kinds of targets.
*
* @param kind
* @return {@code true} if the transformation applies to the specified kind, {@code false} otherwise
*/
default boolean appliesTo(Kind kind) {
return true;
}
/**
*
* @param transformationContext
*/
void transform(TransformationContext transformationContext);
// ---
// implementation of `AnnotationTransformation` methods
@Override
default int priority() {
return getPriority();
}
@Override
default boolean supports(Kind kind) {
return appliesTo(kind);
}
@Override
default void apply(AnnotationTransformation.TransformationContext context) {
transform(new TransformationContext() {
@Override
public AnnotationTarget getTarget() {
return context.declaration();
}
@Override
public Collection<AnnotationInstance> getAnnotations() {
return context.annotations();
}
@Override
public Transformation transform() {
return new Transformation(context);
}
});
}
@Override
default boolean requiresCompatibleMode() {
return true;
}
// ---
/**
*
* @return a new builder instance
*/
static Builder builder() {
return new Builder();
}
/**
* A transformation context.
*/
| AnnotationsTransformer |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/model/DefaultDependencyManagementImporter.java | {
"start": 1855,
"end": 8275
} | class ____ implements DependencyManagementImporter {
@Override
public Model importManagement(
Model target,
List<? extends DependencyManagement> sources,
ModelBuilderRequest request,
ModelProblemCollector problems) {
if (sources != null && !sources.isEmpty()) {
Map<String, Dependency> dependencies = new LinkedHashMap<>();
DependencyManagement depMgmt = target.getDependencyManagement();
if (depMgmt != null) {
for (Dependency dependency : depMgmt.getDependencies()) {
dependencies.put(dependency.getManagementKey(), dependency);
}
} else {
depMgmt = DependencyManagement.newInstance();
}
Set<String> directDependencies = new HashSet<>(dependencies.keySet());
for (DependencyManagement source : sources) {
for (Dependency dependency : source.getDependencies()) {
String key = dependency.getManagementKey();
Dependency present = dependencies.putIfAbsent(key, dependency);
if (present != null && !equals(dependency, present) && !directDependencies.contains(key)) {
// TODO: https://issues.apache.org/jira/browse/MNG-8004
problems.add(
Severity.WARNING,
Version.V40,
"Ignored POM import for: " + toString(dependency) + " as already imported "
+ toString(present) + ". Add the conflicting managed dependency directly "
+ "to the dependencyManagement section of the POM.");
}
if (present == null && request.isLocationTracking()) {
Dependency updatedDependency = updateWithImportedFrom(dependency, source);
dependencies.put(key, updatedDependency);
}
}
}
return target.withDependencyManagement(depMgmt.withDependencies(dependencies.values()));
}
return target;
}
private String toString(Dependency dependency) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder
.append(dependency.getGroupId())
.append(":")
.append(dependency.getArtifactId())
.append(":")
.append(dependency.getType());
if (dependency.getClassifier() != null && !dependency.getClassifier().isEmpty()) {
stringBuilder.append(":").append(dependency.getClassifier());
}
stringBuilder
.append(":")
.append(dependency.getVersion())
.append("@")
.append(dependency.getScope() == null ? "compile" : dependency.getScope());
if (dependency.isOptional()) {
stringBuilder.append("[optional]");
}
if (!dependency.getExclusions().isEmpty()) {
stringBuilder.append("[").append(dependency.getExclusions().size()).append(" exclusions]");
}
return stringBuilder.toString();
}
private boolean equals(Dependency d1, Dependency d2) {
return Objects.equals(d1.getGroupId(), d2.getGroupId())
&& Objects.equals(d1.getArtifactId(), d2.getArtifactId())
&& Objects.equals(d1.getVersion(), d2.getVersion())
&& Objects.equals(d1.getType(), d2.getType())
&& Objects.equals(d1.getClassifier(), d2.getClassifier())
&& Objects.equals(d1.getScope(), d2.getScope())
&& Objects.equals(d1.getSystemPath(), d2.getSystemPath())
&& Objects.equals(d1.getOptional(), d2.getOptional())
&& equals(d1.getExclusions(), d2.getExclusions());
}
private boolean equals(Collection<Exclusion> ce1, Collection<Exclusion> ce2) {
if (ce1.size() == ce2.size()) {
Iterator<Exclusion> i1 = ce1.iterator();
Iterator<Exclusion> i2 = ce2.iterator();
while (i1.hasNext() && i2.hasNext()) {
if (!equals(i1.next(), i2.next())) {
return false;
}
}
return !i1.hasNext() && !i2.hasNext();
}
return false;
}
private boolean equals(Exclusion e1, Exclusion e2) {
return Objects.equals(e1.getGroupId(), e2.getGroupId())
&& Objects.equals(e1.getArtifactId(), e2.getArtifactId());
}
static Dependency updateWithImportedFrom(Dependency dependency, DependencyManagement bom) {
// We are only interested in the InputSource, so the location of the <dependency> element is sufficient
InputLocation dependencyLocation = dependency.getLocation("");
InputLocation bomLocation = bom.getLocation("");
if (dependencyLocation == null || bomLocation == null) {
return dependency;
}
InputSource dependencySource = dependencyLocation.getSource();
InputSource bomSource = bomLocation.getSource();
// If the dependency and BOM have the same source, it means we found the root where the dependency is declared.
if (dependencySource == null
|| bomSource == null
|| Objects.equals(dependencySource.getModelId(), bomSource.getModelId())) {
return Dependency.newBuilder(dependency, true)
.importedFrom(bomLocation)
.build();
}
while (dependencySource.getImportedFrom() != null) {
InputLocation importedFrom = dependencySource.getImportedFrom();
// Stop if the BOM is already in the list, no update necessary
if (Objects.equals(importedFrom.getSource().getModelId(), bomSource.getModelId())) {
return dependency;
}
dependencySource = importedFrom.getSource();
}
// We modify the input location that is used for the whole file.
// This is likely correct because the POM hierarchy applies to the whole POM, not just one dependency.
return Dependency.newBuilder(dependency, true).importedFrom(bomLocation).build();
}
}
| DefaultDependencyManagementImporter |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/subclassmapping/SubclassOrderWarningMapper.java | {
"start": 887,
"end": 1338
} | interface ____ {
SubclassOrderWarningMapper INSTANCE = Mappers.getMapper( SubclassOrderWarningMapper.class );
VehicleCollectionDto map(VehicleCollection vehicles);
@SubclassMapping( source = Car.class, target = CarDto.class )
@SubclassMapping( source = HatchBack.class, target = HatchBackDto.class )
@Mapping( source = "vehicleManufacturingCompany", target = "maker")
VehicleDto map(Vehicle vehicle);
}
| SubclassOrderWarningMapper |
java | elastic__elasticsearch | modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java | {
"start": 2621,
"end": 8580
} | class ____ extends ESTestCase {
private long now;
private ClusterService clusterService;
private TestThreadPool threadPool;
private CopyOnWriteArrayList<UpdateHealthInfoCacheAction.Request> clientSeenRequests;
private DataStreamLifecycleHealthInfoPublisher dslHealthInfoPublisher;
private final DiscoveryNode node1 = DiscoveryNodeUtils.builder("node_1")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE))
.build();
private final DiscoveryNode node2 = DiscoveryNodeUtils.builder("node_2")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE))
.build();
private final DiscoveryNode[] allNodes = new DiscoveryNode[] { node1, node2 };
private DataStreamLifecycleErrorStore errorStore;
@Before
public void setupServices() {
threadPool = new TestThreadPool(getTestName());
Set<Setting<?>> builtInClusterSettings = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
builtInClusterSettings.add(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL_SETTING);
builtInClusterSettings.add(DATA_STREAM_MERGE_POLICY_TARGET_FLOOR_SEGMENT_SETTING);
builtInClusterSettings.add(DATA_STREAM_MERGE_POLICY_TARGET_FACTOR_SETTING);
builtInClusterSettings.add(DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING);
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, builtInClusterSettings);
clusterService = createClusterService(threadPool, clusterSettings);
now = System.currentTimeMillis();
clientSeenRequests = new CopyOnWriteArrayList<>();
final Client client = getTransportRequestsRecordingClient();
errorStore = new DataStreamLifecycleErrorStore(() -> now);
dslHealthInfoPublisher = new DataStreamLifecycleHealthInfoPublisher(Settings.EMPTY, client, clusterService, errorStore);
}
@After
public void cleanup() {
clientSeenRequests.clear();
clusterService.close();
threadPool.shutdownNow();
}
public void testPublishDslErrorEntries() {
final var projectId = randomProjectIdOrDefault();
for (int i = 0; i < 11; i++) {
errorStore.recordError(projectId, "testIndexOverSignalThreshold", new NullPointerException("ouch"));
}
errorStore.recordError(projectId, "testIndex", new IllegalStateException("bad state"));
ClusterState stateWithHealthNode = ClusterStateCreationUtils.state(node1, node1, node1, allNodes);
ClusterServiceUtils.setState(clusterService, stateWithHealthNode);
dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {}
@Override
public void onFailure(Exception e) {
}
});
assertThat(clientSeenRequests.size(), is(1));
DataStreamLifecycleHealthInfo dslHealthInfo = clientSeenRequests.get(0).getDslHealthInfo();
assertThat(dslHealthInfo, is(notNullValue()));
List<DslErrorInfo> dslErrorsInfo = dslHealthInfo.dslErrorsInfo();
assertThat(dslErrorsInfo.size(), is(1));
assertThat(dslErrorsInfo.get(0).indexName(), is("testIndexOverSignalThreshold"));
assertThat(dslErrorsInfo.get(0).projectId(), is(projectId));
assertThat(dslHealthInfo.totalErrorEntriesCount(), is(2));
}
public void testPublishDslErrorEntriesNoHealthNode() {
final var projectId = randomProjectIdOrDefault();
// no requests are being executed
for (int i = 0; i < 11; i++) {
errorStore.recordError(projectId, "testIndexOverSignalThreshold", new NullPointerException("ouch"));
}
errorStore.recordError(projectId, "testIndex", new IllegalStateException("bad state"));
ClusterState stateNoHealthNode = ClusterStateCreationUtils.state(node1, node1, null, allNodes);
ClusterServiceUtils.setState(clusterService, stateNoHealthNode);
dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {}
@Override
public void onFailure(Exception e) {
}
});
assertThat(clientSeenRequests.size(), is(0));
}
public void testPublishDslErrorEntriesEmptyErrorStore() {
// publishes the empty error store (this is the "back to healthy" state where all errors have been fixed)
ClusterState state = ClusterStateCreationUtils.state(node1, node1, node1, allNodes);
ClusterServiceUtils.setState(clusterService, state);
dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {}
@Override
public void onFailure(Exception e) {
}
});
assertThat(clientSeenRequests.size(), is(1));
DataStreamLifecycleHealthInfo dslHealthInfo = clientSeenRequests.get(0).getDslHealthInfo();
assertThat(dslHealthInfo, is(notNullValue()));
List<DslErrorInfo> dslErrorsInfo = dslHealthInfo.dslErrorsInfo();
assertThat(dslErrorsInfo.size(), is(0));
assertThat(dslHealthInfo.totalErrorEntriesCount(), is(0));
}
private Client getTransportRequestsRecordingClient() {
return new NoOpClient(threadPool) {
@Override
protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
clientSeenRequests.add((UpdateHealthInfoCacheAction.Request) request);
}
};
}
}
| DataStreamLifecycleHealthInfoPublisherTests |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/AnnotationUtils.java | {
"start": 7876,
"end": 8077
} | class ____ which to search for the annotation; may be {@code null}
* @param annotationType the annotation type to search for; never {@code null}
* @param searchEnclosingClasses whether the enclosing | on |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/Serializers.java | {
"start": 9417,
"end": 9517
} | class ____ that
* sub-classes only need to override methods they need.
*/
public static | so |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java | {
"start": 1352,
"end": 1391
} | class ____ unit tests.
*/
public final | for |
java | apache__spark | sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java | {
"start": 48401,
"end": 49742
} | class ____ extends DecimalUpdater {
private final int parquetScale;
IntegerToDecimalUpdater(ColumnDescriptor descriptor, DecimalType sparkType) {
super(sparkType);
LogicalTypeAnnotation typeAnnotation =
descriptor.getPrimitiveType().getLogicalTypeAnnotation();
if (typeAnnotation instanceof DecimalLogicalTypeAnnotation) {
this.parquetScale = ((DecimalLogicalTypeAnnotation) typeAnnotation).getScale();
} else {
this.parquetScale = 0;
}
}
@Override
public void skipValues(int total, VectorizedValuesReader valuesReader) {
valuesReader.skipIntegers(total);
}
@Override
public void readValue(
int offset,
WritableColumnVector values,
VectorizedValuesReader valuesReader) {
BigDecimal decimal = BigDecimal.valueOf(valuesReader.readInteger(), parquetScale);
writeDecimal(offset, values, decimal);
}
@Override
public void decodeSingleDictionaryId(
int offset,
WritableColumnVector values,
WritableColumnVector dictionaryIds,
Dictionary dictionary) {
BigDecimal decimal =
BigDecimal.valueOf(dictionary.decodeToInt(dictionaryIds.getDictId(offset)), parquetScale);
writeDecimal(offset, values, decimal);
}
}
private static | IntegerToDecimalUpdater |
java | netty__netty | transport/src/main/java/io/netty/channel/embedded/EmbeddedChannel.java | {
"start": 2292,
"end": 34921
} | enum ____ { OPEN, ACTIVE, CLOSED }
private static final InternalLogger logger = InternalLoggerFactory.getInstance(EmbeddedChannel.class);
private static final ChannelMetadata METADATA_NO_DISCONNECT = new ChannelMetadata(false);
private static final ChannelMetadata METADATA_DISCONNECT = new ChannelMetadata(true);
private final EmbeddedEventLoop loop;
private final ChannelFutureListener recordExceptionListener = new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
recordException(future);
}
};
private final ChannelMetadata metadata;
private final ChannelConfig config;
private Queue<Object> inboundMessages;
private Queue<Object> outboundMessages;
private Throwable lastException;
private State state;
private int executingStackCnt;
private boolean cancelRemainingScheduledTasks;
/**
* Create a new instance with an {@link EmbeddedChannelId} and an empty pipeline.
*/
public EmbeddedChannel() {
this(builder());
}
/**
* Create a new instance with the specified ID and an empty pipeline.
*
* @param channelId the {@link ChannelId} that will be used to identify this channel
*/
public EmbeddedChannel(ChannelId channelId) {
this(builder().channelId(channelId));
}
/**
* Create a new instance with the pipeline initialized with the specified handlers.
*
* @param handlers the {@link ChannelHandler}s which will be add in the {@link ChannelPipeline}
*/
public EmbeddedChannel(ChannelHandler... handlers) {
this(builder().handlers(handlers));
}
/**
* Create a new instance with the pipeline initialized with the specified handlers.
*
* @param hasDisconnect {@code false} if this {@link Channel} will delegate {@link #disconnect()}
* to {@link #close()}, {@code true} otherwise.
* @param handlers the {@link ChannelHandler}s which will be added to the {@link ChannelPipeline}
*/
public EmbeddedChannel(boolean hasDisconnect, ChannelHandler... handlers) {
this(builder().hasDisconnect(hasDisconnect).handlers(handlers));
}
/**
* Create a new instance with the pipeline initialized with the specified handlers.
*
* @param register {@code true} if this {@link Channel} is registered to the {@link EventLoop} in the
* constructor. If {@code false} the user will need to call {@link #register()}.
* @param hasDisconnect {@code false} if this {@link Channel} will delegate {@link #disconnect()}
* to {@link #close()}, {@code true} otherwise.
* @param handlers the {@link ChannelHandler}s which will be added to the {@link ChannelPipeline}
*/
public EmbeddedChannel(boolean register, boolean hasDisconnect, ChannelHandler... handlers) {
this(builder().register(register).hasDisconnect(hasDisconnect).handlers(handlers));
}
/**
* Create a new instance with the channel ID set to the given ID and the pipeline
* initialized with the specified handlers.
*
* @param channelId the {@link ChannelId} that will be used to identify this channel
* @param handlers the {@link ChannelHandler}s which will be added to the {@link ChannelPipeline}
*/
public EmbeddedChannel(ChannelId channelId, ChannelHandler... handlers) {
this(builder().channelId(channelId).handlers(handlers));
}
/**
* Create a new instance with the channel ID set to the given ID and the pipeline
* initialized with the specified handlers.
*
* @param channelId the {@link ChannelId} that will be used to identify this channel
* @param hasDisconnect {@code false} if this {@link Channel} will delegate {@link #disconnect()}
* to {@link #close()}, {@code true} otherwise.
* @param handlers the {@link ChannelHandler}s which will be added to the {@link ChannelPipeline}
*/
public EmbeddedChannel(ChannelId channelId, boolean hasDisconnect, ChannelHandler... handlers) {
this(builder().channelId(channelId).hasDisconnect(hasDisconnect).handlers(handlers));
}
/**
* Create a new instance with the channel ID set to the given ID and the pipeline
* initialized with the specified handlers.
*
* @param channelId the {@link ChannelId} that will be used to identify this channel
* @param register {@code true} if this {@link Channel} is registered to the {@link EventLoop} in the
* constructor. If {@code false} the user will need to call {@link #register()}.
* @param hasDisconnect {@code false} if this {@link Channel} will delegate {@link #disconnect()}
* to {@link #close()}, {@code true} otherwise.
* @param handlers the {@link ChannelHandler}s which will be added to the {@link ChannelPipeline}
*/
public EmbeddedChannel(ChannelId channelId, boolean register, boolean hasDisconnect,
ChannelHandler... handlers) {
this(builder().channelId(channelId).register(register).hasDisconnect(hasDisconnect).handlers(handlers));
}
/**
* Create a new instance with the channel ID set to the given ID and the pipeline
* initialized with the specified handlers.
*
* @param parent the parent {@link Channel} of this {@link EmbeddedChannel}.
* @param channelId the {@link ChannelId} that will be used to identify this channel
* @param register {@code true} if this {@link Channel} is registered to the {@link EventLoop} in the
* constructor. If {@code false} the user will need to call {@link #register()}.
* @param hasDisconnect {@code false} if this {@link Channel} will delegate {@link #disconnect()}
* to {@link #close()}, {@code true} otherwise.
* @param handlers the {@link ChannelHandler}s which will be added to the {@link ChannelPipeline}
*/
public EmbeddedChannel(Channel parent, ChannelId channelId, boolean register, boolean hasDisconnect,
final ChannelHandler... handlers) {
this(builder()
.parent(parent)
.channelId(channelId)
.register(register)
.hasDisconnect(hasDisconnect)
.handlers(handlers));
}
/**
* Create a new instance with the channel ID set to the given ID and the pipeline
* initialized with the specified handlers.
*
* @param channelId the {@link ChannelId} that will be used to identify this channel
* @param hasDisconnect {@code false} if this {@link Channel} will delegate {@link #disconnect()}
* to {@link #close()}, {@code true} otherwise.
* @param config the {@link ChannelConfig} which will be returned by {@link #config()}.
* @param handlers the {@link ChannelHandler}s which will be added to the {@link ChannelPipeline}
*/
public EmbeddedChannel(ChannelId channelId, boolean hasDisconnect, final ChannelConfig config,
final ChannelHandler... handlers) {
this(builder().channelId(channelId).hasDisconnect(hasDisconnect).config(config).handlers(handlers));
}
/**
* Create a new instance with the configuration from the given builder. This method is {@code protected} for use by
* subclasses; Otherwise, please use {@link Builder#build()}.
*
* @param builder The builder
*/
protected EmbeddedChannel(Builder builder) {
super(builder.parent, builder.channelId);
loop = new EmbeddedEventLoop(builder.ticker == null ? new EmbeddedEventLoop.FreezableTicker() : builder.ticker);
metadata = metadata(builder.hasDisconnect);
config = builder.config == null ? new DefaultChannelConfig(this) : builder.config;
if (builder.handler == null) {
setup(builder.register, builder.handlers);
} else {
setup(builder.register, builder.handler);
}
}
private static ChannelMetadata metadata(boolean hasDisconnect) {
return hasDisconnect ? METADATA_DISCONNECT : METADATA_NO_DISCONNECT;
}
private void setup(boolean register, final ChannelHandler... handlers) {
ChannelPipeline p = pipeline();
p.addLast(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) {
ChannelPipeline pipeline = ch.pipeline();
for (ChannelHandler h: handlers) {
if (h == null) {
break;
}
pipeline.addLast(h);
}
}
});
if (register) {
ChannelFuture future = loop.register(this);
assert future.isDone();
}
}
private void setup(boolean register, final ChannelHandler handler) {
ChannelPipeline p = pipeline();
p.addLast(handler);
if (register) {
ChannelFuture future = loop.register(this);
assert future.isDone();
}
}
/**
* Register this {@code Channel} on its {@link EventLoop}.
*/
public void register() throws Exception {
ChannelFuture future = loop.register(this);
assert future.isDone();
Throwable cause = future.cause();
if (cause != null) {
PlatformDependent.throwException(cause);
}
}
@Override
protected final DefaultChannelPipeline newChannelPipeline() {
return new EmbeddedChannelPipeline(this);
}
@Override
public ChannelMetadata metadata() {
return metadata;
}
@Override
public ChannelConfig config() {
return config;
}
@Override
public boolean isOpen() {
return state != State.CLOSED;
}
@Override
public boolean isActive() {
return state == State.ACTIVE;
}
/**
* Returns the {@link Queue} which holds all the {@link Object}s that were received by this {@link Channel}.
*/
public Queue<Object> inboundMessages() {
if (inboundMessages == null) {
inboundMessages = new ArrayDeque<Object>();
}
return inboundMessages;
}
/**
* @deprecated use {@link #inboundMessages()}
*/
@Deprecated
public Queue<Object> lastInboundBuffer() {
return inboundMessages();
}
/**
* Returns the {@link Queue} which holds all the {@link Object}s that were written by this {@link Channel}.
*/
public Queue<Object> outboundMessages() {
if (outboundMessages == null) {
outboundMessages = new ArrayDeque<Object>();
}
return outboundMessages;
}
/**
* @deprecated use {@link #outboundMessages()}
*/
@Deprecated
public Queue<Object> lastOutboundBuffer() {
return outboundMessages();
}
/**
* Return received data from this {@link Channel}
*/
@SuppressWarnings("unchecked")
public <T> T readInbound() {
T message = (T) poll(inboundMessages);
if (message != null) {
ReferenceCountUtil.touch(message, "Caller of readInbound() will handle the message from this point");
}
return message;
}
/**
* Read data from the outbound. This may return {@code null} if nothing is readable.
*/
@SuppressWarnings("unchecked")
public <T> T readOutbound() {
T message = (T) poll(outboundMessages);
if (message != null) {
ReferenceCountUtil.touch(message, "Caller of readOutbound() will handle the message from this point.");
}
return message;
}
/**
* Write messages to the inbound of this {@link Channel}.
*
* @param msgs the messages to be written
*
* @return {@code true} if the write operation did add something to the inbound buffer
*/
public boolean writeInbound(Object... msgs) {
ensureOpen();
if (msgs.length == 0) {
return isNotEmpty(inboundMessages);
}
executingStackCnt++;
try {
ChannelPipeline p = pipeline();
for (Object m : msgs) {
p.fireChannelRead(m);
}
flushInbound(false, voidPromise());
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
return isNotEmpty(inboundMessages);
}
/**
* Writes one message to the inbound of this {@link Channel} and does not flush it. This
* method is conceptually equivalent to {@link #write(Object)}.
*
* @see #writeOneOutbound(Object)
*/
public ChannelFuture writeOneInbound(Object msg) {
return writeOneInbound(msg, newPromise());
}
/**
* Writes one message to the inbound of this {@link Channel} and does not flush it. This
* method is conceptually equivalent to {@link #write(Object, ChannelPromise)}.
*
* @see #writeOneOutbound(Object, ChannelPromise)
*/
public ChannelFuture writeOneInbound(Object msg, ChannelPromise promise) {
executingStackCnt++;
try {
if (checkOpen(true)) {
pipeline().fireChannelRead(msg);
}
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
return checkException(promise);
}
/**
* Flushes the inbound of this {@link Channel}. This method is conceptually equivalent to {@link #flush()}.
*
* @see #flushOutbound()
*/
public EmbeddedChannel flushInbound() {
flushInbound(true, voidPromise());
return this;
}
private ChannelFuture flushInbound(boolean recordException, ChannelPromise promise) {
executingStackCnt++;
try {
if (checkOpen(recordException)) {
pipeline().fireChannelReadComplete();
runPendingTasks();
}
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
return checkException(promise);
}
/**
* Write messages to the outbound of this {@link Channel}.
*
* @param msgs the messages to be written
* @return bufferReadable returns {@code true} if the write operation did add something to the outbound buffer
*/
public boolean writeOutbound(Object... msgs) {
ensureOpen();
if (msgs.length == 0) {
return isNotEmpty(outboundMessages);
}
executingStackCnt++;
RecyclableArrayList futures = RecyclableArrayList.newInstance(msgs.length);
try {
try {
for (Object m : msgs) {
if (m == null) {
break;
}
futures.add(write(m));
}
flushOutbound0();
int size = futures.size();
for (int i = 0; i < size; i++) {
ChannelFuture future = (ChannelFuture) futures.get(i);
if (future.isDone()) {
recordException(future);
} else {
// The write may be delayed to run later by runPendingTasks()
future.addListener(recordExceptionListener);
}
}
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
checkException();
return isNotEmpty(outboundMessages);
} finally {
futures.recycle();
}
}
/**
* Writes one message to the outbound of this {@link Channel} and does not flush it. This
* method is conceptually equivalent to {@link #write(Object)}.
*
* @see #writeOneInbound(Object)
*/
public ChannelFuture writeOneOutbound(Object msg) {
return writeOneOutbound(msg, newPromise());
}
/**
* Writes one message to the outbound of this {@link Channel} and does not flush it. This
* method is conceptually equivalent to {@link #write(Object, ChannelPromise)}.
*
* @see #writeOneInbound(Object, ChannelPromise)
*/
public ChannelFuture writeOneOutbound(Object msg, ChannelPromise promise) {
executingStackCnt++;
try {
if (checkOpen(true)) {
return write(msg, promise);
}
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
return checkException(promise);
}
/**
* Flushes the outbound of this {@link Channel}. This method is conceptually equivalent to {@link #flush()}.
*
* @see #flushInbound()
*/
public EmbeddedChannel flushOutbound() {
executingStackCnt++;
try {
if (checkOpen(true)) {
flushOutbound0();
}
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
checkException(voidPromise());
return this;
}
private void flushOutbound0() {
// We need to call runPendingTasks first as a ChannelOutboundHandler may used eventloop.execute(...) to
// delay the write on the next eventloop run.
runPendingTasks();
flush();
}
/**
* Mark this {@link Channel} as finished. Any further try to write data to it will fail.
*
* @return bufferReadable returns {@code true} if any of the used buffers has something left to read
*/
public boolean finish() {
return finish(false);
}
/**
* Mark this {@link Channel} as finished and release all pending message in the inbound and outbound buffer.
* Any further try to write data to it will fail.
*
* @return bufferReadable returns {@code true} if any of the used buffers has something left to read
*/
public boolean finishAndReleaseAll() {
return finish(true);
}
/**
* Mark this {@link Channel} as finished. Any further try to write data to it will fail.
*
* @param releaseAll if {@code true} all pending message in the inbound and outbound buffer are released.
* @return bufferReadable returns {@code true} if any of the used buffers has something left to read
*/
private boolean finish(boolean releaseAll) {
executingStackCnt++;
try {
close();
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
try {
checkException();
return isNotEmpty(inboundMessages) || isNotEmpty(outboundMessages);
} finally {
if (releaseAll) {
releaseAll(inboundMessages);
releaseAll(outboundMessages);
}
}
}
/**
* Release all buffered inbound messages and return {@code true} if any were in the inbound buffer, {@code false}
* otherwise.
*/
public boolean releaseInbound() {
return releaseAll(inboundMessages);
}
/**
* Release all buffered outbound messages and return {@code true} if any were in the outbound buffer, {@code false}
* otherwise.
*/
public boolean releaseOutbound() {
return releaseAll(outboundMessages);
}
private static boolean releaseAll(Queue<Object> queue) {
if (isNotEmpty(queue)) {
for (;;) {
Object msg = queue.poll();
if (msg == null) {
break;
}
ReferenceCountUtil.release(msg);
}
return true;
}
return false;
}
@Override
public final ChannelFuture close() {
return close(newPromise());
}
@Override
public final ChannelFuture disconnect() {
return disconnect(newPromise());
}
@Override
public final ChannelFuture close(ChannelPromise promise) {
// We need to call runPendingTasks() before calling super.close() as there may be something in the queue
// that needs to be run before the actual close takes place.
executingStackCnt++;
ChannelFuture future;
try {
runPendingTasks();
future = super.close(promise);
cancelRemainingScheduledTasks = true;
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
return future;
}
@Override
public final ChannelFuture disconnect(ChannelPromise promise) {
executingStackCnt++;
ChannelFuture future;
try {
future = super.disconnect(promise);
if (!metadata.hasDisconnect()) {
cancelRemainingScheduledTasks = true;
}
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
return future;
}
@Override
public ChannelFuture bind(SocketAddress localAddress) {
executingStackCnt++;
try {
return super.bind(localAddress);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress) {
executingStackCnt++;
try {
return super.connect(remoteAddress);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress) {
executingStackCnt++;
try {
return super.connect(remoteAddress, localAddress);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture deregister() {
executingStackCnt++;
try {
return super.deregister();
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public Channel flush() {
executingStackCnt++;
try {
return super.flush();
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture bind(SocketAddress localAddress, ChannelPromise promise) {
executingStackCnt++;
try {
return super.bind(localAddress, promise);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, ChannelPromise promise) {
executingStackCnt++;
try {
return super.connect(remoteAddress, promise);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) {
executingStackCnt++;
try {
return super.connect(remoteAddress, localAddress, promise);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture deregister(ChannelPromise promise) {
executingStackCnt++;
try {
return super.deregister(promise);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public Channel read() {
executingStackCnt++;
try {
return super.read();
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture write(Object msg) {
executingStackCnt++;
try {
return super.write(msg);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture write(Object msg, ChannelPromise promise) {
executingStackCnt++;
try {
return super.write(msg, promise);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture writeAndFlush(Object msg) {
executingStackCnt++;
try {
return super.writeAndFlush(msg);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
@Override
public ChannelFuture writeAndFlush(Object msg, ChannelPromise promise) {
executingStackCnt++;
try {
return super.writeAndFlush(msg, promise);
} finally {
executingStackCnt--;
maybeRunPendingTasks();
}
}
private static boolean isNotEmpty(Queue<Object> queue) {
return queue != null && !queue.isEmpty();
}
private static Object poll(Queue<Object> queue) {
return queue != null ? queue.poll() : null;
}
private void maybeRunPendingTasks() {
if (executingStackCnt == 0) {
runPendingTasks();
if (cancelRemainingScheduledTasks) {
// Cancel all scheduled tasks that are left.
embeddedEventLoop().cancelScheduledTasks();
}
}
}
/**
* Run all tasks (which also includes scheduled tasks) that are pending in the {@link EventLoop}
* for this {@link Channel}
*/
public void runPendingTasks() {
try {
embeddedEventLoop().runTasks();
} catch (Exception e) {
recordException(e);
}
try {
embeddedEventLoop().runScheduledTasks();
} catch (Exception e) {
recordException(e);
}
}
/**
* Check whether this channel has any pending tasks that would be executed by a call to {@link #runPendingTasks()}.
* This includes normal tasks, and scheduled tasks where the deadline has expired. If this method returns
* {@code false}, a call to {@link #runPendingTasks()} would do nothing.
*
* @return {@code true} if there are any pending tasks, {@code false} otherwise.
*/
public boolean hasPendingTasks() {
return embeddedEventLoop().hasPendingNormalTasks() ||
embeddedEventLoop().nextScheduledTask() == 0;
}
/**
* Run all pending scheduled tasks in the {@link EventLoop} for this {@link Channel} and return the
* {@code nanoseconds} when the next scheduled task is ready to run. If no other task was scheduled it will return
* {@code -1}.
*/
public long runScheduledPendingTasks() {
try {
return embeddedEventLoop().runScheduledTasks();
} catch (Exception e) {
recordException(e);
return embeddedEventLoop().nextScheduledTask();
}
}
private void recordException(ChannelFuture future) {
if (!future.isSuccess()) {
recordException(future.cause());
}
}
private void recordException(Throwable cause) {
if (lastException == null) {
lastException = cause;
} else {
logger.warn(
"More than one exception was raised. " +
"Will report only the first one and log others.", cause);
}
}
private EmbeddedEventLoop.FreezableTicker freezableTicker() {
Ticker ticker = eventLoop().ticker();
if (ticker instanceof EmbeddedEventLoop.FreezableTicker) {
return (EmbeddedEventLoop.FreezableTicker) ticker;
} else {
throw new IllegalStateException(
"EmbeddedChannel constructed with custom ticker, time manipulation methods are unavailable.");
}
}
/**
* Advance the clock of the event loop of this channel by the given duration. Any scheduled tasks will execute
* sooner by the given time (but {@link #runScheduledPendingTasks()} still needs to be called).
*/
public void advanceTimeBy(long duration, TimeUnit unit) {
freezableTicker().advance(duration, unit);
}
/**
* Freeze the clock of this channel's event loop. Any scheduled tasks that are not already due will not run on
* future {@link #runScheduledPendingTasks()} calls. While the event loop is frozen, it is still possible to
* {@link #advanceTimeBy(long, TimeUnit) advance time} manually so that scheduled tasks execute.
*/
public void freezeTime() {
freezableTicker().freezeTime();
}
/**
* Unfreeze an event loop that was {@link #freezeTime() frozen}. Time will continue at the point where
* {@link #freezeTime()} stopped it: if a task was scheduled ten minutes in the future and {@link #freezeTime()}
* was called, it will run ten minutes after this method is called again (assuming no
* {@link #advanceTimeBy(long, TimeUnit)} calls, and assuming pending scheduled tasks are run at that time using
* {@link #runScheduledPendingTasks()}).
*/
public void unfreezeTime() {
freezableTicker().unfreezeTime();
}
/**
* Checks for the presence of an {@link Exception}.
*/
private ChannelFuture checkException(ChannelPromise promise) {
Throwable t = lastException;
if (t != null) {
lastException = null;
if (promise.isVoid()) {
PlatformDependent.throwException(t);
}
return promise.setFailure(t);
}
return promise.setSuccess();
}
/**
* Check if there was any {@link Throwable} received and if so rethrow it.
*/
public void checkException() {
checkException(voidPromise());
}
/**
* Returns {@code true} if the {@link Channel} is open and records optionally
* an {@link Exception} if it isn't.
*/
private boolean checkOpen(boolean recordException) {
if (!isOpen()) {
if (recordException) {
recordException(new ClosedChannelException());
}
return false;
}
return true;
}
private EmbeddedEventLoop embeddedEventLoop() {
if (isRegistered()) {
return (EmbeddedEventLoop) super.eventLoop();
}
return loop;
}
/**
* Ensure the {@link Channel} is open and if not throw an exception.
*/
protected final void ensureOpen() {
if (!checkOpen(true)) {
checkException();
}
}
@Override
protected boolean isCompatible(EventLoop loop) {
return loop instanceof EmbeddedEventLoop;
}
@Override
protected SocketAddress localAddress0() {
return isActive()? LOCAL_ADDRESS : null;
}
@Override
protected SocketAddress remoteAddress0() {
return isActive()? REMOTE_ADDRESS : null;
}
@Override
protected void doRegister() throws Exception {
state = State.ACTIVE;
}
@Override
protected void doBind(SocketAddress localAddress) throws Exception {
// NOOP
}
@Override
protected void doDisconnect() throws Exception {
if (!metadata.hasDisconnect()) {
doClose();
}
}
@Override
protected void doClose() throws Exception {
state = State.CLOSED;
}
@Override
protected void doBeginRead() throws Exception {
// NOOP
}
@Override
protected AbstractUnsafe newUnsafe() {
return new EmbeddedUnsafe();
}
@Override
public Unsafe unsafe() {
return ((EmbeddedUnsafe) super.unsafe()).wrapped;
}
@Override
protected void doWrite(ChannelOutboundBuffer in) throws Exception {
for (;;) {
Object msg = in.current();
if (msg == null) {
break;
}
ReferenceCountUtil.retain(msg);
handleOutboundMessage(msg);
in.remove();
}
}
/**
* Called for each outbound message.
*
* @see #doWrite(ChannelOutboundBuffer)
*/
protected void handleOutboundMessage(Object msg) {
outboundMessages().add(msg);
}
/**
* Called for each inbound message.
*/
protected void handleInboundMessage(Object msg) {
inboundMessages().add(msg);
}
public static Builder builder() {
return new Builder();
}
public static final | State |
java | elastic__elasticsearch | build-tools/src/main/java/org/elasticsearch/gradle/FileSystemOperationsAware.java | {
"start": 658,
"end": 740
} | interface ____ {
WorkResult delete(Object... objects);
}
| FileSystemOperationsAware |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java | {
"start": 6279,
"end": 41258
} | interface ____ {
RMStateStore getRMStateStore() throws Exception;
boolean isFinalStateValid() throws Exception;
void writeVersion(Version version) throws Exception;
Version getCurrentVersion() throws Exception;
boolean appExists(RMApp app) throws Exception;
boolean attemptExists(RMAppAttempt attempt) throws Exception;
}
public long getEpochRange() {
return epochRange;
}
void waitNotify(TestDispatcher dispatcher) {
long startTime = System.currentTimeMillis();
while(!dispatcher.notified) {
synchronized (dispatcher) {
try {
dispatcher.wait(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if(System.currentTimeMillis() - startTime > 1000*60) {
fail("Timed out attempt store notification");
}
}
dispatcher.notified = false;
}
protected RMApp storeApp(RMStateStore store, ApplicationId appId,
long submitTime, long startTime) throws Exception {
ApplicationSubmissionContext context =
new ApplicationSubmissionContextPBImpl();
context.setApplicationId(appId);
context.setAMContainerSpec(new ContainerLaunchContextPBImpl());
RMApp mockApp = mock(RMApp.class);
when(mockApp.getApplicationId()).thenReturn(appId);
when(mockApp.getSubmitTime()).thenReturn(submitTime);
when(mockApp.getStartTime()).thenReturn(startTime);
when(mockApp.getApplicationSubmissionContext()).thenReturn(context);
when(mockApp.getUser()).thenReturn("test");
when(mockApp.getCallerContext())
.thenReturn(new CallerContext.Builder("context").build());
store.storeNewApplication(mockApp);
return mockApp;
}
protected RMAppAttempt storeAttempt(RMStateStore store,
ApplicationAttemptId attemptId,
String containerIdStr, Token<AMRMTokenIdentifier> appToken,
SecretKey clientTokenMasterKey, TestDispatcher dispatcher)
throws Exception {
RMAppAttemptMetrics mockRmAppAttemptMetrics =
mock(RMAppAttemptMetrics.class);
Container container = new ContainerPBImpl();
container.setId(ContainerId.fromString(containerIdStr));
RMAppAttempt mockAttempt = mock(RMAppAttempt.class);
when(mockAttempt.getAppAttemptId()).thenReturn(attemptId);
when(mockAttempt.getMasterContainer()).thenReturn(container);
when(mockAttempt.getAMRMToken()).thenReturn(appToken);
when(mockAttempt.getClientTokenMasterKey())
.thenReturn(clientTokenMasterKey);
when(mockAttempt.getRMAppAttemptMetrics())
.thenReturn(mockRmAppAttemptMetrics);
when(mockRmAppAttemptMetrics.getAggregateAppResourceUsage())
.thenReturn(new AggregateAppResourceUsage(new HashMap<>()));
dispatcher.attemptId = attemptId;
store.storeNewApplicationAttempt(mockAttempt);
waitNotify(dispatcher);
return mockAttempt;
}
protected void updateAttempt(RMStateStore store, TestDispatcher dispatcher,
ApplicationAttemptStateData attemptState) {
dispatcher.attemptId = attemptState.getAttemptId();
store.updateApplicationAttemptState(attemptState);
waitNotify(dispatcher);
}
void testRMAppStateStore(RMStateStoreHelper stateStoreHelper)
throws Exception {
testRMAppStateStore(stateStoreHelper, new StoreStateVerifier());
}
void testRMAppStateStore(RMStateStoreHelper stateStoreHelper,
StoreStateVerifier verifier)
throws Exception {
long submitTime = System.currentTimeMillis();
long startTime = System.currentTimeMillis() + 1234;
Configuration conf = new YarnConfiguration();
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
RMContext rmContext = mock(RMContext.class);
when(rmContext.getStateStore()).thenReturn(store);
AMRMTokenSecretManager appTokenMgr =
spy(new AMRMTokenSecretManager(conf, rmContext));
MasterKeyData masterKeyData = appTokenMgr.createNewMasterKey();
when(appTokenMgr.getMasterKey()).thenReturn(masterKeyData);
ClientToAMTokenSecretManagerInRM clientToAMTokenMgr =
new ClientToAMTokenSecretManagerInRM();
ApplicationAttemptId attemptId1 = ApplicationAttemptId.fromString(
"appattempt_1352994193343_0001_000001");
ApplicationId appId1 = attemptId1.getApplicationId();
storeApp(store, appId1, submitTime, startTime);
verifier.afterStoreApp(store, appId1);
// create application token and client token key for attempt1
Token<AMRMTokenIdentifier> appAttemptToken1 =
generateAMRMToken(attemptId1, appTokenMgr);
SecretKey clientTokenKey1 =
clientToAMTokenMgr.createMasterKey(attemptId1);
ContainerId containerId1 = storeAttempt(store, attemptId1,
"container_1352994193343_0001_01_000001",
appAttemptToken1, clientTokenKey1, dispatcher)
.getMasterContainer().getId();
String appAttemptIdStr2 = "appattempt_1352994193343_0001_000002";
ApplicationAttemptId attemptId2 = ApplicationAttemptId.fromString(
appAttemptIdStr2);
// create application token and client token key for attempt2
Token<AMRMTokenIdentifier> appAttemptToken2 =
generateAMRMToken(attemptId2, appTokenMgr);
SecretKey clientTokenKey2 =
clientToAMTokenMgr.createMasterKey(attemptId2);
ContainerId containerId2 = storeAttempt(store, attemptId2,
"container_1352994193343_0001_02_000001",
appAttemptToken2, clientTokenKey2, dispatcher)
.getMasterContainer().getId();
ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.fromString(
"appattempt_1352994193343_0002_000001");
ApplicationId appIdRemoved = attemptIdRemoved.getApplicationId();
storeApp(store, appIdRemoved, submitTime, startTime);
storeAttempt(store, attemptIdRemoved,
"container_1352994193343_0002_01_000001", null, null, dispatcher);
verifier.afterStoreAppAttempt(store, attemptIdRemoved);
RMApp mockRemovedApp = mock(RMApp.class);
RMAppAttemptMetrics mockRmAppAttemptMetrics =
mock(RMAppAttemptMetrics.class);
HashMap<ApplicationAttemptId, RMAppAttempt> attempts =
new HashMap<ApplicationAttemptId, RMAppAttempt>();
ApplicationSubmissionContext context =
new ApplicationSubmissionContextPBImpl();
context.setApplicationId(appIdRemoved);
when(mockRemovedApp.getSubmitTime()).thenReturn(submitTime);
when(mockRemovedApp.getApplicationSubmissionContext()).thenReturn(context);
when(mockRemovedApp.getAppAttempts()).thenReturn(attempts);
when(mockRemovedApp.getUser()).thenReturn("user1");
RMAppAttempt mockRemovedAttempt = mock(RMAppAttempt.class);
when(mockRemovedAttempt.getAppAttemptId()).thenReturn(attemptIdRemoved);
when(mockRemovedAttempt.getRMAppAttemptMetrics())
.thenReturn(mockRmAppAttemptMetrics);
when(mockRmAppAttemptMetrics.getAggregateAppResourceUsage())
.thenReturn(new AggregateAppResourceUsage(new HashMap<>()));
attempts.put(attemptIdRemoved, mockRemovedAttempt);
store.removeApplication(mockRemovedApp);
// remove application directory recursively.
storeApp(store, appIdRemoved, submitTime, startTime);
storeAttempt(store, attemptIdRemoved,
"container_1352994193343_0002_01_000001", null, null, dispatcher);
store.removeApplication(mockRemovedApp);
// let things settle down
Thread.sleep(1000);
store.close();
// give tester a chance to modify app state in the store
modifyAppState();
// load state
store = stateStoreHelper.getRMStateStore();
store.setRMDispatcher(dispatcher);
RMState state = store.loadState();
Map<ApplicationId, ApplicationStateData> rmAppState =
state.getApplicationState();
ApplicationStateData appState = rmAppState.get(appId1);
// app is loaded
assertNotNull(appState);
// app is loaded correctly
assertEquals(submitTime, appState.getSubmitTime());
assertEquals(startTime, appState.getStartTime());
// submission context is loaded correctly
assertEquals(appId1,
appState.getApplicationSubmissionContext().getApplicationId());
ApplicationAttemptStateData attemptState = appState.getAttempt(attemptId1);
// attempt1 is loaded correctly
assertNotNull(attemptState);
assertEquals(attemptId1, attemptState.getAttemptId());
assertEquals(-1000, attemptState.getAMContainerExitStatus());
// attempt1 container is loaded correctly
assertEquals(containerId1, attemptState.getMasterContainer().getId());
// attempt1 client token master key is loaded correctly
assertArrayEquals(
clientTokenKey1.getEncoded(),
attemptState.getAppAttemptTokens()
.getSecretKey(RMStateStore.AM_CLIENT_TOKEN_MASTER_KEY_NAME));
assertEquals("context", appState.getCallerContext().getContext());
attemptState = appState.getAttempt(attemptId2);
// attempt2 is loaded correctly
assertNotNull(attemptState);
assertEquals(attemptId2, attemptState.getAttemptId());
// attempt2 container is loaded correctly
assertEquals(containerId2, attemptState.getMasterContainer().getId());
// attempt2 client token master key is loaded correctly
assertArrayEquals(
clientTokenKey2.getEncoded(),
attemptState.getAppAttemptTokens()
.getSecretKey(RMStateStore.AM_CLIENT_TOKEN_MASTER_KEY_NAME));
//******* update application/attempt state *******//
ApplicationStateData appState2 =
ApplicationStateData.newInstance(appState.getSubmitTime(),
appState.getStartTime(), appState.getUser(),
appState.getApplicationSubmissionContext(), RMAppState.FINISHED,
"appDiagnostics", 123, 1234, appState.getCallerContext());
appState2.attempts.putAll(appState.attempts);
store.updateApplicationState(appState2);
ApplicationAttemptStateData oldAttemptState = attemptState;
ApplicationAttemptStateData newAttemptState =
ApplicationAttemptStateData.newInstance(
oldAttemptState.getAttemptId(),
oldAttemptState.getMasterContainer(),
oldAttemptState.getAppAttemptTokens(),
oldAttemptState.getStartTime(), RMAppAttemptState.FINISHED,
"myTrackingUrl", "attemptDiagnostics",
FinalApplicationStatus.SUCCEEDED, 100,
oldAttemptState.getFinishTime(), new HashMap<>(), new HashMap<>(),
0);
store.updateApplicationAttemptState(newAttemptState);
// test updating the state of an app/attempt whose initial state was not
// saved.
ApplicationId dummyAppId = ApplicationId.newInstance(1234, 10);
ApplicationSubmissionContext dummyContext =
new ApplicationSubmissionContextPBImpl();
dummyContext.setApplicationId(dummyAppId);
dummyContext.setAMContainerSpec(new ContainerLaunchContextPBImpl());
ApplicationStateData dummyApp =
ApplicationStateData.newInstance(appState.getSubmitTime(),
appState.getStartTime(), appState.getUser(), dummyContext,
RMAppState.FINISHED, "appDiagnostics", 123, 1234, null);
store.updateApplicationState(dummyApp);
ApplicationAttemptId dummyAttemptId =
ApplicationAttemptId.newInstance(dummyAppId, 6);
ApplicationAttemptStateData dummyAttempt =
ApplicationAttemptStateData.newInstance(dummyAttemptId,
oldAttemptState.getMasterContainer(),
oldAttemptState.getAppAttemptTokens(),
oldAttemptState.getStartTime(), RMAppAttemptState.FINISHED,
"myTrackingUrl", "attemptDiagnostics",
FinalApplicationStatus.SUCCEEDED, 111,
oldAttemptState.getFinishTime(), new HashMap<>(), new HashMap<>(),
0);
store.updateApplicationAttemptState(dummyAttempt);
// let things settle down
Thread.sleep(1000);
store.close();
// check updated application state.
store = stateStoreHelper.getRMStateStore();
store.setRMDispatcher(dispatcher);
RMState newRMState = store.loadState();
Map<ApplicationId, ApplicationStateData> newRMAppState =
newRMState.getApplicationState();
assertNotNull(newRMAppState.get(
dummyApp.getApplicationSubmissionContext().getApplicationId()));
ApplicationStateData updatedAppState = newRMAppState.get(appId1);
assertEquals(appState.getApplicationSubmissionContext().getApplicationId(),
updatedAppState.getApplicationSubmissionContext().getApplicationId());
assertEquals(appState.getSubmitTime(), updatedAppState.getSubmitTime());
assertEquals(appState.getStartTime(), updatedAppState.getStartTime());
assertEquals(appState.getUser(), updatedAppState.getUser());
// new app state fields
assertEquals( RMAppState.FINISHED, updatedAppState.getState());
assertEquals("appDiagnostics", updatedAppState.getDiagnostics());
assertEquals(1234, updatedAppState.getFinishTime());
// check updated attempt state
assertNotNull(newRMAppState.get(dummyApp.getApplicationSubmissionContext
().getApplicationId()).getAttempt(dummyAttemptId));
ApplicationAttemptStateData updatedAttemptState =
updatedAppState.getAttempt(newAttemptState.getAttemptId());
assertEquals(oldAttemptState.getAttemptId(),
updatedAttemptState.getAttemptId());
assertEquals(containerId2, updatedAttemptState.getMasterContainer().getId());
assertArrayEquals(
clientTokenKey2.getEncoded(),
attemptState.getAppAttemptTokens()
.getSecretKey(RMStateStore.AM_CLIENT_TOKEN_MASTER_KEY_NAME));
// new attempt state fields
assertEquals(RMAppAttemptState.FINISHED, updatedAttemptState.getState());
assertEquals("myTrackingUrl", updatedAttemptState.getFinalTrackingUrl());
assertEquals("attemptDiagnostics", updatedAttemptState.getDiagnostics());
assertEquals(100, updatedAttemptState.getAMContainerExitStatus());
assertEquals(FinalApplicationStatus.SUCCEEDED,
updatedAttemptState.getFinalApplicationStatus());
// assert store is in expected state after everything is cleaned
assertTrue(stateStoreHelper.isFinalStateValid());
store.close();
}
public void testRMDTSecretManagerStateStore(
RMStateStoreHelper stateStoreHelper) throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
// store RM delegation token;
RMDelegationTokenIdentifier dtId1 =
new RMDelegationTokenIdentifier(new Text("owner1"),
new Text("renewer1"), new Text("realuser1"));
int sequenceNumber = 1111;
dtId1.setSequenceNumber(sequenceNumber);
byte[] tokenBeforeStore = dtId1.getBytes();
Long renewDate1 = new Long(System.currentTimeMillis());
store.storeRMDelegationToken(dtId1, renewDate1);
modifyRMDelegationTokenState();
Map<RMDelegationTokenIdentifier, Long> token1 =
new HashMap<RMDelegationTokenIdentifier, Long>();
token1.put(dtId1, renewDate1);
// store delegation key;
DelegationKey key = new DelegationKey(1234, 4321 , "keyBytes".getBytes());
HashSet<DelegationKey> keySet = new HashSet<DelegationKey>();
keySet.add(key);
store.storeRMDTMasterKey(key);
RMDTSecretManagerState secretManagerState =
store.loadState().getRMDTSecretManagerState();
assertEquals(token1, secretManagerState.getTokenState());
assertEquals(keySet, secretManagerState.getMasterKeyState());
assertEquals(sequenceNumber,
secretManagerState.getDTSequenceNumber());
RMDelegationTokenIdentifier tokenAfterStore =
secretManagerState.getTokenState().keySet().iterator().next();
assertTrue(Arrays.equals(tokenBeforeStore,
tokenAfterStore.getBytes()));
// update RM delegation token;
renewDate1 = new Long(System.currentTimeMillis());
store.updateRMDelegationToken(dtId1, renewDate1);
token1.put(dtId1, renewDate1);
RMDTSecretManagerState updateSecretManagerState =
store.loadState().getRMDTSecretManagerState();
assertEquals(token1, updateSecretManagerState.getTokenState());
assertEquals(keySet, updateSecretManagerState.getMasterKeyState());
assertEquals(sequenceNumber,
updateSecretManagerState.getDTSequenceNumber());
// check to delete delegationKey
store.removeRMDTMasterKey(key);
keySet.clear();
RMDTSecretManagerState noKeySecretManagerState =
store.loadState().getRMDTSecretManagerState();
assertEquals(token1, noKeySecretManagerState.getTokenState());
assertEquals(keySet, noKeySecretManagerState.getMasterKeyState());
assertEquals(sequenceNumber,
noKeySecretManagerState.getDTSequenceNumber());
// check to delete delegationToken
store.removeRMDelegationToken(dtId1);
RMDTSecretManagerState noKeyAndTokenSecretManagerState =
store.loadState().getRMDTSecretManagerState();
token1.clear();
assertEquals(token1,
noKeyAndTokenSecretManagerState.getTokenState());
assertEquals(keySet,
noKeyAndTokenSecretManagerState.getMasterKeyState());
assertEquals(sequenceNumber,
noKeySecretManagerState.getDTSequenceNumber());
store.close();
}
protected Token<AMRMTokenIdentifier> generateAMRMToken(
ApplicationAttemptId attemptId,
AMRMTokenSecretManager appTokenMgr) {
Token<AMRMTokenIdentifier> appToken =
appTokenMgr.createAndGetAMRMToken(attemptId);
appToken.setService(new Text("appToken service"));
return appToken;
}
public void testCheckVersion(RMStateStoreHelper stateStoreHelper)
throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
store.setRMDispatcher(new TestDispatcher());
// default version
Version defaultVersion = stateStoreHelper.getCurrentVersion();
store.checkVersion();
assertEquals(defaultVersion, store.loadVersion());
// compatible version
Version compatibleVersion =
Version.newInstance(defaultVersion.getMajorVersion(),
defaultVersion.getMinorVersion() + 2);
stateStoreHelper.writeVersion(compatibleVersion);
assertEquals(compatibleVersion, store.loadVersion());
store.checkVersion();
// overwrite the compatible version
assertEquals(defaultVersion, store.loadVersion());
// incompatible version
Version incompatibleVersion =
Version.newInstance(defaultVersion.getMajorVersion() + 2,
defaultVersion.getMinorVersion());
stateStoreHelper.writeVersion(incompatibleVersion);
try {
store.checkVersion();
fail("Invalid version, should fail.");
} catch (Throwable t) {
assertTrue(t instanceof RMStateVersionIncompatibleException);
}
}
public void testEpoch(RMStateStoreHelper stateStoreHelper)
throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
store.setRMDispatcher(new TestDispatcher());
long firstTimeEpoch = store.getAndIncrementEpoch();
assertEquals(epoch, firstTimeEpoch);
long secondTimeEpoch = store.getAndIncrementEpoch();
assertEquals(epoch + 1, secondTimeEpoch);
long thirdTimeEpoch = store.getAndIncrementEpoch();
assertEquals(epoch + 2, thirdTimeEpoch);
for (int i = 0; i < epochRange; ++i) {
store.getAndIncrementEpoch();
}
long wrappedEpoch = store.getAndIncrementEpoch();
// Epoch should have wrapped around and then incremented once for a total
// of + 3
assertEquals(epoch + 3, wrappedEpoch);
}
public void testAppDeletion(RMStateStoreHelper stateStoreHelper)
throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
store.setRMDispatcher(new TestDispatcher());
ArrayList<RMApp> appList = createAndStoreApps(stateStoreHelper, store, 5);
for (RMApp app : appList) {
// remove the app
store.removeApplication(app);
// wait for app to be removed.
while (true) {
if (!stateStoreHelper.appExists(app)) {
break;
} else {
Thread.sleep(100);
}
}
}
}
private ArrayList<RMApp> createAndStoreApps(
RMStateStoreHelper stateStoreHelper, RMStateStore store, int numApps)
throws Exception {
ArrayList<RMApp> appList = new ArrayList<RMApp>();
for (int i = 0; i < numApps; i++) {
ApplicationId appId = ApplicationId.newInstance(1383183338, i);
RMApp app = storeApp(store, appId, 123456789, 987654321);
appList.add(app);
}
assertEquals(numApps, appList.size());
for (RMApp app : appList) {
// wait for app to be stored.
while (true) {
if (stateStoreHelper.appExists(app)) {
break;
} else {
Thread.sleep(100);
}
}
}
return appList;
}
public void testDeleteStore(RMStateStoreHelper stateStoreHelper)
throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
ArrayList<RMApp> appList = createAndStoreApps(stateStoreHelper, store, 5);
store.deleteStore();
// verify apps deleted
for (RMApp app : appList) {
assertFalse(stateStoreHelper.appExists(app));
}
}
public void testRemoveApplication(RMStateStoreHelper stateStoreHelper)
throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
int noOfApps = 2;
ArrayList<RMApp> appList =
createAndStoreApps(stateStoreHelper, store, noOfApps);
RMApp rmApp1 = appList.get(0);
store.removeApplication(rmApp1.getApplicationId());
assertFalse(stateStoreHelper.appExists(rmApp1));
RMApp rmApp2 = appList.get(1);
assertTrue(stateStoreHelper.appExists(rmApp2));
}
public void testRemoveAttempt(RMStateStoreHelper stateStoreHelper)
throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
ApplicationId appId = ApplicationId.newInstance(1383183339, 6);
storeApp(store, appId, 123456, 564321);
ApplicationAttemptId attemptId1 =
ApplicationAttemptId.newInstance(appId, 1);
RMAppAttempt attempt1 = storeAttempt(store, attemptId1,
ContainerId.newContainerId(attemptId1, 1).toString(),
null, null, dispatcher);
ApplicationAttemptId attemptId2 =
ApplicationAttemptId.newInstance(appId, 2);
RMAppAttempt attempt2 = storeAttempt(store, attemptId2,
ContainerId.newContainerId(attemptId2, 1).toString(),
null, null, dispatcher);
store.removeApplicationAttemptInternal(attemptId1);
assertFalse(stateStoreHelper.attemptExists(attempt1));
assertTrue(stateStoreHelper.attemptExists(attempt2));
// let things settle down
Thread.sleep(1000);
store.close();
// load state
store = stateStoreHelper.getRMStateStore();
RMState state = store.loadState();
Map<ApplicationId, ApplicationStateData> rmAppState =
state.getApplicationState();
ApplicationStateData appState = rmAppState.get(appId);
// app is loaded
assertNotNull(appState);
assertEquals(2, appState.getFirstAttemptId());
assertNull(appState.getAttempt(attemptId1));
assertNotNull(appState.getAttempt(attemptId2));
}
protected void modifyAppState() throws Exception {
}
protected void modifyRMDelegationTokenState() throws Exception {
}
public void testAMRMTokenSecretManagerStateStore(
RMStateStoreHelper stateStoreHelper) throws Exception {
System.out.println("Start testing");
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
RMContext rmContext = mock(RMContext.class);
when(rmContext.getStateStore()).thenReturn(store);
Configuration conf = new YarnConfiguration();
AMRMTokenSecretManager appTokenMgr =
new AMRMTokenSecretManager(conf, rmContext);
//create and save the first masterkey
MasterKeyData firstMasterKeyData = appTokenMgr.createNewMasterKey();
AMRMTokenSecretManagerState state1 =
AMRMTokenSecretManagerState.newInstance(
firstMasterKeyData.getMasterKey(), null);
rmContext.getStateStore()
.storeOrUpdateAMRMTokenSecretManager(state1,
false);
// load state
store = stateStoreHelper.getRMStateStore();
when(rmContext.getStateStore()).thenReturn(store);
store.setRMDispatcher(dispatcher);
RMState state = store.loadState();
assertNotNull(state.getAMRMTokenSecretManagerState());
assertEquals(firstMasterKeyData.getMasterKey(), state
.getAMRMTokenSecretManagerState().getCurrentMasterKey());
assertNull(state
.getAMRMTokenSecretManagerState().getNextMasterKey());
//create and save the second masterkey
MasterKeyData secondMasterKeyData = appTokenMgr.createNewMasterKey();
AMRMTokenSecretManagerState state2 =
AMRMTokenSecretManagerState
.newInstance(firstMasterKeyData.getMasterKey(),
secondMasterKeyData.getMasterKey());
rmContext.getStateStore().storeOrUpdateAMRMTokenSecretManager(state2,
true);
// load state
store = stateStoreHelper.getRMStateStore();
when(rmContext.getStateStore()).thenReturn(store);
store.setRMDispatcher(dispatcher);
RMState state_2 = store.loadState();
assertNotNull(state_2.getAMRMTokenSecretManagerState());
assertEquals(firstMasterKeyData.getMasterKey(), state_2
.getAMRMTokenSecretManagerState().getCurrentMasterKey());
assertEquals(secondMasterKeyData.getMasterKey(), state_2
.getAMRMTokenSecretManagerState().getNextMasterKey());
// re-create the masterKeyData based on the recovered masterkey
// should have the same secretKey
appTokenMgr.recover(state_2);
assertEquals(appTokenMgr.getCurrnetMasterKeyData().getSecretKey(),
firstMasterKeyData.getSecretKey());
assertEquals(appTokenMgr.getNextMasterKeyData().getSecretKey(),
secondMasterKeyData.getSecretKey());
store.close();
}
public void testReservationStateStore(
RMStateStoreHelper stateStoreHelper) throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
RMContext rmContext = mock(RMContext.class);
when(rmContext.getStateStore()).thenReturn(store);
long ts = System.currentTimeMillis();
ReservationId r1 = ReservationId.newInstance(ts, 1);
int start = 1;
int[] alloc = { 10, 10, 10, 10, 10 };
ResourceCalculator res = new DefaultResourceCalculator();
Resource minAlloc = Resource.newInstance(1024, 1);
boolean hasGang = true;
String planName = "dedicated";
ReservationDefinition rDef =
ReservationSystemTestUtil.createSimpleReservationDefinition(
start, start + alloc.length + 1, alloc.length);
ReservationAllocation allocation = new InMemoryReservationAllocation(
r1, rDef, "u3", planName, 0, 0 + alloc.length,
ReservationSystemTestUtil.generateAllocation(0L, 1L, alloc), res,
minAlloc, hasGang);
ReservationAllocationStateProto allocationStateProto =
ReservationSystemUtil.buildStateProto(allocation);
assertAllocationStateEqual(allocation, allocationStateProto);
// 1. Load empty store and verify no errors
store = stateStoreHelper.getRMStateStore();
when(rmContext.getStateStore()).thenReturn(store);
store.setRMDispatcher(dispatcher);
RMState state = store.loadState();
Map<String, Map<ReservationId, ReservationAllocationStateProto>>
reservationState = state.getReservationState();
assertNotNull(reservationState);
// 2. Store single reservation and verify
String reservationIdName = r1.toString();
rmContext.getStateStore().storeNewReservation(
allocationStateProto,
planName, reservationIdName);
// load state and verify new state
validateStoredReservation(
stateStoreHelper, dispatcher, rmContext, r1, planName, allocation,
allocationStateProto);
// 3. update state test
alloc = new int[]{6, 6, 6};
hasGang = false;
allocation = new InMemoryReservationAllocation(
r1, rDef, "u3", planName, 2, 2 + alloc.length,
ReservationSystemTestUtil.generateAllocation(1L, 2L, alloc), res,
minAlloc, hasGang);
allocationStateProto =
ReservationSystemUtil.buildStateProto(allocation);
rmContext.getStateStore().removeReservation(planName, reservationIdName);
rmContext.getStateStore().storeNewReservation(allocationStateProto, planName, reservationIdName);
// load state and verify updated reservation
validateStoredReservation(
stateStoreHelper, dispatcher, rmContext, r1, planName, allocation,
allocationStateProto);
// 4. add a second one and remove the first one
ReservationId r2 = ReservationId.newInstance(ts, 2);
ReservationAllocation allocation2 = new InMemoryReservationAllocation(
r2, rDef, "u3", planName, 0, 0 + alloc.length,
ReservationSystemTestUtil.generateAllocation(0L, 1L, alloc), res,
minAlloc, hasGang);
ReservationAllocationStateProto allocationStateProto2 =
ReservationSystemUtil.buildStateProto(allocation2);
String reservationIdName2 = r2.toString();
rmContext.getStateStore().storeNewReservation(
allocationStateProto2,
planName, reservationIdName2);
rmContext.getStateStore().removeReservation(planName, reservationIdName);
// load state and verify r1 is removed and r2 is still there
Map<ReservationId, ReservationAllocationStateProto> reservations;
store = stateStoreHelper.getRMStateStore();
when(rmContext.getStateStore()).thenReturn(store);
store.setRMDispatcher(dispatcher);
state = store.loadState();
reservationState = state.getReservationState();
assertNotNull(reservationState);
reservations = reservationState.get(planName);
assertNotNull(reservations);
ReservationAllocationStateProto storedReservationAllocation =
reservations.get(r1);
assertNull(storedReservationAllocation,
"Removed reservation should not be available in store");
storedReservationAllocation = reservations.get(r2);
assertAllocationStateEqual(
allocationStateProto2, storedReservationAllocation);
assertAllocationStateEqual(allocation2, storedReservationAllocation);
// 5. remove last reservation removes the plan state
rmContext.getStateStore().removeReservation(planName, reservationIdName2);
store = stateStoreHelper.getRMStateStore();
when(rmContext.getStateStore()).thenReturn(store);
store.setRMDispatcher(dispatcher);
state = store.loadState();
reservationState = state.getReservationState();
assertNotNull(reservationState);
reservations = reservationState.get(planName);
assertNull(reservations);
}
public void testProxyCA(
RMStateStoreHelper stateStoreHelper) throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
ProxyCA originalProxyCA = new ProxyCA();
originalProxyCA.init();
store.storeProxyCACert(originalProxyCA.getCaCert(),
originalProxyCA.getCaKeyPair().getPrivate());
RMStateStore.ProxyCAState proxyCAState =
store.loadState().getProxyCAState();
assertEquals(originalProxyCA.getCaCert(), proxyCAState.getCaCert());
assertEquals(originalProxyCA.getCaKeyPair().getPrivate(),
proxyCAState.getCaPrivateKey());
// Try replacing with a different ProxyCA
ProxyCA newProxyCA = new ProxyCA();
newProxyCA.init();
assertNotEquals(originalProxyCA.getCaCert(), newProxyCA.getCaCert());
assertNotEquals(originalProxyCA.getCaKeyPair().getPrivate(),
newProxyCA.getCaKeyPair().getPrivate());
store.storeProxyCACert(newProxyCA.getCaCert(),
newProxyCA.getCaKeyPair().getPrivate());
proxyCAState = store.loadState().getProxyCAState();
assertEquals(newProxyCA.getCaCert(), proxyCAState.getCaCert());
assertEquals(newProxyCA.getCaKeyPair().getPrivate(),
proxyCAState.getCaPrivateKey());
}
private void validateStoredReservation(
RMStateStoreHelper stateStoreHelper, TestDispatcher dispatcher,
RMContext rmContext, ReservationId r1, String planName,
ReservationAllocation allocation,
ReservationAllocationStateProto allocationStateProto) throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
when(rmContext.getStateStore()).thenReturn(store);
store.setRMDispatcher(dispatcher);
RMState state = store.loadState();
Map<String, Map<ReservationId, ReservationAllocationStateProto>>
reservationState = state.getReservationState();
assertNotNull(reservationState);
Map<ReservationId, ReservationAllocationStateProto> reservations =
reservationState.get(planName);
assertNotNull(reservations);
ReservationAllocationStateProto storedReservationAllocation =
reservations.get(r1);
assertNotNull(storedReservationAllocation);
assertAllocationStateEqual(
allocationStateProto, storedReservationAllocation);
assertAllocationStateEqual(allocation, storedReservationAllocation);
}
void assertAllocationStateEqual(
ReservationAllocationStateProto expected,
ReservationAllocationStateProto actual) {
assertEquals(
expected.getAcceptanceTime(), actual.getAcceptanceTime());
assertEquals(expected.getStartTime(), actual.getStartTime());
assertEquals(expected.getEndTime(), actual.getEndTime());
assertEquals(expected.getContainsGangs(), actual.getContainsGangs());
assertEquals(expected.getUser(), actual.getUser());
assertEquals(
expected.getReservationDefinition(), actual.getReservationDefinition());
assertEquals(expected.getAllocationRequestsList(),
actual.getAllocationRequestsList());
}
void assertAllocationStateEqual(
ReservationAllocation expected,
ReservationAllocationStateProto actual) {
assertEquals(
expected.getAcceptanceTime(), actual.getAcceptanceTime());
assertEquals(expected.getStartTime(), actual.getStartTime());
assertEquals(expected.getEndTime(), actual.getEndTime());
assertEquals(expected.containsGangs(), actual.getContainsGangs());
assertEquals(expected.getUser(), actual.getUser());
assertEquals(
expected.getReservationDefinition(),
ReservationSystemUtil.convertFromProtoFormat(
actual.getReservationDefinition()));
assertEquals(
expected.getAllocationRequests(),
ReservationSystemUtil.toAllocations(
actual.getAllocationRequestsList()));
}
}
| RMStateStoreHelper |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/beans/factory/ScopeBeanFactory.java | {
"start": 15334,
"end": 15577
} | class ____ {
private final String name;
private final Object instance;
BeanInfo(String name, Object instance) {
this.name = name;
this.instance = instance;
}
}
static final | BeanInfo |
java | hibernate__hibernate-orm | tooling/hibernate-maven-plugin/src/it/enhance/src/main/java/org/foo/Foo.java | {
"start": 25,
"end": 164
} | class ____ {
private Bar bar;
Bar getBar() {
return bar;
}
public void setBar(Bar b) {
bar = b;
}
}
| Foo |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/configuration/RuntimeOverrideConfigSourceBuilder.java | {
"start": 100,
"end": 371
} | class ____ implements ConfigBuilder {
@Override
public SmallRyeConfigBuilder configBuilder(final SmallRyeConfigBuilder builder) {
return builder.withSources(new RuntimeOverrideConfigSource(builder.getClassLoader()));
}
}
| RuntimeOverrideConfigSourceBuilder |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/processor/DefaultVersionInformation.java | {
"start": 4547,
"end": 7807
} | class ____
className = JDT_BATCH_PE_CLASS;
}
else {
InvocationHandler invocationHandler = Proxy.getInvocationHandler( processingEnv );
return "Proxy handler " + invocationHandler.getClass() + " from " +
getLibraryName( invocationHandler.getClass(), false );
}
}
else {
className = processingEnv.getClass().getName();
}
if ( className.equals( JAVAC_PE_CLASS ) ) {
return COMPILER_NAME_JAVAC;
}
if ( className.equals( JDT_IDE_PE_CLASS ) ) {
// the processing environment for the IDE integrated APT is in a different bundle than the APT classes
return COMPILER_NAME_ECLIPSE_JDT + " (IDE) "
+ getLibraryName( processingEnv.getTypeUtils().getClass(), true );
}
if ( className.equals( JDT_BATCH_PE_CLASS ) ) {
return COMPILER_NAME_ECLIPSE_JDT + " (Batch) " + getLibraryName( processingEnv.getClass(), true );
}
return processingEnv.getClass().getSimpleName() + " from " + getLibraryName( processingEnv.getClass(), false );
}
private static String getLibraryName(Class<?> clazz, boolean preferVersionOnly) {
String classFileName = asClassFileName( clazz.getName() );
URL resource = clazz.getClassLoader().getResource( classFileName );
Manifest manifest = openManifest( classFileName, resource );
if ( preferVersionOnly && manifest != null ) {
String version = manifest.getMainAttributes().getValue( "Bundle-Version" );
if ( version != null ) {
return version;
}
}
if ( resource == null ) {
return "";
}
else if ( "jar".equals( resource.getProtocol() ) ) {
return extractJarFileName( resource.getFile() );
}
else if ( "jrt".equals( resource.getProtocol() ) ) {
return extractJrtModuleName( resource );
}
else if ( "bundleresource".equals( resource.getProtocol() ) && manifest != null ) {
return extractBundleName( manifest );
}
return resource.toExternalForm();
}
private static Manifest openManifest(String classFileName, URL resource) {
if ( resource == null ) {
return null;
}
try {
URL manifestUrl = createManifestUrl( classFileName, resource );
return new Manifest( manifestUrl.openStream() );
}
catch ( IOException e ) {
return null;
}
}
private static String extractBundleName(Manifest manifest) {
String version = manifest.getMainAttributes().getValue( "Bundle-Version" );
String symbolicName = manifest.getMainAttributes().getValue( "Bundle-SymbolicName" );
int semicolon = symbolicName.indexOf( ';' );
if ( semicolon > 0 ) {
symbolicName = symbolicName.substring( 0, semicolon );
}
return symbolicName + "_" + version;
}
private static String extractJrtModuleName(URL resource) {
// JDK 9 style, e.g. jrt:/jdk.compiler/com/sun/tools/javac/processing/JavacProcessingEnvironment. | name |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/builder/ToStringStyle.java | {
"start": 12551,
"end": 13348
} | class ____ extends ToStringStyle {
private static final long serialVersionUID = 1L;
/**
* Constructs a new instance.
*
* <p>
* Use the static constant rather than instantiating.
* </p>
*/
MultiLineToStringStyle() {
setContentStart("[");
setFieldSeparator(System.lineSeparator() + " ");
setFieldSeparatorAtStart(true);
setContentEnd(System.lineSeparator() + "]");
}
/**
* Ensure Singleton after serialization.
*
* @return the singleton.
*/
private Object readResolve() {
return MULTI_LINE_STYLE;
}
}
/**
* {@link ToStringStyle} that does not print out the | MultiLineToStringStyle |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/cglib/core/TypeUtils.java | {
"start": 906,
"end": 12355
} | class ____ {
private static final Map transforms = new HashMap();
private static final Map rtransforms = new HashMap();
private TypeUtils() {
}
static {
transforms.put("void", "V");
transforms.put("byte", "B");
transforms.put("char", "C");
transforms.put("double", "D");
transforms.put("float", "F");
transforms.put("int", "I");
transforms.put("long", "J");
transforms.put("short", "S");
transforms.put("boolean", "Z");
CollectionUtils.reverse(transforms, rtransforms);
}
public static Type getType(String className) {
return Type.getType("L" + className.replace('.', '/') + ";");
}
public static boolean isFinal(int access) {
return (Constants.ACC_FINAL & access) != 0;
}
public static boolean isStatic(int access) {
return (Constants.ACC_STATIC & access) != 0;
}
public static boolean isProtected(int access) {
return (Constants.ACC_PROTECTED & access) != 0;
}
public static boolean isPublic(int access) {
return (Constants.ACC_PUBLIC & access) != 0;
}
public static boolean isAbstract(int access) {
return (Constants.ACC_ABSTRACT & access) != 0;
}
public static boolean isInterface(int access) {
return (Constants.ACC_INTERFACE & access) != 0;
}
public static boolean isPrivate(int access) {
return (Constants.ACC_PRIVATE & access) != 0;
}
public static boolean isSynthetic(int access) {
return (Constants.ACC_SYNTHETIC & access) != 0;
}
public static boolean isBridge(int access) {
return (Constants.ACC_BRIDGE & access) != 0;
}
// getPackage returns null on JDK 1.2
public static String getPackageName(Type type) {
return getPackageName(getClassName(type));
}
public static String getPackageName(String className) {
int idx = className.lastIndexOf('.');
return (idx < 0) ? "" : className.substring(0, idx);
}
public static String upperFirst(String s) {
if (s == null || s.isEmpty()) {
return s;
}
return Character.toUpperCase(s.charAt(0)) + s.substring(1);
}
public static String getClassName(Type type) {
if (isPrimitive(type)) {
return (String)rtransforms.get(type.getDescriptor());
} else if (isArray(type)) {
return getClassName(getComponentType(type)) + "[]";
} else {
return type.getClassName();
}
}
public static Type[] add(Type[] types, Type extra) {
if (types == null) {
return new Type[]{ extra };
} else {
List list = Arrays.asList(types);
if (list.contains(extra)) {
return types;
}
Type[] copy = new Type[types.length + 1];
System.arraycopy(types, 0, copy, 0, types.length);
copy[types.length] = extra;
return copy;
}
}
public static Type[] add(Type[] t1, Type[] t2) {
// TODO: set semantics?
Type[] all = new Type[t1.length + t2.length];
System.arraycopy(t1, 0, all, 0, t1.length);
System.arraycopy(t2, 0, all, t1.length, t2.length);
return all;
}
public static Type fromInternalName(String name) {
// TODO; primitives?
return Type.getType("L" + name + ";");
}
public static Type[] fromInternalNames(String[] names) {
if (names == null) {
return null;
}
Type[] types = new Type[names.length];
for (int i = 0; i < names.length; i++) {
types[i] = fromInternalName(names[i]);
}
return types;
}
public static int getStackSize(Type[] types) {
int size = 0;
for (Type type : types) {
size += type.getSize();
}
return size;
}
public static String[] toInternalNames(Type[] types) {
if (types == null) {
return null;
}
String[] names = new String[types.length];
for (int i = 0; i < types.length; i++) {
names[i] = types[i].getInternalName();
}
return names;
}
public static Signature parseSignature(String s) {
int space = s.indexOf(' ');
int lparen = s.indexOf('(', space);
int rparen = s.indexOf(')', lparen);
String returnType = s.substring(0, space);
String methodName = s.substring(space + 1, lparen);
StringBuilder sb = new StringBuilder();
sb.append('(');
for (Iterator it = parseTypes(s, lparen + 1, rparen).iterator(); it.hasNext();) {
sb.append(it.next());
}
sb.append(')');
sb.append(map(returnType));
return new Signature(methodName, sb.toString());
}
public static Type parseType(String s) {
return Type.getType(map(s));
}
public static Type[] parseTypes(String s) {
List names = parseTypes(s, 0, s.length());
Type[] types = new Type[names.size()];
for (int i = 0; i < types.length; i++) {
types[i] = Type.getType((String)names.get(i));
}
return types;
}
public static Signature parseConstructor(Type[] types) {
StringBuilder sb = new StringBuilder();
sb.append("(");
for (Type type : types) {
sb.append(type.getDescriptor());
}
sb.append(")");
sb.append("V");
return new Signature(Constants.CONSTRUCTOR_NAME, sb.toString());
}
public static Signature parseConstructor(String sig) {
return parseSignature("void <init>(" + sig + ")"); // TODO
}
private static List parseTypes(String s, int mark, int end) {
List types = new ArrayList(5);
for (;;) {
int next = s.indexOf(',', mark);
if (next < 0) {
break;
}
types.add(map(s.substring(mark, next).trim()));
mark = next + 1;
}
types.add(map(s.substring(mark, end).trim()));
return types;
}
private static String map(String type) {
if (type.isEmpty()) {
return type;
}
String t = (String)transforms.get(type);
if (t != null) {
return t;
} else if (type.indexOf('.') < 0) {
return map("java.lang." + type);
} else {
StringBuilder sb = new StringBuilder();
int index = 0;
while ((index = type.indexOf("[]", index) + 1) > 0) {
sb.append('[');
}
type = type.substring(0, type.length() - sb.length() * 2);
sb.append('L').append(type.replace('.', '/')).append(';');
return sb.toString();
}
}
public static Type getBoxedType(Type type) {
return switch (type.getSort()) {
case Type.CHAR -> Constants.TYPE_CHARACTER;
case Type.BOOLEAN -> Constants.TYPE_BOOLEAN;
case Type.DOUBLE -> Constants.TYPE_DOUBLE;
case Type.FLOAT -> Constants.TYPE_FLOAT;
case Type.LONG -> Constants.TYPE_LONG;
case Type.INT -> Constants.TYPE_INTEGER;
case Type.SHORT -> Constants.TYPE_SHORT;
case Type.BYTE -> Constants.TYPE_BYTE;
default -> type;
};
}
public static Type getUnboxedType(Type type) {
if (Constants.TYPE_INTEGER.equals(type)) {
return Type.INT_TYPE;
} else if (Constants.TYPE_BOOLEAN.equals(type)) {
return Type.BOOLEAN_TYPE;
} else if (Constants.TYPE_DOUBLE.equals(type)) {
return Type.DOUBLE_TYPE;
} else if (Constants.TYPE_LONG.equals(type)) {
return Type.LONG_TYPE;
} else if (Constants.TYPE_CHARACTER.equals(type)) {
return Type.CHAR_TYPE;
} else if (Constants.TYPE_BYTE.equals(type)) {
return Type.BYTE_TYPE;
} else if (Constants.TYPE_FLOAT.equals(type)) {
return Type.FLOAT_TYPE;
} else if (Constants.TYPE_SHORT.equals(type)) {
return Type.SHORT_TYPE;
} else {
return type;
}
}
public static boolean isArray(Type type) {
return type.getSort() == Type.ARRAY;
}
public static Type getComponentType(Type type) {
if (!isArray(type)) {
throw new IllegalArgumentException("Type " + type + " is not an array");
}
return Type.getType(type.getDescriptor().substring(1));
}
public static boolean isPrimitive(Type type) {
return switch (type.getSort()) {
case Type.ARRAY, Type.OBJECT -> false;
default -> true;
};
}
public static String emulateClassGetName(Type type) {
if (isArray(type)) {
return type.getDescriptor().replace('/', '.');
} else {
return getClassName(type);
}
}
public static boolean isConstructor(MethodInfo method) {
return method.getSignature().getName().equals(Constants.CONSTRUCTOR_NAME);
}
public static Type[] getTypes(Class[] classes) {
if (classes == null) {
return null;
}
Type[] types = new Type[classes.length];
for (int i = 0; i < classes.length; i++) {
types[i] = Type.getType(classes[i]);
}
return types;
}
public static int ICONST(int value) {
return switch (value) {
case -1 -> Constants.ICONST_M1;
case 0 -> Constants.ICONST_0;
case 1 -> Constants.ICONST_1;
case 2 -> Constants.ICONST_2;
case 3 -> Constants.ICONST_3;
case 4 -> Constants.ICONST_4;
case 5 -> Constants.ICONST_5;
default -> -1; // error
};
}
public static int LCONST(long value) {
if (value == 0L) {
return Constants.LCONST_0;
} else if (value == 1L) {
return Constants.LCONST_1;
} else {
return -1; // error
}
}
public static int FCONST(float value) {
if (value == 0f) {
return Constants.FCONST_0;
} else if (value == 1f) {
return Constants.FCONST_1;
} else if (value == 2f) {
return Constants.FCONST_2;
} else {
return -1; // error
}
}
public static int DCONST(double value) {
if (value == 0d) {
return Constants.DCONST_0;
} else if (value == 1d) {
return Constants.DCONST_1;
} else {
return -1; // error
}
}
public static int NEWARRAY(Type type) {
return switch (type.getSort()) {
case Type.BYTE -> Constants.T_BYTE;
case Type.CHAR -> Constants.T_CHAR;
case Type.DOUBLE -> Constants.T_DOUBLE;
case Type.FLOAT -> Constants.T_FLOAT;
case Type.INT -> Constants.T_INT;
case Type.LONG -> Constants.T_LONG;
case Type.SHORT -> Constants.T_SHORT;
case Type.BOOLEAN -> Constants.T_BOOLEAN;
default -> -1; // error
};
}
public static String escapeType(String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0, len = s.length(); i < len; i++) {
char c = s.charAt(i);
switch (c) {
case '$' -> sb.append("$24");
case '.' -> sb.append("$2E");
case '[' -> sb.append("$5B");
case ';' -> sb.append("$3B");
case '(' -> sb.append("$28");
case ')' -> sb.append("$29");
case '/' -> sb.append("$2F");
default -> sb.append(c);
}
}
return sb.toString();
}
}
| TypeUtils |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/annotation/AnnotationTransactionAttributeSourceTests.java | {
"start": 25465,
"end": 25634
} | class ____ implements TestInterface10 {
@Override
public int getAge() {
return 10;
}
}
@Transactional(label = {"retryable", "long-running"})
static | TestBean10 |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcMessage.java | {
"start": 968,
"end": 1990
} | enum ____ {
// the order of the values below are significant.
RPC_CALL,
RPC_REPLY;
public int getValue() {
return ordinal();
}
public static Type fromValue(int value) {
if (value < 0 || value >= values().length) {
return null;
}
return values()[value];
}
}
protected final int xid;
protected final Type messageType;
RpcMessage(int xid, Type messageType) {
if (messageType != Type.RPC_CALL && messageType != Type.RPC_REPLY) {
throw new IllegalArgumentException("Invalid message type " + messageType);
}
this.xid = xid;
this.messageType = messageType;
}
public abstract XDR write(XDR xdr);
public int getXid() {
return xid;
}
public Type getMessageType() {
return messageType;
}
protected void validateMessageType(Type expected) {
if (expected != messageType) {
throw new IllegalArgumentException("Message type is expected to be "
+ expected + " but got " + messageType);
}
}
}
| Type |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/UninterruptiblesTest.java | {
"start": 19368,
"end": 21095
} | class ____ {
final Stopwatch stopwatch;
final long expectedCompletionWaitMillis;
Completion(long expectedCompletionWaitMillis) {
this.expectedCompletionWaitMillis = expectedCompletionWaitMillis;
stopwatch = Stopwatch.createStarted();
}
/**
* Asserts that the expected completion time has passed (and not "too much" time beyond that).
*/
void assertCompletionExpected() {
assertAtLeastTimePassed(stopwatch, expectedCompletionWaitMillis);
assertTimeNotPassed(stopwatch, expectedCompletionWaitMillis + LONG_DELAY_MS);
}
/**
* Asserts that at least {@code timeout} has passed but the expected completion time has not.
*/
void assertCompletionNotExpected(long timeout) {
Preconditions.checkArgument(timeout < expectedCompletionWaitMillis);
assertAtLeastTimePassed(stopwatch, timeout);
assertTimeNotPassed(stopwatch, expectedCompletionWaitMillis);
}
}
private static void assertAtLeastTimePassed(Stopwatch stopwatch, long expectedMillis) {
long elapsedMillis = stopwatch.elapsed(MILLISECONDS);
/*
* The "+ 5" below is to permit, say, sleep(10) to sleep only 9 milliseconds. We see such
* behavior sometimes when running these tests publicly as part of Guava. "+ 5" is probably more
* generous than it needs to be.
*/
assertTrue(
"Expected elapsed millis to be >= " + expectedMillis + " but was " + elapsedMillis,
elapsedMillis + 5 >= expectedMillis);
}
// TODO(cpovirk): Split this into separate CountDownLatch and IncrementableCountDownLatch classes.
/** Manages a {@link BlockingQueue} and associated timings for a {@code put} call. */
private static final | Completion |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenIT0130CleanLifecycleTest.java | {
"start": 959,
"end": 1606
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Test default binding of goals for "clean" lifecycle.
*
* @throws Exception in case of failure
*/
@Test
public void testit0130() throws Exception {
File testDir = extractResources("/it0130");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.deleteDirectory("target");
verifier.setAutoclean(false);
verifier.addCliArgument("clean");
verifier.execute();
verifier.verifyFilePresent("target/clean-clean.txt");
verifier.verifyErrorFreeLog();
}
}
| MavenIT0130CleanLifecycleTest |
java | apache__camel | components/camel-jgroups/src/test/java/org/apache/camel/component/jgroups/JGroupsProducerTest.java | {
"start": 1237,
"end": 2921
} | class ____ extends CamelTestSupport {
static final String CLUSTER_NAME = "CLUSTER_NAME";
static final String MESSAGE = "MESSAGE";
// Fixtures
JChannel channel;
Object messageReceived;
// Routes fixture
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("jgroups:" + CLUSTER_NAME);
}
};
}
// Fixture setup
@Override
protected void doPreSetup() throws Exception {
super.doPreSetup();
channel = new JChannel();
channel.setReceiver(new Receiver() {
@Override
public void receive(Message msg) {
messageReceived = msg.getObject();
}
});
channel.connect(CLUSTER_NAME);
}
@Override
public void doPostTearDown() {
channel.close();
}
@Test
public void shouldReceiveMulticastedBody() throws Exception {
// When
sendBody("direct:start", MESSAGE);
// Then
waitForMulticastChannel(5);
assertEquals(MESSAGE, messageReceived);
}
@Test
public void shouldNotSendNullMessage() throws Exception {
// When
sendBody("direct:start", null);
// Then
waitForMulticastChannel(2);
assertNull(messageReceived);
}
// Helpers
private void waitForMulticastChannel(int attempts) throws InterruptedException {
while (messageReceived == null && attempts > 0) {
TimeUnit.SECONDS.sleep(1);
attempts--;
}
}
}
| JGroupsProducerTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mappingexception/MappingExceptionTest.java | {
"start": 1937,
"end": 6418
} | class ____ in npe, because no classloader exists for it
fail();
}
catch (org.hibernate.boot.MappingNotFoundException e) {
assertEquals( SourceType.RESOURCE, e.getOrigin().getType() );
assertEquals( "org/hibernate/Hibernate.hbm.xml", e.getOrigin().getName() );
}
try {
cfg.addFile( "completelybogus.hbm.xml" );
fail();
}
catch (org.hibernate.boot.MappingNotFoundException e) {
assertEquals( SourceType.FILE, e.getOrigin().getType() );
assertEquals( "completelybogus.hbm.xml", e.getOrigin().getName() );
}
try {
cfg.addFile( new File( "completelybogus.hbm.xml" ) );
fail();
}
catch (org.hibernate.boot.MappingNotFoundException e) {
assertEquals( SourceType.FILE, e.getOrigin().getType() );
assertEquals( "completelybogus.hbm.xml", e.getOrigin().getName() );
}
try {
cfg.addInputStream( new ByteArrayInputStream( new byte[0] ) );
fail();
}
catch (org.hibernate.boot.InvalidMappingException e) {
assertEquals( SourceType.INPUT_STREAM, e.getOrigin().getType() );
assertNull( null, e.getOrigin().getName() );
}
catch (InvalidMappingException inv) {
assertEquals( "input stream", inv.getType() );
assertNull( inv.getPath() );
}
try {
cfg.addResource( "nothere" );
fail();
}
catch (org.hibernate.boot.MappingNotFoundException e) {
assertEquals( SourceType.RESOURCE, e.getOrigin().getType() );
assertEquals( "nothere", e.getOrigin().getName() );
}
try {
cfg.addURL( new URL( "file://nothere" ) );
fail();
}
catch (org.hibernate.boot.MappingNotFoundException e) {
assertEquals( SourceType.URL, e.getOrigin().getType() );
assertEquals( "file://nothere", e.getOrigin().getName() );
}
catch (InvalidMappingException inv) {
assertEquals( "URL", inv.getType() );
assertEquals( "file://nothere", inv.getPath() );
}
catch (org.hibernate.boot.MappingException me) {
assertEquals( SourceType.URL, me.getOrigin().getType() );
assertEquals( "file://nothere", me.getOrigin().getName() );
}
}
@Test
public void testInvalidMapping() throws MappingException, IOException {
String resourceName = "org/hibernate/orm/test/mappingexception/InvalidMapping.hbm.xml";
File file = File.createTempFile( "TempInvalidMapping", ".hbm.xml" );
file.deleteOnExit();
copy( ConfigHelper.getConfigStream( resourceName ), file );
Configuration cfg = new Configuration();
try {
cfg.addCacheableFile( file.getAbsolutePath() );
fail();
}
catch (InvalidMappingException inv) {
assertEquals( "file", inv.getType() );
assertNotNull( inv.getPath() );
assertTrue( inv.getPath().endsWith( ".hbm.xml" ) );
}
try {
cfg.addCacheableFile( file );
fail();
}
catch (InvalidMappingException inv) {
assertEquals( "file", inv.getType() );
assertNotNull( inv.getPath() );
assertTrue( inv.getPath().endsWith( ".hbm.xml" ) );
}
try {
cfg.addClass( InvalidMapping.class );
fail();
}
catch (InvalidMappingException inv) {
assertEquals( "resource", inv.getType() );
assertEquals( "org/hibernate/orm/test/mappingexception/InvalidMapping.hbm.xml", inv.getPath() );
}
try {
cfg.addFile( file.getAbsolutePath() );
fail();
}
catch (InvalidMappingException inv) {
assertEquals( "file", inv.getType() );
assertEquals( file.getPath(), inv.getPath() );
}
try {
cfg.addFile( file );
fail();
}
catch (InvalidMappingException inv) {
assertEquals( "file", inv.getType() );
assertEquals( file.getPath(), inv.getPath() );
}
try {
cfg.addInputStream( ConfigHelper.getResourceAsStream( resourceName ) );
fail();
}
catch (InvalidMappingException inv) {
assertEquals( "input stream", inv.getType() );
assertNull( inv.getPath() );
}
try {
cfg.addResource( resourceName );
fail();
}
catch (InvalidMappingException inv) {
assertEquals( "resource", inv.getType() );
assertEquals( resourceName, inv.getPath() );
}
try {
cfg.addURL( ConfigHelper.findAsResource( resourceName ) );
fail();
}
catch (InvalidMappingException inv) {
assertEquals( "URL", inv.getType() );
assertTrue( inv.getPath().endsWith( "InvalidMapping.hbm.xml" ) );
}
}
void copy(InputStream in, File dst) throws IOException {
OutputStream out = new FileOutputStream( dst );
// Transfer bytes from in to out
byte[] buf = new byte[1024];
int len;
while ( ( len = in.read( buf ) ) > 0 ) {
out.write( buf, 0, len );
}
in.close();
out.close();
}
}
| result |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java | {
"start": 451,
"end": 642
} | class ____ responsible for publishing metrics related to ES|QL planning.
*
* @see <a href="https://github.com/elastic/elasticsearch/blob/main/modules/apm/METERING.md">METERING</a>
*/
public | is |
java | spring-projects__spring-boot | module/spring-boot-cache/src/main/java/org/springframework/boot/cache/autoconfigure/Cache2kBuilderCustomizer.java | {
"start": 733,
"end": 986
} | interface ____ can be implemented by beans wishing to customize the default
* setup for caches added to the manager through addCaches and for dynamically created
* caches.
*
* @author Jens Wilke
* @author Stephane Nicoll
* @since 4.0.0
*/
public | that |
java | google__jimfs | jimfs/src/test/java/com/google/common/jimfs/ClassLoaderTest.java | {
"start": 1340,
"end": 1557
} | class ____
* (which is what {@link FileSystemProvider#installedProviders()} uses to load {@link
* FileSystemProvider}s as services from the classpath).
*
* @author Colin Decker
*/
@RunWith(JUnit4.class)
public | loader |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/async/RouterAsyncProtocolTestBase.java | {
"start": 2243,
"end": 6008
} | class ____ {
private static Configuration routerConf;
/** Federated HDFS cluster. */
private static MiniRouterDFSCluster cluster;
private static String ns0;
/** Random Router for this federated cluster. */
private MiniRouterDFSCluster.RouterContext router;
private FileSystem routerFs;
private RouterRpcServer routerRpcServer;
private RouterRpcServer routerAsyncRpcServer;
protected static final String TEST_DIR_PATH = "/testdir";
@BeforeAll
public static void setUpCluster() throws Exception {
cluster = new MiniRouterDFSCluster(true, 1, 2,
DEFAULT_HEARTBEAT_INTERVAL_MS, 1000);
cluster.setNumDatanodesPerNameservice(3);
cluster.startCluster();
// Making one Namenode active per nameservice
if (cluster.isHighAvailability()) {
for (String ns : cluster.getNameservices()) {
cluster.switchToActive(ns, NAMENODES[0]);
cluster.switchToStandby(ns, NAMENODES[1]);
}
}
// Start routers with only an RPC service
routerConf = new RouterConfigBuilder()
.rpc()
.build();
// Reduce the number of RPC clients threads to overload the Router easy
routerConf.setInt(RBFConfigKeys.DFS_ROUTER_CLIENT_THREADS_SIZE, 1);
routerConf.setInt(DFS_ROUTER_ASYNC_RPC_HANDLER_COUNT_KEY, 1);
routerConf.setInt(DFS_ROUTER_ASYNC_RPC_RESPONDER_COUNT_KEY, 1);
// We decrease the DN cache times to make the test faster
routerConf.setTimeDuration(
RBFConfigKeys.DN_REPORT_CACHE_EXPIRE, 1, TimeUnit.SECONDS);
cluster.addRouterOverrides(routerConf);
// Start routers with only an RPC service
cluster.startRouters();
// Register and verify all NNs with all routers
cluster.registerNamenodes();
cluster.waitNamenodeRegistration();
cluster.waitActiveNamespaces();
ns0 = cluster.getNameservices().get(0);
}
@AfterAll
public static void shutdownCluster() throws Exception {
if (cluster != null) {
cluster.shutdown();
}
}
@BeforeEach
public void setUp() throws IOException {
router = cluster.getRandomRouter();
routerFs = router.getFileSystem();
routerRpcServer = router.getRouterRpcServer();
routerRpcServer.initAsyncThreadPools(routerConf);
RouterAsyncRpcClient asyncRpcClient = new RouterAsyncRpcClient(
routerConf, router.getRouter(), routerRpcServer.getNamenodeResolver(),
routerRpcServer.getRPCMonitor(),
routerRpcServer.getRouterStateIdContext());
routerAsyncRpcServer = Mockito.spy(routerRpcServer);
Mockito.when(routerAsyncRpcServer.getRPCClient()).thenReturn(asyncRpcClient);
Mockito.when(routerAsyncRpcServer.isAsync()).thenReturn(true);
// Create mock locations
MockResolver resolver = (MockResolver) router.getRouter().getSubclusterResolver();
resolver.addLocation("/", ns0, "/");
FsPermission permission = new FsPermission("705");
routerFs.mkdirs(new Path(TEST_DIR_PATH), permission);
}
@AfterEach
public void tearDown() throws IOException {
// clear client context
CallerContext.setCurrent(null);
boolean delete = routerFs.delete(new Path(TEST_DIR_PATH));
assertTrue(delete);
if (routerFs != null) {
routerFs.close();
}
}
public static Configuration getRouterConf() {
return routerConf;
}
public static MiniRouterDFSCluster getCluster() {
return cluster;
}
public static String getNs0() {
return ns0;
}
public MiniRouterDFSCluster.RouterContext getRouter() {
return router;
}
public FileSystem getRouterFs() {
return routerFs;
}
public RouterRpcServer getRouterRpcServer() {
return routerRpcServer;
}
public RouterRpcServer getRouterAsyncRpcServer() {
return routerAsyncRpcServer;
}
}
| RouterAsyncProtocolTestBase |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/spi/ClassLoaderAccess.java | {
"start": 436,
"end": 919
} | interface ____ {
/**
* Obtain a {@link Class} reference by name
*
* @param name The name of the class
* @return The {@code Class} object with the given name
*/
<T> Class<T> classForName(String name);
/**
* Locate a resource by name
*
* @param resourceName The name of the resource to resolve
* @return The located resource;
* may return {@code null} to indicate the resource was not found
*/
URL locateResource(String resourceName);
}
| ClassLoaderAccess |
java | netty__netty | transport/src/main/java/io/netty/channel/DefaultChannelPromise.java | {
"start": 1200,
"end": 4509
} | class ____ extends DefaultPromise<Void> implements ChannelPromise, FlushCheckpoint {
private final Channel channel;
private long checkpoint;
/**
* Creates a new instance.
*
* @param channel
* the {@link Channel} associated with this future
*/
public DefaultChannelPromise(Channel channel) {
this.channel = checkNotNull(channel, "channel");
}
/**
* Creates a new instance.
*
* @param channel
* the {@link Channel} associated with this future
*/
public DefaultChannelPromise(Channel channel, EventExecutor executor) {
super(executor);
this.channel = checkNotNull(channel, "channel");
}
@Override
protected EventExecutor executor() {
EventExecutor e = super.executor();
if (e == null) {
return channel().eventLoop();
} else {
return e;
}
}
@Override
public Channel channel() {
return channel;
}
@Override
public ChannelPromise setSuccess() {
return setSuccess(null);
}
@Override
public ChannelPromise setSuccess(Void result) {
super.setSuccess(result);
return this;
}
@Override
public boolean trySuccess() {
return trySuccess(null);
}
@Override
public ChannelPromise setFailure(Throwable cause) {
super.setFailure(cause);
return this;
}
@Override
public ChannelPromise addListener(GenericFutureListener<? extends Future<? super Void>> listener) {
super.addListener(listener);
return this;
}
@Override
public ChannelPromise addListeners(GenericFutureListener<? extends Future<? super Void>>... listeners) {
super.addListeners(listeners);
return this;
}
@Override
public ChannelPromise removeListener(GenericFutureListener<? extends Future<? super Void>> listener) {
super.removeListener(listener);
return this;
}
@Override
public ChannelPromise removeListeners(GenericFutureListener<? extends Future<? super Void>>... listeners) {
super.removeListeners(listeners);
return this;
}
@Override
public ChannelPromise sync() throws InterruptedException {
super.sync();
return this;
}
@Override
public ChannelPromise syncUninterruptibly() {
super.syncUninterruptibly();
return this;
}
@Override
public ChannelPromise await() throws InterruptedException {
super.await();
return this;
}
@Override
public ChannelPromise awaitUninterruptibly() {
super.awaitUninterruptibly();
return this;
}
@Override
public long flushCheckpoint() {
return checkpoint;
}
@Override
public void flushCheckpoint(long checkpoint) {
this.checkpoint = checkpoint;
}
@Override
public ChannelPromise promise() {
return this;
}
@Override
protected void checkDeadLock() {
if (channel().isRegistered()) {
super.checkDeadLock();
}
}
@Override
public ChannelPromise unvoid() {
return this;
}
@Override
public boolean isVoid() {
return false;
}
}
| DefaultChannelPromise |
java | netty__netty | microbench/src/main/java/io/netty/microbench/concurrent/BurstCostExecutorsBenchmark.java | {
"start": 2434,
"end": 6069
} | class ____ implements ExecutorService {
private static final Runnable POISON_PILL = new Runnable() {
@Override
public void run() {
}
};
private final Queue<Runnable> tasks;
private final AtomicBoolean poisoned = new AtomicBoolean();
private final Thread executorThread;
SpinExecutorService(int maxTasks) {
tasks = PlatformDependent.newFixedMpscQueue(maxTasks);
executorThread = new Thread(new Runnable() {
@Override
public void run() {
final Queue<Runnable> tasks = SpinExecutorService.this.tasks;
Runnable task;
while ((task = tasks.poll()) != POISON_PILL) {
if (task != null) {
task.run();
}
}
}
});
executorThread.start();
}
@Override
public void shutdown() {
if (poisoned.compareAndSet(false, true)) {
while (!tasks.offer(POISON_PILL)) {
// Just try again
}
try {
executorThread.join();
} catch (InterruptedException e) {
//We're quite trusty :)
}
}
}
@Override
public List<Runnable> shutdownNow() {
throw new UnsupportedOperationException();
}
@Override
public boolean isShutdown() {
throw new UnsupportedOperationException();
}
@Override
public boolean isTerminated() {
throw new UnsupportedOperationException();
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public <T> Future<T> submit(Callable<T> task) {
throw new UnsupportedOperationException();
}
@Override
public <T> Future<T> submit(Runnable task, T result) {
throw new UnsupportedOperationException();
}
@Override
public Future<?> submit(Runnable task) {
throw new UnsupportedOperationException();
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks)
throws InterruptedException, ExecutionException {
throw new UnsupportedOperationException();
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
throw new UnsupportedOperationException();
}
@Override
public void execute(Runnable command) {
if (!tasks.offer(command)) {
throw new RejectedExecutionException(
"If that happens, there is something wrong with the available capacity/burst size");
}
}
}
private | SpinExecutorService |
java | spring-projects__spring-security | oauth2/oauth2-resource-server/src/test/java/org/springframework/security/oauth2/server/resource/web/authentication/BearerTokenAuthenticationFilterTests.java | {
"start": 21343,
"end": 21549
} | class ____ implements GrantedAuthority {
public static final String AUTHORITY = "CUSTOM_AUTHORITY";
@Override
public String getAuthority() {
return AUTHORITY;
}
}
}
| DefaultEqualsGrantedAuthority |
java | apache__dubbo | dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/issues/issue7003/Issue7003Test.java | {
"start": 3134,
"end": 3275
} | class ____ {
@DubboReference(check = false, version = "1.2.3", group = "demo")
private HelloService helloService;
}
}
| ClassB |
java | apache__kafka | generator/src/main/java/org/apache/kafka/message/StructRegistry.java | {
"start": 1196,
"end": 7249
} | class ____ {
/**
* The specification for this structure.
*/
private final StructSpec spec;
/**
* The versions which the parent(s) of this structure can have. If this is a
* top-level structure, this will be equal to the versions which the
* overall message can have.
*/
private final Versions parentVersions;
StructInfo(StructSpec spec, Versions parentVersions) {
this.spec = spec;
this.parentVersions = parentVersions;
}
public StructSpec spec() {
return spec;
}
public Versions parentVersions() {
return parentVersions;
}
}
public StructRegistry() {
this.structs = new TreeMap<>();
this.commonStructNames = new TreeSet<>();
}
/**
* Register all the structures contained a message spec.
*/
public void register(MessageSpec message) throws Exception {
// Register common structures.
for (StructSpec struct : message.commonStructs()) {
if (!MessageGenerator.firstIsCapitalized(struct.name())) {
throw new RuntimeException("Can't process structure " + struct.name() +
": the first letter of structure names must be capitalized.");
}
if (structs.containsKey(struct.name())) {
throw new RuntimeException("Common struct " + struct.name() + " was specified twice.");
}
structs.put(struct.name(), new StructInfo(struct, struct.versions()));
commonStructNames.add(struct.name());
}
// Register inline structures.
addStructSpecs(message.validVersions(), message.fields());
}
private void addStructSpecs(Versions parentVersions, List<FieldSpec> fields) {
for (FieldSpec field : fields) {
String typeName = null;
if (field.type().isStructArray()) {
FieldType.ArrayType arrayType = (FieldType.ArrayType) field.type();
typeName = arrayType.elementName();
} else if (field.type().isStruct()) {
FieldType.StructType structType = (FieldType.StructType) field.type();
typeName = structType.typeName();
}
if (typeName != null) {
if (commonStructNames.contains(typeName)) {
// If we're using a common structure, we can't specify its fields.
// The fields should be specified in the commonStructs area.
if (!field.fields().isEmpty()) {
throw new RuntimeException("Can't re-specify the common struct " +
typeName + " as an inline struct.");
}
} else if (structs.containsKey(typeName)) {
// Inline structures should only appear once.
throw new RuntimeException("Struct " + typeName +
" was specified twice.");
} else {
// Synthesize a StructSpec object out of the fields.
StructSpec spec = new StructSpec(typeName,
field.versions().toString(),
Versions.NONE_STRING, // version deprecations not supported at field level
field.fields());
structs.put(typeName, new StructInfo(spec, parentVersions));
}
addStructSpecs(parentVersions.intersect(field.versions()), field.fields());
}
}
}
/**
* Locate the struct corresponding to a field.
*/
public StructSpec findStruct(FieldSpec field) {
String structFieldName;
if (field.type().isArray()) {
FieldType.ArrayType arrayType = (FieldType.ArrayType) field.type();
structFieldName = arrayType.elementName();
} else if (field.type().isStruct()) {
FieldType.StructType structType = (FieldType.StructType) field.type();
structFieldName = structType.typeName();
} else {
throw new RuntimeException("Field " + field.name() +
" cannot be treated as a structure.");
}
return findStruct(structFieldName);
}
public StructSpec findStruct(String structFieldName) {
StructInfo structInfo = structs.get(structFieldName);
if (structInfo == null) {
throw new RuntimeException("Unable to locate a specification for the structure " +
structFieldName);
}
return structInfo.spec;
}
/**
* Return true if the field is a struct array with keys.
*/
public boolean isStructArrayWithKeys(FieldSpec field) {
if (!field.type().isArray()) {
return false;
}
FieldType.ArrayType arrayType = (FieldType.ArrayType) field.type();
if (!arrayType.isStructArray()) {
return false;
}
StructInfo structInfo = structs.get(arrayType.elementName());
if (structInfo == null) {
throw new RuntimeException("Unable to locate a specification for the structure " +
arrayType.elementName());
}
return structInfo.spec.hasKeys();
}
Set<String> commonStructNames() {
return commonStructNames;
}
/**
* Returns an iterator that will step through all the common structures.
*/
Iterator<StructSpec> commonStructs() {
return new Iterator<StructSpec>() {
private final Iterator<String> iter = commonStructNames.iterator();
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public StructSpec next() {
return structs.get(iter.next()).spec;
}
};
}
Iterator<StructInfo> structs() {
return structs.values().iterator();
}
}
| StructInfo |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/JettyComponentBuilderFactory.java | {
"start": 1371,
"end": 1801
} | interface ____ {
/**
* Jetty (camel-jetty)
* Expose HTTP endpoints using Jetty 12.
*
* Category: http
* Since: 1.2
* Maven coordinates: org.apache.camel:camel-jetty
*
* @return the dsl builder
*/
static JettyComponentBuilder jetty() {
return new JettyComponentBuilderImpl();
}
/**
* Builder for the Jetty component.
*/
| JettyComponentBuilderFactory |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/experimental/SocketStreamIteratorTest.java | {
"start": 1492,
"end": 3763
} | class ____ {
@Test
void testIterator() throws Exception {
final long seed = new Random().nextLong();
final int numElements = 1000;
final SocketStreamIterator<Long> iterator =
new SocketStreamIterator<>(LongSerializer.INSTANCE);
CheckedThread writer =
new CheckedThread() {
@Override
public void go() throws Exception {
try (Socket sock =
new Socket(iterator.getBindAddress(), iterator.getPort());
DataOutputViewStreamWrapper out =
new DataOutputViewStreamWrapper(sock.getOutputStream())) {
final TypeSerializer<Long> serializer = LongSerializer.INSTANCE;
final Random rnd = new Random(seed);
for (int i = 0; i < numElements; i++) {
serializer.serialize(rnd.nextLong(), out);
}
}
}
};
writer.start();
final Random validator = new Random(seed);
for (int i = 0; i < numElements; i++) {
assertThat(iterator).hasNext();
assertThat(iterator).hasNext();
assertThat(iterator.next()).isEqualTo(validator.nextLong());
}
assertThat(iterator).isExhausted();
writer.sync();
assertThat(iterator).isExhausted();
}
@Test
void testIteratorWithException() throws Exception {
final SocketStreamIterator<Long> iterator =
new SocketStreamIterator<>(LongSerializer.INSTANCE);
// asynchronously set an error
new Thread() {
@Override
public void run() {
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
iterator.notifyOfError(new Exception("test"));
}
}.start();
assertThatThrownBy(iterator::hasNext)
.isInstanceOf(RuntimeException.class)
.hasMessageContaining("test");
}
}
| SocketStreamIteratorTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorFactory.java | {
"start": 1323,
"end": 3340
} | class ____ extends ValuesSourceAggregatorFactory {
private final MedianAbsoluteDeviationAggregatorSupplier aggregatorSupplier;
private final double compression;
private final TDigestExecutionHint executionHint;
MedianAbsoluteDeviationAggregatorFactory(
String name,
ValuesSourceConfig config,
AggregationContext context,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder,
Map<String, Object> metadata,
double compression,
TDigestExecutionHint executionHint,
MedianAbsoluteDeviationAggregatorSupplier aggregatorSupplier
) throws IOException {
super(name, config, context, parent, subFactoriesBuilder, metadata);
this.aggregatorSupplier = aggregatorSupplier;
this.compression = compression;
this.executionHint = executionHint;
}
static void registerAggregators(ValuesSourceRegistry.Builder builder) {
builder.register(
MedianAbsoluteDeviationAggregationBuilder.REGISTRY_KEY,
List.of(CoreValuesSourceType.NUMERIC, TimeSeriesValuesSourceType.COUNTER),
MedianAbsoluteDeviationAggregator::new,
true
);
}
@Override
protected Aggregator createUnmapped(Aggregator parent, Map<String, Object> metadata) throws IOException {
final InternalMedianAbsoluteDeviation empty = InternalMedianAbsoluteDeviation.empty(
name,
metadata,
config.format(),
compression,
executionHint
);
return new NonCollectingSingleMetricAggregator(name, context, parent, empty, metadata);
}
@Override
protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map<String, Object> metadata)
throws IOException {
return aggregatorSupplier.build(name, config, config.format(), context, parent, metadata, compression, executionHint);
}
}
| MedianAbsoluteDeviationAggregatorFactory |
java | quarkusio__quarkus | extensions/spring-scheduled/deployment/src/test/java/io/quarkus/spring/scheduled/deployment/InvalidCronExpressionTest.java | {
"start": 818,
"end": 917
} | class ____ {
@Scheduled(cron = "0 0 0")
void wrong() {
}
}
}
| InvalidBean |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java | {
"start": 42650,
"end": 43182
} | class ____ {
public int foo(Suit suit) {
switch (suit) {
case HEART:
case DIAMOND:
return 1;
case SPADE:
System.out.println("hello");
throw new RuntimeException();
default:
throw new NullPointerException();
}
}
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedMethodTest.java | {
"start": 2971,
"end": 3253
} | class ____ {
// BUG: Diagnostic contains:
private void notUsedMethod() {}
// BUG: Diagnostic contains:
private static void staticNotUsedMethod() {}
@SuppressWarnings({"deprecation", "unused"})
| Unuseds |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/results/graph/FetchParent.java | {
"start": 885,
"end": 4017
} | interface ____ extends DomainResultGraphNode {
/**
* This parent's mapping type
*/
FetchableContainer getReferencedMappingContainer();
/**
* This parent's mapping type
*/
FetchableContainer getReferencedMappingType();
default NavigablePath resolveNavigablePath(Fetchable fetchable) {
final String fetchableName = fetchable.getFetchableName();
if ( fetchable instanceof EntityIdentifierMapping ) {
return new EntityIdentifierNavigablePath( getNavigablePath(), fetchableName );
}
else {
final FetchableContainer referencedMappingContainer = getReferencedMappingContainer();
final EntityMappingType fetchableEntityType = fetchable.findContainingEntityMapping();
final EntityMappingType fetchParentType;
if ( referencedMappingContainer instanceof EmbeddableMappingType
|| referencedMappingContainer instanceof EmbeddableValuedModelPart ) {
fetchParentType = referencedMappingContainer.findContainingEntityMapping();
}
else if ( referencedMappingContainer instanceof EntityMappingType entityMappingType ) {
fetchParentType = entityMappingType;
}
else {
fetchParentType = fetchableEntityType;
}
if ( fetchParentType != null && !fetchParentType.isTypeOrSuperType( fetchableEntityType ) ) {
return getNavigablePath().treatAs( fetchableEntityType.getEntityName() )
.append( fetchableName );
}
else {
return getNavigablePath().append( fetchableName );
}
}
}
/**
* Whereas {@link #getReferencedMappingContainer} and {@link #getReferencedMappingType} return the
* referenced container type, this method returns the referenced part.
*
* E.g. for a many-to-one this method returns the
* {@link ToOneAttributeMapping} while
* {@link #getReferencedMappingContainer} and {@link #getReferencedMappingType} return the referenced
* {@link org.hibernate.metamodel.mapping.EntityMappingType}.
*/
default ModelPart getReferencedModePart() {
return getReferencedMappingContainer();
}
/**
* Get the property path to this parent
*/
NavigablePath getNavigablePath();
/**
* Retrieve the fetches owned by this fetch source.
*/
ImmutableFetchList getFetches();
Fetch findFetch(Fetchable fetchable);
boolean hasJoinFetches();
boolean containsCollectionFetches();
default int getCollectionFetchesCount() {
return getFetches().getCollectionFetchesCount();
}
@Override
default void collectValueIndexesToCache(BitSet valueIndexes) {
for ( Fetch fetch : getFetches() ) {
fetch.collectValueIndexesToCache( valueIndexes );
}
}
Initializer<?> createInitializer(InitializerParent<?> parent, AssemblerCreationState creationState);
default FetchParent getRoot() {
return this instanceof Fetch fetch ? fetch.getFetchParent().getRoot() : this;
}
default Fetch generateFetchableFetch(
Fetchable fetchable,
NavigablePath fetchablePath,
FetchTiming fetchTiming,
boolean selected,
String resultVariable,
DomainResultCreationState creationState) {
return fetchable.generateFetch(
this,
fetchablePath,
fetchTiming,
selected,
resultVariable,
creationState
);
}
}
| FetchParent |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_126_ads_minus.java | {
"start": 315,
"end": 4355
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select q.case_id as case_id\n" +
"from (\n" +
"\tselect p.id as case_id,p.gmt_create as create_time,p.member_id as member_id\n" +
"\t\t,b.biz_2_name as biz_2_name,b.biz_3_name as biz_3_name,b.biz_4_name as biz_4_name,d.deal_nick as deal_nick,d.ad_account as ad_account,d.org_name as dept_name\n" +
"\t\t,case when d.org_level=7 then org_6_name when d.org_level=6 then org_5_name when d.org_level=5 then org_4_name when d.org_level=4 then org_3_name when d.org_level=3 then org_2_name when d.org_level=2 then org_1_name else '' end as group_name\n" +
"\t\t,d.company_id as company_id,d.company_type_id as company_type_id,p.biz_type as biz_type \n" +
"\tfrom tpp_case_phone p \n" +
"\t\tjoin dim_tb_crm_call_biz b on p.biz_type=b.biz_id \n" +
"\t\tjoin dim_tb_crm_deal d on p.owner=d.deal_id \n" +
"\twhere p.end_time is null \n" +
"\t\tand p.owner <> 0 and d.ad_account is not null and (d.org_2_id='109704' \n" +
"\t\t\tor d.org_2_id='111059' or d.org_2_id='110989' or d.org_2_id='110694' or d.org_2_id='110301' or d.org_2_id='110296') \n" +
"\t\tand p.gmt_create >= '2018-02-26 13:11:00' and p.gmt_create <= '2018-02-26 13:12:58' ) q \n" +
"\t\twhere q.case_id in ((select case_id from tpp_action_case where action_code=999990 ) MINUS (select case_id from tpp_action_case where action_code=999997))";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("SELECT q.case_id AS case_id\n" +
"FROM (\n" +
"\tSELECT p.id AS case_id, p.gmt_create AS create_time, p.member_id AS member_id, b.biz_2_name AS biz_2_name, b.biz_3_name AS biz_3_name\n" +
"\t\t, b.biz_4_name AS biz_4_name, d.deal_nick AS deal_nick, d.ad_account AS ad_account, d.org_name AS dept_name\n" +
"\t\t, CASE\n" +
"\t\t\tWHEN d.org_level = 7 THEN org_6_name\n" +
"\t\t\tWHEN d.org_level = 6 THEN org_5_name\n" +
"\t\t\tWHEN d.org_level = 5 THEN org_4_name\n" +
"\t\t\tWHEN d.org_level = 4 THEN org_3_name\n" +
"\t\t\tWHEN d.org_level = 3 THEN org_2_name\n" +
"\t\t\tWHEN d.org_level = 2 THEN org_1_name\n" +
"\t\t\tELSE ''\n" +
"\t\tEND AS group_name, d.company_id AS company_id, d.company_type_id AS company_type_id, p.biz_type AS biz_type\n" +
"\tFROM tpp_case_phone p\n" +
"\t\tJOIN dim_tb_crm_call_biz b ON p.biz_type = b.biz_id\n" +
"\t\tJOIN dim_tb_crm_deal d ON p.owner = d.deal_id\n" +
"\tWHERE p.end_time IS NULL\n" +
"\t\tAND p.owner <> 0\n" +
"\t\tAND d.ad_account IS NOT NULL\n" +
"\t\tAND (d.org_2_id = '109704'\n" +
"\t\t\tOR d.org_2_id = '111059'\n" +
"\t\t\tOR d.org_2_id = '110989'\n" +
"\t\t\tOR d.org_2_id = '110694'\n" +
"\t\t\tOR d.org_2_id = '110301'\n" +
"\t\t\tOR d.org_2_id = '110296')\n" +
"\t\tAND p.gmt_create >= '2018-02-26 13:11:00'\n" +
"\t\tAND p.gmt_create <= '2018-02-26 13:12:58'\n" +
") q\n" +
"WHERE q.case_id IN (\n" +
"\tSELECT case_id\n" +
"\tFROM tpp_action_case\n" +
"\tWHERE action_code = 999990\n" +
"\tMINUS\n" +
"\t(SELECT case_id\n" +
"\tFROM tpp_action_case\n" +
"\tWHERE action_code = 999997)\n" +
")", stmt.toString());
}
}
| MySqlSelectTest_126_ads_minus |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/test/java/org/apache/flink/state/forst/ForStListStateTest.java | {
"start": 1218,
"end": 3632
} | class ____ extends ForStStateTestBase {
@Test
public void testMergeNamespace() throws Exception {
ListStateDescriptor<Integer> descriptor =
new ListStateDescriptor<>("testState", IntSerializer.INSTANCE);
InternalListState<String, Integer, Integer> listState =
keyedBackend.createState(1, IntSerializer.INSTANCE, descriptor);
setCurrentContext("test", "test");
for (int i = 0; i < 10; i++) {
listState.setCurrentNamespace(i);
listState.asyncAdd(i);
}
drain();
setCurrentContext("test", "test");
for (int i = 0; i < 10; i++) {
listState.setCurrentNamespace(i);
Iterable<Integer> list = listState.get();
assertThat(list).containsExactly(i);
}
drain();
// 1~20
ArrayList<Integer> namespaces = new ArrayList<>();
for (int i = 1; i < 20; i++) {
namespaces.add(i);
}
setCurrentContext("test", "test");
listState
.asyncMergeNamespaces(0, namespaces)
.thenAccept(
(e) -> {
listState.setCurrentNamespace(0);
Iterable<Integer> list = listState.get();
assertThat(list).containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
for (int i = 1; i < 10; i++) {
listState.setCurrentNamespace(i);
assertThat(listState.get()).isNullOrEmpty();
}
});
drain();
// test sync method
setCurrentContext("test", "test");
for (int i = 10; i < 20; i++) {
listState.setCurrentNamespace(i);
listState.add(i);
}
drain();
setCurrentContext("test", "test");
listState.mergeNamespaces(0, namespaces);
listState.setCurrentNamespace(0);
Iterable<Integer> list = listState.get();
assertThat(list)
.containsExactly(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19);
for (int i = 1; i < 20; i++) {
listState.setCurrentNamespace(i);
assertThat(listState.get()).isNullOrEmpty();
}
drain();
}
}
| ForStListStateTest |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/OrderedProgrammaticExtensionRegistrationTests.java | {
"start": 10367,
"end": 10543
} | class ____
extends DefaultOrderAndExplicitOrderClassLevelExtensionRegistrationTestCase {
}
static | InheritedDefaultOrderAndExplicitOrderClassLevelExtensionRegistrationTestCase |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeAggregatorFactory.java | {
"start": 1239,
"end": 2757
} | class ____<R extends Range> extends ValuesSourceAggregatorFactory {
private final InternalRange.Factory<?, ?> rangeFactory;
private final R[] ranges;
private final boolean keyed;
private final RangeAggregatorSupplier aggregatorSupplier;
public AbstractRangeAggregatorFactory(
String name,
ValuesSourceConfig config,
R[] ranges,
boolean keyed,
InternalRange.Factory<?, ?> rangeFactory,
AggregationContext context,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder,
Map<String, Object> metadata,
RangeAggregatorSupplier aggregatorSupplier
) throws IOException {
super(name, config, context, parent, subFactoriesBuilder, metadata);
this.ranges = ranges;
this.keyed = keyed;
this.rangeFactory = rangeFactory;
this.aggregatorSupplier = aggregatorSupplier;
}
@Override
protected Aggregator createUnmapped(Aggregator parent, Map<String, Object> metadata) throws IOException {
return new Unmapped<>(name, factories, ranges, keyed, config.format(), context, parent, rangeFactory, metadata);
}
@Override
protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map<String, Object> metadata)
throws IOException {
return aggregatorSupplier.build(name, factories, config, rangeFactory, ranges, keyed, context, parent, cardinality, metadata);
}
}
| AbstractRangeAggregatorFactory |
java | spring-projects__spring-boot | module/spring-boot-r2dbc/src/main/java/org/springframework/boot/r2dbc/autoconfigure/MissingR2dbcPoolDependencyException.java | {
"start": 1052,
"end": 1092
} | class ____ not "
+ "present.");
}
}
| is |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-links/deployment/src/test/java/io/quarkus/resteasy/links/deployment/TestResource.java | {
"start": 369,
"end": 1175
} | class ____ {
private static final AtomicInteger ID_COUNTER = new AtomicInteger(0);
private static final List<TestRecord> RECORDS = new LinkedList<>(Arrays.asList(
new TestRecord(ID_COUNTER.incrementAndGet(), "first", "First value"),
new TestRecord(ID_COUNTER.incrementAndGet(), "second", "Second value")));
@GET
@Produces({ MediaType.APPLICATION_JSON, "application/hal+json" })
@LinkResource(entityClassName = "io.quarkus.resteasy.links.deployment.TestRecord", rel = "list")
public List<TestRecord> getAll() {
return RECORDS;
}
@GET
@Path("/first")
@Produces({ MediaType.APPLICATION_JSON, "application/hal+json" })
@LinkResource(rel = "first")
public TestRecord getFirst() {
return RECORDS.get(0);
}
}
| TestResource |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/FactoryFinderResolver.java | {
"start": 1777,
"end": 2108
} | class ____ to use
* @param resourcePath the resource path as base to lookup files within
* @return a factory finder.
*/
FactoryFinder resolveFactoryFinder(ClassResolver classResolver, String resourcePath);
/**
* Creates a new factory finder.
*
* @param classResolver the | resolver |
java | quarkusio__quarkus | extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/asynchronous/types/mutiny/MutinyHelloService.java | {
"start": 572,
"end": 2302
} | class ____ {
static final AtomicInteger COUNTER = new AtomicInteger(0);
@NonBlocking
@Retry(jitter = 50)
@Fallback(fallbackMethod = "fallback")
public Uni<String> helloNonblocking() {
COUNTER.incrementAndGet();
return Uni.createFrom().failure(IllegalArgumentException::new);
}
@Blocking
@Retry(jitter = 50)
@Fallback(fallbackMethod = "fallback")
public Uni<String> helloBlocking() {
COUNTER.incrementAndGet();
return Uni.createFrom().failure(IllegalArgumentException::new);
}
@Asynchronous
@Retry(jitter = 50)
@Fallback(fallbackMethod = "fallback")
public Uni<String> helloAsynchronous() {
COUNTER.incrementAndGet();
return Uni.createFrom().failure(IllegalArgumentException::new);
}
@AsynchronousNonBlocking
@Retry(jitter = 50)
@Fallback(fallbackMethod = "fallback")
public Uni<String> helloAsynchronousNonBlocking() {
COUNTER.incrementAndGet();
return Uni.createFrom().failure(IllegalArgumentException::new);
}
@Asynchronous
@NonBlocking
@Retry(jitter = 50)
@Fallback(fallbackMethod = "fallback")
public Uni<String> helloAsynchronousNonblockingCombined() {
COUNTER.incrementAndGet();
return Uni.createFrom().failure(IllegalArgumentException::new);
}
@Asynchronous
@Blocking
@Retry(jitter = 50)
@Fallback(fallbackMethod = "fallback")
public Uni<String> helloAsynchronousBlockingCombined() {
COUNTER.incrementAndGet();
return Uni.createFrom().failure(IllegalArgumentException::new);
}
public Uni<String> fallback() {
return Uni.createFrom().item(() -> "hello");
}
}
| MutinyHelloService |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/config/AnnotationDrivenTests.java | {
"start": 1527,
"end": 4533
} | class ____ {
@Test
void withProxyTargetClass() {
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("annotationDrivenProxyTargetClassTests.xml", getClass());
doTestWithMultipleTransactionManagers(context);
}
@Test
void withConfigurationClass() {
ApplicationContext parent = new AnnotationConfigApplicationContext(TransactionManagerConfiguration.class);
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext(new String[] {"annotationDrivenConfigurationClassTests.xml"}, getClass(), parent);
doTestWithMultipleTransactionManagers(context);
}
@Test
void withAnnotatedTransactionManagers() {
AnnotationConfigApplicationContext parent = new AnnotationConfigApplicationContext();
parent.registerBeanDefinition("transactionManager1", new RootBeanDefinition(SynchTransactionManager.class));
parent.registerBeanDefinition("transactionManager2", new RootBeanDefinition(NoSynchTransactionManager.class));
parent.refresh();
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext(new String[] {"annotationDrivenConfigurationClassTests.xml"}, getClass(), parent);
doTestWithMultipleTransactionManagers(context);
}
private void doTestWithMultipleTransactionManagers(ApplicationContext context) {
CallCountingTransactionManager tm1 = context.getBean("transactionManager1", CallCountingTransactionManager.class);
CallCountingTransactionManager tm2 = context.getBean("transactionManager2", CallCountingTransactionManager.class);
TransactionalService service = context.getBean("service", TransactionalService.class);
assertThat(AopUtils.isCglibProxy(service)).isTrue();
service.setSomething("someName");
assertThat(tm1.commits).isEqualTo(1);
assertThat(tm2.commits).isEqualTo(0);
service.doSomething();
assertThat(tm1.commits).isEqualTo(1);
assertThat(tm2.commits).isEqualTo(1);
service.setSomething("someName");
assertThat(tm1.commits).isEqualTo(2);
assertThat(tm2.commits).isEqualTo(1);
service.doSomething();
assertThat(tm1.commits).isEqualTo(2);
assertThat(tm2.commits).isEqualTo(2);
}
@Test
void serializableWithPreviousUsage() throws Exception {
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("annotationDrivenProxyTargetClassTests.xml", getClass());
TransactionalService service = context.getBean("service", TransactionalService.class);
service.setSomething("someName");
service = SerializationTestUtils.serializeAndDeserialize(service);
service.setSomething("someName");
}
@Test
void serializableWithoutPreviousUsage() throws Exception {
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("annotationDrivenProxyTargetClassTests.xml", getClass());
TransactionalService service = context.getBean("service", TransactionalService.class);
service = SerializationTestUtils.serializeAndDeserialize(service);
service.setSomething("someName");
}
@SuppressWarnings("serial")
public static | AnnotationDrivenTests |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/ai/model/mcp/registry/McpRegistryServerListTest.java | {
"start": 1064,
"end": 7379
} | class ____ extends BasicRequestTest {
@Test
void testSerialize() throws JsonProcessingException {
McpRegistryServerList mcpRegistryServerList = new McpRegistryServerList();
// Use detail type to match List<ServerResponse> in production code
mcpRegistryServerList.setServers(Collections.singletonList(new ServerResponse()));
// Set metadata with count and nextCursor (now camelCase)
mcpRegistryServerList.setMetadata(new McpRegistryServerList.Metadata("next", 1));
String json = mapper.writeValueAsString(mcpRegistryServerList);
assertTrue(json.contains("\"servers\":["));
assertTrue(json.contains("\"metadata\":"));
// Primary format: camelCase
assertTrue(json.contains("\"nextCursor\":\"next\""));
assertTrue(json.contains("\"count\":1"));
}
@Test
void testDeserialize() throws JsonProcessingException {
// Test with new camelCase format (primary)
String jsonCamelCase = "{\"servers\":[],\"metadata\":{\"nextCursor\":\"next\",\"count\":1}}";
McpRegistryServerList result1 = mapper.readValue(jsonCamelCase, McpRegistryServerList.class);
assertEquals(0, result1.getServers().size());
assertEquals(1, result1.getMetadata().getCount());
assertEquals("next", result1.getMetadata().getNextCursor());
// Test with old snake_case format (backward compatibility)
String jsonSnakeCase = "{\"servers\":[],\"metadata\":{\"next_cursor\":\"next\",\"count\":1}}";
McpRegistryServerList result2 = mapper.readValue(jsonSnakeCase, McpRegistryServerList.class);
assertEquals(0, result2.getServers().size());
assertEquals(1, result2.getMetadata().getCount());
assertEquals("next", result2.getMetadata().getNextCursor());
}
@Test
void testSerializeWithMultipleServers() throws JsonProcessingException {
McpRegistryServerList list = new McpRegistryServerList();
ServerResponse sr1 = new ServerResponse();
McpRegistryServerDetail server1 = new McpRegistryServerDetail();
server1.setName("Server1");
sr1.setServer(server1);
ServerResponse sr2 = new ServerResponse();
McpRegistryServerDetail server2 = new McpRegistryServerDetail();
server2.setName("Server2");
sr2.setServer(server2);
list.setServers(Arrays.asList(sr1, sr2));
list.setMetadata(new McpRegistryServerList.Metadata("cursor2", 2));
String json = mapper.writeValueAsString(list);
assertTrue(json.contains("\"servers\":["));
assertTrue(json.contains("\"name\":\"Server1\""));
assertTrue(json.contains("\"name\":\"Server2\""));
assertTrue(json.contains("\"count\":2"));
}
@Test
void testDeserializeWithMultipleServers() throws JsonProcessingException {
String json = "{\"servers\":["
+ "{\"server\":{\"name\":\"Server1\",\"version\":\"1.0.0\"}},"
+ "{\"server\":{\"name\":\"Server2\",\"version\":\"2.0.0\"}}"
+ "],\"metadata\":{\"nextCursor\":\"cursor2\",\"count\":2}}";
McpRegistryServerList list = mapper.readValue(json, McpRegistryServerList.class);
assertEquals(2, list.getServers().size());
assertEquals("Server1", list.getServers().get(0).getServer().getName());
assertEquals("Server2", list.getServers().get(1).getServer().getName());
assertEquals("cursor2", list.getMetadata().getNextCursor());
assertEquals(2, list.getMetadata().getCount());
}
@Test
void testMetadataConstructor() throws JsonProcessingException {
McpRegistryServerList.Metadata metadata = new McpRegistryServerList.Metadata("test_cursor", 5);
assertEquals("test_cursor", metadata.getNextCursor());
assertEquals(5, metadata.getCount());
String json = mapper.writeValueAsString(metadata);
assertTrue(json.contains("\"nextCursor\":\"test_cursor\""));
assertTrue(json.contains("\"count\":5"));
}
@Test
void testEmptyServerList() throws JsonProcessingException {
McpRegistryServerList list = new McpRegistryServerList();
list.setServers(Collections.emptyList());
list.setMetadata(new McpRegistryServerList.Metadata(null, 0));
String json = mapper.writeValueAsString(list);
assertTrue(json.contains("\"servers\":[]"));
assertTrue(json.contains("\"metadata\":{"));
}
@Test
void testNullNextCursorHandling() throws JsonProcessingException {
McpRegistryServerList list = new McpRegistryServerList();
list.setServers(Collections.emptyList());
list.setMetadata(new McpRegistryServerList.Metadata(null, 0));
String json = mapper.writeValueAsString(list);
McpRegistryServerList parsed = mapper.readValue(json, McpRegistryServerList.class);
assertNull(parsed.getMetadata().getNextCursor());
assertEquals(0, parsed.getMetadata().getCount());
}
@Test
void testBackwardCompatibilitySnakeCaseAlias() throws JsonProcessingException {
// Ensure @JsonAlias works for next_cursor -> nextCursor
String jsonSnakeCase = "{\"servers\":[],\"metadata\":{\"next_cursor\":\"pagination_cursor\",\"count\":10}}";
McpRegistryServerList list = mapper.readValue(jsonSnakeCase, McpRegistryServerList.class);
assertEquals("pagination_cursor", list.getMetadata().getNextCursor());
assertEquals(10, list.getMetadata().getCount());
}
@Test
void testPrimaryFormatCamelCase() throws JsonProcessingException {
// Ensure camelCase is the primary serialization format
McpRegistryServerList list = new McpRegistryServerList();
list.setServers(Collections.emptyList());
list.setMetadata(new McpRegistryServerList.Metadata("cursor", 5));
String json = mapper.writeValueAsString(list);
// Primary format should be camelCase
assertTrue(json.contains("\"nextCursor\":\"cursor\""));
// Should NOT use snake_case in serialization
assertTrue(!json.contains("\"next_cursor\""));
}
} | McpRegistryServerListTest |
java | apache__dubbo | dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/openapi/model/Operation.java | {
"start": 1427,
"end": 8994
} | class ____ extends Node<Operation> {
private Set<String> tags;
private String summary;
private String description;
private ExternalDocs externalDocs;
private String operationId;
private List<Parameter> parameters;
private RequestBody requestBody;
private Map<String, ApiResponse> responses;
private Boolean deprecated;
private List<SecurityRequirement> security;
private List<Server> servers;
private String group;
private String version;
private HttpMethods httpMethod;
private transient MethodMeta meta;
public Set<String> getTags() {
return tags;
}
public Operation setTags(Set<String> tags) {
this.tags = tags;
return this;
}
public Operation addTag(String tag) {
if (tags == null) {
tags = new LinkedHashSet<>();
}
tags.add(tag);
return this;
}
public Operation removeTag(String tag) {
if (tags != null) {
tags.remove(tag);
}
return this;
}
public String getSummary() {
return summary;
}
public Operation setSummary(String summary) {
this.summary = summary;
return this;
}
public String getDescription() {
return description;
}
public Operation setDescription(String description) {
this.description = description;
return this;
}
public ExternalDocs getExternalDocs() {
return externalDocs;
}
public Operation setExternalDocs(ExternalDocs externalDocs) {
this.externalDocs = externalDocs;
return this;
}
public String getOperationId() {
return operationId;
}
public Operation setOperationId(String operationId) {
this.operationId = operationId;
return this;
}
public List<Parameter> getParameters() {
return parameters;
}
public Parameter getParameter(String name, In in) {
if (parameters == null || name == null || in == null) {
return null;
}
for (int i = 0, size = parameters.size(); i < size; i++) {
Parameter parameter = parameters.get(i);
if (name.equals(parameter.getName()) && in == parameter.getIn()) {
return parameter;
}
}
return null;
}
public Operation setParameters(List<Parameter> parameters) {
this.parameters = parameters;
return this;
}
public Operation addParameter(Parameter parameter) {
if (parameters == null) {
parameters = new ArrayList<>();
}
parameters.add(parameter);
return this;
}
public Operation removeParameter(Parameter parameter) {
if (parameters != null) {
parameters.remove(parameter);
}
return this;
}
public RequestBody getRequestBody() {
return requestBody;
}
public Operation setRequestBody(RequestBody requestBody) {
this.requestBody = requestBody;
return this;
}
public Map<String, ApiResponse> getResponses() {
return responses;
}
public ApiResponse getResponse(String httpStatusCode) {
return responses == null ? null : responses.get(httpStatusCode);
}
public ApiResponse getOrAddResponse(String httpStatusCode) {
if (responses == null) {
responses = new LinkedHashMap<>();
}
return responses.computeIfAbsent(httpStatusCode, k -> new ApiResponse());
}
public Operation setResponses(Map<String, ApiResponse> responses) {
this.responses = responses;
return this;
}
public Operation addResponse(String name, ApiResponse response) {
if (responses == null) {
responses = new LinkedHashMap<>();
}
responses.put(name, response);
return this;
}
public Operation removeResponse(String name) {
if (responses != null) {
responses.remove(name);
}
return this;
}
public Boolean getDeprecated() {
return deprecated;
}
public Operation setDeprecated(Boolean deprecated) {
this.deprecated = deprecated;
return this;
}
public List<SecurityRequirement> getSecurity() {
return security;
}
public Operation setSecurity(List<SecurityRequirement> security) {
this.security = security;
return this;
}
public Operation addSecurity(SecurityRequirement security) {
if (this.security == null) {
this.security = new ArrayList<>();
}
this.security.add(security);
return this;
}
public Operation removeSecurity(SecurityRequirement security) {
if (this.security != null) {
this.security.remove(security);
}
return this;
}
public List<Server> getServers() {
return servers;
}
public Operation setServers(List<Server> servers) {
this.servers = servers;
return this;
}
public Operation addServer(Server server) {
if (servers == null) {
servers = new ArrayList<>();
}
servers.add(server);
return this;
}
public Operation removeServer(Server server) {
if (servers != null) {
servers.remove(server);
}
return this;
}
public String getGroup() {
return group;
}
public Operation setGroup(String group) {
this.group = group;
return this;
}
public String getVersion() {
return version;
}
public Operation setVersion(String version) {
this.version = version;
return this;
}
public HttpMethods getHttpMethod() {
return httpMethod;
}
public Operation setHttpMethod(HttpMethods httpMethod) {
this.httpMethod = httpMethod;
return this;
}
public MethodMeta getMeta() {
return meta;
}
public Operation setMeta(MethodMeta meta) {
this.meta = meta;
return this;
}
@Override
public Operation clone() {
Operation clone = super.clone();
if (tags != null) {
clone.tags = new LinkedHashSet<>(tags);
}
clone.externalDocs = clone(externalDocs);
clone.parameters = clone(parameters);
clone.requestBody = clone(requestBody);
clone.responses = clone(responses);
clone.security = clone(security);
clone.servers = clone(servers);
return clone;
}
@Override
public Map<String, Object> writeTo(Map<String, Object> node, Context context) {
write(node, "tags", tags);
write(node, "summary", summary);
write(node, "description", description);
write(node, "externalDocs", externalDocs, context);
write(node, "operationId", operationId);
write(node, "parameters", parameters, context);
write(node, "requestBody", requestBody, context);
write(node, "responses", responses, context);
write(node, "deprecated", deprecated);
write(node, "security", security, context);
write(node, "servers", servers, context);
writeExtensions(node);
write(node, Constants.X_JAVA_CLASS, meta.getServiceMeta().getServiceInterface());
write(node, Constants.X_JAVA_METHOD, meta.getMethod().getName());
write(node, Constants.X_JAVA_METHOD_DESCRIPTOR, TypeUtils.getMethodDescriptor(meta));
return node;
}
}
| Operation |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/SimpleRequestSlotMatchingStrategy.java | {
"start": 1308,
"end": 2680
} | enum ____ implements RequestSlotMatchingStrategy {
INSTANCE;
@Override
public Collection<RequestSlotMatch> matchRequestsAndSlots(
Collection<? extends PhysicalSlot> slots,
Collection<PendingRequest> pendingRequests,
Map<ResourceID, LoadingWeight> taskExecutorsLoadingWeight) {
final Collection<RequestSlotMatch> resultingMatches = new ArrayList<>();
// if pendingRequests has a special order, then let's preserve it
final LinkedList<PendingRequest> pendingRequestsIndex = new LinkedList<>(pendingRequests);
for (PhysicalSlot slot : slots) {
final Iterator<PendingRequest> pendingRequestIterator = pendingRequestsIndex.iterator();
while (pendingRequestIterator.hasNext()) {
final PendingRequest pendingRequest = pendingRequestIterator.next();
if (slot.getResourceProfile().isMatching(pendingRequest.getResourceProfile())) {
resultingMatches.add(RequestSlotMatch.createFor(pendingRequest, slot));
pendingRequestIterator.remove();
break;
}
}
}
return resultingMatches;
}
@Override
public String toString() {
return SimpleRequestSlotMatchingStrategy.class.getSimpleName();
}
}
| SimpleRequestSlotMatchingStrategy |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/Generic1128Test.java | {
"start": 462,
"end": 735
} | class ____<M extends HObj> {
public long id;
// important: do not serialize as subtype, but only as type that
// is statically recognizable here.
@JsonSerialize(typing=JsonSerialize.Typing.STATIC)
public M parent;
}
static | HObj |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/web/server/ServerHttpSecurity.java | {
"start": 112954,
"end": 113600
} | class ____ {
private ReferrerPolicySpec() {
}
private ReferrerPolicySpec(ReferrerPolicy referrerPolicy) {
HeaderSpec.this.referrerPolicy.setPolicy(referrerPolicy);
}
/**
* Sets the policy to be used in the response header.
* @param referrerPolicy a referrer policy
* @return the {@link ReferrerPolicySpec} to continue configuring
*/
public ReferrerPolicySpec policy(ReferrerPolicy referrerPolicy) {
HeaderSpec.this.referrerPolicy.setPolicy(referrerPolicy);
return this;
}
}
/**
* Configures the Cross-Origin-Opener-Policy header
*
* @since 5.7
*/
public final | ReferrerPolicySpec |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/request/target/ImageViewTargetTest.java | {
"start": 4730,
"end": 5022
} | class ____ extends ImageViewTarget<Drawable> {
public Drawable resource;
TestTarget(ImageView view) {
super(view);
}
@Override
protected void setResource(Drawable resource) {
this.resource = resource;
view.setImageDrawable(resource);
}
}
}
| TestTarget |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java | {
"start": 34075,
"end": 35449
} | class ____ extends CompositeService {
private final CompositeService parent;
private final Service serviceToAdd;
private STATE triggerState;
public AddSiblingService(CompositeService parent,
Service serviceToAdd,
STATE triggerState) {
super("ParentStateManipulatorService");
this.parent = parent;
this.serviceToAdd = serviceToAdd;
this.triggerState = triggerState;
}
/**
* Add the serviceToAdd to the parent if this service
* is in the state requested
*/
private void maybeAddSibling() {
if (getServiceState() == triggerState) {
parent.addService(serviceToAdd);
}
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
maybeAddSibling();
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
maybeAddSibling();
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
maybeAddSibling();
super.serviceStop();
}
/**
* Expose addService method
* @param parent parent service
* @param child child to add
*/
public static void addChildToService(CompositeService parent, Service child) {
parent.addService(child);
}
}
}
| AddSiblingService |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/javatime/key/InstantAsKeyTest.java | {
"start": 389,
"end": 1911
} | class ____ extends DateTimeTestBase
{
private static final Instant INSTANT_0 = Instant.ofEpochMilli(0);
private static final String INSTANT_0_STRING = "1970-01-01T00:00:00Z";
private static final Instant INSTANT = Instant.ofEpochSecond(1426325213l, 590000000l);
private static final String INSTANT_STRING = "2015-03-14T09:26:53.590Z";
private final ObjectMapper MAPPER = newMapper();
private final ObjectReader READER = MAPPER.readerFor(new TypeReference<Map<Instant, String>>() { });
@Test
public void testSerialization0() throws Exception {
String value = MAPPER.writeValueAsString(asMap(INSTANT_0, "test"));
assertEquals(mapAsString(INSTANT_0_STRING, "test"), value);
}
@Test
public void testSerialization1() throws Exception {
String value = MAPPER.writeValueAsString(asMap(INSTANT, "test"));
assertEquals(mapAsString(INSTANT_STRING, "test"), value);
}
@Test
public void testDeserialization0() throws Exception {
Map<Instant, String> value = READER.readValue(mapAsString(INSTANT_0_STRING, "test"));
Map<Instant, String> EXP = asMap(INSTANT_0, "test");
assertEquals(EXP, value, "Value is incorrect");
}
@Test
public void testDeserialization1() throws Exception {
Map<Instant, String> value = READER.readValue(mapAsString(INSTANT_STRING, "test"));
Map<Instant, String> EXP = asMap(INSTANT, "test");
assertEquals(EXP, value, "Value is incorrect");
}
}
| InstantAsKeyTest |
java | google__guava | android/guava-tests/test/com/google/common/base/FinalizableReferenceQueueTest.java | {
"start": 4582,
"end": 5100
} | class ____, not its parent.
if (name.equals(Finalizer.class.getName())) {
Class<?> clazz = findClass(name);
if (resolve) {
resolveClass(clazz);
}
return clazz;
}
return super.loadClass(name, resolve);
}
}
@Test
public void testGetFinalizerUrl() {
assertThat(getClass().getResource("internal/Finalizer.class")).isNotNull();
}
@Test
public void testFinalizeClassHasNoNestedClasses() throws Exception {
// Ensure that the Finalizer | loader |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/event/WatermarkEvent.java | {
"start": 1423,
"end": 2107
} | class ____ extends RuntimeEvent {
private static final int TAG_LONG_GENERALIZED_WATERMARK = 0;
private static final int TAG_BOOL_GENERALIZED_WATERMARK = 1;
private Watermark watermark;
private boolean isAligned = false;
public WatermarkEvent() {}
public WatermarkEvent(Watermark watermark, boolean isAligned) {
this.watermark = watermark;
this.isAligned = isAligned;
}
@Override
public void write(DataOutputView out) throws IOException {
// write watermark identifier
out.writeUTF(watermark.getIdentifier());
// write align flag
out.writeBoolean(isAligned);
// write watermark | WatermarkEvent |
java | apache__hadoop | hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryParser.java | {
"start": 1058,
"end": 1315
} | interface ____ extends Closeable {
/**
* Get the next {@link HistoryEvent}
* @return the next {@link HistoryEvent}. If no more events left, return null.
* @throws IOException
*/
HistoryEvent nextEvent() throws IOException;
}
| JobHistoryParser |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java | {
"start": 1848,
"end": 6468
} | class ____ extends TransportAction<QueryUserRequest, QueryUserResponse> {
private final NativeUsersStore usersStore;
private final ProfileService profileService;
private final Authentication.RealmRef nativeRealmRef;
@Inject
public TransportQueryUserAction(
TransportService transportService,
ActionFilters actionFilters,
NativeUsersStore usersStore,
ProfileService profileService,
Realms realms
) {
super(ActionTypes.QUERY_USER_ACTION.name(), actionFilters, transportService.getTaskManager(), EsExecutors.DIRECT_EXECUTOR_SERVICE);
this.usersStore = usersStore;
this.profileService = profileService;
this.nativeRealmRef = realms.getNativeRealmRef();
}
@Override
protected void doExecute(Task task, QueryUserRequest request, ActionListener<QueryUserResponse> listener) {
final SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource()
.version(false)
.fetchSource(true)
.trackTotalHits(true);
if (request.getFrom() != null) {
searchSourceBuilder.from(request.getFrom());
}
if (request.getSize() != null) {
searchSourceBuilder.size(request.getSize());
}
searchSourceBuilder.query(UserBoolQueryBuilder.build(request.getQueryBuilder()));
if (request.getFieldSortBuilders() != null) {
USER_FIELD_NAME_TRANSLATORS.translateFieldSortBuilders(request.getFieldSortBuilders(), searchSourceBuilder, null);
}
if (request.getSearchAfterBuilder() != null) {
searchSourceBuilder.searchAfter(request.getSearchAfterBuilder().getSortValues());
}
final SearchRequest searchRequest = new SearchRequest(new String[] { SECURITY_MAIN_ALIAS }, searchSourceBuilder);
usersStore.queryUsers(searchRequest, ActionListener.wrap(queryUserResults -> {
if (request.isWithProfileUid()) {
resolveProfileUids(queryUserResults, listener);
} else {
List<QueryUserResponse.Item> queryUserResponseResults = queryUserResults.userQueryResult()
.stream()
.map(queryUserResult -> new QueryUserResponse.Item(queryUserResult.user(), queryUserResult.sortValues(), null))
.toList();
listener.onResponse(new QueryUserResponse(queryUserResults.total(), queryUserResponseResults));
}
}, listener::onFailure));
}
private void resolveProfileUids(NativeUsersStore.QueryUserResults queryUserResults, ActionListener<QueryUserResponse> listener) {
final List<Subject> subjects = queryUserResults.userQueryResult()
.stream()
.map(item -> new Subject(item.user(), nativeRealmRef))
.toList();
profileService.searchProfilesForSubjects(subjects, ActionListener.wrap(resultsAndErrors -> {
if (resultsAndErrors == null || resultsAndErrors.errors().isEmpty()) {
final Map<String, String> profileUidLookup = resultsAndErrors == null
? Map.of()
: resultsAndErrors.results()
.stream()
.filter(t -> Objects.nonNull(t.v2()))
.map(t -> new Tuple<>(t.v1().getUser().principal(), t.v2().uid()))
.collect(Collectors.toUnmodifiableMap(Tuple::v1, Tuple::v2));
List<QueryUserResponse.Item> queryUserResponseResults = queryUserResults.userQueryResult()
.stream()
.map(
userResult -> new QueryUserResponse.Item(
userResult.user(),
userResult.sortValues(),
profileUidLookup.getOrDefault(userResult.user().principal(), null)
)
)
.toList();
listener.onResponse(new QueryUserResponse(queryUserResults.total(), queryUserResponseResults));
} else {
final ElasticsearchStatusException exception = new ElasticsearchStatusException(
"failed to retrieve profile for users. please retry without fetching profile uid (with_profile_uid=false)",
RestStatus.INTERNAL_SERVER_ERROR
);
resultsAndErrors.errors().values().forEach(exception::addSuppressed);
listener.onFailure(exception);
}
}, listener::onFailure));
}
}
| TransportQueryUserAction |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/GroupingAggregator.java | {
"start": 475,
"end": 657
} | class ____ implements an aggregation function with grouping.
* See {@link Aggregator} for more information.
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.SOURCE)
public @ | that |
java | google__jimfs | jimfs/src/test/java/com/google/common/jimfs/TestAttributeProvider.java | {
"start": 1096,
"end": 3317
} | class ____ extends AttributeProvider {
private static final ImmutableSet<String> ATTRIBUTES = ImmutableSet.of("foo", "bar", "baz");
@Override
public String name() {
return "test";
}
@Override
public ImmutableSet<String> inherits() {
return ImmutableSet.of("basic");
}
@Override
public ImmutableSet<String> fixedAttributes() {
return ATTRIBUTES;
}
@Override
public ImmutableMap<String, ?> defaultValues(Map<String, ?> userDefaults) {
Map<String, Object> result = new HashMap<>();
Long bar = 0L;
Integer baz = 1;
if (userDefaults.containsKey("test:bar")) {
bar = checkType("test", "bar", userDefaults.get("test:bar"), Number.class).longValue();
}
if (userDefaults.containsKey("test:baz")) {
baz = checkType("test", "baz", userDefaults.get("test:baz"), Integer.class);
}
result.put("test:bar", bar);
result.put("test:baz", baz);
return ImmutableMap.copyOf(result);
}
@Override
public void set(File file, String view, String attribute, Object value, boolean create) {
switch (attribute) {
case "bar":
checkNotCreate(view, attribute, create);
file.setAttribute(
"test", "bar", checkType(view, attribute, value, Number.class).longValue());
break;
case "baz":
file.setAttribute("test", "baz", checkType(view, attribute, value, Integer.class));
break;
default:
throw unsettable(view, attribute, create);
}
}
@Override
public Object get(File file, String attribute) {
if (attribute.equals("foo")) {
return "hello";
}
return file.getAttribute("test", attribute);
}
@Override
public Class<TestAttributeView> viewType() {
return TestAttributeView.class;
}
@Override
public TestAttributeView view(
FileLookup lookup, ImmutableMap<String, FileAttributeView> inheritedViews) {
return new View(lookup, (BasicFileAttributeView) inheritedViews.get("basic"));
}
@Override
public Class<TestAttributes> attributesType() {
return TestAttributes.class;
}
@Override
public TestAttributes readAttributes(File file) {
return new Attributes(file);
}
static final | TestAttributeProvider |
java | apache__avro | lang/java/idl/src/main/java/org/apache/avro/idl/IdlReader.java | {
"start": 20946,
"end": 36265
} | enum ____.
propertiesStack.pop();
enumSymbols.add(identifier(ctx.name));
}
@Override
public void exitEnumDefault(IdlParser.EnumDefaultContext ctx) {
enumDefaultSymbol = identifier(ctx.defaultSymbolName);
}
@Override
public void enterRecordDeclaration(RecordDeclarationContext ctx) {
assert schema == null;
assert fields.isEmpty();
propertiesStack.push(new SchemaProperties(currentNamespace(), true, true, false));
}
@Override
public void enterRecordBody(RecordBodyContext ctx) {
assert fields.isEmpty();
RecordDeclarationContext recordCtx = (RecordDeclarationContext) ctx.parent;
SchemaProperties properties = propertiesStack.pop();
String doc = getDocComment(recordCtx);
String identifier = identifier(recordCtx.name);
String name = name(identifier);
pushNamespace(namespace(identifier, properties.namespace()));
boolean isError = recordCtx.recordType.getType() == IdlParser.Error;
schema = Schema.createRecord(name, doc, currentNamespace(), isError);
properties.copyAliases(schema::addAlias);
properties.copyProperties(schema);
}
@Override
public void exitRecordDeclaration(RecordDeclarationContext ctx) {
schema.setFields(fields);
fields.clear();
parseContext.put(schema);
schema = null;
popNamespace();
}
@Override
public void enterFieldDeclaration(FieldDeclarationContext ctx) {
assert typeStack.isEmpty();
defaultVariableDocComment = getDocComment(ctx);
}
@Override
public void exitFieldDeclaration(FieldDeclarationContext ctx) {
typeStack.pop();
defaultVariableDocComment = null;
}
@Override
public void enterVariableDeclaration(VariableDeclarationContext ctx) {
assert jsonValues.isEmpty();
propertiesStack.push(new SchemaProperties(currentNamespace(), false, true, true));
}
@Override
public void exitVariableDeclaration(VariableDeclarationContext ctx) {
String doc = Optional.ofNullable(getDocComment(ctx)).orElse(defaultVariableDocComment);
String fieldName = identifier(ctx.fieldName);
JsonNode defaultValue = jsonValues.poll();
Schema type = typeStack.element();
JsonNode fieldDefault = fixDefaultValue(defaultValue, type);
Schema fieldType = fixOptionalSchema(type, fieldDefault);
SchemaProperties properties = propertiesStack.pop();
boolean validate = SchemaResolver.isFullyResolvedSchema(fieldType);
Schema.Field field = Accessor.createField(fieldName, fieldType, doc, fieldDefault, validate, properties.order());
properties.copyAliases(field::addAlias);
properties.copyProperties(field);
fields.add(field);
}
/**
* When parsing JSON, the parser generates a LongNode or IntNode based on the
* size of the number it encounters. But this may not be expected based on the
* schema. This method fixes that.
*
* @param defaultValue the parsed default value
* @param fieldType the field schema
* @return the default value, now matching the schema
*/
private JsonNode fixDefaultValue(JsonNode defaultValue, Schema fieldType) {
if (!(defaultValue instanceof IntNode)) {
return defaultValue;
}
if (fieldType.getType() == Schema.Type.UNION) {
for (Schema unionedType : fieldType.getTypes()) {
if (unionedType.getType() == Schema.Type.INT) {
break;
} else if (unionedType.getType() == Schema.Type.LONG) {
return new LongNode(defaultValue.longValue());
}
}
return defaultValue;
}
if (fieldType.getType() == Schema.Type.LONG) {
return new LongNode(defaultValue.longValue());
}
return defaultValue;
}
/**
* For "optional schemas" (recognized by the marker property the NullableType
* production adds), ensure the null schema is in the right place.
*
* @param schema a schema
* @param defaultValue the intended default value
* @return the schema, or an optional schema with null in the right place
*/
private Schema fixOptionalSchema(Schema schema, JsonNode defaultValue) {
Object optionalType = schema.getObjectProp(OPTIONAL_NULLABLE_TYPE_PROPERTY);
if (optionalType == null) {
return schema;
}
// The schema is a union schema with 2 types: "null" and a non-"null"
// schema. The result of this method must not have the property
// OPTIONAL_NULLABLE_TYPE_PROPERTY.
Schema nullSchema = schema.getTypes().get(0);
Schema nonNullSchema = schema.getTypes().get(1);
boolean nonNullDefault = defaultValue != null && !defaultValue.isNull();
if (nonNullDefault) {
return Schema.createUnion(nonNullSchema, nullSchema);
} else {
return Schema.createUnion(nullSchema, nonNullSchema);
}
}
@Override
public void enterMessageDeclaration(MessageDeclarationContext ctx) {
assert typeStack.isEmpty();
assert fields.isEmpty();
assert messageDocComment == null;
propertiesStack.push(new SchemaProperties(currentNamespace(), false, false, false));
messageDocComment = getDocComment(ctx);
}
@Override
public void exitMessageDeclaration(MessageDeclarationContext ctx) {
Schema resultType = typeStack.pop();
Map<String, JsonNode> properties = propertiesStack.pop().properties;
String name = identifier(ctx.name);
Schema request = Schema.createRecord(null, null, null, false, fields);
fields.clear();
Protocol.Message message;
if (ctx.oneway != null) {
if (resultType.getType() == Schema.Type.NULL) {
message = protocol.createMessage(name, messageDocComment, properties, request);
} else {
throw error("One-way message'" + name + "' must return void", ctx.returnType.start);
}
} else {
List<Schema> errorSchemas = new ArrayList<>();
errorSchemas.add(Protocol.SYSTEM_ERROR);
for (IdentifierContext errorContext : ctx.errors) {
errorSchemas.add(namedSchemaOrUnresolved(fullName(currentNamespace(), identifier(errorContext))));
}
message = protocol.createMessage(name, messageDocComment, properties, request, resultType,
Schema.createUnion(errorSchemas));
}
messageDocComment = null;
protocol.getMessages().put(message.getName(), message);
}
@Override
public void enterFormalParameter(FormalParameterContext ctx) {
assert typeStack.size() == 1; // The message return type is on the stack; nothing else.
defaultVariableDocComment = getDocComment(ctx);
}
@Override
public void exitFormalParameter(FormalParameterContext ctx) {
typeStack.pop();
defaultVariableDocComment = null;
}
@Override
public void exitResultType(ResultTypeContext ctx) {
if (typeStack.isEmpty()) {
// if there's no type, we've parsed 'void': use the null type
typeStack.push(Schema.create(Schema.Type.NULL));
}
}
@Override
public void enterFullType(FullTypeContext ctx) {
propertiesStack.push(new SchemaProperties(currentNamespace(), false, false, false));
}
@Override
public void exitFullType(FullTypeContext ctx) {
SchemaProperties properties = propertiesStack.pop();
Schema type = typeStack.element();
if (type.getObjectProp(OPTIONAL_NULLABLE_TYPE_PROPERTY) != null) {
// Optional type: put the properties on the non-null content
properties.copyProperties(type.getTypes().get(1));
} else {
properties.copyProperties(type);
}
}
@Override
public void exitNullableType(NullableTypeContext ctx) {
Schema type;
if (ctx.referenceName == null) {
type = typeStack.pop();
} else {
// propertiesStack is empty within resultType->plainType->nullableType, and
// holds our properties otherwise
if (propertiesStack.isEmpty() || propertiesStack.peek().hasProperties()) {
throw error("Type references may not be annotated", ctx.getParent().getStart());
}
type = namedSchemaOrUnresolved(fullName(currentNamespace(), identifier(ctx.referenceName)));
}
if (ctx.optional != null) {
type = Schema.createUnion(Schema.create(Schema.Type.NULL), type);
// Add a marker property to the union (it will be removed when creating fields)
type.addProp(OPTIONAL_NULLABLE_TYPE_PROPERTY, BooleanNode.TRUE);
}
typeStack.push(type);
}
@Override
public void exitPrimitiveType(PrimitiveTypeContext ctx) {
switch (ctx.typeName.getType()) {
case IdlParser.Boolean:
typeStack.push(Schema.create(Schema.Type.BOOLEAN));
break;
case IdlParser.Int:
typeStack.push(Schema.create(Schema.Type.INT));
break;
case IdlParser.Long:
typeStack.push(Schema.create(Schema.Type.LONG));
break;
case IdlParser.Float:
typeStack.push(Schema.create(Schema.Type.FLOAT));
break;
case IdlParser.Double:
typeStack.push(Schema.create(Schema.Type.DOUBLE));
break;
case IdlParser.Bytes:
typeStack.push(Schema.create(Schema.Type.BYTES));
break;
case IdlParser.String:
typeStack.push(Schema.create(Schema.Type.STRING));
break;
case IdlParser.Null:
typeStack.push(Schema.create(Schema.Type.NULL));
break;
case IdlParser.Date:
typeStack.push(LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT)));
break;
case IdlParser.Time:
typeStack.push(LogicalTypes.timeMillis().addToSchema(Schema.create(Schema.Type.INT)));
break;
case IdlParser.Timestamp:
typeStack.push(LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG)));
break;
case IdlParser.LocalTimestamp:
typeStack.push(LogicalTypes.localTimestampMillis().addToSchema(Schema.create(Schema.Type.LONG)));
break;
case IdlParser.UUID:
typeStack.push(LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING)));
break;
default: // Only option left: decimal
int precision = Integer.decode(ctx.precision.getText());
int scale = ctx.scale == null ? 0 : Integer.decode(ctx.scale.getText());
typeStack.push(LogicalTypes.decimal(precision, scale).addToSchema(Schema.create(Schema.Type.BYTES)));
break;
}
}
@Override
public void exitArrayType(ArrayTypeContext ctx) {
typeStack.push(Schema.createArray(typeStack.pop()));
}
@Override
public void exitMapType(MapTypeContext ctx) {
typeStack.push(Schema.createMap(typeStack.pop()));
}
@Override
public void enterUnionType(UnionTypeContext ctx) {
// push an empty marker union; we'll replace it with the real union upon exit
typeStack.push(Schema.createUnion());
}
@Override
public void exitUnionType(UnionTypeContext ctx) {
List<Schema> types = new ArrayList<>();
Schema type;
while ((type = typeStack.pop()).getType() != Schema.Type.UNION) {
types.add(type);
}
Collections.reverse(types); // Popping the stack works in reverse order
// type is an empty marker union; ignore (drop) it
typeStack.push(Schema.createUnion(types));
}
@Override
public void exitJsonValue(JsonValueContext ctx) {
if (ctx.parent instanceof JsonArrayContext) {
JsonNode value = jsonValues.pop();
assert jsonValues.peek() instanceof ArrayNode;
((ArrayNode) jsonValues.element()).add(value);
}
}
@Override
public void exitJsonLiteral(JsonLiteralContext ctx) {
Token literal = ctx.literal;
switch (literal.getType()) {
case IdlParser.Null:
jsonValues.push(NullNode.getInstance());
break;
case IdlParser.BTrue:
jsonValues.push(BooleanNode.TRUE);
break;
case IdlParser.BFalse:
jsonValues.push(BooleanNode.FALSE);
break;
case IdlParser.IntegerLiteral:
String number = literal.getText().replace("_", "");
char lastChar = number.charAt(number.length() - 1);
boolean coerceToLong = false;
if (lastChar == 'l' || lastChar == 'L') {
coerceToLong = true;
number = number.substring(0, number.length() - 1);
}
long longNumber = Long.decode(number);
int intNumber = (int) longNumber; // Narrowing cast: if too large a number, the two are different
jsonValues.push(coerceToLong || intNumber != longNumber ? new LongNode(longNumber) : new IntNode(intNumber));
break;
case IdlParser.FloatingPointLiteral:
jsonValues.push(new DoubleNode(Double.parseDouble(literal.getText())));
break;
default: // StringLiteral:
jsonValues.push(new TextNode(getString(literal)));
break;
}
}
@Override
public void enterJsonArray(JsonArrayContext ctx) {
jsonValues.push(new ArrayNode(null));
}
@Override
public void enterJsonObject(JsonObjectContext ctx) {
jsonValues.push(new ObjectNode(null));
}
@Override
public void exitJsonPair(JsonPairContext ctx) {
String name = getString(ctx.name);
JsonNode value = jsonValues.pop();
assert jsonValues.peek() instanceof ObjectNode;
((ObjectNode) jsonValues.element()).set(name, value);
}
private String identifier(IdentifierContext ctx) {
return ctx.word.getText().replace("`", "");
}
private String name(String identifier) {
int dotPos = identifier.lastIndexOf('.');
String name = identifier.substring(dotPos + 1);
return validateName(name, true);
}
private String namespace(String identifier, String namespace) {
int dotPos = identifier.lastIndexOf('.');
String ns = dotPos < 0 ? namespace : identifier.substring(0, dotPos);
if (ns == null) {
return null;
}
for (int s = 0, e = ns.indexOf('.'); e > 0; s = e + 1, e = ns.indexOf('.', s)) {
validateName(ns.substring(s, e), false);
}
return ns;
}
private String validateName(String name, boolean isTypeName) {
if (name == null) {
throw new SchemaParseException("Null name");
} else if (!VALID_NAME.test(name)) {
throw new SchemaParseException("Illegal name: " + name);
}
if (isTypeName && INVALID_TYPE_NAMES.contains(name)) {
throw new SchemaParseException("Illegal name: " + name);
}
return name;
}
private String fullName(String namespace, String typeName) {
int dotPos = typeName.lastIndexOf('.');
if (dotPos > -1) {
return typeName;
}
return namespace != null ? namespace + "." + typeName : typeName;
}
private String getString(Token stringToken) {
String stringLiteral = stringToken.getText();
String betweenQuotes = stringLiteral.substring(1, stringLiteral.length() - 1);
return StringEscapeUtils.unescapeJava(betweenQuotes);
}
}
private static | symbols |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/RedundantNullCheckTest.java | {
"start": 2841,
"end": 3528
} | class ____ {
void foo(String s) {
if (s == null) {
/* This is fine */
}
if (s != null) {
/* This is fine */
}
}
}
""")
.doTest();
}
@Test
public void negative_classMarkedAndUnmarked_effectivelyUnmarked() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.NullUnmarked;
@NullMarked
@NullUnmarked // This should prevent @NullMarked from applying
| Test |
java | apache__camel | components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpProducerHttpPathTest.java | {
"start": 1060,
"end": 3951
} | class ____ extends BaseNettyTest {
@Test
public void testEmptyPathDoesNotEndsWithSlash() throws Exception {
MockEndpoint input = getMockEndpoint("mock:input");
input.expectedBodiesReceived("Hello World");
input.expectedHeaderReceived(Exchange.HTTP_PATH, "");
input.expectedHeaderReceived(Exchange.HTTP_URI, "/foo");
template.request("netty-http:http://localhost:{{port}}/foo", exchange -> {
Message in = exchange.getIn();
in.setBody("Hello World");
in.setHeader(Exchange.HTTP_PATH, "");
});
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testNonEmptyPathDoesNotEndsWithSlash() throws Exception {
MockEndpoint input = getMockEndpoint("mock:input");
input.expectedBodiesReceived("Hello World");
input.expectedHeaderReceived(Exchange.HTTP_PATH, "/bar");
input.expectedHeaderReceived(Exchange.HTTP_URI, "/foo/bar");
template.request("netty-http:http://localhost:{{port}}/foo", exchange -> {
Message in = exchange.getIn();
in.setBody("Hello World");
in.setHeader(Exchange.HTTP_PATH, "/bar");
});
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testNullPathDoesNotEndsWithSlash() throws Exception {
MockEndpoint input = getMockEndpoint("mock:input");
input.expectedBodiesReceived("Hello World");
input.expectedHeaderReceived(Exchange.HTTP_PATH, "");
input.expectedHeaderReceived(Exchange.HTTP_URI, "/foo");
template.request("netty-http:http://localhost:{{port}}/foo", exchange -> {
Message in = exchange.getIn();
in.setBody("Hello World");
in.setHeader(Exchange.HTTP_PATH, null);
});
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testPathMustPreserveSlash() throws Exception {
MockEndpoint input = getMockEndpoint("mock:input");
input.expectedBodiesReceived("Hello World");
input.expectedHeaderReceived(Exchange.HTTP_PATH, "/bar/");
input.expectedHeaderReceived(Exchange.HTTP_URI, "/foo/bar/");
template.request("netty-http:http://localhost:{{port}}/foo", exchange -> {
Message in = exchange.getIn();
in.setBody("Hello World");
in.setHeader(Exchange.HTTP_PATH, "/bar/");
});
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("netty-http:http://localhost:{{port}}/foo?matchOnUriPrefix=true")
.to("mock:input")
.transform().constant("Bye World");
}
};
}
}
| NettyHttpProducerHttpPathTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/state/StateDescriptor.java | {
"start": 2431,
"end": 2875
} | class ____<S extends State, T> implements Serializable {
private static final Logger LOG = LoggerFactory.getLogger(StateDescriptor.class);
/**
* An enumeration of the types of supported states. Used to identify the state type when writing
* and restoring checkpoints and savepoints.
*/
// IMPORTANT: Do not change the order of the elements in this enum, ordinal is used in
// serialization
public | StateDescriptor |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/scheduling/concurrent/ThreadPoolTaskScheduler.java | {
"start": 3156,
"end": 16290
} | class ____ extends ExecutorConfigurationSupport
implements AsyncTaskExecutor, SchedulingTaskExecutor, TaskScheduler {
private static final TimeUnit NANO = TimeUnit.NANOSECONDS;
private volatile int poolSize = 1;
private volatile boolean removeOnCancelPolicy;
private volatile boolean continueExistingPeriodicTasksAfterShutdownPolicy;
private volatile boolean executeExistingDelayedTasksAfterShutdownPolicy = true;
private @Nullable TaskDecorator taskDecorator;
private volatile @Nullable ErrorHandler errorHandler;
private Clock clock = Clock.systemDefaultZone();
private @Nullable ScheduledExecutorService scheduledExecutor;
/**
* Set the ScheduledExecutorService's pool size.
* Default is 1.
* <p><b>This setting can be modified at runtime, for example through JMX.</b>
*/
public void setPoolSize(int poolSize) {
Assert.isTrue(poolSize > 0, "'poolSize' must be 1 or higher");
if (this.scheduledExecutor instanceof ScheduledThreadPoolExecutor threadPoolExecutor) {
threadPoolExecutor.setCorePoolSize(poolSize);
}
this.poolSize = poolSize;
}
/**
* Set the remove-on-cancel mode on {@link ScheduledThreadPoolExecutor}.
* <p>Default is {@code false}. If set to {@code true}, the target executor will be
* switched into remove-on-cancel mode (if possible).
* <p><b>This setting can be modified at runtime, for example through JMX.</b>
* @see ScheduledThreadPoolExecutor#setRemoveOnCancelPolicy
*/
public void setRemoveOnCancelPolicy(boolean flag) {
if (this.scheduledExecutor instanceof ScheduledThreadPoolExecutor threadPoolExecutor) {
threadPoolExecutor.setRemoveOnCancelPolicy(flag);
}
this.removeOnCancelPolicy = flag;
}
/**
* Set whether to continue existing periodic tasks even when this executor has been shutdown.
* <p>Default is {@code false}. If set to {@code true}, the target executor will be
* switched into continuing periodic tasks (if possible).
* <p><b>This setting can be modified at runtime, for example through JMX.</b>
* @since 5.3.9
* @see ScheduledThreadPoolExecutor#setContinueExistingPeriodicTasksAfterShutdownPolicy
*/
public void setContinueExistingPeriodicTasksAfterShutdownPolicy(boolean flag) {
if (this.scheduledExecutor instanceof ScheduledThreadPoolExecutor threadPoolExecutor) {
threadPoolExecutor.setContinueExistingPeriodicTasksAfterShutdownPolicy(flag);
}
this.continueExistingPeriodicTasksAfterShutdownPolicy = flag;
}
/**
* Set whether to execute existing delayed tasks even when this executor has been shutdown.
* <p>Default is {@code true}. If set to {@code false}, the target executor will be
* switched into dropping remaining tasks (if possible).
* <p><b>This setting can be modified at runtime, for example through JMX.</b>
* @since 5.3.9
* @see ScheduledThreadPoolExecutor#setExecuteExistingDelayedTasksAfterShutdownPolicy
*/
public void setExecuteExistingDelayedTasksAfterShutdownPolicy(boolean flag) {
if (this.scheduledExecutor instanceof ScheduledThreadPoolExecutor threadPoolExecutor) {
threadPoolExecutor.setExecuteExistingDelayedTasksAfterShutdownPolicy(flag);
}
this.executeExistingDelayedTasksAfterShutdownPolicy = flag;
}
/**
* Specify a custom {@link TaskDecorator} to be applied to any {@link Runnable}
* about to be executed.
* <p>Note that such a decorator is not being applied to the user-supplied
* {@code Runnable}/{@code Callable} but rather to the scheduled execution
* callback (a wrapper around the user-supplied task).
* <p>The primary use case is to set some execution context around the task's
* invocation, or to provide some monitoring/statistics for task execution.
* @since 6.2
*/
public void setTaskDecorator(TaskDecorator taskDecorator) {
this.taskDecorator = taskDecorator;
}
/**
* Set a custom {@link ErrorHandler} strategy.
*/
public void setErrorHandler(ErrorHandler errorHandler) {
this.errorHandler = errorHandler;
}
/**
* Set the clock to use for scheduling purposes.
* <p>The default clock is the system clock for the default time zone.
* @since 5.3
* @see Clock#systemDefaultZone()
*/
public void setClock(Clock clock) {
Assert.notNull(clock, "Clock must not be null");
this.clock = clock;
}
@Override
public Clock getClock() {
return this.clock;
}
@Override
protected ExecutorService initializeExecutor(
ThreadFactory threadFactory, RejectedExecutionHandler rejectedExecutionHandler) {
this.scheduledExecutor = createExecutor(this.poolSize, threadFactory, rejectedExecutionHandler);
if (this.scheduledExecutor instanceof ScheduledThreadPoolExecutor threadPoolExecutor) {
if (this.removeOnCancelPolicy) {
threadPoolExecutor.setRemoveOnCancelPolicy(true);
}
if (this.continueExistingPeriodicTasksAfterShutdownPolicy) {
threadPoolExecutor.setContinueExistingPeriodicTasksAfterShutdownPolicy(true);
}
if (!this.executeExistingDelayedTasksAfterShutdownPolicy) {
threadPoolExecutor.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
}
}
return this.scheduledExecutor;
}
/**
* Create a new {@link ScheduledExecutorService} instance.
* <p>The default implementation creates a {@link ScheduledThreadPoolExecutor}.
* Can be overridden in subclasses to provide custom {@link ScheduledExecutorService} instances.
* @param poolSize the specified pool size
* @param threadFactory the ThreadFactory to use
* @param rejectedExecutionHandler the RejectedExecutionHandler to use
* @return a new ScheduledExecutorService instance
* @see #afterPropertiesSet()
* @see java.util.concurrent.ScheduledThreadPoolExecutor
*/
protected ScheduledExecutorService createExecutor(
int poolSize, ThreadFactory threadFactory, RejectedExecutionHandler rejectedExecutionHandler) {
return new ScheduledThreadPoolExecutor(poolSize, threadFactory, rejectedExecutionHandler) {
@Override
protected void beforeExecute(Thread thread, Runnable task) {
ThreadPoolTaskScheduler.this.beforeExecute(thread, task);
}
@Override
protected void afterExecute(Runnable task, Throwable ex) {
ThreadPoolTaskScheduler.this.afterExecute(task, ex);
}
@Override
protected <V> RunnableScheduledFuture<V> decorateTask(Runnable runnable, RunnableScheduledFuture<V> task) {
return decorateTaskIfNecessary(task);
}
@Override
protected <V> RunnableScheduledFuture<V> decorateTask(Callable<V> callable, RunnableScheduledFuture<V> task) {
return decorateTaskIfNecessary(task);
}
};
}
/**
* Return the underlying ScheduledExecutorService for native access.
* @return the underlying ScheduledExecutorService (never {@code null})
* @throws IllegalStateException if the ThreadPoolTaskScheduler hasn't been initialized yet
*/
public ScheduledExecutorService getScheduledExecutor() throws IllegalStateException {
Assert.state(this.scheduledExecutor != null, "ThreadPoolTaskScheduler not initialized");
return this.scheduledExecutor;
}
/**
* Return the underlying ScheduledThreadPoolExecutor, if available.
* @return the underlying ScheduledExecutorService (never {@code null})
* @throws IllegalStateException if the ThreadPoolTaskScheduler hasn't been initialized yet
* or if the underlying ScheduledExecutorService isn't a ScheduledThreadPoolExecutor
* @see #getScheduledExecutor()
*/
public ScheduledThreadPoolExecutor getScheduledThreadPoolExecutor() throws IllegalStateException {
Assert.state(this.scheduledExecutor instanceof ScheduledThreadPoolExecutor,
"No ScheduledThreadPoolExecutor available");
return (ScheduledThreadPoolExecutor) this.scheduledExecutor;
}
/**
* Return the current pool size.
* <p>Requires an underlying {@link ScheduledThreadPoolExecutor}.
* @see #getScheduledThreadPoolExecutor()
* @see java.util.concurrent.ScheduledThreadPoolExecutor#getPoolSize()
*/
public int getPoolSize() {
if (this.scheduledExecutor == null) {
// Not initialized yet: assume initial pool size.
return this.poolSize;
}
return getScheduledThreadPoolExecutor().getPoolSize();
}
/**
* Return the number of currently active threads.
* <p>Requires an underlying {@link ScheduledThreadPoolExecutor}.
* @see #getScheduledThreadPoolExecutor()
* @see java.util.concurrent.ScheduledThreadPoolExecutor#getActiveCount()
*/
public int getActiveCount() {
if (this.scheduledExecutor == null) {
// Not initialized yet: assume no active threads.
return 0;
}
return getScheduledThreadPoolExecutor().getActiveCount();
}
/**
* Return the current setting for the remove-on-cancel mode.
* <p>Requires an underlying {@link ScheduledThreadPoolExecutor}.
* @deprecated in favor of direct {@link #getScheduledThreadPoolExecutor()} access
*/
@Deprecated(since = "5.3.9")
public boolean isRemoveOnCancelPolicy() {
if (this.scheduledExecutor == null) {
// Not initialized yet: return our setting for the time being.
return this.removeOnCancelPolicy;
}
return getScheduledThreadPoolExecutor().getRemoveOnCancelPolicy();
}
// SchedulingTaskExecutor implementation
@Override
public void execute(Runnable task) {
Executor executor = getScheduledExecutor();
try {
executor.execute(errorHandlingTask(task, false));
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(executor, task, ex);
}
}
@Override
public Future<?> submit(Runnable task) {
ExecutorService executor = getScheduledExecutor();
try {
return executor.submit(errorHandlingTask(task, false));
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(executor, task, ex);
}
}
@Override
public <T> Future<T> submit(Callable<T> task) {
ExecutorService executor = getScheduledExecutor();
try {
return executor.submit(new DelegatingErrorHandlingCallable<>(task, this.errorHandler));
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(executor, task, ex);
}
}
// TaskScheduler implementation
@Override
public @Nullable ScheduledFuture<?> schedule(Runnable task, Trigger trigger) {
ScheduledExecutorService executor = getScheduledExecutor();
try {
ErrorHandler errorHandler = this.errorHandler;
if (errorHandler == null) {
errorHandler = TaskUtils.getDefaultErrorHandler(true);
}
return new ReschedulingRunnable(task, trigger, this.clock, executor, errorHandler).schedule();
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(executor, task, ex);
}
}
@Override
public ScheduledFuture<?> schedule(Runnable task, Instant startTime) {
ScheduledExecutorService executor = getScheduledExecutor();
Duration delay = Duration.between(this.clock.instant(), startTime);
try {
return executor.schedule(errorHandlingTask(task, false), NANO.convert(delay), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(executor, task, ex);
}
}
@Override
public ScheduledFuture<?> scheduleAtFixedRate(Runnable task, Instant startTime, Duration period) {
ScheduledExecutorService executor = getScheduledExecutor();
Duration initialDelay = Duration.between(this.clock.instant(), startTime);
try {
return executor.scheduleAtFixedRate(errorHandlingTask(task, true),
NANO.convert(initialDelay), NANO.convert(period), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(executor, task, ex);
}
}
@Override
public ScheduledFuture<?> scheduleAtFixedRate(Runnable task, Duration period) {
ScheduledExecutorService executor = getScheduledExecutor();
try {
return executor.scheduleAtFixedRate(errorHandlingTask(task, true),
0, NANO.convert(period), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(executor, task, ex);
}
}
@Override
public ScheduledFuture<?> scheduleWithFixedDelay(Runnable task, Instant startTime, Duration delay) {
ScheduledExecutorService executor = getScheduledExecutor();
Duration initialDelay = Duration.between(this.clock.instant(), startTime);
try {
return executor.scheduleWithFixedDelay(errorHandlingTask(task, true),
NANO.convert(initialDelay), NANO.convert(delay), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(executor, task, ex);
}
}
@Override
public ScheduledFuture<?> scheduleWithFixedDelay(Runnable task, Duration delay) {
ScheduledExecutorService executor = getScheduledExecutor();
try {
return executor.scheduleWithFixedDelay(errorHandlingTask(task, true),
0, NANO.convert(delay), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(executor, task, ex);
}
}
private <V> RunnableScheduledFuture<V> decorateTaskIfNecessary(RunnableScheduledFuture<V> future) {
return (this.taskDecorator != null ? new DelegatingRunnableScheduledFuture<>(future, this.taskDecorator) :
future);
}
private Runnable errorHandlingTask(Runnable task, boolean isRepeatingTask) {
return TaskUtils.decorateTaskWithErrorHandler(task, this.errorHandler, isRepeatingTask);
}
private static | ThreadPoolTaskScheduler |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessor.java | {
"start": 771,
"end": 1865
} | class ____ implements Releasable {
private final ExtractedFields fields;
private final XContentBuilder jsonBuilder;
SearchHitToJsonProcessor(ExtractedFields fields, OutputStream outputStream) throws IOException {
this.fields = Objects.requireNonNull(fields);
this.jsonBuilder = new XContentBuilder(JsonXContent.jsonXContent, outputStream);
}
public void process(SearchHit hit, SourceSupplier sourceSupplier) throws IOException {
jsonBuilder.startObject();
for (ExtractedField field : fields.getAllFields()) {
writeKeyValue(field.getName(), field.value(hit, sourceSupplier));
}
jsonBuilder.endObject();
}
private void writeKeyValue(String key, Object... values) throws IOException {
if (values.length == 0) {
return;
}
if (values.length == 1) {
jsonBuilder.field(key, values[0]);
} else {
jsonBuilder.array(key, values);
}
}
@Override
public void close() {
jsonBuilder.close();
}
}
| SearchHitToJsonProcessor |
java | google__dagger | javatests/dagger/internal/codegen/DuplicateBindingsValidationTest.java | {
"start": 28447,
"end": 28667
} | class ____");
} else {
subject.hasErrorCount(1);
subject.hasErrorContaining(errorMessage)
.onSource(aComponent)
.onLineContaining(" | AModule |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/builder/RecursiveToStringStyle.java | {
"start": 1135,
"end": 1549
} | class ____ {
* String name;
* int age;
* boolean smoker;
* Job job;
*
* ...
*
* public String toString() {
* return new ReflectionToStringBuilder(this, new RecursiveToStringStyle()).toString();
* }
* }
* </pre>
*
* <p>This will produce a toString of the format:
* {@code Person@7f54[name=Stephen,age=29,smoker=false,job=Job@43cd2[title=Manager]]}</p>
*
* @since 3.2
*/
public | Person |
java | spring-projects__spring-framework | framework-docs/src/main/java/org/springframework/docs/dataaccess/jdbc/jdbcdatasource/BasicDataSourceConfiguration.java | {
"start": 871,
"end": 1266
} | class ____ {
// tag::snippet[]
@Bean(destroyMethod = "close")
BasicDataSource dataSource() {
BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName("org.hsqldb.jdbcDriver");
dataSource.setUrl("jdbc:hsqldb:hsql://localhost:");
dataSource.setUsername("sa");
dataSource.setPassword("");
return dataSource;
}
// end::snippet[]
}
| BasicDataSourceConfiguration |
java | apache__camel | components/camel-pgevent/src/main/java/org/apache/camel/component/pgevent/PgEventConsumer.java | {
"start": 1587,
"end": 3915
} | class ____ extends DefaultConsumer {
private static final Logger LOG = LoggerFactory.getLogger(PgEventConsumer.class);
private final PgEventListener listener = new PgEventListener();
private final PgEventEndpoint endpoint;
private PGConnection dbConnection;
private ScheduledExecutorService reconnectPool;
private BackgroundTask reconnectTask;
private ExecutorService workerPool;
private boolean shutdownWorkerPool;
public PgEventConsumer(PgEventEndpoint endpoint, Processor processor) {
super(endpoint, processor);
this.endpoint = endpoint;
}
public PgEventListener getPgEventListener() {
return listener;
}
@Override
protected void doInit() throws Exception {
if (endpoint.getWorkerPool() != null) {
workerPool = endpoint.getWorkerPool();
} else {
workerPool = endpoint.createWorkerPool(this);
shutdownWorkerPool = true;
}
// used for re-connecting to the database
reconnectPool = getEndpoint().getCamelContext().getExecutorServiceManager()
.newSingleThreadScheduledExecutor(this, "PgEventReconnect");
reconnectTask = Tasks.backgroundTask()
.withScheduledExecutor(reconnectPool)
.withBudget(Budgets.iterationTimeBudget()
.withInterval(Duration.ofMillis(endpoint.getReconnectDelay()))
.withInitialDelay(Duration.ofSeconds(1))
.withUnlimitedDuration()
.build())
.withName("PgEventReconnect")
.build();
}
@Override
protected void doStart() throws Exception {
listener.initConnection();
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
listener.closeConnection();
getEndpoint().getCamelContext().getExecutorServiceManager().shutdown(reconnectPool);
if (shutdownWorkerPool && workerPool != null) {
LOG.debug("Shutting down PgEventConsumer worker threads with timeout {} millis", 10000);
endpoint.getCamelContext().getExecutorServiceManager().shutdownGraceful(workerPool, 10000);
workerPool = null;
}
}
public | PgEventConsumer |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java | {
"start": 2809,
"end": 4508
} | class ____ extends Plugin implements AnalysisPlugin {
@Override
public Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return Collections.singletonMap("mock_payload_filter", (indexSettings, environment, name, settings) -> {
return new TokenFilterFactory() {
@Override
public String name() {
return "mock_payload_filter";
}
@Override
public TokenStream create(TokenStream tokenStream) {
String delimiter = settings.get("delimiter");
PayloadEncoder encoder = null;
if (settings.get("encoding").equals("float")) {
encoder = new FloatEncoder();
} else if (settings.get("encoding").equals("int")) {
encoder = new IntegerEncoder();
} else if (settings.get("encoding").equals("identity")) {
encoder = new IdentityEncoder();
}
return new MockPayloadTokenFilter(tokenStream, delimiter.charAt(0), encoder);
}
};
});
}
@Override
public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
return Collections.singletonList(
PreConfiguredTokenizer.singleton("mock-whitespace", () -> new MockTokenizer(MockTokenizer.WHITESPACE, false))
);
}
// Based on DelimitedPayloadTokenFilter:
final | MockPayloadAnalyzerPlugin |
java | bumptech__glide | annotation/compiler/src/main/java/com/bumptech/glide/annotation/compiler/GlideAnnotationProcessor.java | {
"start": 1227,
"end": 1886
} | class ____ a specific package that will
* later be used by the processor to discover all {@code LibraryGlideModule} classes.
* <li>For {@code AppGlideModule}s - A single {@code AppGlideModule} implementation ({@code
* com.bumptech.glide.GeneratedAppGlideModule}) that calls all {@code LibraryGlideModule}s and
* the original {@code AppGlideModule} in the correct order when Glide is initialized.
* <li>{@link com.bumptech.glide.annotation.GlideExtension}s -
* <ul>
* <li>A {@code com.bumptech.glide.request.RequestOptions} implementation that contains
* static versions of all builder methods in the base | in |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/launcher/listeners/discovery/LoggingLauncherDiscoveryListenerTests.java | {
"start": 1663,
"end": 5928
} | class ____ {
@Test
void logsWarningOnUnresolvedUniqueIdSelectorWithEnginePrefix(LogRecordListener log) {
var engine = createEngineThatCannotResolveAnything("some-engine");
var request = request() //
.configurationParameter(DISCOVERY_ISSUE_FAILURE_PHASE_PROPERTY_NAME, "execution") //
.configurationParameter(DEFAULT_DISCOVERY_LISTENER_CONFIGURATION_PROPERTY_NAME, "logging") //
.selectors(selectUniqueId(UniqueId.forEngine(engine.getId()))) //
.enableImplicitConfigurationParameters(false) //
.build();
var launcher = createLauncher(engine);
launcher.discover(request);
assertThat(log.stream(LoggingLauncherDiscoveryListener.class, Level.WARNING)) //
.extracting(LogRecord::getMessage) //
.containsExactly(
"UniqueIdSelector [uniqueId = [engine:some-engine]] could not be resolved by [engine:some-engine]");
}
@Test
void logsDebugMessageOnUnresolvedUniqueIdSelectorWithoutEnginePrefix(LogRecordListener log) {
var engine = createEngineThatCannotResolveAnything("some-engine");
var request = request() //
.configurationParameter(DEFAULT_DISCOVERY_LISTENER_CONFIGURATION_PROPERTY_NAME, "logging") //
.selectors(selectUniqueId(UniqueId.forEngine("some-other-engine"))) //
.enableImplicitConfigurationParameters(false) //
.build();
var launcher = createLauncher(engine);
launcher.discover(request);
assertThat(log.stream(LoggingLauncherDiscoveryListener.class, Level.FINE)) //
.extracting(LogRecord::getMessage) //
.containsExactly(
"UniqueIdSelector [uniqueId = [engine:some-other-engine]] could not be resolved by [engine:some-engine]");
}
@Test
void logsErrorOnSelectorResolutionFailure(LogRecordListener log) {
var rootCause = new RuntimeException();
var engine = createEngineThatFailsToResolveAnything("some-engine", rootCause);
var request = request() //
.configurationParameter(DISCOVERY_ISSUE_FAILURE_PHASE_PROPERTY_NAME, "execution") //
.configurationParameter(DEFAULT_DISCOVERY_LISTENER_CONFIGURATION_PROPERTY_NAME, "logging") //
.selectors(selectClass(Object.class)) //
.enableImplicitConfigurationParameters(false) //
.build();
var launcher = createLauncher(engine);
launcher.discover(request);
assertThat(log.stream(LoggingLauncherDiscoveryListener.class, Level.SEVERE)) //
.extracting(LogRecord::getMessage) //
.containsExactly(
"Resolution of ClassSelector [className = 'java.lang.Object', classLoader = null] by [engine:some-engine] failed");
}
@Test
void logsErrorOnEngineDiscoveryFailure(LogRecordListener log) {
var rootCause = new RuntimeException();
var engine = new TestEngineStub("some-engine") {
@Override
public TestDescriptor discover(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId) {
throw rootCause;
}
};
var request = request() //
.configurationParameter(DEFAULT_DISCOVERY_LISTENER_CONFIGURATION_PROPERTY_NAME, "logging") //
.selectors(selectUniqueId(UniqueId.forEngine(engine.getId()))) //
.enableImplicitConfigurationParameters(false) //
.build();
var launcher = createLauncher(engine);
launcher.discover(request);
var logRecord = log.stream(LoggingLauncherDiscoveryListener.class, Level.SEVERE).findFirst().get();
assertThat(logRecord.getMessage()).isEqualTo("TestEngine with ID 'some-engine' failed to discover tests");
assertThat(logRecord.getThrown()).isSameAs(rootCause);
}
@Test
void logsTraceMessageOnStartAndEnd(LogRecordListener log) {
var engine = new TestEngineStub("some-engine");
var request = request() //
.configurationParameter(DEFAULT_DISCOVERY_LISTENER_CONFIGURATION_PROPERTY_NAME, "logging") //
.selectors(selectUniqueId(UniqueId.forEngine(engine.getId()))) //
.enableImplicitConfigurationParameters(false) //
.build();
var launcher = createLauncher(engine);
launcher.discover(request);
assertThat(log.stream(LoggingLauncherDiscoveryListener.class, Level.FINER)) //
.extracting(LogRecord::getMessage) //
.containsExactly( //
"Test discovery started", //
"Engine [engine:some-engine] has started discovering tests", //
"Engine [engine:some-engine] has finished discovering tests", //
"Test discovery finished");
}
}
| LoggingLauncherDiscoveryListenerTests |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/partial/ThriftMetadata.java | {
"start": 1752,
"end": 2333
} | enum ____ implements TFieldIdEnum {
ROOT((short) 0, "root"),
ENUM((short) 1, "enum"),
LIST_ELEMENT((short) 2, "listElement"),
MAP_KEY((short) 3, "mapKey"),
MAP_VALUE((short) 4, "mapValue"),
SET_ELEMENT((short) 5, "setElement");
private final short id;
private final String name;
FieldTypeEnum(short id, String name) {
this.id = id;
this.name = name;
}
@Override
public short getThriftFieldId() {
return id;
}
@Override
public String getFieldName() {
return name;
}
}
private | FieldTypeEnum |
java | quarkusio__quarkus | independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/CovariantTypes.java | {
"start": 531,
"end": 672
} | class ____ on all the possible Type subtypes: Class, ParameterizedType, TypeVariable, WildcardType,
* GenericArrayType.
* To make this | operates |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/repositories/Repository.java | {
"start": 2421,
"end": 15991
} | interface ____ {
/**
* Constructs a repository.
*
* @param projectId the project-id for the repository or {@code null} if the repository is at the cluster level.
* @param metadata metadata for the repository including name and settings
*/
Repository create(@Nullable ProjectId projectId, RepositoryMetadata metadata) throws Exception;
/**
* Constructs a repository.
* @param projectId the project-id for the repository or {@code null} if the repository is at the cluster level.
* @param metadata metadata for the repository including name and settings
* @param typeLookup a function that returns the repository factory for the given repository type.
*/
default Repository create(
@Nullable ProjectId projectId,
RepositoryMetadata metadata,
Function<String, Repository.Factory> typeLookup
) throws Exception {
return create(projectId, metadata);
}
}
/**
* Get the project-id for the repository.
*
* @return the project-id, or {@code null} if the repository is at the cluster level.
*/
@Nullable
ProjectId getProjectId();
/**
* Get the project qualified repository
*/
default ProjectRepo getProjectRepo() {
return new ProjectRepo(getProjectId(), getMetadata().name());
}
/**
* Returns metadata about this repository.
*/
RepositoryMetadata getMetadata();
/**
* Reads a collection of {@link SnapshotInfo} instances from the repository.
*
* @param snapshotIds The IDs of the snapshots whose {@link SnapshotInfo} instances should be retrieved.
* @param abortOnFailure Whether to stop fetching further {@link SnapshotInfo} instances if a single fetch fails.
* @param isCancelled Supplies whether the enclosing task is cancelled, which should stop fetching {@link SnapshotInfo} instances.
* @param consumer A consumer for each {@link SnapshotInfo} retrieved. Called concurrently from multiple threads. If the consumer
* throws an exception and {@code abortOnFailure} is {@code true} then the fetching will stop.
* @param listener If {@code abortOnFailure} is {@code true} and any operation fails then the failure is passed to this listener.
* Also completed exceptionally on cancellation. Otherwise, completed once all requested {@link SnapshotInfo}
* instances have been processed by the {@code consumer}.
*/
void getSnapshotInfo(
Collection<SnapshotId> snapshotIds,
boolean abortOnFailure,
BooleanSupplier isCancelled,
CheckedConsumer<SnapshotInfo, Exception> consumer,
ActionListener<Void> listener
);
/**
* Reads a single snapshot description from the repository
*
* @param snapshotId snapshot id to read description for
* @param listener listener to resolve with snapshot description (is resolved on the {@link ThreadPool.Names#SNAPSHOT_META} pool)
*/
default void getSnapshotInfo(SnapshotId snapshotId, ActionListener<SnapshotInfo> listener) {
getSnapshotInfo(List.of(snapshotId), true, () -> false, snapshotInfo -> {
assert Repository.assertSnapshotMetaThread();
listener.onResponse(snapshotInfo);
}, new ActionListener<>() {
@Override
public void onResponse(Void o) {
// ignored
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
}
/**
* Returns global metadata associated with the snapshot.
*
* @param snapshotId the snapshot id to load the global metadata from
* @param fromProjectMetadata The metadata may need to be constructed by first reading the project metadata
* @return the global metadata about the snapshot
*/
Metadata getSnapshotGlobalMetadata(SnapshotId snapshotId, boolean fromProjectMetadata);
/**
* Returns the index metadata associated with the snapshot.
*
* @param repositoryData current {@link RepositoryData}
* @param snapshotId the snapshot id to load the index metadata from
* @param index the {@link IndexId} to load the metadata from
* @return the index metadata about the given index for the given snapshot
*/
IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, SnapshotId snapshotId, IndexId index) throws IOException;
/**
* Fetches the {@link RepositoryData} and passes it into the listener. May completes the listener with a {@link RepositoryException} if
* there is an error in reading the repository data.
*
* @param responseExecutor Executor to use to complete the listener if not using the calling thread. Using {@link
* org.elasticsearch.common.util.concurrent.EsExecutors#DIRECT_EXECUTOR_SERVICE} means to complete the listener
* on the thread which ultimately resolved the {@link RepositoryData}, which might be a low-latency transport or
* cluster applier thread so make sure not to do anything slow or expensive in that case.
* @param listener Listener which is either completed on the calling thread (if the {@link RepositoryData} is immediately
* available, e.g. from an in-memory cache), otherwise it is completed using {@code responseExecutor}.
*/
void getRepositoryData(Executor responseExecutor, ActionListener<RepositoryData> listener);
/**
* Finalizes snapshotting process
* <p>
* This method is called on master after all shards are snapshotted.
*
* @param finalizeSnapshotContext finalization context
*/
void finalizeSnapshot(FinalizeSnapshotContext finalizeSnapshotContext);
/**
* Deletes snapshots
*
* @param snapshotIds snapshot ids to delete
* @param repositoryDataGeneration the generation of the {@link RepositoryData} in the repository at the start of the deletion
* @param minimumNodeVersion the minimum {@link IndexVersion} across the nodes in the cluster, with which the repository
* format must remain compatible
* @param repositoryDataUpdateListener listener completed when the {@link RepositoryData} is updated, or when the process fails
* without changing the repository contents - in either case, it is now safe for the next operation
* on this repository to proceed.
* @param onCompletion action executed on completion of the cleanup actions that follow a successful
* {@link RepositoryData} update; not called if {@code repositoryDataUpdateListener} completes
* exceptionally.
*/
void deleteSnapshots(
Collection<SnapshotId> snapshotIds,
long repositoryDataGeneration,
IndexVersion minimumNodeVersion,
ActionListener<RepositoryData> repositoryDataUpdateListener,
Runnable onCompletion
);
/**
* Returns stats on the repository usage
*/
default RepositoryStats stats() {
return RepositoryStats.EMPTY_STATS;
}
/**
* Verifies repository on the master node and returns the verification token.
* <p>
* If the verification token is not null, it's passed to all data nodes for verification. If it's null - no
* additional verification is required
*
* @return verification token that should be passed to all Index Shard Repositories for additional verification or null
*/
String startVerification();
/**
* Called at the end of repository verification process.
* <p>
* This method should perform all necessary cleanup of the temporary files created in the repository
*
* @param verificationToken verification request generated by {@link #startVerification} command
*/
void endVerification(String verificationToken);
/**
* Verifies repository settings on data node.
* @param verificationToken value returned by {@link org.elasticsearch.repositories.Repository#startVerification()}
* @param localNode the local node information, for inclusion in verification errors
*/
void verify(String verificationToken, DiscoveryNode localNode);
/**
* Returns true if the repository supports only read operations
* @return true if the repository is read/only
*/
boolean isReadOnly();
/**
* Creates a snapshot of the shard referenced by the given {@link SnapshotShardContext}.
* <p>
* As snapshot process progresses, implementation of this method should update {@link IndexShardSnapshotStatus} object returned by
* {@link SnapshotShardContext#status()} and call {@link IndexShardSnapshotStatus#ensureNotAborted()} to see if the snapshot process
* should be aborted.
*
* @param snapshotShardContext snapshot shard context that must be completed via {@link SnapshotShardContext#onResponse} or
* {@link SnapshotShardContext#onFailure}
*/
void snapshotShard(SnapshotShardContext snapshotShardContext);
/**
* Restores snapshot of the shard.
* <p>
* The index can be renamed on restore, hence different {@code shardId} and {@code snapshotShardId} are supplied.
* @param store the store to restore the index into
* @param snapshotId snapshot id
* @param indexId id of the index in the repository from which the restore is occurring
* @param snapshotShardId shard id (in the snapshot)
* @param recoveryState recovery state
* @param listener listener to invoke once done
*/
void restoreShard(
Store store,
SnapshotId snapshotId,
IndexId indexId,
ShardId snapshotShardId,
RecoveryState recoveryState,
ActionListener<Void> listener
);
/**
* Retrieve shard snapshot status for the stored snapshot
*
* @param snapshotId snapshot id
* @param indexId the snapshotted index id for the shard to get status for
* @param shardId shard id
* @return snapshot status
*/
IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId);
/**
* Check if this instances {@link Settings} can be changed to the provided updated settings without recreating the repository.
*
* @param updatedSettings new repository settings
* @param ignoredSettings setting names to ignore even if changed
* @return true if the repository can be updated in place
*/
default boolean canUpdateInPlace(Settings updatedSettings, Set<String> ignoredSettings) {
return getMetadata().settings().equals(updatedSettings);
}
/**
* Update the repository with the incoming cluster state. This method is invoked from {@link RepositoriesService#applyClusterState} and
* thus the same semantics as with {@link org.elasticsearch.cluster.ClusterStateApplier#applyClusterState} apply for the
* {@link ClusterState} that is passed here.
*
* @param state new cluster state
*/
void updateState(ClusterState state);
/**
* Clones a shard snapshot.
*
* @param source source snapshot
* @param target target snapshot
* @param shardId shard id
* @param shardGeneration shard generation in repo
* @param listener listener to complete with new shard generation once clone has completed
*/
void cloneShardSnapshot(
SnapshotId source,
SnapshotId target,
RepositoryShardId shardId,
@Nullable ShardGeneration shardGeneration,
ActionListener<ShardSnapshotResult> listener
);
/**
* Block until all in-flight operations for this repository have completed. Must only be called after this instance has been closed
* by a call to stop {@link #close()}.
* Waiting for ongoing operations should be implemented here instead of in {@link #stop()} or {@link #close()} hooks of this interface
* as these are expected to be called on the cluster state applier thread (which must not block) if a repository is removed from the
* cluster. This method is intended to be called on node shutdown instead as a means to ensure no repository operations are leaked.
*/
void awaitIdle();
/**
* @return a set of the names of the features that this repository instance uses, for reporting in the cluster stats for telemetry
* collection.
*/
default Set<String> getUsageFeatures() {
return Set.of();
}
static boolean assertSnapshotMetaThread() {
return ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SNAPSHOT_META);
}
/**
* Get the current count of snapshots in progress
*
* @return The current number of shard snapshots in progress metric value, or null if this repository doesn't track that
*/
@Nullable
LongWithAttributes getShardSnapshotsInProgress();
RepositoriesStats.SnapshotStats getSnapshotStats();
}
| Factory |
java | micronaut-projects__micronaut-core | http-client-core/src/main/java/io/micronaut/http/client/HttpClient.java | {
"start": 1260,
"end": 1379
} | interface ____ around the Micronaut API and Reactive Streams.
*
* @author Graeme Rocher
* @since 1.0
*/
public | designed |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java | {
"start": 22629,
"end": 23765
} | class ____<R> extends NamespacedBuilder<R, FixedBuilder<R>> {
private FixedBuilder(Completion<R> context, NameContext names, String name) {
super(context, names, name);
}
private static <R> FixedBuilder<R> create(Completion<R> context, NameContext names, String name) {
return new FixedBuilder<>(context, names, name);
}
@Override
protected FixedBuilder<R> self() {
return this;
}
/** Configure this fixed type's size, and end its configuration. **/
public R size(int size) {
Schema schema = Schema.createFixed(name(), super.doc(), space(), size);
completeSchema(schema);
return context().complete(schema);
}
}
/**
* Builds an Avro Enum type with optional properties, namespace, doc, and
* aliases.
* <p/>
* Set properties with {@link #prop(String, String)}, namespace with
* {@link #namespace(String)}, doc with {@link #doc(String)}, and aliases with
* {@link #aliases(String[])}.
* <p/>
* The Enum schema is finalized when its required symbols are set via
* {@link #symbols(String[])}.
**/
public static final | FixedBuilder |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/snapshots/SnapshotInfoBlobSerializationTests.java | {
"start": 877,
"end": 1774
} | class ____ extends AbstractWireTestCase<SnapshotInfo> {
@Override
protected SnapshotInfo createTestInstance() {
return SnapshotInfoTestUtils.createRandomSnapshotInfo();
}
@Override
protected SnapshotInfo mutateInstance(SnapshotInfo instance) {
return SnapshotInfoTestUtils.mutateSnapshotInfo(instance);
}
@Override
protected SnapshotInfo copyInstance(SnapshotInfo instance, TransportVersion version) throws IOException {
final BytesStreamOutput out = new BytesStreamOutput();
BlobStoreRepository.SNAPSHOT_FORMAT.serialize(instance, "test", randomBoolean(), out);
return BlobStoreRepository.SNAPSHOT_FORMAT.deserialize(
new ProjectRepo(instance.projectId(), instance.repository()),
NamedXContentRegistry.EMPTY,
out.bytes().streamInput()
);
}
}
| SnapshotInfoBlobSerializationTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/PrivateBinder.java | {
"start": 816,
"end": 992
} | interface ____ extends Binder {
@Override
PrivateBinder withSource(Object source);
@Override
PrivateBinder skipSources(Class<?>... classesToSkip);
}
| PrivateBinder |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/util/StringUtilsTests.java | {
"start": 1269,
"end": 38695
} | class ____ {
@ParameterizedTest
@ValueSource(strings = {"text", " text ", " ", "\t", "\n text"})
void hasLengthForValidValues(String value) {
assertThat(StringUtils.hasLength(value)).isTrue();
}
@ParameterizedTest
@NullAndEmptySource
void hasLengthForInvalidValues(String value) {
assertThat(StringUtils.hasLength(value)).isFalse();
}
@ParameterizedTest
@ValueSource(strings = {"text", " text ", "\n text"})
void hasTextForValidValues(String value) {
assertThat(StringUtils.hasText(value)).isTrue();
}
@ParameterizedTest
@NullAndEmptySource
@ValueSource(strings = {" ", "\t"})
void hasTextForInvalidValues(String value) {
assertThat(StringUtils.hasText(value)).isFalse();
}
@Test
void containsWhitespace() {
assertThat(StringUtils.containsWhitespace(null)).isFalse();
assertThat(StringUtils.containsWhitespace("")).isFalse();
assertThat(StringUtils.containsWhitespace("a")).isFalse();
assertThat(StringUtils.containsWhitespace("abc")).isFalse();
assertThat(StringUtils.containsWhitespace(" ")).isTrue();
assertThat(StringUtils.containsWhitespace("\t")).isTrue();
assertThat(StringUtils.containsWhitespace("\n")).isTrue();
assertThat(StringUtils.containsWhitespace(" a")).isTrue();
assertThat(StringUtils.containsWhitespace("abc ")).isTrue();
assertThat(StringUtils.containsWhitespace("a b")).isTrue();
assertThat(StringUtils.containsWhitespace("a b")).isTrue();
}
@Test
@Deprecated
void trimWhitespace() {
assertThat(StringUtils.trimWhitespace(null)).isNull();
assertThat(StringUtils.trimWhitespace("")).isEmpty();
assertThat(StringUtils.trimWhitespace(" ")).isEmpty();
assertThat(StringUtils.trimWhitespace("\t")).isEmpty();
assertThat(StringUtils.trimWhitespace("\n")).isEmpty();
assertThat(StringUtils.trimWhitespace(" \t\n")).isEmpty();
assertThat(StringUtils.trimWhitespace(" a")).isEqualTo("a");
assertThat(StringUtils.trimWhitespace("a ")).isEqualTo("a");
assertThat(StringUtils.trimWhitespace(" a ")).isEqualTo("a");
assertThat(StringUtils.trimWhitespace(" a b ")).isEqualTo("a b");
assertThat(StringUtils.trimWhitespace(" a b c ")).isEqualTo("a b c");
}
@Test
void trimAllWhitespace() {
assertThat(StringUtils.trimAllWhitespace(null)).isNull();
assertThat(StringUtils.trimAllWhitespace("")).isEmpty();
assertThat(StringUtils.trimAllWhitespace(" ")).isEmpty();
assertThat(StringUtils.trimAllWhitespace("\t")).isEmpty();
assertThat(StringUtils.trimAllWhitespace("\n")).isEmpty();
assertThat(StringUtils.trimAllWhitespace(" \t\n")).isEmpty();
assertThat(StringUtils.trimAllWhitespace(" a")).isEqualTo("a");
assertThat(StringUtils.trimAllWhitespace("a ")).isEqualTo("a");
assertThat(StringUtils.trimAllWhitespace(" a ")).isEqualTo("a");
assertThat(StringUtils.trimAllWhitespace(" a b ")).isEqualTo("ab");
assertThat(StringUtils.trimAllWhitespace(" a b c ")).isEqualTo("abc");
}
@Test
@SuppressWarnings("deprecation")
void trimLeadingWhitespace() {
assertThat(StringUtils.trimLeadingWhitespace(null)).isNull();
assertThat(StringUtils.trimLeadingWhitespace("")).isEmpty();
assertThat(StringUtils.trimLeadingWhitespace(" ")).isEmpty();
assertThat(StringUtils.trimLeadingWhitespace("\t")).isEmpty();
assertThat(StringUtils.trimLeadingWhitespace("\n")).isEmpty();
assertThat(StringUtils.trimLeadingWhitespace(" \t\n")).isEmpty();
assertThat(StringUtils.trimLeadingWhitespace(" a")).isEqualTo("a");
assertThat(StringUtils.trimLeadingWhitespace("a ")).isEqualTo("a ");
assertThat(StringUtils.trimLeadingWhitespace(" a ")).isEqualTo("a ");
assertThat(StringUtils.trimLeadingWhitespace(" a b ")).isEqualTo("a b ");
assertThat(StringUtils.trimLeadingWhitespace(" a b c ")).isEqualTo("a b c ");
}
@Test
@SuppressWarnings("deprecation")
void trimTrailingWhitespace() {
assertThat(StringUtils.trimTrailingWhitespace(null)).isNull();
assertThat(StringUtils.trimTrailingWhitespace("")).isEmpty();
assertThat(StringUtils.trimTrailingWhitespace(" ")).isEmpty();
assertThat(StringUtils.trimTrailingWhitespace("\t")).isEmpty();
assertThat(StringUtils.trimTrailingWhitespace("\n")).isEmpty();
assertThat(StringUtils.trimTrailingWhitespace(" \t\n")).isEmpty();
assertThat(StringUtils.trimTrailingWhitespace("a ")).isEqualTo("a");
assertThat(StringUtils.trimTrailingWhitespace(" a")).isEqualTo(" a");
assertThat(StringUtils.trimTrailingWhitespace(" a ")).isEqualTo(" a");
assertThat(StringUtils.trimTrailingWhitespace(" a b ")).isEqualTo(" a b");
assertThat(StringUtils.trimTrailingWhitespace(" a b c ")).isEqualTo(" a b c");
}
@Test
void trimLeadingCharacter() {
assertThat(StringUtils.trimLeadingCharacter(null, ' ')).isNull();
assertThat(StringUtils.trimLeadingCharacter("", ' ')).isEmpty();
assertThat(StringUtils.trimLeadingCharacter(" ", ' ')).isEmpty();
assertThat(StringUtils.trimLeadingCharacter("\t", ' ')).isEqualTo("\t");
assertThat(StringUtils.trimLeadingCharacter(" a", ' ')).isEqualTo("a");
assertThat(StringUtils.trimLeadingCharacter("a ", ' ')).isEqualTo("a ");
assertThat(StringUtils.trimLeadingCharacter(" a ", ' ')).isEqualTo("a ");
assertThat(StringUtils.trimLeadingCharacter(" a b ", ' ')).isEqualTo("a b ");
assertThat(StringUtils.trimLeadingCharacter(" a b c ", ' ')).isEqualTo("a b c ");
}
@Test
void trimTrailingCharacter() {
assertThat(StringUtils.trimTrailingCharacter(null, ' ')).isNull();
assertThat(StringUtils.trimTrailingCharacter("", ' ')).isEmpty();
assertThat(StringUtils.trimTrailingCharacter(" ", ' ')).isEmpty();
assertThat(StringUtils.trimTrailingCharacter("\t", ' ')).isEqualTo("\t");
assertThat(StringUtils.trimTrailingCharacter("a ", ' ')).isEqualTo("a");
assertThat(StringUtils.trimTrailingCharacter(" a", ' ')).isEqualTo(" a");
assertThat(StringUtils.trimTrailingCharacter(" a ", ' ')).isEqualTo(" a");
assertThat(StringUtils.trimTrailingCharacter(" a b ", ' ')).isEqualTo(" a b");
assertThat(StringUtils.trimTrailingCharacter(" a b c ", ' ')).isEqualTo(" a b c");
}
@Test
void matchesCharacter() {
assertThat(StringUtils.matchesCharacter(null, '/')).isFalse();
assertThat(StringUtils.matchesCharacter("/a", '/')).isFalse();
assertThat(StringUtils.matchesCharacter("a", '/')).isFalse();
assertThat(StringUtils.matchesCharacter("/", '/')).isTrue();
}
@Test
void startsWithIgnoreCase() {
String prefix = "fOo";
assertThat(StringUtils.startsWithIgnoreCase("foo", prefix)).isTrue();
assertThat(StringUtils.startsWithIgnoreCase("Foo", prefix)).isTrue();
assertThat(StringUtils.startsWithIgnoreCase("foobar", prefix)).isTrue();
assertThat(StringUtils.startsWithIgnoreCase("foobarbar", prefix)).isTrue();
assertThat(StringUtils.startsWithIgnoreCase("Foobar", prefix)).isTrue();
assertThat(StringUtils.startsWithIgnoreCase("FoobarBar", prefix)).isTrue();
assertThat(StringUtils.startsWithIgnoreCase("foObar", prefix)).isTrue();
assertThat(StringUtils.startsWithIgnoreCase("FOObar", prefix)).isTrue();
assertThat(StringUtils.startsWithIgnoreCase("fOobar", prefix)).isTrue();
assertThat(StringUtils.startsWithIgnoreCase(null, prefix)).isFalse();
assertThat(StringUtils.startsWithIgnoreCase("fOobar", null)).isFalse();
assertThat(StringUtils.startsWithIgnoreCase("b", prefix)).isFalse();
assertThat(StringUtils.startsWithIgnoreCase("barfoo", prefix)).isFalse();
assertThat(StringUtils.startsWithIgnoreCase("barfoobar", prefix)).isFalse();
}
@Test
void endsWithIgnoreCase() {
String suffix = "fOo";
assertThat(StringUtils.endsWithIgnoreCase("foo", suffix)).isTrue();
assertThat(StringUtils.endsWithIgnoreCase("Foo", suffix)).isTrue();
assertThat(StringUtils.endsWithIgnoreCase("barfoo", suffix)).isTrue();
assertThat(StringUtils.endsWithIgnoreCase("barbarfoo", suffix)).isTrue();
assertThat(StringUtils.endsWithIgnoreCase("barFoo", suffix)).isTrue();
assertThat(StringUtils.endsWithIgnoreCase("barBarFoo", suffix)).isTrue();
assertThat(StringUtils.endsWithIgnoreCase("barfoO", suffix)).isTrue();
assertThat(StringUtils.endsWithIgnoreCase("barFOO", suffix)).isTrue();
assertThat(StringUtils.endsWithIgnoreCase("barfOo", suffix)).isTrue();
assertThat(StringUtils.endsWithIgnoreCase(null, suffix)).isFalse();
assertThat(StringUtils.endsWithIgnoreCase("barfOo", null)).isFalse();
assertThat(StringUtils.endsWithIgnoreCase("b", suffix)).isFalse();
assertThat(StringUtils.endsWithIgnoreCase("foobar", suffix)).isFalse();
assertThat(StringUtils.endsWithIgnoreCase("barfoobar", suffix)).isFalse();
}
@Test
void substringMatch() {
assertThat(StringUtils.substringMatch("foo", 0, "foo")).isTrue();
assertThat(StringUtils.substringMatch("foo", 1, "oo")).isTrue();
assertThat(StringUtils.substringMatch("foo", 2, "o")).isTrue();
assertThat(StringUtils.substringMatch("foo", 0, "fOo")).isFalse();
assertThat(StringUtils.substringMatch("foo", 1, "fOo")).isFalse();
assertThat(StringUtils.substringMatch("foo", 2, "fOo")).isFalse();
assertThat(StringUtils.substringMatch("foo", 3, "fOo")).isFalse();
assertThat(StringUtils.substringMatch("foo", 1, "Oo")).isFalse();
assertThat(StringUtils.substringMatch("foo", 2, "Oo")).isFalse();
assertThat(StringUtils.substringMatch("foo", 3, "Oo")).isFalse();
assertThat(StringUtils.substringMatch("foo", 2, "O")).isFalse();
assertThat(StringUtils.substringMatch("foo", 3, "O")).isFalse();
}
@Test
void countOccurrencesOf() {
assertThat(StringUtils.countOccurrencesOf(null, null)).as("nullx2 = 0").isEqualTo(0);
assertThat(StringUtils.countOccurrencesOf("s", null)).as("null string = 0").isEqualTo(0);
assertThat(StringUtils.countOccurrencesOf(null, "s")).as("null substring = 0").isEqualTo(0);
String s = "erowoiueoiur";
assertThat(StringUtils.countOccurrencesOf(s, "WERWER")).as("not found = 0").isEqualTo(0);
assertThat(StringUtils.countOccurrencesOf(s, "x")).as("not found char = 0").isEqualTo(0);
assertThat(StringUtils.countOccurrencesOf(s, " ")).as("not found ws = 0").isEqualTo(0);
assertThat(StringUtils.countOccurrencesOf(s, "")).as("not found empty string = 0").isEqualTo(0);
assertThat(StringUtils.countOccurrencesOf(s, "e")).as("found char=2").isEqualTo(2);
assertThat(StringUtils.countOccurrencesOf(s, "oi")).as("found substring=2").isEqualTo(2);
assertThat(StringUtils.countOccurrencesOf(s, "oiu")).as("found substring=2").isEqualTo(2);
assertThat(StringUtils.countOccurrencesOf(s, "oiur")).as("found substring=3").isEqualTo(1);
assertThat(StringUtils.countOccurrencesOf(s, "r")).as("test last").isEqualTo(2);
}
@Test
void replace() {
String inString = "a6AazAaa77abaa";
String oldPattern = "aa";
String newPattern = "foo";
// Simple replace
String s = StringUtils.replace(inString, oldPattern, newPattern);
assertThat(s).as("Replace 1 worked").isEqualTo("a6AazAfoo77abfoo");
// Non match: no change
s = StringUtils.replace(inString, "qwoeiruqopwieurpoqwieur", newPattern);
assertThat(s).as("Replace non-matched is returned as-is").isSameAs(inString);
// Null new pattern: should ignore
s = StringUtils.replace(inString, oldPattern, null);
assertThat(s).as("Replace non-matched is returned as-is").isSameAs(inString);
// Null old pattern: should ignore
s = StringUtils.replace(inString, null, newPattern);
assertThat(s).as("Replace non-matched is returned as-is").isSameAs(inString);
}
@Test
void delete() {
String inString = "The quick brown fox jumped over the lazy dog";
String noThe = StringUtils.delete(inString, "the");
assertThat(noThe).as("Result has no the [" + noThe + "]")
.isEqualTo("The quick brown fox jumped over lazy dog");
String nohe = StringUtils.delete(inString, "he");
assertThat(nohe).as("Result has no he [" + nohe + "]").isEqualTo("T quick brown fox jumped over t lazy dog");
String nosp = StringUtils.delete(inString, " ");
assertThat(nosp).as("Result has no spaces").isEqualTo("Thequickbrownfoxjumpedoverthelazydog");
String killEnd = StringUtils.delete(inString, "dog");
assertThat(killEnd).as("Result has no dog").isEqualTo("The quick brown fox jumped over the lazy ");
String mismatch = StringUtils.delete(inString, "dxxcxcxog");
assertThat(mismatch).as("Result is unchanged").isEqualTo(inString);
String nochange = StringUtils.delete(inString, "");
assertThat(nochange).as("Result is unchanged").isEqualTo(inString);
}
@Test
void deleteAny() {
String inString = "Able was I ere I saw Elba";
String res = StringUtils.deleteAny(inString, "I");
assertThat(res).as("Result has no 'I'").isEqualTo("Able was ere saw Elba");
res = StringUtils.deleteAny(inString, "AeEba!");
assertThat(res).as("Result has no 'AeEba!'").isEqualTo("l ws I r I sw l");
res = StringUtils.deleteAny(inString, "#@$#$^");
assertThat(res).as("Result is unchanged").isEqualTo(inString);
}
@Test
void deleteAnyWhitespace() {
String whitespace = "This is\n\n\n \t a messagy string with whitespace\n";
assertThat(whitespace).as("Has CR").contains("\n");
assertThat(whitespace).as("Has tab").contains("\t");
assertThat(whitespace).as("Has space").contains(" ");
String cleaned = StringUtils.deleteAny(whitespace, "\n\t ");
assertThat(cleaned).as("Has no CR").doesNotContain("\n");
assertThat(cleaned).as("Has no tab").doesNotContain("\t");
assertThat(cleaned).as("Has no space").doesNotContain(" ");
assertThat(cleaned.length()).as("Still has chars").isGreaterThan(10);
}
@Test
void quote() {
assertThat(StringUtils.quote("myString")).isEqualTo("'myString'");
assertThat(StringUtils.quote("")).isEqualTo("''");
assertThat(StringUtils.quote(null)).isNull();
}
@Test
void quoteIfString() {
assertThat(StringUtils.quoteIfString("myString")).isEqualTo("'myString'");
assertThat(StringUtils.quoteIfString("")).isEqualTo("''");
assertThat(StringUtils.quoteIfString(5)).isEqualTo(5);
assertThat(StringUtils.quoteIfString(null)).isNull();
}
@Test
void unqualify() {
String qualified = "i.am.not.unqualified";
assertThat(StringUtils.unqualify(qualified)).isEqualTo("unqualified");
}
@Test
void capitalize() {
String capitalized = "i am not capitalized";
assertThat(StringUtils.capitalize(capitalized)).isEqualTo("I am not capitalized");
}
@Test
void uncapitalize() {
String capitalized = "I am capitalized";
assertThat(StringUtils.uncapitalize(capitalized)).isEqualTo("i am capitalized");
}
@Test
void getFilename() {
assertThat(StringUtils.getFilename(null)).isNull();
assertThat(StringUtils.getFilename("")).isEmpty();
assertThat(StringUtils.getFilename("myfile")).isEqualTo("myfile");
assertThat(StringUtils.getFilename("my/path/myfile")).isEqualTo("myfile");
assertThat(StringUtils.getFilename("myfile.")).isEqualTo("myfile.");
assertThat(StringUtils.getFilename("mypath/myfile.")).isEqualTo("myfile.");
assertThat(StringUtils.getFilename("myfile.txt")).isEqualTo("myfile.txt");
assertThat(StringUtils.getFilename("my/path/myfile.txt")).isEqualTo("myfile.txt");
}
@Test
void getFilenameExtension() {
assertThat(StringUtils.getFilenameExtension(null)).isNull();
assertThat(StringUtils.getFilenameExtension("")).isNull();
assertThat(StringUtils.getFilenameExtension("myfile")).isNull();
assertThat(StringUtils.getFilenameExtension("myPath/myfile")).isNull();
assertThat(StringUtils.getFilenameExtension("/home/user/.m2/settings/myfile")).isNull();
assertThat(StringUtils.getFilenameExtension("myfile.")).isEmpty();
assertThat(StringUtils.getFilenameExtension("myPath/myfile.")).isEmpty();
assertThat(StringUtils.getFilenameExtension("myfile.txt")).isEqualTo("txt");
assertThat(StringUtils.getFilenameExtension("mypath/myfile.txt")).isEqualTo("txt");
assertThat(StringUtils.getFilenameExtension("/home/user/.m2/settings/myfile.txt")).isEqualTo("txt");
}
@Test
void stripFilenameExtension() {
assertThat(StringUtils.stripFilenameExtension("")).isEmpty();
assertThat(StringUtils.stripFilenameExtension("myfile")).isEqualTo("myfile");
assertThat(StringUtils.stripFilenameExtension("myfile.")).isEqualTo("myfile");
assertThat(StringUtils.stripFilenameExtension("myfile.txt")).isEqualTo("myfile");
assertThat(StringUtils.stripFilenameExtension("mypath/myfile")).isEqualTo("mypath/myfile");
assertThat(StringUtils.stripFilenameExtension("mypath/myfile.")).isEqualTo("mypath/myfile");
assertThat(StringUtils.stripFilenameExtension("mypath/myfile.txt")).isEqualTo("mypath/myfile");
assertThat(StringUtils.stripFilenameExtension("/home/user/.m2/settings/myfile")).isEqualTo("/home/user/.m2/settings/myfile");
assertThat(StringUtils.stripFilenameExtension("/home/user/.m2/settings/myfile.")).isEqualTo("/home/user/.m2/settings/myfile");
assertThat(StringUtils.stripFilenameExtension("/home/user/.m2/settings/myfile.txt")).isEqualTo("/home/user/.m2/settings/myfile");
}
@Test
void cleanPath() {
assertThat(StringUtils.cleanPath("mypath/myfile")).isEqualTo("mypath/myfile");
assertThat(StringUtils.cleanPath("mypath\\myfile")).isEqualTo("mypath/myfile");
assertThat(StringUtils.cleanPath("mypath/../mypath/myfile")).isEqualTo("mypath/myfile");
assertThat(StringUtils.cleanPath("mypath/myfile/../../mypath/myfile")).isEqualTo("mypath/myfile");
assertThat(StringUtils.cleanPath("../mypath/myfile")).isEqualTo("../mypath/myfile");
assertThat(StringUtils.cleanPath("../mypath/../mypath/myfile")).isEqualTo("../mypath/myfile");
assertThat(StringUtils.cleanPath("mypath/../../mypath/myfile")).isEqualTo("../mypath/myfile");
assertThat(StringUtils.cleanPath("/../mypath/myfile")).isEqualTo("/../mypath/myfile");
assertThat(StringUtils.cleanPath("/a/:b/../../mypath/myfile")).isEqualTo("/mypath/myfile");
assertThat(StringUtils.cleanPath("/")).isEqualTo("/");
assertThat(StringUtils.cleanPath("/mypath/../")).isEqualTo("/");
assertThat(StringUtils.cleanPath("mypath/..")).isEmpty();
assertThat(StringUtils.cleanPath("mypath/../.")).isEmpty();
assertThat(StringUtils.cleanPath("mypath/../")).isEqualTo("./");
assertThat(StringUtils.cleanPath("././")).isEqualTo("./");
assertThat(StringUtils.cleanPath("./")).isEqualTo("./");
assertThat(StringUtils.cleanPath("../")).isEqualTo("../");
assertThat(StringUtils.cleanPath("./../")).isEqualTo("../");
assertThat(StringUtils.cleanPath(".././")).isEqualTo("../");
assertThat(StringUtils.cleanPath("file:/")).isEqualTo("file:/");
assertThat(StringUtils.cleanPath("file:/mypath/../")).isEqualTo("file:/");
assertThat(StringUtils.cleanPath("file:mypath/..")).isEqualTo("file:");
assertThat(StringUtils.cleanPath("file:mypath/../.")).isEqualTo("file:");
assertThat(StringUtils.cleanPath("file:mypath/../")).isEqualTo("file:./");
assertThat(StringUtils.cleanPath("file:././")).isEqualTo("file:./");
assertThat(StringUtils.cleanPath("file:./")).isEqualTo("file:./");
assertThat(StringUtils.cleanPath("file:../")).isEqualTo("file:../");
assertThat(StringUtils.cleanPath("file:./../")).isEqualTo("file:../");
assertThat(StringUtils.cleanPath("file:.././")).isEqualTo("file:../");
assertThat(StringUtils.cleanPath("file:/mypath/spring.factories")).isEqualTo("file:/mypath/spring.factories");
assertThat(StringUtils.cleanPath("file:///c:/some/../path/the%20file.txt")).isEqualTo("file:///c:/path/the%20file.txt");
assertThat(StringUtils.cleanPath("jar:file:///c:\\some\\..\\path\\.\\the%20file.txt")).isEqualTo("jar:file:///c:/path/the%20file.txt");
assertThat(StringUtils.cleanPath("jar:file:///c:/some/../path/./the%20file.txt")).isEqualTo("jar:file:///c:/path/the%20file.txt");
assertThat(StringUtils.cleanPath("jar:file:///c:\\\\some\\\\..\\\\path\\\\.\\\\the%20file.txt")).isEqualTo("jar:file:///c:/path/the%20file.txt");
}
@Test
void pathEquals() {
assertThat(StringUtils.pathEquals("/dummy1/dummy2/dummy3", "/dummy1/dummy2/dummy3")).as("Must be true for the same strings").isTrue();
assertThat(StringUtils.pathEquals("C:\\dummy1\\dummy2\\dummy3", "C:\\dummy1\\dummy2\\dummy3")).as("Must be true for the same win strings").isTrue();
assertThat(StringUtils.pathEquals("/dummy1/bin/../dummy2/dummy3", "/dummy1/dummy2/dummy3")).as("Must be true for one top path on 1").isTrue();
assertThat(StringUtils.pathEquals("C:\\dummy1\\dummy2\\dummy3", "C:\\dummy1\\bin\\..\\dummy2\\dummy3")).as("Must be true for one win top path on 2").isTrue();
assertThat(StringUtils.pathEquals("/dummy1/bin/../dummy2/bin/../dummy3", "/dummy1/dummy2/dummy3")).as("Must be true for two top paths on 1").isTrue();
assertThat(StringUtils.pathEquals("C:\\dummy1\\dummy2\\dummy3", "C:\\dummy1\\bin\\..\\dummy2\\bin\\..\\dummy3")).as("Must be true for two win top paths on 2").isTrue();
assertThat(StringUtils.pathEquals("/dummy1/bin/tmp/../../dummy2/dummy3", "/dummy1/dummy2/dummy3")).as("Must be true for double top paths on 1").isTrue();
assertThat(StringUtils.pathEquals("/dummy1/dummy2/dummy3", "/dummy1/dum/dum/../../dummy2/dummy3")).as("Must be true for double top paths on 2 with similarity").isTrue();
assertThat(StringUtils.pathEquals("./dummy1/dummy2/dummy3", "dummy1/dum/./dum/../../dummy2/dummy3")).as("Must be true for current paths").isTrue();
assertThat(StringUtils.pathEquals("./dummy1/dummy2/dummy3", "/dummy1/dum/./dum/../../dummy2/dummy3")).as("Must be false for relative/absolute paths").isFalse();
assertThat(StringUtils.pathEquals("/dummy1/dummy2/dummy3", "/dummy1/dummy4/dummy3")).as("Must be false for different strings").isFalse();
assertThat(StringUtils.pathEquals("/dummy1/bin/tmp/../dummy2/dummy3", "/dummy1/dummy2/dummy3")).as("Must be false for one false path on 1").isFalse();
assertThat(StringUtils.pathEquals("C:\\dummy1\\dummy2\\dummy3", "C:\\dummy1\\bin\\tmp\\..\\dummy2\\dummy3")).as("Must be false for one false win top path on 2").isFalse();
assertThat(StringUtils.pathEquals("/dummy1/bin/../dummy2/dummy3", "/dummy1/dummy2/dummy4")).as("Must be false for top path on 1 + difference").isFalse();
}
@Test
void concatenateStringArrays() {
String[] input1 = new String[] {"myString2"};
String[] input2 = new String[] {"myString1", "myString2"};
String[] result = StringUtils.concatenateStringArrays(input1, input2);
assertThat(result).hasSize(3);
assertThat(result[0]).isEqualTo("myString2");
assertThat(result[1]).isEqualTo("myString1");
assertThat(result[2]).isEqualTo("myString2");
assertThat(StringUtils.concatenateStringArrays(input1, null)).isEqualTo(input1);
assertThat(StringUtils.concatenateStringArrays(null, input2)).isEqualTo(input2);
assertThat(StringUtils.concatenateStringArrays(null, null)).isNull();
}
@Test
void sortStringArray() {
String[] input = new String[] {"myString2"};
input = StringUtils.addStringToArray(input, "myString1");
assertThat(input[0]).isEqualTo("myString2");
assertThat(input[1]).isEqualTo("myString1");
StringUtils.sortStringArray(input);
assertThat(input[0]).isEqualTo("myString1");
assertThat(input[1]).isEqualTo("myString2");
}
@Test
void trimArrayElements() {
assertThat(StringUtils.trimArrayElements(null)).isNull();
assertThat(StringUtils.trimArrayElements(new String[] {})).isEmpty();
assertThat(StringUtils.trimArrayElements(new String[] { "", " ", " ", " " })).containsExactly("", "", "", "");
assertThat(StringUtils.trimArrayElements(new String[] { "\n", "\t ", "\n\t" })).containsExactly("", "", "");
assertThat(StringUtils.trimArrayElements(new String[] { "a", "b", "c" })).containsExactly("a", "b", "c");
assertThat(StringUtils.trimArrayElements(new String[] { " a ", " b b ", " cc " })).containsExactly("a", "b b", "cc");
assertThat(StringUtils.trimArrayElements(new String[] { " a ", "b", " c " })).containsExactly("a", "b", "c");
assertThat(StringUtils.trimArrayElements(new String[] { null, " a ", null })).containsExactly(null, "a", null);
}
@Test
void removeDuplicateStrings() {
String[] input = new String[] {"myString2", "myString1", "myString2"};
input = StringUtils.removeDuplicateStrings(input);
assertThat(input[0]).isEqualTo("myString2");
assertThat(input[1]).isEqualTo("myString1");
}
@Test
void splitArrayElementsIntoProperties() {
String[] input = new String[] {"key1=value1 ", "key2 =\"value2\""};
Properties result = StringUtils.splitArrayElementsIntoProperties(input, "=");
assertThat(result.getProperty("key1")).isEqualTo("value1");
assertThat(result.getProperty("key2")).isEqualTo("\"value2\"");
}
@Test
void splitArrayElementsIntoPropertiesAndDeletedChars() {
String[] input = new String[] {"key1=value1 ", "key2 =\"value2\""};
Properties result = StringUtils.splitArrayElementsIntoProperties(input, "=", "\"");
assertThat(result.getProperty("key1")).isEqualTo("value1");
assertThat(result.getProperty("key2")).isEqualTo("value2");
}
@Test
void tokenizeToStringArray() {
String[] sa = StringUtils.tokenizeToStringArray("a,b , ,c", ",");
assertThat(sa).hasSize(3);
assertThat(sa[0].equals("a") && sa[1].equals("b") && sa[2].equals("c")).as("components are correct").isTrue();
}
@Test
void tokenizeToStringArrayWithNotIgnoreEmptyTokens() {
String[] sa = StringUtils.tokenizeToStringArray("a,b , ,c", ",", true, false);
assertThat(sa).hasSize(4);
assertThat(sa[0].equals("a") && sa[1].equals("b") && sa[2].isEmpty() && sa[3].equals("c")).as("components are correct").isTrue();
}
@Test
void tokenizeToStringArrayWithNotTrimTokens() {
String[] sa = StringUtils.tokenizeToStringArray("a,b ,c", ",", false, true);
assertThat(sa).hasSize(3);
assertThat(sa[0].equals("a") && sa[1].equals("b ") && sa[2].equals("c")).as("components are correct").isTrue();
}
@Test
void commaDelimitedListToStringArrayWithNullProducesEmptyArray() {
String[] sa = StringUtils.commaDelimitedListToStringArray(null);
assertThat(sa).as("String array isn't null with null input").isNotNull();
assertThat(sa.length).as("String array length == 0 with null input").isEqualTo(0);
}
@Test
void commaDelimitedListToStringArrayWithEmptyStringProducesEmptyArray() {
String[] sa = StringUtils.commaDelimitedListToStringArray("");
assertThat(sa).as("String array isn't null with null input").isNotNull();
assertThat(sa.length).as("String array length == 0 with null input").isEqualTo(0);
}
@Test
void delimitedListToStringArrayWithComma() {
String[] sa = StringUtils.delimitedListToStringArray("a,b", ",");
assertThat(sa).hasSize(2);
assertThat(sa[0]).isEqualTo("a");
assertThat(sa[1]).isEqualTo("b");
}
@Test
void delimitedListToStringArrayWithSemicolon() {
String[] sa = StringUtils.delimitedListToStringArray("a;b", ";");
assertThat(sa).hasSize(2);
assertThat(sa[0]).isEqualTo("a");
assertThat(sa[1]).isEqualTo("b");
}
@Test
void delimitedListToStringArrayWithEmptyDelimiter() {
String[] sa = StringUtils.delimitedListToStringArray("a,b", "");
assertThat(sa).hasSize(3);
assertThat(sa[0]).isEqualTo("a");
assertThat(sa[1]).isEqualTo(",");
assertThat(sa[2]).isEqualTo("b");
}
@Test
void delimitedListToStringArrayWithNullDelimiter() {
String[] sa = StringUtils.delimitedListToStringArray("a,b", null);
assertThat(sa).hasSize(1);
assertThat(sa[0]).isEqualTo("a,b");
}
@Test
void delimitedListToStringArrayWithCharacterToDelete() {
String[] sa = StringUtils.delimitedListToStringArray("a,b,c", ",", "a");
assertThat(sa).containsExactly("", "b", "c");
}
@Test
void delimitedListToStringArrayWithCharacterToDeleteEqualsToDelimiter() {
String[] sa = StringUtils.delimitedListToStringArray("a,b,c", ",", ",");
assertThat(sa).containsExactly("a", "b", "c");
}
@Test
void commaDelimitedListToStringArrayMatchWords() {
// Could read these from files
String[] sa = new String[] {"foo", "bar", "big"};
doTestCommaDelimitedListToStringArrayLegalMatch(sa);
doTestStringArrayReverseTransformationMatches(sa);
sa = new String[] {"a", "b", "c"};
doTestCommaDelimitedListToStringArrayLegalMatch(sa);
doTestStringArrayReverseTransformationMatches(sa);
// Test same words
sa = new String[] {"AA", "AA", "AA", "AA", "AA"};
doTestCommaDelimitedListToStringArrayLegalMatch(sa);
doTestStringArrayReverseTransformationMatches(sa);
}
private void doTestStringArrayReverseTransformationMatches(String[] sa) {
String[] reverse =
StringUtils.commaDelimitedListToStringArray(StringUtils.arrayToCommaDelimitedString(sa));
assertThat(Arrays.asList(reverse)).as("Reverse transformation is equal").isEqualTo(Arrays.asList(sa));
}
@Test
void commaDelimitedListToStringArraySingleString() {
// Could read these from files
String s = "woeirqupoiewuropqiewuorpqiwueopriquwopeiurqopwieur";
String[] sa = StringUtils.commaDelimitedListToStringArray(s);
assertThat(sa.length).as("Found one String with no delimiters").isEqualTo(1);
assertThat(sa[0]).as("Single array entry matches input String with no delimiters").isEqualTo(s);
}
@Test
void commaDelimitedListToStringArrayWithOtherPunctuation() {
// Could read these from files
String[] sa = new String[] {"xcvwert4456346&*.", "///", ".!", ".", ";"};
doTestCommaDelimitedListToStringArrayLegalMatch(sa);
}
/**
* We expect to see the empty Strings in the output.
*/
@Test
void commaDelimitedListToStringArrayEmptyStrings() {
// Could read these from files
String[] sa = StringUtils.commaDelimitedListToStringArray("a,,b");
assertThat(sa.length).as("a,,b produces array length 3").isEqualTo(3);
assertThat(sa[0].equals("a") && sa[1].isEmpty() && sa[2].equals("b")).as("components are correct").isTrue();
sa = new String[] {"", "", "a", ""};
doTestCommaDelimitedListToStringArrayLegalMatch(sa);
}
private void doTestCommaDelimitedListToStringArrayLegalMatch(String[] components) {
String sb = String.join(String.valueOf(','), components);
String[] sa = StringUtils.commaDelimitedListToStringArray(sb);
assertThat(sa).as("String array isn't null with legal match").isNotNull();
assertThat(sa.length).as("String array length is correct with legal match").isEqualTo(components.length);
assertThat(Arrays.equals(sa, components)).as("Output equals input").isTrue();
}
@Test
void parseLocaleStringSunnyDay() {
Locale expectedLocale = Locale.UK;
Locale locale = StringUtils.parseLocaleString(expectedLocale.toString());
assertThat(locale).as("When given a bona-fide Locale string, must not return null.").isNotNull();
assertThat(locale).isEqualTo(expectedLocale);
}
@Test
void parseLocaleStringWithEmptyLocaleStringYieldsNullLocale() {
Locale locale = StringUtils.parseLocaleString("");
assertThat(locale).as("When given an empty Locale string, must return null.").isNull();
}
@Test // SPR-8637
void parseLocaleWithMultiSpecialCharactersInVariant() {
String variant = "proper-northern";
String localeString = "en_GB_" + variant;
Locale locale = StringUtils.parseLocaleString(localeString);
assertThat(locale.getVariant()).as("Multi-valued variant portion of the Locale not extracted correctly.").isEqualTo(variant);
}
@Test // SPR-3671
void parseLocaleWithMultiValuedVariant() {
String variant = "proper_northern";
String localeString = "en_GB_" + variant;
Locale locale = StringUtils.parseLocaleString(localeString);
assertThat(locale.getVariant()).as("Multi-valued variant portion of the Locale not extracted correctly.").isEqualTo(variant);
}
@Test // SPR-3671
void parseLocaleWithMultiValuedVariantUsingSpacesAsSeparators() {
String variant = "proper northern";
String localeString = "en GB " + variant;
Locale locale = StringUtils.parseLocaleString(localeString);
assertThat(locale.getVariant()).as("Multi-valued variant portion of the Locale not extracted correctly.").isEqualTo(variant);
}
@Test // SPR-3671
void parseLocaleWithMultiValuedVariantUsingMixtureOfUnderscoresAndSpacesAsSeparators() {
String variant = "proper northern";
String localeString = "en_GB_" + variant;
Locale locale = StringUtils.parseLocaleString(localeString);
assertThat(locale.getVariant()).as("Multi-valued variant portion of the Locale not extracted correctly.").isEqualTo(variant);
}
@Test // SPR-7779
void parseLocaleWithInvalidCharacters() {
assertThatIllegalArgumentException().isThrownBy(() ->
StringUtils.parseLocaleString("%0D%0AContent-length:30%0D%0A%0D%0A%3Cscript%3Ealert%28123%29%3C/script%3E"));
}
@Test // SPR-9420
void parseLocaleWithSameLowercaseTokenForLanguageAndCountry() {
assertThat(StringUtils.parseLocaleString("tr_tr").toString()).isEqualTo("tr_TR");
assertThat(StringUtils.parseLocaleString("bg_bg_vnt").toString()).isEqualTo("bg_BG_vnt");
}
@Test // SPR-11806
void parseLocaleWithVariantContainingCountryCode() {
String variant = "GBtest";
String localeString = "en_GB_" + variant;
Locale locale = StringUtils.parseLocaleString(localeString);
assertThat(locale.getVariant()).as("Variant containing country code not extracted correctly").isEqualTo(variant);
}
@Test // SPR-14718, SPR-7598
void parseJava7Variant() {
assertThat(StringUtils.parseLocaleString("sr__#LATN").toString()).isEqualTo("sr__#LATN");
}
@Test // SPR-16651
void availableLocalesWithLocaleString() {
for (Locale locale : Locale.getAvailableLocales()) {
Locale parsedLocale = StringUtils.parseLocaleString(locale.toString());
if (parsedLocale == null) {
assertThat(locale.getLanguage()).isEmpty();
}
else {
assertThat(locale.toString()).isEqualTo(parsedLocale.toString());
}
}
}
@Test // SPR-16651
void availableLocalesWithLanguageTag() {
for (Locale locale : Locale.getAvailableLocales()) {
Locale parsedLocale = StringUtils.parseLocale(locale.toLanguageTag());
if (parsedLocale == null) {
assertThat(locale.getLanguage()).isEmpty();
}
else {
assertThat(locale.toLanguageTag()).isEqualTo(parsedLocale.toLanguageTag());
}
}
}
@Test
void invalidLocaleWithLocaleString() {
assertThat(StringUtils.parseLocaleString("invalid")).isEqualTo(new Locale("invalid"));
assertThat(StringUtils.parseLocaleString("invalidvalue")).isEqualTo(new Locale("invalidvalue"));
assertThat(StringUtils.parseLocaleString("invalidvalue_foo")).isEqualTo(new Locale("invalidvalue", "foo"));
assertThat(StringUtils.parseLocaleString("")).isNull();
}
@Test
void invalidLocaleWithLanguageTag() {
assertThat(StringUtils.parseLocale("invalid")).isEqualTo(new Locale("invalid"));
assertThat(StringUtils.parseLocale("invalidvalue")).isEqualTo(new Locale("invalidvalue"));
assertThat(StringUtils.parseLocale("invalidvalue_foo")).isEqualTo(new Locale("invalidvalue", "foo"));
assertThat(StringUtils.parseLocale("")).isNull();
}
@Test
void parseLocaleStringWithEmptyCountryAndVariant() {
assertThat(StringUtils.parseLocale("be__TARASK").toString()).isEqualTo("be__TARASK");
}
@Test
void split() {
assertThat(StringUtils.split("Hello, world", ",")).containsExactly("Hello", " world");
assertThat(StringUtils.split(",Hello world", ",")).containsExactly("", "Hello world");
assertThat(StringUtils.split("Hello world,", ",")).containsExactly("Hello world", "");
assertThat(StringUtils.split("Hello, world,", ",")).containsExactly("Hello", " world,");
}
@Test
void splitWithEmptyStringOrNull() {
assertThat(StringUtils.split("Hello, world", "")).isNull();
assertThat(StringUtils.split("", ",")).isNull();
assertThat(StringUtils.split(null, ",")).isNull();
assertThat(StringUtils.split("Hello, world", null)).isNull();
assertThat(StringUtils.split(null, null)).isNull();
}
@Test
void collectionToDelimitedStringWithNullValuesShouldNotFail() {
assertThat(StringUtils.collectionToCommaDelimitedString(Collections.singletonList(null))).isEqualTo("null");
}
@Test
void applyRelativePath() {
// Basic combination
assertThat(StringUtils.applyRelativePath("mypath/myfile", "otherfile")).isEqualTo("mypath/otherfile");
// Relative path starts with slash
assertThat(StringUtils.applyRelativePath("mypath/myfile", "/otherfile")).isEqualTo("mypath/otherfile");
// Includes root path
assertThat(StringUtils.applyRelativePath("/mypath/myfile", "otherfile")).isEqualTo("/mypath/otherfile");
assertThat(StringUtils.applyRelativePath("/mypath/myfile", "/otherfile")).isEqualTo("/mypath/otherfile");
// When base path has no slash
assertThat(StringUtils.applyRelativePath("myfile", "otherfile")).isEqualTo("otherfile");
// Keep parent directory token as-is
assertThat(StringUtils.applyRelativePath("mypath/myfile", "../otherfile")).isEqualTo("mypath/../otherfile");
// Base path ends with slash
assertThat(StringUtils.applyRelativePath("mypath/", "otherfile")).isEqualTo("mypath/otherfile");
// Empty relative path
assertThat(StringUtils.applyRelativePath("mypath/myfile", "")).isEqualTo("mypath/");
}
@Test
void truncatePreconditions() {
assertThatIllegalArgumentException()
.isThrownBy(() -> StringUtils.truncate("foo", 0))
.withMessage("Truncation threshold must be a positive number: 0");
assertThatIllegalArgumentException()
.isThrownBy(() -> StringUtils.truncate("foo", -99))
.withMessage("Truncation threshold must be a positive number: -99");
}
@ParameterizedTest
@CsvSource(delimiterString = "-->", textBlock = """
'' --> ''
aardvark --> aardvark
aardvark12 --> aardvark12
aardvark123 --> aardvark12 (truncated)...
aardvark, bird, cat --> aardvark, (truncated)...
"""
)
void truncate(String text, String truncated) {
assertThat(StringUtils.truncate(text, 10)).isEqualTo(truncated);
}
}
| StringUtilsTests |
java | apache__kafka | group-coordinator/src/main/java/org/apache/kafka/coordinator/group/assignor/SimpleHeterogeneousAssignmentBuilder.java | {
"start": 10362,
"end": 23002
} | class ____ {
/**
* The topic ID.
*/
private final Uuid topicId;
/**
* The list of assignable topic-partitions for this topic.
*/
private final List<TopicIdPartition> targetPartitions;
/**
* The list of member indices subscribed to this topic.
*/
private final List<Integer> subscribedMembers;
/**
* The final assignment, grouped by partition index, progressively computed during the assignment building.
*/
private final Map<Integer, Set<Integer>> finalAssignmentByPartition;
/**
* The final assignment, grouped by member index, progressively computed during the assignment building.
*/
private final Map<Integer, Set<Integer>> finalAssignmentByMember;
/**
* The desired sharing for each target partition.
* For entirely balanced assignment, we would expect (numTargetPartitions / numGroupMembers) partitions per member, rounded upwards.
* That can be expressed as: Math.ceil(numTargetPartitions / (double) numGroupMembers)
*/
private final Integer desiredSharing;
/**
* The desired number of assignments for each share group member.
* <p>
* Members are stored as integer indices into the memberIds array.
*/
private final int[] desiredAssignmentCounts;
public TopicAssignmentPartialBuilder(Uuid topicId, int numGroupMembers, List<TopicIdPartition> targetPartitions, List<Integer> subscribedMembers) {
this.topicId = topicId;
this.targetPartitions = targetPartitions;
this.subscribedMembers = subscribedMembers;
this.finalAssignmentByPartition = AssignorHelpers.newHashMap(targetPartitions.size());
this.finalAssignmentByMember = AssignorHelpers.newHashMap(subscribedMembers.size());
int numTargetPartitions = targetPartitions.size();
int numSubscribedMembers = subscribedMembers.size();
if (numTargetPartitions == 0) {
this.desiredSharing = 0;
} else {
this.desiredSharing = (numSubscribedMembers + numTargetPartitions - 1) / numTargetPartitions;
}
// Calculate the desired number of assignments for each member.
// The precise desired assignment count per target partition. This can be a fractional number.
// We would expect (numSubscribedMembers / numTargetPartitions) assignments per partition, rounded upwards.
// Using integer arithmetic: (numSubscribedMembers + numTargetPartitions - 1) / numTargetPartitions
this.desiredAssignmentCounts = new int[numGroupMembers];
double preciseDesiredAssignmentCount = desiredSharing * numTargetPartitions / (double) numSubscribedMembers;
for (int memberIndex = 0; memberIndex < numSubscribedMembers; memberIndex++) {
desiredAssignmentCounts[subscribedMembers.get(memberIndex)] =
(int) Math.ceil(preciseDesiredAssignmentCount * (double) (memberIndex + 1)) -
(int) Math.ceil(preciseDesiredAssignmentCount * (double) memberIndex);
}
}
public void build() {
// The order of steps here is not that significant, but assignRemainingPartitions must go last.
revokeUnassignablePartitions();
revokeOverfilledMembers();
revokeOversharedPartitions();
// Add in any partitions which are currently not in the assignment.
targetPartitions.forEach(topicPartition ->
finalAssignmentByPartition.computeIfAbsent(topicPartition.partitionId(), k -> AssignorHelpers.newHashSet(subscribedMembers.size())));
assignRemainingPartitions();
}
/**
* Examine the members from the current assignment, making sure that no member has too many assigned partitions.
* When looking at the current assignment, we need to only consider the topics in the current assignment that are
* also being subscribed in the new assignment.
*/
private void revokeUnassignablePartitions() {
for (Map.Entry<Integer, Map<Uuid, Set<Integer>>> entry : oldGroupAssignment.entrySet()) {
Integer memberIndex = entry.getKey();
Map<Uuid, Set<Integer>> oldMemberAssignment = entry.getValue();
Map<Uuid, Set<Integer>> newMemberAssignment = null;
if (oldMemberAssignment.isEmpty()) {
continue;
}
Set<Integer> assignedPartitions = oldMemberAssignment.get(topicId);
if (assignedPartitions != null) {
if (subscribedTopicIds.contains(topicId)) {
for (int partition : assignedPartitions) {
finalAssignmentByPartition.computeIfAbsent(partition, k -> new HashSet<>()).add(memberIndex);
finalAssignmentByMember.computeIfAbsent(memberIndex, k -> new HashSet<>()).add(partition);
}
} else {
// We create a deep copy of the original assignment so we can alter it.
newMemberAssignment = AssignorHelpers.deepCopyAssignment(oldMemberAssignment);
// Remove the entire topic.
newMemberAssignment.remove(topicId);
}
if (newMemberAssignment != null) {
newGroupAssignment.put(memberIndex, newMemberAssignment);
}
}
}
}
/**
* Revoke partitions from members which are overfilled.
*/
private void revokeOverfilledMembers() {
finalAssignmentByMember.forEach((memberIndex, assignedPartitions) -> {
int memberDesiredAssignmentCount = desiredAssignmentCounts[memberIndex];
if (assignedPartitions.size() > memberDesiredAssignmentCount) {
Map<Uuid, Set<Integer>> newMemberAssignment = newGroupAssignment.get(memberIndex);
Iterator<Integer> partitionIterator = assignedPartitions.iterator();
while (partitionIterator.hasNext() && (assignedPartitions.size() > memberDesiredAssignmentCount)) {
int partitionIndex = partitionIterator.next();
finalAssignmentByPartition.get(partitionIndex).remove(memberIndex);
partitionIterator.remove();
if (newMemberAssignment == null) {
newMemberAssignment = AssignorHelpers.deepCopyAssignment(oldGroupAssignment.get(memberIndex));
newGroupAssignment.put(memberIndex, newMemberAssignment);
}
newMemberAssignment.get(topicId).remove(partitionIndex);
}
}
});
}
/**
* Revoke any over-shared partitions.
*/
private void revokeOversharedPartitions() {
finalAssignmentByPartition.forEach((partitionIndex, assignedMembers) -> {
int assignedMemberCount = assignedMembers.size();
if (assignedMemberCount > desiredSharing) {
Iterator<Integer> assignedMemberIterator = assignedMembers.iterator();
while (assignedMemberIterator.hasNext()) {
Integer memberIndex = assignedMemberIterator.next();
Map<Uuid, Set<Integer>> newMemberAssignment = newGroupAssignment.get(memberIndex);
if (newMemberAssignment == null) {
newMemberAssignment = AssignorHelpers.deepCopyAssignment(oldGroupAssignment.get(memberIndex));
newGroupAssignment.put(memberIndex, newMemberAssignment);
}
Set<Integer> partitions = newMemberAssignment.get(topicId);
if (partitions != null) {
if (partitions.remove(partitionIndex)) {
assignedMemberCount--;
assignedMemberIterator.remove();
finalAssignmentByMember.get(memberIndex).remove(partitionIndex);
}
}
if (assignedMemberCount <= desiredSharing) {
break;
}
}
}
});
}
/**
* Assign partitions to unfilled members. It repeatedly iterates through the unfilled members while running
* once through the set of partitions. When a partition is found that has insufficient sharing, it attempts to assign
* to one of the members.
* <p>
* There is one tricky case here and that's where a partition wants another assignment, but none of the unfilled
* members are able to take it (because they already have that partition). In this situation, we just accept that
* no additional assignments for this partition could be made and carry on. In theory, a different shuffling of the
* partitions would be able to achieve better balance, but it's harmless tolerating a slight imbalance in this case.
* <p>
* Note that finalAssignmentByMember is not maintained by this method which is expected to be the final step in the
* computation.
*/
private void assignRemainingPartitions() {
Set<Integer> unfilledMembers = AssignorHelpers.newHashSet(numGroupMembers);
subscribedMembersByTopic.get(topicId).forEach(memberIndex -> {
Set<Integer> assignedPartitions = finalAssignmentByMember.get(memberIndex);
int numberOfAssignedPartitions = (assignedPartitions == null) ? 0 : assignedPartitions.size();
if (numberOfAssignedPartitions < desiredAssignmentCounts[memberIndex]) {
unfilledMembers.add(memberIndex);
}
});
Iterator<Integer> memberIterator = unfilledMembers.iterator();
boolean partitionAssignedForThisIterator = false;
for (Map.Entry<Integer, Set<Integer>> partitionAssignment : finalAssignmentByPartition.entrySet()) {
int partitionIndex = partitionAssignment.getKey();
Set<Integer> membersAssigned = partitionAssignment.getValue();
if (membersAssigned.size() < desiredSharing) {
int assignmentsToMake = desiredSharing - membersAssigned.size();
while (assignmentsToMake > 0) {
if (!memberIterator.hasNext()) {
if (!partitionAssignedForThisIterator) {
break;
}
memberIterator = unfilledMembers.iterator();
partitionAssignedForThisIterator = false;
}
int memberIndex = memberIterator.next();
if (!membersAssigned.contains(memberIndex)) {
Map<Uuid, Set<Integer>> newMemberAssignment = newGroupAssignment.get(memberIndex);
if (newMemberAssignment == null) {
newMemberAssignment = AssignorHelpers.deepCopyAssignment(oldGroupAssignment.get(memberIndex));
newGroupAssignment.put(memberIndex, newMemberAssignment);
}
newMemberAssignment.computeIfAbsent(topicId, k -> new HashSet<>()).add(partitionIndex);
finalAssignmentByMember.computeIfAbsent(memberIndex, k -> new HashSet<>()).add(partitionIndex);
assignmentsToMake--;
partitionAssignedForThisIterator = true;
if (finalAssignmentByMember.get(memberIndex).size() >= desiredAssignmentCounts[memberIndex]) {
memberIterator.remove();
}
}
}
}
if (unfilledMembers.isEmpty()) {
break;
}
}
}
}
}
| TopicAssignmentPartialBuilder |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/scheduling/ScheduledTasksEndpoint.java | {
"start": 5959,
"end": 6581
} | class ____ {
private final TaskExecutionOutcome lastExecutionOutcome;
private LastExecution(TaskExecutionOutcome lastExecutionOutcome) {
this.lastExecutionOutcome = lastExecutionOutcome;
}
public Status getStatus() {
return this.lastExecutionOutcome.status();
}
public @Nullable Instant getTime() {
return this.lastExecutionOutcome.executionTime();
}
public @Nullable ExceptionInfo getException() {
Throwable throwable = this.lastExecutionOutcome.throwable();
if (throwable != null) {
return new ExceptionInfo(throwable);
}
return null;
}
}
public static final | LastExecution |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/ProcessInheritIODisabledBuildItem.java | {
"start": 1025,
"end": 1586
} | class ____ implements Function<Map<String, Object>, List<Consumer<BuildChainBuilder>>> {
@Override
public List<Consumer<BuildChainBuilder>> apply(final Map<String, Object> props) {
return Collections.singletonList((builder) -> {
final BuildStepBuilder stepBuilder = builder.addBuildStep((ctx) -> {
ctx.produce(new ProcessInheritIODisabledBuildItem());
});
stepBuilder.produces(ProcessInheritIODisabledBuildItem.class).build();
});
}
}
}
| Factory |
java | elastic__elasticsearch | x-pack/plugin/sql/qa/jdbc/security/src/test/java/org/elasticsearch/xpack/sql/qa/jdbc/security/JdbcConnectionIT.java | {
"start": 801,
"end": 2304
} | class ____ extends ConnectionTestCase {
static final boolean SSL_ENABLED = Booleans.parseBoolean(System.getProperty("tests.ssl.enabled"), false);
static Settings securitySettings() {
String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray()));
Settings.Builder builder = Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token);
if (SSL_ENABLED) {
Path keyStore;
try {
keyStore = PathUtils.get(getTestClass().getResource("/test-node.jks").toURI());
} catch (URISyntaxException e) {
throw new RuntimeException("exception while reading the store", e);
}
if (Files.exists(keyStore) == false) {
throw new IllegalStateException("Keystore file [" + keyStore + "] does not exist.");
}
builder.put(ESRestTestCase.TRUSTSTORE_PATH, keyStore).put(ESRestTestCase.TRUSTSTORE_PASSWORD, "keypass");
}
return builder.build();
}
@Override
protected Settings restClientSettings() {
return securitySettings();
}
@Override
protected String getProtocol() {
return SSL_ENABLED ? "https" : "http";
}
@Override
protected Properties connectionProperties() {
Properties properties = super.connectionProperties();
properties.putAll(JdbcSecurityUtils.adminProperties());
return properties;
}
}
| JdbcConnectionIT |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 90936,
"end": 91461
} | class ____
extends AggregateFunction<String, Row> {
@FunctionHint(accumulator = @DataTypeHint("ROW<b BOOLEAN>"))
public void accumulate(Row accumulator, @DataTypeHint("ROW<i INT, b BOOLEAN>") Row r) {
// nothing to do
}
@Override
public String getValue(Row accumulator) {
return null;
}
@Override
public Row createAccumulator() {
return null;
}
}
private static | AggregateFunctionWithManyAnnotations |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/tests/StreamsStandByReplicaTest.java | {
"start": 1913,
"end": 7833
} | class ____ {
public static void main(final String[] args) throws IOException {
if (args.length < 2) {
System.err.println("StreamsStandByReplicaTest are expecting two parameters: " +
"propFile, additionalConfigs; but only see " + args.length + " parameter");
Exit.exit(1);
}
System.out.println("StreamsTest instance started");
final String propFileName = args[0];
final String additionalConfigs = args[1];
final Properties streamsProperties = Utils.loadProps(propFileName);
final String kafka = streamsProperties.getProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
if (kafka == null) {
System.err.println("No bootstrap kafka servers specified in " + StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
Exit.exit(1);
}
streamsProperties.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100L);
streamsProperties.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
streamsProperties.put(StreamsConfig.STATESTORE_CACHE_MAX_BYTES_CONFIG, 0);
streamsProperties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsProperties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsProperties.put(StreamsConfig.producerPrefix(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG), true);
if (additionalConfigs == null) {
System.err.println("additional configs are not provided");
System.err.flush();
Exit.exit(1);
}
final Map<String, String> updated = SystemTestUtil.parseConfigs(additionalConfigs);
System.out.println("Updating configs with " + updated);
final String sourceTopic = updated.remove("sourceTopic");
final String sinkTopic1 = updated.remove("sinkTopic1");
final String sinkTopic2 = updated.remove("sinkTopic2");
if (sourceTopic == null || sinkTopic1 == null || sinkTopic2 == null) {
System.err.printf(
"one or more required topics null sourceTopic[%s], sinkTopic1[%s], sinkTopic2[%s]%n",
sourceTopic,
sinkTopic1,
sinkTopic2);
System.err.flush();
Exit.exit(1);
}
streamsProperties.putAll(updated);
if (!confirmCorrectConfigs(streamsProperties)) {
System.err.printf(
"ERROR: Did not have all required configs expected to contain %s, %s, %s, %s%n",
StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG),
StreamsConfig.producerPrefix(ProducerConfig.RETRIES_CONFIG),
StreamsConfig.producerPrefix(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG),
StreamsConfig.producerPrefix(ProducerConfig.MAX_BLOCK_MS_CONFIG)
);
Exit.exit(1);
}
final StreamsBuilder builder = new StreamsBuilder();
final String inMemoryStoreName = "in-memory-store";
final String persistentMemoryStoreName = "persistent-memory-store";
final KeyValueBytesStoreSupplier inMemoryStoreSupplier = Stores.inMemoryKeyValueStore(inMemoryStoreName);
final KeyValueBytesStoreSupplier persistentStoreSupplier = Stores.persistentKeyValueStore(persistentMemoryStoreName);
final Serde<String> stringSerde = Serdes.String();
final ValueMapper<Long, String> countMapper = Object::toString;
final KStream<String, String> inputStream = builder.stream(sourceTopic, Consumed.with(stringSerde, stringSerde));
inputStream.groupByKey().count(Materialized.as(inMemoryStoreSupplier)).toStream().mapValues(countMapper)
.to(sinkTopic1, Produced.with(stringSerde, stringSerde));
inputStream.groupByKey().count(Materialized.as(persistentStoreSupplier)).toStream().mapValues(countMapper)
.to(sinkTopic2, Produced.with(stringSerde, stringSerde));
final KafkaStreams streams = new KafkaStreams(builder.build(), streamsProperties);
streams.setUncaughtExceptionHandler(e -> {
System.err.println("FATAL: An unexpected exception " + e);
e.printStackTrace(System.err);
System.err.flush();
return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT;
});
streams.setStateListener((newState, oldState) -> {
if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
final Set<ThreadMetadata> threadMetadata = streams.metadataForLocalThreads();
for (final ThreadMetadata threadMetadatum : threadMetadata) {
System.out.println(
"ACTIVE_TASKS:" + threadMetadatum.activeTasks().size()
+ " STANDBY_TASKS:" + threadMetadatum.standbyTasks().size());
}
}
});
System.out.println("Start Kafka Streams");
streams.start();
Exit.addShutdownHook("streams-shutdown-hook", () -> {
shutdown(streams);
System.out.println("Shut down streams now");
});
}
private static void shutdown(final KafkaStreams streams) {
streams.close(Duration.ofSeconds(10));
}
private static boolean confirmCorrectConfigs(final Properties properties) {
return properties.containsKey(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG)) &&
properties.containsKey(StreamsConfig.producerPrefix(ProducerConfig.RETRIES_CONFIG)) &&
properties.containsKey(StreamsConfig.producerPrefix(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG)) &&
properties.containsKey(StreamsConfig.producerPrefix(ProducerConfig.MAX_BLOCK_MS_CONFIG));
}
}
| StreamsStandByReplicaTest |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/command/ShowJarsOperation.java | {
"start": 1238,
"end": 1703
} | class ____ implements ShowOperation {
@Override
public String asSummaryString() {
return "SHOW JARS";
}
@Override
public TableResultInternal execute(Context ctx) {
String[] jars =
ctx.getResourceManager().getResources().keySet().stream()
.map(ResourceUri::getUri)
.toArray(String[]::new);
return buildStringArrayResult("jars", jars);
}
}
| ShowJarsOperation |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/component/StructComponentOneToManyTest.java | {
"start": 3455,
"end": 3923
} | class ____ {
private String name;
@OneToMany
private Set<Book> books;
public Author() {
}
public Author(String name, Book book) {
this.name = name;
this.books = book == null ? null : Set.of( book );
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Set<Book> getBooks() {
return books;
}
public void setBooks(Set<Book> books) {
this.books = books;
}
}
}
| Author |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/context/event/ApplicationReadyEvent.java | {
"start": 1303,
"end": 2472
} | class ____ extends SpringApplicationEvent {
private final ConfigurableApplicationContext context;
private final @Nullable Duration timeTaken;
/**
* Create a new {@link ApplicationReadyEvent} instance.
* @param application the current application
* @param args the arguments the application is running with
* @param context the context that was being created
* @param timeTaken the time taken to get the application ready to service requests
* @since 2.6.0
*/
public ApplicationReadyEvent(SpringApplication application, String[] args, ConfigurableApplicationContext context,
@Nullable Duration timeTaken) {
super(application, args);
this.context = context;
this.timeTaken = timeTaken;
}
/**
* Return the application context.
* @return the context
*/
public ConfigurableApplicationContext getApplicationContext() {
return this.context;
}
/**
* Return the time taken for the application to be ready to service requests, or
* {@code null} if unknown.
* @return the time taken to be ready to service requests
* @since 2.6.0
*/
public @Nullable Duration getTimeTaken() {
return this.timeTaken;
}
}
| ApplicationReadyEvent |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/serialization/Serializer.java | {
"start": 1606,
"end": 1674
} | class ____.
*
* @param <T> Type to be serialized from.
*/
public | name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.