language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
extensions/vertx/deployment/src/test/java/io/quarkus/vertx/deployment/VertxLoggingTest.java
|
{
"start": 394,
"end": 762
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(BeanThatLog.class));
@Inject
BeanThatLog bean;
@Test
public void test() {
bean.info();
bean.trace();
}
@ApplicationScoped
static
|
VertxLoggingTest
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/impl/verticle/PackageHelper.java
|
{
"start": 846,
"end": 1485
}
|
class ____ {
private final static String CLASS_FILE = ".class";
private final ClassLoader classLoader;
public PackageHelper(ClassLoader classLoader) {
this.classLoader = classLoader;
}
public List<JavaFileObject> find(String packageName) throws IOException {
String javaPackageName = packageName.replaceAll("\\.", "/");
List<JavaFileObject> result = new ArrayList<>();
Enumeration<URL> urlEnumeration = classLoader.getResources(javaPackageName);
while (urlEnumeration.hasMoreElements()) {
URL resource = urlEnumeration.nextElement();
//Need to urldecode it too, since bug in JDK URL
|
PackageHelper
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/test/java/org/springframework/boot/test/context/runner/AbstractApplicationContextRunnerTests.java
|
{
"start": 13561,
"end": 13682
}
|
interface ____ {
void configure(Example example);
}
@Configuration(proxyBeanMethods = false)
static
|
ExampleConfigurer
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/proxy/filter/StatFilterConcurrentTest.java
|
{
"start": 941,
"end": 2834
}
|
class ____ extends TestCase {
private DruidDataSource dataSource;
private StatFilter statFilter;
private int LOOP_COUNT = 1000 * 1;
public void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setMaxActive(100);
statFilter = new StatFilter();
dataSource.getProxyFilters().add(statFilter);
dataSource.setConnectionProperties("executeSleep=1");
}
public void tearDown() throws Exception {
dataSource.close();
}
public void test_stat() throws Exception {
concurrent(100);
for (JdbcSqlStat sqlStat : dataSource.getDataSourceStat().getSqlStatMap().values()) {
System.out.println(sqlStat.getConcurrentMax());
}
}
public void concurrent(int threadCount) throws Exception {
Thread[] threads = new Thread[threadCount];
final CountDownLatch endLatch = new CountDownLatch(threadCount);
for (int i = 0; i < threadCount; ++i) {
threads[i] = new Thread() {
public void run() {
try {
for (int i = 0; i < LOOP_COUNT; ++i) {
Connection conn = dataSource.getConnection();
Statement stmt = conn.createStatement();
stmt.executeUpdate("select 1");
stmt.close();
conn.close();
}
} catch (Exception e) {
e.printStackTrace();
} finally {
endLatch.countDown();
}
}
};
}
for (int i = 0; i < threadCount; ++i) {
threads[i].start();
}
endLatch.await();
}
}
|
StatFilterConcurrentTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/access/EmbeddableDefaultAccessTests.java
|
{
"start": 1206,
"end": 4663
}
|
class ____ {
@Test
public void verifyBootModel(DomainModelScope scope) {
scope.withHierarchy( Person2.class, (personDescriptor) -> {
final Property nameProperty = personDescriptor.getProperty( "name" );
final Component nameMapping = (Component) nameProperty.getValue();
assertThat( nameMapping.getPropertySpan() ).isEqualTo( 2 );
final Property nameFirst = nameMapping.getProperty( 0 );
final Property nameLast = nameMapping.getProperty( 1 );
assertThat( nameFirst.getName() ).isEqualTo( "first" );
assertThat( nameLast.getName() ).isEqualTo( "last" );
assertThat( ( (BasicValue) nameFirst.getValue() ).getColumn().getText() ).isEqualTo( "first_name" );
assertThat( ( (BasicValue) nameLast.getValue() ).getColumn().getText() ).isEqualTo( "last_name" );
assertThat( ( (BasicValue) nameFirst.getValue() ).getJpaAttributeConverterDescriptor() ).isNotNull();
final Property aliasesProperty = personDescriptor.getProperty( "aliases" );
final Component aliasMapping = (Component) ( (Collection) aliasesProperty.getValue() ).getElement();
assertThat( aliasMapping.getPropertySpan() ).isEqualTo( 2 );
final Property aliasFirst = aliasMapping.getProperty( 0 );
final Property aliasLast = aliasMapping.getProperty( 1 );
assertThat( aliasFirst.getName() ).isEqualTo( "first" );
assertThat( aliasLast.getName() ).isEqualTo( "last" );
assertThat( ( (BasicValue) aliasFirst.getValue() ).getColumn().getText() ).isEqualTo( "first_name" );
assertThat( ( (BasicValue) aliasLast.getValue() ).getColumn().getText() ).isEqualTo( "last_name" );
assertThat( ( (BasicValue) aliasFirst.getValue() ).getJpaAttributeConverterDescriptor() ).isNotNull();
} );
}
@Test
public void verifyRuntimeModel(SessionFactoryScope scope) {
final RuntimeMetamodels runtimeMetamodels = scope.getSessionFactory().getRuntimeMetamodels();
final EntityMappingType personDescriptor = runtimeMetamodels.getEntityMappingType( Person2.class );
// Person defines FIELD access, while Name uses PROPERTY
// - if we find the property annotations, the attribute names will be
// `firstName` and `lastName`, and the columns `first_name` and `last_name`
// - otherwise, we have property and column names being `first` and `last`
final EmbeddableMappingType nameEmbeddable = ( (EmbeddedAttributeMapping) personDescriptor.findAttributeMapping( "name" ) ).getEmbeddableTypeDescriptor();
assertThat( nameEmbeddable.getNumberOfAttributeMappings() ).isEqualTo( 2 );
final AttributeMapping nameFirst = nameEmbeddable.getAttributeMapping( 0 );
final AttributeMapping nameLast = nameEmbeddable.getAttributeMapping( 1 );
assertThat( nameFirst.getAttributeName() ).isEqualTo( "first" );
assertThat( nameLast.getAttributeName() ).isEqualTo( "last" );
final PluralAttributeMapping aliasesAttribute = (PluralAttributeMapping) personDescriptor.findAttributeMapping( "aliases" );
final EmbeddableMappingType aliasEmbeddable = ( (EmbeddedCollectionPart) aliasesAttribute.getElementDescriptor() ).getEmbeddableTypeDescriptor();
assertThat( aliasEmbeddable.getNumberOfAttributeMappings() ).isEqualTo( 2 );
final AttributeMapping aliasFirst = nameEmbeddable.getAttributeMapping( 0 );
final AttributeMapping aliasLast = nameEmbeddable.getAttributeMapping( 1 );
assertThat( aliasFirst.getAttributeName() ).isEqualTo( "first" );
assertThat( aliasLast.getAttributeName() ).isEqualTo( "last" );
}
}
|
EmbeddableDefaultAccessTests
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/JpaQueryMethodUnitTests.java
|
{
"start": 2958,
"end": 19310
}
|
class ____ {
private static final String METHOD_NAME = "findByFirstname";
@Mock QueryExtractor extractor;
@Mock RepositoryMetadata metadata;
private ProjectionFactory factory = new SpelAwareProxyProjectionFactory();
private Method invalidReturnType;
private Method pageableAndSort;
private Method pageableTwice;
private Method sortableTwice;
private Method findWithLockMethod;
private Method findsProjections;
private Method findsProjection;
private Method queryMethodWithCustomEntityFetchGraph;
/**
* @throws Exception
*/
@BeforeEach
void setUp() throws Exception {
invalidReturnType = InvalidRepository.class.getMethod(METHOD_NAME, String.class, Pageable.class);
pageableAndSort = InvalidRepository.class.getMethod(METHOD_NAME, String.class, Pageable.class, Sort.class);
pageableTwice = InvalidRepository.class.getMethod(METHOD_NAME, String.class, Pageable.class, Pageable.class);
sortableTwice = InvalidRepository.class.getMethod(METHOD_NAME, String.class, Sort.class, Sort.class);
findWithLockMethod = ValidRepository.class.getMethod("findOneLocked", Integer.class);
findsProjections = ValidRepository.class.getMethod("findsProjections");
findsProjection = ValidRepository.class.getMethod("findsProjection");
queryMethodWithCustomEntityFetchGraph = ValidRepository.class.getMethod("queryMethodWithCustomEntityFetchGraph",
Integer.class);
when(metadata.getReturnType(any(Method.class)))
.thenAnswer(invocation -> TypeInformation.fromReturnTypeOf(invocation.getArgument(0)));
}
@Test
void testname() throws Exception {
JpaQueryMethod method = getQueryMethod(UserRepository.class, "findByLastname", String.class);
assertThat(method.getNamedQueryName()).isEqualTo("User.findByLastname");
assertThat(method.isCollectionQuery()).isTrue();
assertThat(method.getAnnotatedQuery()).isNull();
assertThat(method.isNativeQuery()).isFalse();
}
@Test
void preventsNullRepositoryMethod() {
assertThatIllegalArgumentException().isThrownBy(() -> new JpaQueryMethod(null, metadata, factory, extractor));
}
@Test
void preventsNullQueryExtractor() throws Exception {
Method method = UserRepository.class.getMethod("findByLastname", String.class);
assertThatIllegalArgumentException().isThrownBy(() -> new JpaQueryMethod(method, metadata, factory, null));
}
@Test
void returnsCorrectName() throws Exception {
JpaQueryMethod method = getQueryMethod(UserRepository.class, "findByLastname", String.class);
assertThat(method.getName()).isEqualTo("findByLastname");
}
@Test
void returnsQueryIfAvailable() throws Exception {
JpaQueryMethod method = getQueryMethod(UserRepository.class, "findByLastname", String.class);
assertThat(method.getAnnotatedQuery()).isNull();
method = getQueryMethod(UserRepository.class, "findByAnnotatedQuery", String.class);
assertThat(method.getAnnotatedQuery()).isNotNull();
}
@Test
void rejectsInvalidReturntypeOnPagebleFinder() {
when(metadata.getReturnedDomainClass(any())).thenReturn((Class) User.class);
when(metadata.getDomainTypeInformation()).thenReturn((TypeInformation) TypeInformation.of(User.class));
when(metadata.getRepositoryInterface()).thenReturn((Class) InvalidRepository.class);
assertThatIllegalStateException()
.isThrownBy(() -> new JpaQueryMethod(invalidReturnType, metadata, factory, extractor));
}
@Test
void rejectsPageableAndSortInFinderMethod() {
when(metadata.getReturnedDomainClass(any())).thenReturn((Class) User.class);
when(metadata.getDomainTypeInformation()).thenReturn((TypeInformation) TypeInformation.of(User.class));
when(metadata.getRepositoryInterface()).thenReturn((Class) InvalidRepository.class);
assertThatIllegalStateException()
.isThrownBy(() -> new JpaQueryMethod(pageableAndSort, metadata, factory, extractor));
}
@Test
void rejectsTwoPageableParameters() {
assertThatIllegalStateException().isThrownBy(() -> new JpaQueryMethod(pageableTwice, metadata, factory, extractor));
}
@Test
void rejectsTwoSortableParameters() {
assertThatIllegalStateException().isThrownBy(() -> new JpaQueryMethod(sortableTwice, metadata, factory, extractor));
}
@Test
void recognizesModifyingMethod() throws Exception {
JpaQueryMethod method = getQueryMethod(UserRepository.class, "renameAllUsersTo", String.class);
assertThat(method.isModifyingQuery()).isTrue();
}
@Test
void rejectsModifyingMethodWithPageable() throws Exception {
Method method = InvalidRepository.class.getMethod("updateMethod", String.class, Pageable.class);
assertThatIllegalArgumentException().isThrownBy(() -> new JpaQueryMethod(method, metadata, factory, extractor));
}
@Test
void rejectsModifyingMethodWithSort() throws Exception {
Method method = InvalidRepository.class.getMethod("updateMethod", String.class, Sort.class);
assertThatIllegalArgumentException().isThrownBy(() -> new JpaQueryMethod(method, metadata, factory, extractor));
}
@Test
void discoversHintsCorrectly() throws Exception {
JpaQueryMethod method = getQueryMethod(UserRepository.class, "findByLastname", String.class);
List<QueryHint> hints = method.getHints();
assertThat(hints).isNotNull();
assertThat(hints.get(0).name()).isEqualTo("foo");
assertThat(hints.get(0).value()).isEqualTo("bar");
}
private JpaQueryMethod getQueryMethod(Class<?> repositoryInterface, String methodName, Class<?>... parameterTypes)
throws Exception {
Method method = repositoryInterface.getMethod(methodName, parameterTypes);
DefaultRepositoryMetadata repositoryMetadata = new DefaultRepositoryMetadata(repositoryInterface);
return new JpaQueryMethod(method, repositoryMetadata, factory, extractor);
}
@Test
void calculatesNamedQueryNamesCorrectly() throws Exception {
RepositoryMetadata metadata = new DefaultRepositoryMetadata(UserRepository.class);
JpaQueryMethod queryMethod = getQueryMethod(UserRepository.class, "findByLastname", String.class);
assertThat(queryMethod.getNamedQueryName()).isEqualTo("User.findByLastname");
Method method = UserRepository.class.getMethod("renameAllUsersTo", String.class);
queryMethod = new JpaQueryMethod(method, metadata, factory, extractor);
assertThat(queryMethod.getNamedQueryName()).isEqualTo("User.renameAllUsersTo");
method = UserRepository.class.getMethod("findSpecialUsersByLastname", String.class);
queryMethod = new JpaQueryMethod(method, metadata, factory, extractor);
assertThat(queryMethod.getNamedQueryName()).isEqualTo("SpecialUser.findSpecialUsersByLastname");
}
@Test // DATAJPA-117
void discoversNativeQuery() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "findByLastname", String.class);
assertThat(method.isNativeQuery()).isTrue();
}
@Test // DATAJPA-129
void considersAnnotatedNamedQueryName() throws Exception {
JpaQueryMethod queryMethod = getQueryMethod(ValidRepository.class, "findByNamedQuery");
assertThat(queryMethod.getNamedQueryName()).isEqualTo("HateoasAwareSpringDataWebConfiguration.bar");
}
@Test // DATAJPA-73
void discoversLockModeCorrectly() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "findOneLocked", Integer.class);
LockModeType lockMode = method.getLockModeType();
assertThat(lockMode).isEqualTo(LockModeType.PESSIMISTIC_WRITE);
}
@Test // DATAJPA-142
void returnsDefaultCountQueryName() throws Exception {
JpaQueryMethod method = getQueryMethod(UserRepository.class, "findByLastname", String.class);
assertThat(method.getNamedCountQueryName()).isEqualTo("User.findByLastname.count");
}
@Test // DATAJPA-142
void returnsDefaultCountQueryNameBasedOnConfiguredNamedQueryName() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "findByNamedQuery");
assertThat(method.getNamedCountQueryName()).isEqualTo("HateoasAwareSpringDataWebConfiguration.bar.count");
}
@Test // DATAJPA-207
@SuppressWarnings({ "rawtypes", "unchecked" })
void returnsTrueIfReturnTypeIsEntity() {
when(metadata.getDomainType()).thenReturn((Class) User.class);
when(metadata.getReturnedDomainClass(findsProjections)).thenReturn((Class) Integer.class);
when(metadata.getReturnedDomainClass(findsProjection)).thenReturn((Class) Integer.class);
assertThat(new JpaQueryMethod(findsProjections, metadata, factory, extractor).isQueryForEntity()).isFalse();
assertThat(new JpaQueryMethod(findsProjection, metadata, factory, extractor).isQueryForEntity()).isFalse();
}
@Test // DATAJPA-345
void detectsLockAndQueryHintsOnIfUsedAsMetaAnnotation() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotation");
assertThat(method.getLockModeType()).isEqualTo(LockModeType.OPTIMISTIC_FORCE_INCREMENT);
assertThat(method.getHints()).hasSize(1);
assertThat(method.getHints().get(0).name()).isEqualTo("foo");
assertThat(method.getHints().get(0).value()).isEqualTo("bar");
}
@Test // DATAJPA-466
void shouldStoreJpa21FetchGraphInformationAsHint() {
when(metadata.getDomainType()).thenReturn((Class) User.class);
when(metadata.getReturnedDomainClass(queryMethodWithCustomEntityFetchGraph)).thenReturn((Class) User.class);
when(metadata.getDomainTypeInformation()).thenReturn((TypeInformation) TypeInformation.of(User.class));
when(metadata.getRepositoryInterface()).thenReturn((Class) InvalidRepository.class);
JpaQueryMethod method = new JpaQueryMethod(queryMethodWithCustomEntityFetchGraph, metadata, factory, extractor);
assertThat(method.getEntityGraph()).isNotNull();
assertThat(method.getEntityGraph().getName()).isEqualTo("User.propertyLoadPath");
assertThat(method.getEntityGraph().getType()).isEqualTo(EntityGraphType.LOAD);
}
@Test // DATAJPA-612
void shouldFindEntityGraphAnnotationOnOverriddenSimpleJpaRepositoryMethod() throws Exception {
when(metadata.getDomainType()).thenReturn((Class) User.class);
when(metadata.getReturnedDomainClass(any())).thenReturn((Class) User.class);
when(metadata.getReturnedDomainClass(queryMethodWithCustomEntityFetchGraph)).thenReturn((Class) User.class);
when(metadata.getRepositoryInterface()).thenReturn((Class) JpaRepositoryOverride.class);
JpaQueryMethod method = new JpaQueryMethod(JpaRepositoryOverride.class.getMethod("findAll"), metadata, factory,
extractor);
assertThat(method.getEntityGraph()).isNotNull();
assertThat(method.getEntityGraph().getName()).isEqualTo("User.detail");
assertThat(method.getEntityGraph().getType()).isEqualTo(EntityGraphType.FETCH);
}
@Test // DATAJPA-689
void shouldFindEntityGraphAnnotationOnOverriddenSimpleJpaRepositoryMethodFindOne() throws Exception {
when(metadata.getDomainType()).thenReturn((Class) User.class);
when(metadata.getReturnedDomainClass(any())).thenReturn((Class) User.class);
when(metadata.getDomainTypeInformation()).thenReturn((TypeInformation) TypeInformation.of(User.class));
when(metadata.getRepositoryInterface()).thenReturn((Class) InvalidRepository.class);
JpaQueryMethod method = new JpaQueryMethod(JpaRepositoryOverride.class.getMethod("findOne", Integer.class),
metadata, factory, extractor);
assertThat(method.getEntityGraph()).isNotNull();
assertThat(method.getEntityGraph().getName()).isEqualTo("User.detail");
assertThat(method.getEntityGraph().getType()).isEqualTo(EntityGraphType.FETCH);
}
/**
* DATAJPA-696
*/
@Test
void shouldFindEntityGraphAnnotationOnQueryMethodGetOneByWithDerivedName() throws Exception {
when(metadata.getDomainType()).thenReturn((Class) User.class);
when(metadata.getDomainTypeInformation()).thenReturn((TypeInformation) TypeInformation.of(User.class));
when(metadata.getReturnedDomainClass(any())).thenReturn((Class) User.class);
when(metadata.getRepositoryInterface()).thenReturn((Class) JpaRepositoryOverride.class);
JpaQueryMethod method = new JpaQueryMethod(JpaRepositoryOverride.class.getMethod("getOneById", Integer.class),
metadata, factory, extractor);
assertThat(method.getEntityGraph()).isNotNull();
assertThat(method.getEntityGraph().getName()).isEqualTo("User.getOneById");
assertThat(method.getEntityGraph().getType()).isEqualTo(EntityGraphType.FETCH);
}
@Test // DATAJPA-758
void allowsPositionalBindingEvenIfParametersAreNamed() throws Exception {
getQueryMethod(ValidRepository.class, "queryWithPositionalBinding", String.class);
}
@Test // DATAJPA-871
void usesAliasedValueForLockLockMode() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.getLockModeType()).isEqualTo(LockModeType.PESSIMISTIC_FORCE_INCREMENT);
}
@Test // DATAJPA-871
void usesAliasedValueForQueryHints() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.getHints()).hasSize(1);
assertThat(method.getHints().get(0).name()).isEqualTo("foo");
assertThat(method.getHints().get(0).value()).isEqualTo("bar");
}
@Test // DATAJPA-871
void usesAliasedValueForQueryHintsCounting() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.applyHintsToCountQuery()).isTrue();
}
@Test // DATAJPA-871
void usesAliasedValueForModifyingClearAutomatically() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.isModifyingQuery()).isTrue();
assertThat(method.getClearAutomatically()).isTrue();
}
@Test // DATAJPA-871
void usesAliasedValueForHintsApplyToCountQuery() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.applyHintsToCountQuery()).isTrue();
}
@Test // DATAJPA-871
void usesAliasedValueForQueryValue() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.getAnnotatedQuery()).isEqualTo("select u from User u where u.firstname = ?1");
}
@Test // DATAJPA-871
void usesAliasedValueForQueryCountQuery() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.getCountQuery()).isEqualTo("select u from User u where u.lastname = ?1");
}
@Test // DATAJPA-871
void usesAliasedValueForQueryCountQueryProjection() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.getCountQueryProjection()).isEqualTo("foo-bar");
}
@Test // DATAJPA-871
void usesAliasedValueForQueryNamedQueryName() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.getNamedQueryName()).isEqualTo("namedQueryName");
}
@Test // DATAJPA-871
void usesAliasedValueForQueryNamedCountQueryName() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.getNamedCountQueryName()).isEqualTo("namedCountQueryName");
}
@Test // DATAJPA-871
void usesAliasedValueForQueryNativeQuery() throws Exception {
JpaQueryMethod method = getQueryMethod(ValidRepository.class, "withMetaAnnotationUsingAliasFor");
assertThat(method.isNativeQuery()).isTrue();
}
@Test // DATAJPA-871
void usesAliasedValueForEntityGraph() throws Exception {
when(metadata.getDomainType()).thenReturn((Class) User.class);
when(metadata.getDomainTypeInformation()).thenReturn((TypeInformation) TypeInformation.of(User.class));
when(metadata.getReturnedDomainClass(any())).thenReturn((Class) User.class);
when(metadata.getRepositoryInterface()).thenReturn((Class) JpaRepositoryOverride.class);
JpaQueryMethod method = new JpaQueryMethod(
JpaRepositoryOverride.class.getMethod("getOneWithCustomEntityGraphAnnotation"), metadata, factory, extractor);
assertThat(method.getEntityGraph()).isNotNull();
assertThat(method.getEntityGraph().getName()).isEqualTo("User.detail");
assertThat(method.getEntityGraph().getType()).isEqualTo(EntityGraphType.LOAD);
}
/**
* Interface to define invalid repository methods for testing.
*
* @author Oliver Gierke
*/
|
JpaQueryMethodUnitTests
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/jdk/ArrayDeserializationTest.java
|
{
"start": 3559,
"end": 3651
}
|
class ____ {
public String height;
public String width;
}
static
|
Things
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/util/WithDefaultConstructor.java
|
{
"start": 687,
"end": 759
}
|
class ____ a default constructor.
*
* @author Phillip Webb
*/
public
|
with
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/EnumerableToLogicalTableScan.java
|
{
"start": 1537,
"end": 2332
}
|
class ____
extends RelRule<EnumerableToLogicalTableScan.EnumerableToLogicalTableScanConfig> {
public static final EnumerableToLogicalTableScan INSTANCE =
EnumerableToLogicalTableScan.EnumerableToLogicalTableScanConfig.DEFAULT.toRule();
private EnumerableToLogicalTableScan(EnumerableToLogicalTableScanConfig config) {
super(config);
}
@Override
public void onMatch(RelOptRuleCall call) {
TableScan oldRel = call.rel(0);
RelOptTable table = oldRel.getTable();
LogicalTableScan newRel =
LogicalTableScan.create(oldRel.getCluster(), table, oldRel.getHints());
call.transformTo(newRel);
}
/** Rule configuration. */
@Value.Immutable(singleton = false)
public
|
EnumerableToLogicalTableScan
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/KeyGroupRangeOffsets.java
|
{
"start": 1138,
"end": 6354
}
|
class ____ implements Iterable<Tuple2<Integer, Long>>, Serializable {
public static IllegalArgumentException newIllegalKeyGroupException(
int keyGroup, KeyGroupRange keyGroupRange) {
return new IllegalArgumentException(
String.format(
"Key group %d is not in %s. Unless you're directly using low level state access APIs, this"
+ " is most likely caused by non-deterministic shuffle key (hashCode and equals implementation).",
keyGroup, keyGroupRange));
}
private static final long serialVersionUID = 6595415219136429696L;
/** the range of key-groups */
private final KeyGroupRange keyGroupRange;
/** the aligned array of offsets for the key-groups */
private final long[] offsets;
/**
* Creates key-group range with offsets from the given key-group range. The order of given
* offsets must be aligned with respect to the key-groups in the range.
*
* @param keyGroupRange The range of key-groups.
* @param offsets The aligned array of offsets for the given key-groups.
*/
public KeyGroupRangeOffsets(KeyGroupRange keyGroupRange, long[] offsets) {
this.keyGroupRange = Preconditions.checkNotNull(keyGroupRange);
this.offsets = Preconditions.checkNotNull(offsets);
Preconditions.checkArgument(offsets.length == keyGroupRange.getNumberOfKeyGroups());
}
/**
* Creates key-group range with offsets from the given start key-group to end key-group. The
* order of given offsets must be aligned with respect to the key-groups in the range.
*
* @param rangeStart Start key-group of the range (inclusive)
* @param rangeEnd End key-group of the range (inclusive)
* @param offsets The aligned array of offsets for the given key-groups.
*/
public KeyGroupRangeOffsets(int rangeStart, int rangeEnd, long[] offsets) {
this(KeyGroupRange.of(rangeStart, rangeEnd), offsets);
}
/**
* Creates key-group range with offsets from the given start key-group to end key-group. All
* offsets are initially zero.
*
* @param rangeStart Start key-group of the range (inclusive)
* @param rangeEnd End key-group of the range (inclusive)
*/
public KeyGroupRangeOffsets(int rangeStart, int rangeEnd) {
this(KeyGroupRange.of(rangeStart, rangeEnd));
}
/**
* Creates key-group range with offsets for the given key-group range, where all offsets are
* initially zero.
*
* @param keyGroupRange The range of key-groups.
*/
public KeyGroupRangeOffsets(KeyGroupRange keyGroupRange) {
this(keyGroupRange, new long[keyGroupRange.getNumberOfKeyGroups()]);
}
/**
* Returns the offset for the given key-group. The key-group must be contained in the range.
*
* @param keyGroup Key-group for which we query the offset. Key-group must be contained in the
* range.
* @return The offset for the given key-group which must be contained in the range.
*/
public long getKeyGroupOffset(int keyGroup) {
return offsets[computeKeyGroupIndex(keyGroup)];
}
/**
* Sets the offset for the given key-group. The key-group must be contained in the range.
*
* @param keyGroup Key-group for which we set the offset. Must be contained in the range.
* @param offset Offset for the key-group.
*/
public void setKeyGroupOffset(int keyGroup, long offset) {
offsets[computeKeyGroupIndex(keyGroup)] = offset;
}
/**
* Returns a key-group range with offsets which is the intersection of the internal key-group
* range with the given key-group range.
*
* @param keyGroupRange Key-group range to intersect with the internal key-group range.
* @return The key-group range with offsets for the intersection of the internal key-group range
* with the given key-group range.
*/
public KeyGroupRangeOffsets getIntersection(KeyGroupRange keyGroupRange) {
Preconditions.checkNotNull(keyGroupRange);
KeyGroupRange intersection = this.keyGroupRange.getIntersection(keyGroupRange);
long[] subOffsets = new long[intersection.getNumberOfKeyGroups()];
if (subOffsets.length > 0) {
System.arraycopy(
offsets,
computeKeyGroupIndex(intersection.getStartKeyGroup()),
subOffsets,
0,
subOffsets.length);
}
return new KeyGroupRangeOffsets(intersection, subOffsets);
}
public KeyGroupRange getKeyGroupRange() {
return keyGroupRange;
}
@Override
public Iterator<Tuple2<Integer, Long>> iterator() {
return new KeyGroupOffsetsIterator();
}
private int computeKeyGroupIndex(int keyGroup) {
int idx = keyGroup - keyGroupRange.getStartKeyGroup();
if (idx < 0 || idx >= offsets.length) {
throw newIllegalKeyGroupException(keyGroup, keyGroupRange);
}
return idx;
}
/** Iterator for the Key-group/Offset pairs. */
private final
|
KeyGroupRangeOffsets
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/LogManager.java
|
{
"start": 1559,
"end": 1725
}
|
class ____ to obtain a named
* {@link Logger}. The method {@link #getLogger()} is provided as the most convenient way to obtain a named Logger based
* on the calling
|
is
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/fields/RecursiveComparisonAssert_isEqualTo_ignoringFieldsOfTypesMatchingRegexes_Test.java
|
{
"start": 5503,
"end": 6868
}
|
class ____ {
private final Collection<NumberHolder> holders;
WithNumberHolderCollection(Collection<NumberHolder> holders) {
this.holders = holders;
}
Collection<NumberHolder> getNumberHolders() {
return holders;
}
}
// GIVEN
final Number intValue = 12;
final Double doubleValueA = 12.34;
final Double doubleValueB = 56.78;
final List<NumberHolder> holdersA = list(new NumberHolder(intValue), new NumberHolder(doubleValueA));
final List<NumberHolder> holdersB = list(new NumberHolder(intValue), new NumberHolder(doubleValueB));
WithNumberHolderCollection actual = new WithNumberHolderCollection(newHashSet(holdersA));
recursiveComparisonConfiguration.ignoreFieldsOfTypesMatchingRegexes(".*NumberHolder");
// WHEN/THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.isEqualTo(new WithNumberHolderCollection(newHashSet(holdersB)));
// bonus check also ordered collection
actual = new WithNumberHolderCollection(new ArrayList<>(holdersA));
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.isEqualTo(new WithNumberHolderCollection(new ArrayList<>(holdersB)));
}
@Test
void should_pass_when_fields_with_given_types_are_ignored_on_unordered_maps() {
|
WithNumberHolderCollection
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/uuid/rfc9562/EntitySix.java
|
{
"start": 522,
"end": 923
}
|
class ____ {
@Id
@GeneratedValue
@UuidGenerator(algorithm = UuidVersion6Strategy.class)
private UUID id;
@Basic
private String name;
protected EntitySix() {
// for Hibernate use
}
public EntitySix(String name) {
this.name = name;
}
public UUID getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
EntitySix
|
java
|
elastic__elasticsearch
|
x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TimeoutChecker.java
|
{
"start": 1274,
"end": 1845
}
|
class ____ not use the {@link Thread#interrupt} mechanism because some other
* methods already convert interruptions to other types of exceptions (for example
* {@link Grok#captures}) and this would lead to non-uniform exception types and
* misleading error messages in the event that the interrupt was handled by one of
* these methods. The code in the long running operation would still have to
* periodically call {@link Thread#interrupt}, so it is not much more of an
* inconvenience to have to periodically call this class's {@link #check} method.
*/
public
|
does
|
java
|
apache__flink
|
flink-end-to-end-tests/flink-end-to-end-tests-common/src/main/java/org/apache/flink/tests/util/flink/FlinkResourceFactory.java
|
{
"start": 939,
"end": 1426
}
|
interface ____ {
/**
* Returns a {@link FlinkResource} instance. If the instance could not be instantiated (for
* example, because a mandatory parameter was missing), then an exception should be thrown.
*
* @param setup setup instructions for the FlinkResource
* @return FlinkResource instance,
* @throws Exception if the instance could not be instantiated
*/
FlinkResource create(FlinkResourceSetup setup) throws Exception;
}
|
FlinkResourceFactory
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/util/CronExpressionTest.java
|
{
"start": 1140,
"end": 7470
}
|
class ____ {
@Test
void testDayOfMonth() throws Exception {
final CronExpression parser = new CronExpression("0 */15,12 7-11,13-17 * * ?");
final Date date = new GregorianCalendar(2015, 11, 2).getTime();
final Date fireDate = parser.getNextValidTimeAfter(date);
final Date expected = new GregorianCalendar(2015, 11, 2, 7, 0, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
@Test
void testDayOfWeek() throws Exception {
final CronExpression parser = new CronExpression("0 */15,12 7-11,13-17 ? * Fri");
final Date date = new GregorianCalendar(2015, 11, 2).getTime();
final Date fireDate = parser.getNextValidTimeAfter(date);
final Date expected = new GregorianCalendar(2015, 11, 4, 7, 0, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
@Test
void testNextMonth() throws Exception {
final CronExpression parser = new CronExpression("0 */15,12 7-11,13-17 1 * ?");
final Date date = new GregorianCalendar(2015, 11, 2).getTime();
final Date fireDate = parser.getNextValidTimeAfter(date);
final Date expected = new GregorianCalendar(2016, 0, 1, 7, 0, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
@Test
void testLastDayOfMonth() throws Exception {
final CronExpression parser = new CronExpression("0 */15,12 7-11,13-17 L * ?");
final Date date = new GregorianCalendar(2015, 10, 2).getTime();
final Date fireDate = parser.getNextValidTimeAfter(date);
final Date expected = new GregorianCalendar(2015, 10, 30, 7, 0, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
@Test
void testNextDay() throws Exception {
final CronExpression parser = new CronExpression("0 0 0 * * ?");
final Date date = new GregorianCalendar(2015, 10, 2).getTime();
final Date fireDate = parser.getNextValidTimeAfter(date);
final Date expected = new GregorianCalendar(2015, 10, 3, 0, 0, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
@Test
void testPrevFireTime1() throws Exception {
final CronExpression parser = new CronExpression("0 */15,12 7-11,13-17 L * ?");
final Date date = new GregorianCalendar(2015, 10, 2).getTime();
final Date fireDate = parser.getPrevFireTime(date);
final Date expected = new GregorianCalendar(2015, 9, 31, 17, 45, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
@Test
void testPrevFireTime2() throws Exception {
final CronExpression parser = new CronExpression("0 0/5 14,18 * * ?");
final Date date = new GregorianCalendar(2015, 10, 2).getTime();
final Date fireDate = parser.getPrevFireTime(date);
final Date expected = new GregorianCalendar(2015, 10, 1, 18, 55, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
/**
* 35,45, and 55 minutes past the hour every hour.
*/
@Test
void testPrevFireTime3() throws Exception {
final CronExpression parser = new CronExpression("0 35/10 * * * ?");
final Date date = new GregorianCalendar(2015, 10, 2).getTime();
final Date fireDate = parser.getPrevFireTime(date);
final Date expected = new GregorianCalendar(2015, 10, 1, 23, 55, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
/**
*
* 10:15 every day.
*/
@Test
void testPrevFireTimeTenFifteen() throws Exception {
final CronExpression parser = new CronExpression("0 15 10 * * ? *");
final Date date = new GregorianCalendar(2015, 10, 2).getTime();
final Date fireDate = parser.getPrevFireTime(date);
final Date expected = new GregorianCalendar(2015, 10, 1, 10, 15, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
/**
* Every day from 2 pm to 2:59 pm
*/
@Test
void testPrevFireTimeTwoPM() throws Exception {
final CronExpression parser = new CronExpression("0 * 14 * * ?");
final Date date = new GregorianCalendar(2015, 10, 2).getTime();
final Date fireDate = parser.getPrevFireTime(date);
final Date expected = new GregorianCalendar(2015, 10, 1, 14, 59, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
/**
* 2:10pm and at 2:44pm every Wednesday in the month of March.
*/
@Test
void testPrevFireTimeMarch() throws Exception {
final CronExpression parser = new CronExpression("0 10,44 14 ? 3 WED");
final Date date = new GregorianCalendar(2015, 10, 2).getTime();
final Date fireDate = parser.getPrevFireTime(date);
final Date expected = new GregorianCalendar(2015, 2, 25, 14, 44, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
/**
* Fire at 10:15am on the third Friday of every month.
*/
@Test
void testPrevFireTimeThirdFriday() throws Exception {
final CronExpression parser = new CronExpression("0 15 10 ? * 6#3");
final Date date = new GregorianCalendar(2015, 10, 2).getTime();
final Date fireDate = parser.getPrevFireTime(date);
final Date expected = new GregorianCalendar(2015, 9, 16, 10, 15, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
/*
* Input time with milliseconds will correctly return the next
* scheduled time.
*/
@Test
void testTimeBeforeMilliseconds() throws Exception {
final CronExpression parser = new CronExpression("0 0 0 * * ?");
final GregorianCalendar cal = new GregorianCalendar(2015, 10, 2, 0, 0, 0);
cal.set(Calendar.MILLISECOND, 100);
final Date date = cal.getTime();
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
System.err.println(sdf.format(date));
final Date fireDate = parser.getTimeBefore(date);
System.err.println(sdf.format(fireDate));
final Date expected = new GregorianCalendar(2015, 10, 1, 0, 0, 0).getTime();
assertEquals(expected, fireDate, "Dates not equal.");
}
}
|
CronExpressionTest
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/function/FailableLongFunction.java
|
{
"start": 1111,
"end": 1805
}
|
interface ____<R, E extends Throwable> {
/** NOP singleton */
@SuppressWarnings("rawtypes")
FailableLongFunction NOP = t -> null;
/**
* Gets the NOP singleton.
*
* @param <R> Return type.
* @param <E> The kind of thrown exception or error.
* @return The NOP singleton.
*/
@SuppressWarnings("unchecked")
static <R, E extends Throwable> FailableLongFunction<R, E> nop() {
return NOP;
}
/**
* Applies this function.
*
* @param input the input for the function
* @return the result of the function
* @throws E Thrown when the function fails.
*/
R apply(long input) throws E;
}
|
FailableLongFunction
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/cdi/bcextensions/ChangeFieldThroughClassTest.java
|
{
"start": 1317,
"end": 1803
}
|
class ____ implements BuildCompatibleExtension {
@Enhancement(types = MyServiceConsumer.class)
public void service(ClassConfig clazz) {
clazz.fields()
.stream()
.filter(it -> "myService".equals(it.info().name()))
.forEach(field -> field.addAnnotation(AnnotationBuilder.of(MyQualifier.class).build()));
}
}
// ---
@Qualifier
@Retention(RetentionPolicy.RUNTIME)
@
|
MyExtension
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/naming/ImplicitCollectionTableNameSource.java
|
{
"start": 355,
"end": 943
}
|
interface ____ extends ImplicitNameSource {
/**
* Access to the physical name of the owning entity's table.
*
* @return Owning entity's table name.
*/
Identifier getOwningPhysicalTableName();
/**
* Access to entity naming information for the owning side.
*
* @return Owning entity naming information
*/
EntityNaming getOwningEntityNaming();
/**
* Access to the name of the attribute, from the owning side, that defines the association.
*
* @return The owning side's attribute name.
*/
AttributePath getOwningAttributePath();
}
|
ImplicitCollectionTableNameSource
|
java
|
elastic__elasticsearch
|
libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/ESUTF8StreamJsonParser.java
|
{
"start": 1514,
"end": 7077
}
|
class ____ extends UTF8StreamJsonParser implements OptimizedTextCapable {
protected int stringEnd = -1;
protected int stringLength;
protected byte[] lastOptimisedValue;
private final List<Integer> backslashes = new ArrayList<>();
public ESUTF8StreamJsonParser(
IOContext ctxt,
int features,
InputStream in,
ObjectCodec codec,
ByteQuadsCanonicalizer sym,
byte[] inputBuffer,
int start,
int end,
int bytesPreProcessed,
boolean bufferRecyclable
) {
super(ctxt, features, in, codec, sym, inputBuffer, start, end, bytesPreProcessed, bufferRecyclable);
}
/**
* Method that will try to get underlying UTF-8 encoded bytes of the current string token.
* This is only a best-effort attempt; if there is some reason the bytes cannot be retrieved, this method will return null.
*/
@Override
public Text getValueAsText() throws IOException {
// _tokenIncomplete is true when UTF8StreamJsonParser has already processed this value.
if (_currToken == JsonToken.VALUE_STRING && _tokenIncomplete) {
if (lastOptimisedValue != null) {
return new Text(new XContentString.UTF8Bytes(lastOptimisedValue), stringLength);
}
if (stringEnd > 0) {
final int len = stringEnd - 1 - _inputPtr;
return new Text(new XContentString.UTF8Bytes(_inputBuffer, _inputPtr, len), stringLength);
}
return _finishAndReturnText();
}
return null;
}
protected Text _finishAndReturnText() throws IOException {
int ptr = _inputPtr;
if (ptr >= _inputEnd) {
_loadMoreGuaranteed();
ptr = _inputPtr;
}
int startPtr = ptr;
final int[] codes = INPUT_CODES_UTF8;
final int max = _inputEnd;
final byte[] inputBuffer = _inputBuffer;
stringLength = 0;
backslashes.clear();
loop: while (true) {
if (ptr >= max) {
return null;
}
int c = inputBuffer[ptr] & 0xFF;
switch (codes[c]) {
case 0 -> {
++ptr;
++stringLength;
}
case 1 -> {
if (c == INT_QUOTE) {
// End of the string
break loop;
}
assert c == INT_BACKSLASH;
backslashes.add(ptr);
++ptr;
if (ptr >= max) {
// Backslash at end of file
return null;
}
c = inputBuffer[ptr] & 0xFF;
if (c == '"' || c == '/' || c == '\\') {
ptr += 1;
stringLength += 1;
} else {
// Any other escaped sequence requires replacing the sequence with
// a new character, which we don't support in the optimized path
return null;
}
}
case 2, 3, 4 -> {
int bytesToSkip = codes[c];
if (ptr + bytesToSkip > max) {
return null;
}
ptr += bytesToSkip;
// Code points that require 4 bytes in UTF-8 will use 2 chars in UTF-16.
stringLength += (bytesToSkip == 4 ? 2 : 1);
}
default -> {
return null;
}
}
}
stringEnd = ptr + 1;
if (backslashes.isEmpty()) {
return new Text(new XContentString.UTF8Bytes(inputBuffer, startPtr, ptr - startPtr), stringLength);
} else {
byte[] buff = new byte[ptr - startPtr - backslashes.size()];
int copyPtr = startPtr;
int destPtr = 0;
for (Integer backslash : backslashes) {
int length = backslash - copyPtr;
System.arraycopy(inputBuffer, copyPtr, buff, destPtr, length);
destPtr += length;
copyPtr = backslash + 1;
}
System.arraycopy(inputBuffer, copyPtr, buff, destPtr, ptr - copyPtr);
lastOptimisedValue = buff;
return new Text(new XContentString.UTF8Bytes(buff), stringLength);
}
}
@Override
public JsonToken nextToken() throws IOException {
resetCurrentTokenState();
return super.nextToken();
}
@Override
public boolean nextFieldName(SerializableString str) throws IOException {
resetCurrentTokenState();
return super.nextFieldName(str);
}
@Override
public String nextFieldName() throws IOException {
resetCurrentTokenState();
return super.nextFieldName();
}
/**
* Resets the current token state before moving to the next. It resets the _inputPtr and the
* _tokenIncomplete only if {@link UTF8StreamJsonParser#getText()} or {@link UTF8StreamJsonParser#getValueAsString()}
* hasn't run yet.
*/
private void resetCurrentTokenState() {
if (_currToken == JsonToken.VALUE_STRING && _tokenIncomplete && stringEnd > 0) {
_inputPtr = stringEnd;
_tokenIncomplete = false;
}
lastOptimisedValue = null;
stringEnd = -1;
}
}
|
ESUTF8StreamJsonParser
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/hybrid/HybridContextLoaderTests.java
|
{
"start": 2156,
"end": 2296
}
|
class ____ {
@Bean
String fooFromJava() {
return "Java";
}
@Bean
String enigma() {
return "enigma from Java";
}
}
}
|
Config
|
java
|
apache__camel
|
core/camel-support/src/main/java/org/apache/camel/support/MessageHelper.java
|
{
"start": 3628,
"end": 50763
}
|
class ____ name as a String.
* <p/>
* Will skip java.lang. for the build in Java types.
*
* @param message the message with the body
* @return the body type name as String, can return <tt>null</null> if no body
*/
public static String getBodyTypeName(Message message) {
if (message == null) {
return null;
}
String answer = ObjectHelper.classCanonicalName(message.getBody());
if (answer != null && answer.startsWith("java.lang.")) {
return answer.substring(10);
}
return answer;
}
/**
* If the message body contains a {@link StreamCache} instance, reset the cache to enable reading from it again.
*
* @param message the message for which to reset the body
*/
public static void resetStreamCache(Message message) {
if (message == null) {
return;
}
Object body = null;
try {
body = message.getBody();
} catch (Exception e) {
// ignore
}
if (body instanceof StreamCache streamCache) {
streamCache.reset();
}
}
/**
* Returns the MIME content type on the message or <tt>null</tt> if none defined
*/
public static String getContentType(Message message) {
return message.getHeader(Exchange.CONTENT_TYPE, String.class);
}
/**
* Returns the MIME content encoding on the message or <tt>null</tt> if none defined
*/
public static String getContentEncoding(Message message) {
return message.getHeader(Exchange.CONTENT_ENCODING, String.class);
}
/**
* Extracts the body for logging purpose.
* <p/>
* Will clip the body if its too big for logging. Will prepend the message with <tt>Message: </tt>
*
* @param message the message
* @return the logging message
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_STREAMS
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_MAX_CHARS
*/
public static String extractBodyForLogging(Message message) {
return extractBodyForLogging(message, "Message: ");
}
/**
* Extracts the value for logging purpose.
* <p/>
* Will clip the value if its too big for logging.
*
* @param value the value
* @param message the message
* @return the logging message
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_STREAMS
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_MAX_CHARS
*/
public static String extractValueForLogging(Object value, Message message) {
boolean streams = isStreams(message);
// default to 1000 chars
int maxChars = 1000;
if (message.getExchange() != null) {
String property = message.getExchange().getContext().getGlobalOption(Exchange.LOG_DEBUG_BODY_MAX_CHARS);
if (property != null) {
maxChars = message.getExchange().getContext().getTypeConverter().convertTo(Integer.class, property);
}
}
return extractValueForLogging(value, message, streams, false, maxChars);
}
private static boolean isStreams(Message message) {
boolean streams = false;
if (message.getExchange() != null) {
String globalOption = message.getExchange().getContext().getGlobalOption(Exchange.LOG_DEBUG_BODY_STREAMS);
if (globalOption != null) {
streams = message.getExchange().getContext().getTypeConverter().convertTo(Boolean.class, message.getExchange(),
globalOption);
}
}
return streams;
}
/**
* Extracts the body for logging purpose.
* <p/>
* Will clip the body if its too big for logging.
*
* @param message the message
* @param prepend a message to prepend
* @return the logging message
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_STREAMS
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_MAX_CHARS
*/
public static String extractBodyForLogging(Message message, String prepend) {
boolean streams = isStreams(message);
return extractBodyForLogging(message, prepend, streams, false);
}
/**
* Extracts the body for logging purpose.
* <p/>
* Will clip the body if its too big for logging.
*
* @param message the message
* @param prepend a message to prepend
* @param allowStreams whether or not streams is allowed
* @param allowFiles whether or not files is allowed (currently not in use)
* @return the logging message
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_STREAMS
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_MAX_CHARS
*/
public static String extractBodyForLogging(Message message, String prepend, boolean allowStreams, boolean allowFiles) {
// default to 1000 chars
int maxChars = 1000;
if (message.getExchange() != null) {
String globalOption = message.getExchange().getContext().getGlobalOption(Exchange.LOG_DEBUG_BODY_MAX_CHARS);
if (globalOption != null) {
maxChars = message.getExchange().getContext().getTypeConverter().convertTo(Integer.class, globalOption);
}
}
return extractBodyForLogging(message, prepend, allowStreams, allowFiles, maxChars);
}
/**
* Extracts the body for logging purpose.
* <p/>
* Will clip the body if its too big for logging.
*
* @param message the message
* @param prepend a message to prepend (optional)
* @param allowStreams whether or not streams is allowed
* @param allowFiles whether or not files is allowed (currently not in use)
* @param maxChars limit to maximum number of chars. Use 0 for not limit, and -1 for turning logging message
* body off.
* @return the logging message
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_MAX_CHARS
*/
public static String extractBodyForLogging(
Message message, String prepend, boolean allowStreams, boolean allowFiles, int maxChars) {
String value = extractValueForLogging(message.getBody(), message, allowStreams, allowFiles, maxChars);
if (prepend != null) {
return prepend + value;
} else {
return value;
}
}
/**
* Extracts the body for logging purpose.
* <p/>
* Will clip the body if its too big for logging.
*
* @param message the message
* @param prepend a message to prepend (optional)
* @param allowCachedStreams whether or not cached streams is allowed
* @param allowStreams whether or not streams is allowed
* @param allowFiles whether or not files is allowed (currently not in use)
* @param maxChars limit to maximum number of chars. Use 0 for not limit, and -1 for turning logging
* message body off.
* @return the logging message
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_MAX_CHARS
*/
public static String extractBodyForLogging(
Message message, String prepend, boolean allowCachedStreams, boolean allowStreams, boolean allowFiles,
int maxChars) {
String value
= extractValueForLogging(message.getBody(), message, allowCachedStreams, allowStreams, allowFiles, maxChars);
if (prepend != null) {
return prepend + value;
} else {
return value;
}
}
/**
* Extracts the value for logging purpose.
* <p/>
* Will clip the value if its too big for logging.
*
* @param obj the value
* @param message the message
* @param allowStreams whether or not streams is allowed
* @param allowFiles whether or not files is allowed (currently not in use)
* @param maxChars limit to maximum number of chars. Use 0 for not limit, and -1 for turning logging message
* body off.
* @return the logging message
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_MAX_CHARS
*/
public static String extractValueForLogging(
Object obj, Message message, boolean allowStreams, boolean allowFiles, int maxChars) {
return extractValueForLogging(obj, message, allowStreams, allowStreams, allowFiles, maxChars);
}
/**
* Extracts the value for logging purpose.
* <p/>
* Will clip the value if its too big for logging.
*
* @param obj the value
* @param message the message
* @param allowCachedStreams whether or not cached streams is allowed
* @param allowStreams whether or not streams is allowed
* @param allowFiles whether or not files is allowed (currently not in use)
* @param maxChars limit to maximum number of chars. Use 0 for not limit, and -1 for turning logging
* message body off.
* @return the logging message
* @see org.apache.camel.Exchange#LOG_DEBUG_BODY_MAX_CHARS
*/
public static String extractValueForLogging(
Object obj, Message message, boolean allowCachedStreams, boolean allowStreams, boolean allowFiles, int maxChars) {
if (maxChars < 0) {
return "[Body is not logged]";
}
if (obj == null) {
return "[Body is null]";
}
if (!allowFiles) {
if (obj instanceof WrappedFile || obj instanceof File || obj instanceof Path) {
return "[Body is file based: " + obj + "]";
}
}
if (!allowStreams) {
boolean allow = allowCachedStreams && obj instanceof StreamCache;
if (!allow) {
if (obj instanceof StreamCache) {
return "[Body is instance of org.apache.camel.StreamCache]";
} else if (obj instanceof InputStream) {
return "[Body is instance of java.io.InputStream]";
} else if (obj instanceof OutputStream) {
return "[Body is instance of java.io.OutputStream]";
} else if (obj instanceof Reader) {
return "[Body is instance of java.io.Reader]";
} else if (obj instanceof Writer) {
return "[Body is instance of java.io.Writer]";
} else if (obj.getClass().getName().equals("javax.xml.transform.stax.StAXSource")) {
// StAX source is streaming based
return "[Body is instance of javax.xml.transform.Source]";
}
}
}
// is the body a stream cache or input stream
StreamCache cache = null;
InputStream is = null;
if (obj instanceof StreamCache streamCache) {
cache = streamCache;
} else if (obj instanceof InputStream inputStream) {
is = inputStream;
}
// grab the message body as a string
String body = null;
if (message.getExchange() != null) {
try {
body = message.getExchange().getContext().getTypeConverter().tryConvertTo(String.class, message.getExchange(),
obj);
} catch (Exception e) {
// ignore as the body is for logging purpose
}
}
if (body == null) {
try {
body = obj.toString();
} catch (Exception e) {
// ignore as the body is for logging purpose
}
}
// reset stream cache after use
if (cache != null) {
cache.reset();
} else if (is != null && is.markSupported()) {
try {
is.reset();
} catch (IOException e) {
// ignore
}
}
if (body == null) {
return "[Body is null]";
}
// clip body if length enabled and the body is too big
if (maxChars > 0 && body.length() > maxChars) {
body = body.substring(0, maxChars) + "... [Body clipped after " + maxChars + " chars, total length is "
+ body.length() + "]";
}
return body;
}
/**
* Dumps the message as a generic XML structure.
*
* @param message the message
* @return the XML
*/
public static String dumpAsXml(Message message) {
return dumpAsXml(message, true);
}
/**
* Dumps the message as a generic XML structure.
*
* @param message the message
* @param includeBody whether or not to include the message body
* @return the XML
*/
public static String dumpAsXml(Message message, boolean includeBody) {
return dumpAsXml(message, includeBody, 0);
}
/**
* Dumps the message as a generic XML structure.
*
* @param message the message
* @param includeBody whether or not to include the message body
* @param indent number of spaces to indent
* @return the XML
*/
public static String dumpAsXml(Message message, boolean includeBody, int indent) {
return dumpAsXml(message, includeBody, indent, false, true, 128 * 1024);
}
/**
* Dumps the message as a generic XML structure.
*
* @param message the message
* @param includeBody whether or not to include the message body
* @param indent number of spaces to indent
* @param allowStreams whether to include message body if they are stream based
* @param allowFiles whether to include message body if they are file based
* @param maxChars clip body after maximum chars (to avoid very big messages). Use 0 or negative value to not
* limit at all.
* @return the XML
*/
public static String dumpAsXml(
Message message, boolean includeBody, int indent, boolean allowStreams, boolean allowFiles, int maxChars) {
return dumpAsXml(message, false, false, includeBody, indent, allowStreams, allowStreams, allowFiles, maxChars);
}
/**
* Dumps the message as a generic XML structure.
*
* @param message the message
* @param includeExchangeProperties whether or not to include exchange properties
* @param includeExchangeVariables whether or not to include exchange variables
* @param includeBody whether or not to include the message body
* @param indent number of spaces to indent
* @param allowCachedStreams whether to include message body if they are stream cache based
* @param allowStreams whether to include message body if they are stream based
* @param allowFiles whether to include message body if they are file based
* @param maxChars clip body after maximum chars (to avoid very big messages). Use 0 or negative
* value to not limit at all.
* @return the XML
*/
public static String dumpAsXml(
Message message, boolean includeExchangeProperties, boolean includeExchangeVariables,
boolean includeBody, int indent, boolean allowCachedStreams, boolean allowStreams,
boolean allowFiles, int maxChars) {
StringBuilder sb = new StringBuilder(1024);
final String prefix = " ".repeat(indent);
// include exchangeId/exchangePattern/type as attribute on the <message> tag
sb.append(prefix);
String messageType = ObjectHelper.classCanonicalName(message);
String exchangeType = ObjectHelper.classCanonicalName(message.getExchange());
sb.append("<message exchangeId=\"").append(message.getExchange().getExchangeId())
.append("\" exchangePattern=\"").append(message.getExchange().getPattern().name())
.append("\" exchangeType=\"").append(exchangeType)
.append("\" messageType=\"").append(messageType).append("\">\n");
// exchange variables
if (includeExchangeVariables && message.getExchange().hasVariables()) {
sb.append(prefix);
sb.append(" <exchangeVariables>\n");
// sort the exchange variables so they are listed A..Z
Map<String, Object> variables = new TreeMap<>(message.getExchange().getVariables());
for (Map.Entry<String, Object> entry : variables.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
String type = ObjectHelper.classCanonicalName(value);
sb.append(prefix);
sb.append(" <exchangeVariable key=\"").append(key).append("\"");
if (type != null) {
sb.append(" type=\"").append(type).append("\"");
}
sb.append(">");
// dump value as XML, use Camel type converter to convert to String
if (value != null) {
try {
String xml = extractValueForLogging(value, message, allowCachedStreams, allowStreams, allowFiles,
maxChars);
if (xml != null) {
// must always xml encode
sb.append(StringHelper.xmlEncode(xml));
}
} catch (Exception e) {
// ignore as the body is for logging purpose
}
}
sb.append("</exchangeVariable>\n");
}
sb.append(prefix);
sb.append(" </exchangeVariables>\n");
}
// exchange properties
if (includeExchangeProperties) {
sb.append(prefix);
sb.append(" <exchangeProperties>\n");
// sort the exchange properties so they are listed A..Z
Map<String, Object> properties = new TreeMap<>(message.getExchange().getAllProperties());
for (Map.Entry<String, Object> entry : properties.entrySet()) {
String key = entry.getKey();
// skip some special that are too big
if (Exchange.MESSAGE_HISTORY.equals(key) || Exchange.GROUPED_EXCHANGE.equals(key)
|| Exchange.FILE_EXCHANGE_FILE.equals(key)) {
continue;
}
Object value = entry.getValue();
String type = ObjectHelper.classCanonicalName(value);
sb.append(prefix);
sb.append(" <exchangeProperty key=\"").append(key).append("\"");
if (type != null) {
sb.append(" type=\"").append(type).append("\"");
}
sb.append(">");
// dump value as XML, use Camel type converter to convert to String
if (value != null) {
try {
String xml = extractValueForLogging(value, message, allowCachedStreams, allowStreams, allowFiles,
maxChars);
if (xml != null) {
// must always xml encode
sb.append(StringHelper.xmlEncode(xml));
}
} catch (Exception e) {
// ignore as the body is for logging purpose
}
}
sb.append("</exchangeProperty>\n");
}
sb.append(prefix);
sb.append(" </exchangeProperties>\n");
}
// headers
if (message.hasHeaders()) {
sb.append(prefix);
sb.append(" <headers>\n");
// sort the headers so they are listed A..Z
Map<String, Object> headers = new TreeMap<>(message.getHeaders());
for (Map.Entry<String, Object> entry : headers.entrySet()) {
Object value = entry.getValue();
String type = ObjectHelper.classCanonicalName(value);
sb.append(prefix);
sb.append(" <header key=\"").append(entry.getKey()).append("\"");
if (type != null) {
sb.append(" type=\"").append(type).append("\"");
}
sb.append(">");
// dump value as XML, use Camel type converter to convert to String
if (value != null) {
try {
String xml = extractValueForLogging(value, message, allowCachedStreams, allowStreams, allowFiles,
maxChars);
if (xml != null) {
// must always xml encode
sb.append(StringHelper.xmlEncode(xml));
}
} catch (Exception e) {
// ignore as the body is for logging purpose
}
}
sb.append("</header>\n");
}
sb.append(prefix);
sb.append(" </headers>\n");
}
if (includeBody) {
sb.append(prefix);
sb.append(" <body");
Object body = message.getBody();
String type = ObjectHelper.classCanonicalName(body);
if (type != null) {
sb.append(" type=\"").append(type).append("\"");
}
if (body instanceof Collection) {
long size = ((Collection<?>) body).size();
sb.append(" size=\"").append(size).append("\"");
}
if (body != null && body.getClass().isArray()) {
int size = Array.getLength(body);
sb.append(" size=\"").append(size).append("\"");
}
if (body instanceof StreamCache streamCache) {
long pos = streamCache.position();
if (pos != -1) {
sb.append(" position=\"").append(pos).append("\"");
}
}
sb.append(">");
String xml = extractBodyForLogging(message, null, allowCachedStreams, allowStreams, allowFiles, maxChars);
if (xml != null) {
// must always xml encode
sb.append(StringHelper.xmlEncode(xml));
}
sb.append("</body>\n");
}
sb.append(prefix);
sb.append("</message>");
return sb.toString();
}
/**
* Copies the body of the source message to the body of the target message while preserving the data type if the
* messages are both of type {@link DataTypeAware}. .
*
* @param source the source message from which the body must be extracted.
* @param target the target message that will receive the body.
*/
public static void copyBody(Message source, Message target) {
// Preserve the DataType if both messages are DataTypeAware
if (source.hasTrait(MessageTrait.DATA_AWARE)) {
target.setBody(source.getBody());
target.setPayloadForTrait(MessageTrait.DATA_AWARE,
source.getPayloadForTrait(MessageTrait.DATA_AWARE));
return;
}
target.setBody(source.getBody());
}
/**
* Copies the headers from the source to the target message.
*
* @param source the source message
* @param target the target message
* @param override whether to override existing headers
*/
public static void copyHeaders(Message source, Message target, boolean override) {
copyHeaders(source, target, null, override);
}
/**
* Copies the headers from the source to the target message.
*
* @param source the source message
* @param target the target message
* @param strategy the header filter strategy which could help us to filter the protocol message headers
* @param override whether to override existing headers
*/
public static void copyHeaders(Message source, Message target, HeaderFilterStrategy strategy, boolean override) {
if (!source.hasHeaders()) {
return;
}
for (Map.Entry<String, Object> entry : source.getHeaders().entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (target.getHeader(key) == null || override) {
if (strategy == null) {
target.setHeader(key, value);
} else if (!strategy.applyFilterToExternalHeaders(key, value, target.getExchange())) {
// Just make sure we don't copy the protocol headers to
// target
target.setHeader(key, value);
}
}
}
}
/**
* Dumps the {@link MessageHistory} from the {@link Exchange} in a human-readable format.
*
* @param exchange the exchange
* @param exchangeFormatter if provided then information about the exchange is included in the dump
* @param logStackTrace whether to include a header for the stacktrace, to be added (not included in this
* dump).
* @return a human-readable message history as a table, never {@code null}
*/
public static String dumpMessageHistoryStacktrace(
Exchange exchange, ExchangeFormatter exchangeFormatter, boolean logStackTrace) {
// must not cause new exceptions so run this in a try catch block
try {
return doDumpMessageHistoryStacktrace(exchange, exchangeFormatter, logStackTrace);
} catch (Exception e) {
// ignore as the body is for logging purpose
return "";
}
}
@SuppressWarnings("unchecked")
private static String doDumpMessageHistoryStacktrace(
Exchange exchange, ExchangeFormatter exchangeFormatter, boolean logStackTrace) {
// add incoming origin of message on the top
String routeId = exchange.getFromRouteId();
Route route = exchange.getContext().getRoute(routeId);
String loc = route != null ? route.getSourceLocationShort() : null;
if (loc == null) {
loc = "";
}
String id = routeId;
String label = "";
if (exchange.getFromEndpoint() != null) {
label = "from[" + URISupport.sanitizeUri(StringHelper.limitLength(exchange.getFromEndpoint().getEndpointUri(), 100))
+ "]";
}
final long elapsed = exchange.getClock().elapsed();
List<MessageHistory> list = exchange.getProperty(ExchangePropertyKey.MESSAGE_HISTORY, List.class);
boolean enabled = list != null;
boolean source = !loc.isEmpty();
StringBuilder sb = new StringBuilder(2048);
sb.append("\n");
sb.append("Message History");
if (!source && !enabled) {
sb.append(" (source location and message history is disabled)");
} else if (!source) {
sb.append(" (source location is disabled)");
} else if (!enabled) {
sb.append(" (complete message history is disabled)");
}
sb.append("\n");
sb.append(
"---------------------------------------------------------------------------------------------------------------------------------------\n");
String goMessageHistoryHeader = exchange.getContext().getGlobalOption(Exchange.MESSAGE_HISTORY_HEADER_FORMAT);
sb.append(String.format(goMessageHistoryHeader == null ? MESSAGE_HISTORY_HEADER : goMessageHistoryHeader,
"Source", "ID", "Processor", "Elapsed (ms)"));
sb.append("\n");
String goMessageHistoryOutput = exchange.getContext().getGlobalOption(Exchange.MESSAGE_HISTORY_OUTPUT_FORMAT);
goMessageHistoryOutput = goMessageHistoryOutput == null ? MESSAGE_HISTORY_OUTPUT : goMessageHistoryOutput;
sb.append(String.format(goMessageHistoryOutput, loc, routeId + "/" + id, label, elapsed));
sb.append("\n");
if (list == null || list.isEmpty()) {
// message history is not enabled but we can show the last processed
// instead
id = exchange.getExchangeExtension().getHistoryNodeId();
if (id != null) {
loc = exchange.getExchangeExtension().getHistoryNodeSource();
if (loc == null) {
loc = "";
}
String rid = ExchangeHelper.getAtRouteId(exchange);
if (rid != null) {
routeId = rid;
}
label = exchange.getExchangeExtension().getHistoryNodeLabel();
// we need to avoid leak the sensible information here
// the sanitizeUri takes a very long time for very long string
// and the format cuts this to
// 78 characters, anyway. Cut this to 100 characters. This will
// give enough space for removing
// characters in the sanitizeUri method and will be reasonably
// fast
label = URISupport.sanitizeUri(StringHelper.limitLength(label, 100));
// we do not have elapsed time
sb.append("\t...\n");
sb.append(String.format(goMessageHistoryOutput, loc, routeId + "/" + id, label, 0));
sb.append("\n");
}
} else {
for (MessageHistory history : list) {
// and then each history
loc = LoggerHelper.getLineNumberLoggerName(history.getNode());
if (loc == null) {
loc = "";
}
routeId = history.getRouteId() != null ? history.getRouteId() : "";
id = history.getNode().getId();
// we need to avoid leak the sensible information here
// the sanitizeUri takes a very long time for very long string
// and the format cuts this to
// 78 characters, anyway. Cut this to 100 characters. This will
// give enough space for removing
// characters in the sanitizeUri method and will be reasonably
// fast
label = URISupport.sanitizeUri(StringHelper.limitLength(history.getNode().getLabel(), 100));
sb.append(String.format(goMessageHistoryOutput, loc, routeId + "/" + id, label, history.getElapsed()));
sb.append("\n");
}
}
if (exchangeFormatter != null) {
sb.append("\nExchange\n");
sb.append(
"---------------------------------------------------------------------------------------------------------------------------------------\n");
sb.append(exchangeFormatter.format(exchange));
sb.append("\n");
}
if (logStackTrace) {
sb.append("\nStacktrace\n");
sb.append(
"---------------------------------------------------------------------------------------------------------------------------------------");
}
return sb.toString();
}
/**
* Dumps the message as a generic JSon structure as text.
*
* @param message the message
* @return the JSon
*/
public static String dumpAsJSon(Message message) {
return dumpAsJSon(message, true);
}
/**
* Dumps the message as a generic JSon structure as text.
*
* @param message the message
* @param includeBody whether or not to include the message body
* @return the JSon
*/
public static String dumpAsJSon(Message message, boolean includeBody) {
return dumpAsJSon(message, includeBody, 0);
}
/**
* Dumps the message as a generic JSon structure as text.
*
* @param message the message
* @param includeBody whether or not to include the message body
* @param indent number of spaces to indent
* @return the JSon
*/
public static String dumpAsJSon(Message message, boolean includeBody, int indent) {
return dumpAsJSon(message, includeBody, indent, false, true, 128 * 1024, true);
}
/**
* Dumps the message as a generic JSon structure as text.
*
* @param message the message
* @param includeBody whether or not to include the message body
* @param indent number of spaces to indent
* @param allowStreams whether to include message body if they are stream based
* @param allowFiles whether to include message body if they are file based
* @param maxChars clip body after maximum chars (to avoid very big messages). Use 0 or negative value to not
* limit at all.
* @return the JSon
*/
public static String dumpAsJSon(
Message message, boolean includeBody, int indent, boolean allowStreams, boolean allowFiles, int maxChars,
boolean pretty) {
return dumpAsJSon(message, false, false, includeBody, indent, false, allowStreams, allowFiles, maxChars, pretty);
}
/**
* Dumps the message as a generic JSon structure as text.
*
* @param message the message
* @param includeExchangeProperties whether or not to include exchange properties
* @param includeExchangeVariables whether or not to include exchange variables
* @param includeBody whether or not to include the message body
* @param indent number of spaces to indent
* @param allowCachedStreams whether to include message body if they are stream cached based
* @param allowStreams whether to include message body if they are stream based
* @param allowFiles whether to include message body if they are file based
* @param maxChars clip body after maximum chars (to avoid very big messages). Use 0 or negative
* value to not limit at all.
* @param pretty whether to pretty print JSon
* @return the JSon
*/
public static String dumpAsJSon(
Message message, boolean includeExchangeProperties, boolean includeExchangeVariables, boolean includeBody,
int indent,
boolean allowCachedStreams, boolean allowStreams, boolean allowFiles, int maxChars, boolean pretty) {
JsonObject jo = dumpAsJSonObject(message, includeExchangeProperties, includeExchangeVariables, includeBody,
allowCachedStreams, allowStreams,
allowFiles, maxChars);
String answer = jo.toJson();
if (pretty) {
if (indent > 0) {
answer = Jsoner.prettyPrint(answer, indent);
} else {
answer = Jsoner.prettyPrint(answer);
}
}
return answer;
}
/**
* Dumps the message as a generic JSon Object.
*
* @param message the message
* @param includeExchangeProperties whether or not to include exchange properties
* @param includeExchangeVariables whether or not to include exchange variables
* @param includeBody whether or not to include the message body
* @param allowCachedStreams whether to include message body if they are stream cached based
* @param allowStreams whether to include message body if they are stream based
* @param allowFiles whether to include message body if they are file based
* @param maxChars clip body after maximum chars (to avoid very big messages). Use 0 or negative
* value to not limit at all.
* @return the JSon Object
*/
public static JsonObject dumpAsJSonObject(
Message message, boolean includeExchangeProperties, boolean includeExchangeVariables, boolean includeBody,
boolean allowCachedStreams, boolean allowStreams, boolean allowFiles, int maxChars) {
JsonObject root = new JsonObject();
JsonObject jo = new JsonObject();
root.put("message", jo);
jo.put("exchangeId", message.getExchange().getExchangeId());
jo.put("exchangePattern", message.getExchange().getPattern().name());
jo.put("exchangeType", ObjectHelper.classCanonicalName(message.getExchange()));
jo.put("messageType", ObjectHelper.classCanonicalName(message));
// exchange variables
if (includeExchangeVariables && message.getExchange().hasVariables()) {
JsonArray arr = new JsonArray();
// sort the exchange variables so they are listed A..Z
Map<String, Object> properties = new TreeMap<>(message.getExchange().getVariables());
for (Map.Entry<String, Object> entry : properties.entrySet()) {
Object value = entry.getValue();
String type = ObjectHelper.classCanonicalName(value);
JsonObject jh = new JsonObject();
String key = entry.getKey();
jh.put("key", key);
if (type != null) {
jh.put("type", type);
}
if (value != null) {
Object s = Jsoner.trySerialize(value);
if (s == null) {
// cannot JSon serialize out of the box, so we need to use string value
try {
s = extractValueForLogging(value, message, allowCachedStreams, allowStreams, allowFiles, maxChars);
} catch (Exception e) {
// ignore
}
} else {
// use the value as-is because it can be serialized in json
s = value;
}
jh.put("value", s);
}
arr.add(jh);
}
if (!arr.isEmpty()) {
jo.put("exchangeVariables", arr);
}
}
// exchange properties
if (includeExchangeProperties) {
JsonArray arr = new JsonArray();
// sort the exchange properties so they are listed A..Z
Map<String, Object> properties = new TreeMap<>(message.getExchange().getAllProperties());
for (Map.Entry<String, Object> entry : properties.entrySet()) {
Object value = entry.getValue();
String type = ObjectHelper.classCanonicalName(value);
JsonObject jh = new JsonObject();
String key = entry.getKey();
// skip some special that are too big
if (Exchange.MESSAGE_HISTORY.equals(key) || Exchange.GROUPED_EXCHANGE.equals(key)
|| Exchange.FILE_EXCHANGE_FILE.equals(key)) {
continue;
}
jh.put("key", key);
if (type != null) {
jh.put("type", type);
}
if (ImportantHeaderUtils.isImportantHeader(key)) {
jh.put("important", true);
}
if (value != null) {
Object s = Jsoner.trySerialize(value);
if (s == null) {
// cannot JSon serialize out of the box, so we need to use string value
try {
s = extractValueForLogging(value, message, allowCachedStreams, allowStreams, allowFiles, maxChars);
} catch (Exception e) {
// ignore
}
} else {
// use the value as-is because it can be serialized in json
s = value;
}
jh.put("value", s);
}
arr.add(jh);
}
if (!arr.isEmpty()) {
jo.put("exchangeProperties", arr);
}
}
// headers
if (message.hasHeaders()) {
JsonArray arr = new JsonArray();
// sort the headers so they are listed A..Z
Map<String, Object> headers = new TreeMap<>(message.getHeaders());
for (Map.Entry<String, Object> entry : headers.entrySet()) {
Object value = entry.getValue();
String type = ObjectHelper.classCanonicalName(value);
JsonObject jh = new JsonObject();
String key = entry.getKey();
jh.put("key", key);
if (type != null) {
jh.put("type", type);
}
if (ImportantHeaderUtils.isImportantHeader(key)) {
jh.put("important", true);
}
// dump header value as JSon, use Camel type converter to convert to String
if (value != null) {
Object s = Jsoner.trySerialize(value);
if (s == null) {
// cannot JSon serialize out of the box, so we need to use string value
try {
s = extractValueForLogging(value, message, allowCachedStreams, allowStreams, allowFiles, maxChars);
} catch (Exception e) {
// ignore
}
} else {
// use the value as-is because it can be serialized in json
s = value;
}
jh.put("value", s);
}
arr.add(jh);
}
if (!arr.isEmpty()) {
jo.put("headers", arr);
}
}
if (includeBody) {
JsonObject jb = new JsonObject();
jo.put("body", jb);
Object body = message.getBody();
String type = ObjectHelper.classCanonicalName(body);
if (type != null) {
jb.put("type", type);
}
if (body instanceof Collection) {
long size = ((Collection<?>) body).size();
jb.put("size", size);
}
if (body != null && body.getClass().isArray()) {
int size = Array.getLength(body);
jb.put("size", size);
}
if (body instanceof WrappedFile<?> wf) {
if (wf.getFile() instanceof File f) {
jb.put("size", f.length());
}
} else if (body instanceof File f) {
jb.put("size", f.length());
} else if (body instanceof Path p) {
jb.put("size", p.toFile().length());
}
if (body instanceof StreamCache streamCache) {
long pos = streamCache.position();
if (pos != -1) {
jb.put("position", pos);
}
long size = streamCache.length();
if (size > 0) {
jb.put("size", size);
}
}
String data = extractBodyForLogging(message, null, allowCachedStreams, allowStreams, allowFiles, maxChars);
if (data != null) {
if ("[Body is null]".equals(data)) {
jb.put("value", null);
} else {
jb.put("value", Jsoner.escape(data));
}
}
}
return root;
}
/**
* Dumps the exception as a generic XML structure.
*
* @param indent number of spaces to indent
* @return the XML
*/
public static String dumpExceptionAsXML(Throwable exception, int indent) {
final String prefix = " ".repeat(indent);
StringBuilder sb = new StringBuilder(512);
try {
sb.append(prefix).append("<exception");
String type = ObjectHelper.classCanonicalName(exception);
if (type != null) {
sb.append(" type=\"").append(type).append("\"");
}
String msg = exception.getMessage();
if (msg != null) {
msg = StringHelper.xmlEncode(msg);
sb.append(" message=\"").append(msg).append("\"");
}
sb.append(">\n");
final String trace = ExceptionHelper.stackTraceToString(exception);
// must always xml encode
sb.append(StringHelper.xmlEncode(trace));
sb.append(prefix).append("</exception>");
} catch (Exception e) {
// ignore
}
return sb.toString();
}
/**
* Dumps the exception as a generic JSon structure as text.
*
* @param indent number of spaces to indent
* @param pretty whether to pretty print JSon
* @return the JSon
*/
public static String dumpExceptionAsJSon(Throwable exception, int indent, boolean pretty) {
JsonObject jo = dumpExceptionAsJSonObject(exception);
String answer = jo.toJson();
if (pretty) {
if (indent > 0) {
answer = Jsoner.prettyPrint(answer, indent);
} else {
answer = Jsoner.prettyPrint(answer);
}
}
return answer;
}
/**
* Dumps the exception as a generic JSon object.
*
* @return the JSon object
*/
public static JsonObject dumpExceptionAsJSonObject(Throwable exception) {
JsonObject root = new JsonObject();
JsonObject jo = new JsonObject();
root.put("exception", jo);
String type = ObjectHelper.classCanonicalName(exception);
if (type != null) {
jo.put("type", type);
}
String msg = exception.getMessage();
jo.put("message", msg);
final String trace = ExceptionHelper.stackTraceToString(exception);
try {
jo.put("stackTrace", Jsoner.escape(trace));
} catch (Exception e) {
// ignore as the body is for logging purpose
}
return root;
}
}
|
type
|
java
|
junit-team__junit5
|
junit-platform-commons/src/main/java/org/junit/platform/commons/support/ReflectionSupport.java
|
{
"start": 1920,
"end": 2386
}
|
class ____ its <em>primitive name</em> or <em>fully qualified
* name</em>, using the default {@link ClassLoader}.
*
* <p>Class names for arrays may be specified using either the JVM's internal
* String representation (e.g., {@code [[I} for {@code int[][]},
* {@code [Lava.lang.String;} for {@code java.lang.String[]}, etc.) or
* <em>source code syntax</em> (e.g., {@code int[][]}, {@code java.lang.String[]},
* etc.).
*
* @param name the name of the
|
by
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/SecondarySort.java
|
{
"start": 4604,
"end": 5232
}
|
class ____
implements RawComparator<IntPair> {
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return WritableComparator.compareBytes(b1, s1, Integer.SIZE/8,
b2, s2, Integer.SIZE/8);
}
@Override
public int compare(IntPair o1, IntPair o2) {
int l = o1.getFirst();
int r = o2.getFirst();
return l == r ? 0 : (l < r ? -1 : 1);
}
}
/**
* Read two integers from each line and generate a key, value pair
* as ((left, right), right).
*/
public static
|
FirstGroupingComparator
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/RMapCacheAsync.java
|
{
"start": 1779,
"end": 16818
}
|
interface ____<K, V> extends RMapAsync<K, V> {
/**
* Sets max size of the map and overrides current value.
* Superfluous elements are evicted using LRU algorithm by default.
*
* @param maxSize - max size
* @return void
*/
RFuture<Void> setMaxSizeAsync(int maxSize);
/**
* Sets max size of the map and overrides current value.
* Superfluous elements are evicted using defined algorithm.
*
* @param maxSize - max size
* @param mode - eviction mode
* @return void
*/
RFuture<Void> setMaxSizeAsync(int maxSize, EvictionMode mode);
/**
* Tries to set max size of the map.
* Superfluous elements are evicted using LRU algorithm by default.
*
* @param maxSize - max size
* @return <code>true</code> if max size has been successfully set, otherwise <code>false</code>.
*/
RFuture<Boolean> trySetMaxSizeAsync(int maxSize);
/**
* Tries to set max size of the map.
* Superfluous elements are evicted using defined algorithm.
*
* @param maxSize - max size
* @param mode - eviction mode
* @return <code>true</code> if max size has been successfully set, otherwise <code>false</code>.
*/
RFuture<Boolean> trySetMaxSizeAsync(int maxSize, EvictionMode mode);
/**
* If the specified key is not already associated
* with a value, associate it with the given value.
* <p>
* Stores value mapped by key with specified time to live.
* Entry expires after specified time to live.
* If the map previously contained a mapping for
* the key, the old value is replaced by the specified value.
*
* @param key - map key
* @param value - map value
* @param ttl - time to live for key\value entry.
* If <code>0</code> then stores infinitely.
* @param unit - time unit
* @return previous associated value
*/
RFuture<V> putIfAbsentAsync(K key, V value, long ttl, TimeUnit unit);
/**
* If the specified key is not already associated
* with a value, associate it with the given value.
* <p>
* Stores value mapped by key with specified time to live and max idle time.
* Entry expires when specified time to live or max idle time has expired.
* <p>
* If the map previously contained a mapping for
* the key, the old value is replaced by the specified value.
*
* @param key - map key
* @param value - map value
* @param ttl - time to live for key\value entry.
* If <code>0</code> then time to live doesn't affect entry expiration.
* @param ttlUnit - time unit
* @param maxIdleTime - max idle time for key\value entry.
* If <code>0</code> then max idle time doesn't affect entry expiration.
* @param maxIdleUnit - time unit
* <p>
* if <code>maxIdleTime</code> and <code>ttl</code> params are equal to <code>0</code>
* then entry stores infinitely.
*
* @return previous associated value
*/
RFuture<V> putIfAbsentAsync(K key, V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit);
/**
* Stores value mapped by key with specified time to live.
* Entry expires after specified time to live.
* If the map previously contained a mapping for
* the key, the old value is replaced by the specified value.
*
* @param key - map key
* @param value - map value
* @param ttl - time to live for key\value entry.
* If <code>0</code> then stores infinitely.
* @param unit - time unit
* @return previous associated value
*/
RFuture<V> putAsync(K key, V value, long ttl, TimeUnit unit);
/**
* Stores value mapped by key with specified time to live and max idle time.
* Entry expires when specified time to live or max idle time has expired.
* <p>
* If the map previously contained a mapping for
* the key, the old value is replaced by the specified value.
*
* @param key - map key
* @param value - map value
* @param ttl - time to live for key\value entry.
* If <code>0</code> then time to live doesn't affect entry expiration.
* @param ttlUnit - time unit
* @param maxIdleTime - max idle time for key\value entry.
* If <code>0</code> then max idle time doesn't affect entry expiration.
* @param maxIdleUnit - time unit
* <p>
* if <code>maxIdleTime</code> and <code>ttl</code> params are equal to <code>0</code>
* then entry stores infinitely.
*
* @return previous associated value
*/
RFuture<V> putAsync(K key, V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit);
/**
* Associates the specified <code>value</code> with the specified <code>key</code>
* in batch.
* <p>
* If {@link MapWriter} is defined then new map entries are stored in write-through mode.
*
* @param map - mappings to be stored in this map
* @param ttl - time to live for all key\value entries.
* If <code>0</code> then stores infinitely.
* @param ttlUnit - time unit
* @return void
*/
RFuture<Void> putAllAsync(Map<? extends K, ? extends V> map, long ttl, TimeUnit ttlUnit);
/**
* Stores value mapped by key with specified time to live.
* Entry expires after specified time to live.
* <p>
* If the map previously contained a mapping for
* the key, the old value is replaced by the specified value.
* <p>
* Works faster than usual {@link #putAsync(Object, Object, long, TimeUnit)}
* as it not returns previous value.
*
* @param key - map key
* @param value - map value
* @param ttl - time to live for key\value entry.
* If <code>0</code> then stores infinitely.
* @param unit - time unit
*
* @return <code>true</code> if key is a new key in the hash and value was set.
* <code>false</code> if key already exists in the hash and the value was updated.
*/
RFuture<Boolean> fastPutAsync(K key, V value, long ttl, TimeUnit unit);
/**
* Stores value mapped by key with specified time to live and max idle time.
* Entry expires when specified time to live or max idle time has expired.
* <p>
* If the map previously contained a mapping for
* the key, the old value is replaced by the specified value.
* <p>
* Works faster than usual {@link #putAsync(Object, Object, long, TimeUnit, long, TimeUnit)}
* as it not returns previous value.
*
* @param key - map key
* @param value - map value
* @param ttl - time to live for key\value entry.
* If <code>0</code> then time to live doesn't affect entry expiration.
* @param ttlUnit - time unit
* @param maxIdleTime - max idle time for key\value entry.
* If <code>0</code> then max idle time doesn't affect entry expiration.
* @param maxIdleUnit - time unit
* <p>
* if <code>maxIdleTime</code> and <code>ttl</code> params are equal to <code>0</code>
* then entry stores infinitely.
* @return <code>true</code> if key is a new key in the hash and value was set.
* <code>false</code> if key already exists in the hash and the value was updated.
*/
RFuture<Boolean> fastPutAsync(K key, V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit);
/**
* If the specified key is not already associated
* with a value, associate it with the given value.
* <p>
* Stores value mapped by key with specified time to live and max idle time.
* Entry expires when specified time to live or max idle time has expired.
* <p>
* Works faster than usual {@link #putIfAbsentAsync(Object, Object, long, TimeUnit, long, TimeUnit)}
* as it not returns previous value.
*
* @param key - map key
* @param value - map value
* @param ttl - time to live for key\value entry.
* If <code>0</code> then time to live doesn't affect entry expiration.
* @param ttlUnit - time unit
* @param maxIdleTime - max idle time for key\value entry.
* If <code>0</code> then max idle time doesn't affect entry expiration.
* @param maxIdleUnit - time unit
* <p>
* if <code>maxIdleTime</code> and <code>ttl</code> params are equal to <code>0</code>
* then entry stores infinitely.
*
* @return <code>true</code> if key is a new key in the hash and value was set.
* <code>false</code> if key already exists in the hash
*/
RFuture<Boolean> fastPutIfAbsentAsync(K key, V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit);
/**
* If the specified key is not already associated
* with a value, attempts to compute its value using the given mapping function and enters it into this map .
* <p>
* Stores value mapped by key with specified time to live.
* Entry expires after specified time to live.
*
* @param key - map key
* @param ttl - time to live for key\value entry.
* If <code>0</code> then stores infinitely.
* @param mappingFunction the mapping function to compute a value
* @return current associated value
*/
RFuture<V> computeIfAbsentAsync(K key, Duration ttl, Function<? super K, ? extends V> mappingFunction);
/**
* Computes a new mapping for the specified key and its current mapped value.
* <p>
* Stores value mapped by key with specified time to live.
* Entry expires after specified time to live.
*
* @param key - map key
* @param ttl - time to live for key\value entry.
* If <code>0</code> then stores infinitely.
* @param remappingFunction - function to compute a value
* @return the new value associated with the specified key, or {@code null} if none
*/
RFuture<V> computeAsync(K key, Duration ttl, BiFunction<? super K, ? super V, ? extends V> remappingFunction);
/**
* Use {@link #expireEntryAsync(Object, Duration, Duration)} instead.
*
* @param key - map key
* @param ttl - time to live for key\value entry.
* If <code>0</code> then time to live doesn't affect entry expiration.
* @param ttlUnit - time unit
* @param maxIdleTime - max idle time for key\value entry.
* If <code>0</code> then max idle time doesn't affect entry expiration.
* @param maxIdleUnit - time unit
* <p>
* if <code>maxIdleTime</code> and <code>ttl</code> params are equal to <code>0</code>
* then entry stores infinitely.
*
* @return returns <code>false</code> if entry already expired or doesn't exist,
* otherwise returns <code>true</code>.
*/
@Deprecated
RFuture<Boolean> updateEntryExpirationAsync(K key, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit);
/**
* Updates time to live and max idle time of specified entry by key.
* Entry expires when specified time to live or max idle time was reached.
* <p>
* Returns <code>false</code> if entry already expired or doesn't exist,
* otherwise returns <code>true</code>.
*
* @param key - map key
* @param ttl - time to live for key\value entry.
* If <code>0</code> then time to live doesn't affect entry expiration.
* @param maxIdleTime - max idle time for key\value entry.
* If <code>0</code> then max idle time doesn't affect entry expiration.
* <p>
* if <code>maxIdleTime</code> and <code>ttl</code> params are equal to <code>0</code>
* then entry stores infinitely.
*
* @return returns <code>false</code> if entry already expired or doesn't exist,
* otherwise returns <code>true</code>.
*/
RFuture<Boolean> expireEntryAsync(K key, Duration ttl, Duration maxIdleTime);
/**
* Updates time to live and max idle time of specified entries by keys.
* Entries expires when specified time to live or max idle time was reached.
* <p>
* Returns amount of updated entries.
*
* @param keys map keys
* @param ttl time to live for key\value entries.
* If <code>0</code> then time to live doesn't affect entry expiration.
* @param maxIdleTime max idle time for key\value entries.
* If <code>0</code> then max idle time doesn't affect entry expiration.
* <p>
* if <code>maxIdleTime</code> and <code>ttl</code> params are equal to <code>0</code>
* then entries are stored infinitely.
*
* @return amount of updated entries.
*/
RFuture<Integer> expireEntriesAsync(Set<K> keys, Duration ttl, Duration maxIdleTime);
/**
* Returns the value mapped by defined <code>key</code> or {@code null} if value is absent.
* <p>
* If map doesn't contain value for specified key and {@link MapLoader} is defined
* then value will be loaded in read-through mode.
* <p>
* NOTE: Idle time of entry is not taken into account.
* Entry last access time isn't modified if map limited by size.
*
* @param key the key
* @return the value mapped by defined <code>key</code> or {@code null} if value is absent
*/
RFuture<V> getWithTTLOnlyAsync(K key);
/**
* Returns map slice contained the mappings with defined <code>keys</code>.
* <p>
* If map doesn't contain value/values for specified key/keys and {@link MapLoader} is defined
* then value/values will be loaded in read-through mode.
* <p>
* NOTE: Idle time of entry is not taken into account.
* Entry last access time isn't modified if map limited by size.
*
* @param keys map keys
* @return Map slice
*/
RFuture<Map<K, V>> getAllWithTTLOnlyAsync(Set<K> keys);
/**
* Returns the number of entries in cache.
* This number can reflects expired entries too
* due to non realtime cleanup process.
*
*/
@Override
RFuture<Integer> sizeAsync();
/**
* Remaining time to live of map entry associated with a <code>key</code>.
*
* @param key - map key
* @return time in milliseconds
* -2 if the key does not exist.
* -1 if the key exists but has no associated expire.
*/
RFuture<Long> remainTimeToLiveAsync(K key);
/**
* Adds map entry listener
*
* @see org.redisson.api.map.event.EntryCreatedListener
* @see org.redisson.api.map.event.EntryUpdatedListener
* @see org.redisson.api.map.event.EntryRemovedListener
* @see org.redisson.api.map.event.EntryExpiredListener
*
* @param listener - entry listener
* @return listener id
*/
RFuture<Integer> addListenerAsync(MapEntryListener listener);
}
|
RMapCacheAsync
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
|
{
"start": 2222,
"end": 2321
}
|
class ____ transport actions that are interacting with currently running tasks.
*/
public abstract
|
for
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-ws/src/main/java/org/apache/camel/component/spring/ws/type/EndpointMappingType.java
|
{
"start": 940,
"end": 1482
}
|
enum ____ {
ROOT_QNAME("rootqname:"),
ACTION("action:"),
TO("to:"),
SOAP_ACTION("soapaction:"),
XPATHRESULT("xpathresult:"),
URI("uri:"),
URI_PATH("uripath:"),
BEANNAME("beanname:");
private final String prefix;
EndpointMappingType(String prefix) {
this.prefix = prefix;
}
public String getPrefix() {
return prefix;
}
/**
* Find {@link EndpointMappingType} that corresponds with the prefix of the given uri. Matching of uri prefix
* against
|
EndpointMappingType
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbstractAbfsStatisticsSource.java
|
{
"start": 1103,
"end": 4542
}
|
class ____ implements IOStatisticsSource {
private IOStatisticsStore ioStatisticsStore;
/**
* Default constructor.
*/
protected AbstractAbfsStatisticsSource() {
}
/**
* Returns the IOStatisticsStore instance.
*
* @return the IOStatisticsStore instance
*/
@Override
public IOStatistics getIOStatistics() {
return ioStatisticsStore;
}
/**
* Sets the IOStatisticsStore instance.
*
* @param ioStatisticsStore the IOStatisticsStore instance to set
*/
protected void setIOStatistics(final IOStatisticsStore ioStatisticsStore) {
this.ioStatisticsStore = ioStatisticsStore;
}
/**
* Increments the counter value by 1 for the given name.
*
* @param name the name of the counter
*/
protected void incCounterValue(String name) {
incCounterValue(name, 1);
}
/**
* Increments the counter value by the specified value for the given name.
*
* @param name the name of the counter
* @param value the value to increment by
*/
protected void incCounterValue(String name, long value) {
ioStatisticsStore.incrementCounter(name, value);
}
/**
* Looks up the counter value for the given name.
*
* @param name the name of the counter
* @return the counter value
*/
protected Long lookupCounterValue(String name) {
return ioStatisticsStore.counters().getOrDefault(name, 0L);
}
/**
* Sets the counter value for the given name.
*
* @param name the name of the counter
* @param value the value to set
*/
protected void setCounterValue(String name, long value) {
ioStatisticsStore.setCounter(name, value);
}
/**
* Increments the gauge value by 1 for the given name.
*
* @param name the name of the gauge
*/
protected void incGaugeValue(String name) {
incCounterValue(name, 1);
}
/**
* Looks up the gauge value for the given name.
*
* @param name the name of the gauge
* @return the gauge value
*/
protected Long lookupGaugeValue(String name) {
return ioStatisticsStore.gauges().getOrDefault(name, 0L);
}
/**
* Sets the gauge value for the given name.
*
* @param name the name of the gauge
* @param value the value to set
*/
protected void setGaugeValue(String name, long value) {
ioStatisticsStore.setGauge(name, value);
}
/**
* Add sample to mean statistics for the given name.
*
* @param name the name of the mean statistic
* @param value the value to set
*/
protected void addMeanStatistic(String name, long value) {
ioStatisticsStore.addMeanStatisticSample(name, value);
}
/**
* Looks up the mean statistics value for the given name.
*
* @param name the name of the mean statistic
* @return the mean value
*/
protected double lookupMeanStatistic(String name) {
return ioStatisticsStore.meanStatistics().get(name).mean();
}
/**
* Returns a string representation of the AbstractAbfsStatisticsSource.
*
* @return a string representation of the AbstractAbfsStatisticsSource
*/
@Override
public String toString() {
return "AbstractAbfsStatisticsStore{" + ioStatisticsStore + '}';
}
}
|
AbstractAbfsStatisticsSource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/CachedQueryShallowCollectionNestedJoinFetchTest.java
|
{
"start": 1977,
"end": 9944
}
|
class ____ {
public final static String HQL = "select e from Manager e left join fetch e.associates a left join fetch a.car";
@BeforeEach
public void setUp(EntityManagerFactoryScope scope) {
scope.inTransaction(
em -> {
for ( int i = 0; i < 10; i++ ) {
Manager manager = new Manager( i, "Manager" + i );
for ( int j = 0; j < 1; j++ ) {
manager.addAssociate( new Employee( i * 10 + j, "John" + ( i * 10 + j ) ) );
}
em.persist( manager );
}
}
);
}
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
public void testCacheableQuery(EntityManagerFactoryScope scope) {
Statistics stats = getStatistics( scope );
CacheImplementor cache = scope.getEntityManagerFactory().unwrap( SessionFactoryImplementor.class ).getCache();
stats.clear();
// First time the query is executed, query and results are cached.
scope.inTransaction(
em -> {
List<Manager> managers = getManagers( em );
assertThatAnSQLQueryHasBeenExecuted( stats );
assertEquals( 0, stats.getQueryCacheHitCount() );
assertEquals( 1, stats.getQueryCacheMissCount() );
assertEquals( 1, stats.getQueryCachePutCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 10, stats.getSecondLevelCachePutCount() );
}
);
stats.clear();
// Second time the query is executed, list of entities are read from query cache
scope.inTransaction(
em -> {
List<Manager> managers = getManagers( em );
assertThatNoSQLQueryHasBeenExecuted( stats );
assertEquals( 1, stats.getQueryCacheHitCount() );
assertEquals( 0, stats.getQueryCacheMissCount() );
assertEquals( 0, stats.getQueryCachePutCount() );
assertEquals( 10, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getSecondLevelCachePutCount() );
final CollectionStatistics collectionStats = stats.getCollectionStatistics( Manager.class.getName() + ".associates" );
assertEquals( 10, collectionStats.getCacheHitCount() );
assertEquals( 0, collectionStats.getCacheMissCount() );
assertEquals( 0, collectionStats.getCachePutCount() );
}
);
// NOTE: JPACache.evictAll() only evicts entity regions;
// it does not evict the collection regions or query cache region
cache.evictCollectionData();
stats.clear();
scope.inTransaction(
em -> {
List<Manager> managers = getManagers( em );
// query is still found in the cache
assertThatNoSQLQueryHasBeenExecuted( stats );
assertEquals( 1, stats.getQueryCacheHitCount() );
assertEquals( 0, stats.getQueryCacheMissCount() );
assertEquals( 0, stats.getQueryCachePutCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 10, stats.getSecondLevelCacheMissCount() );
assertEquals( 10, stats.getSecondLevelCachePutCount() );
final CollectionStatistics collectionStats = stats.getCollectionStatistics( Manager.class.getName() + ".associates" );
assertEquals( 0, collectionStats.getCacheHitCount() );
assertEquals( 10, collectionStats.getCacheMissCount() );
assertEquals( 10, collectionStats.getCachePutCount() );
}
);
stats.clear();
// this time call clear the entity regions and the query cache region
scope.inTransaction(
em -> {
cache.evictCollectionData();
em.unwrap( SessionImplementor.class )
.getFactory()
.getCache()
.evictQueryRegions();
List<Manager> managers = getManagers( em );
// query is no longer found in the cache
assertThatAnSQLQueryHasBeenExecuted( stats );
assertEquals( 0, stats.getQueryCacheHitCount() );
assertEquals( 1, stats.getQueryCacheMissCount() );
assertEquals( 1, stats.getQueryCachePutCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 10, stats.getSecondLevelCachePutCount() );
final CollectionStatistics collectionStats = stats.getCollectionStatistics( Manager.class.getName() + ".associates" );
assertEquals( 0, collectionStats.getCacheHitCount() );
assertEquals( 0, collectionStats.getCacheMissCount() );
assertEquals( 10, collectionStats.getCachePutCount() );
}
);
stats.clear();
// Check that the join fetched to-one association is initialized even if managers are already part of the PC
scope.inTransaction(
em -> {
em.createQuery( "select m from Manager m" ).getResultList();
List<Manager> managers = getManagers( em );
assertThatNoSQLQueryHasBeenExecuted( stats );
assertEquals( 1, stats.getQueryCacheHitCount() );
assertEquals( 0, stats.getQueryCacheMissCount() );
assertEquals( 0, stats.getQueryCachePutCount() );
assertEquals( 10, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getSecondLevelCachePutCount() );
final CollectionStatistics collectionStats = stats.getCollectionStatistics( Manager.class.getName() + ".associates" );
assertEquals( 10, collectionStats.getCacheHitCount() );
assertEquals( 0, collectionStats.getCacheMissCount() );
assertEquals( 0, collectionStats.getCachePutCount() );
}
);
stats.clear();
// Check that the join fetched to-one association is initialized even if managers are already part of the PC
scope.inTransaction(
em -> {
em.createQuery( "select m from Manager m join fetch m.associates" ).getResultList();
List<Manager> managers = getManagers( em );
assertThatNoSQLQueryHasBeenExecuted( stats );
assertEquals( 1, stats.getQueryCacheHitCount() );
assertEquals( 0, stats.getQueryCacheMissCount() );
assertEquals( 0, stats.getQueryCachePutCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getSecondLevelCachePutCount() );
final CollectionStatistics collectionStats = stats.getCollectionStatistics( Manager.class.getName() + ".associates" );
assertEquals( 0, collectionStats.getCacheHitCount() );
assertEquals( 0, collectionStats.getCacheMissCount() );
assertEquals( 0, collectionStats.getCachePutCount() );
}
);
}
private static Statistics getStatistics(EntityManagerFactoryScope scope) {
return ( (SessionFactoryImplementor) scope.getEntityManagerFactory() ).getStatistics();
}
private static List<Manager> getManagers(EntityManager em) {
TypedQuery<Manager> query = em.createQuery(
HQL,
Manager.class
)
.setHint( HINT_CACHEABLE, true );
List<Manager> managers = query.getResultList();
assertEquals( 10, managers.size() );
for ( Manager manager : managers ) {
assertTrue( Hibernate.isInitialized( manager ) );
assertEquals( "Manager" + manager.getId(), manager.getName() );
assertTrue( Hibernate.isInitialized( manager.getAssociates() ) );
assertEquals( 1, manager.getAssociates().size() );
for ( Employee associate : manager.getAssociates() ) {
assertEquals( "John" + associate.getId(), associate.getName() );
assertTrue( Hibernate.isInitialized( associate.getCar() ) );
assertEquals( "John" + associate.getId() + "'s car", associate.getCar().getName() );
}
}
return managers;
}
private static void assertThatAnSQLQueryHasBeenExecuted(Statistics stats) {
assertEquals( 1, stats.getQueryStatistics( HQL ).getExecutionCount() );
}
private static void assertThatNoSQLQueryHasBeenExecuted(Statistics stats) {
assertEquals( 0, stats.getQueryStatistics( HQL ).getExecutionCount() );
}
@Entity(name = "Manager")
public static
|
CachedQueryShallowCollectionNestedJoinFetchTest
|
java
|
apache__maven
|
compat/maven-embedder/src/test/java/org/apache/maven/cli/MavenCliTest.java
|
{
"start": 3684,
"end": 31688
}
|
class ____ {
private MavenCli cli;
private String origBasedir;
@BeforeEach
void setUp() {
cli = new MavenCli();
origBasedir = System.getProperty(MavenCli.MULTIMODULE_PROJECT_DIRECTORY);
}
@AfterEach
void tearDown() throws Exception {
if (origBasedir != null) {
System.setProperty(MavenCli.MULTIMODULE_PROJECT_DIRECTORY, origBasedir);
} else {
System.getProperties().remove(MavenCli.MULTIMODULE_PROJECT_DIRECTORY);
}
}
// Helper method for containsExactlyInAnyOrder assertion
private static <T> void assertContainsExactlyInAnyOrder(Collection<T> actual, T... expected) {
assertEquals(expected.length, actual.size(), "Collection size mismatch");
for (T item : expected) {
assertTrue(actual.contains(item), "Collection should contain: " + item);
}
}
@Test
void testPerformProfileActivation() throws ParseException {
final CommandLineParser parser = new DefaultParser();
final Options options = new Options();
options.addOption(Option.builder(Character.toString(CLIManager.ACTIVATE_PROFILES))
.hasArg()
.build());
ProfileActivation activation;
activation = new ProfileActivation();
performProfileActivation(parser.parse(options, new String[] {"-P", "test1,+test2,?test3,+?test4"}), activation);
assertContainsExactlyInAnyOrder(activation.getRequiredActiveProfileIds(), "test1", "test2");
assertContainsExactlyInAnyOrder(activation.getOptionalActiveProfileIds(), "test3", "test4");
activation = new ProfileActivation();
performProfileActivation(
parser.parse(options, new String[] {"-P", "!test1,-test2,-?test3,!?test4"}), activation);
assertContainsExactlyInAnyOrder(activation.getRequiredInactiveProfileIds(), "test1", "test2");
assertContainsExactlyInAnyOrder(activation.getOptionalInactiveProfileIds(), "test3", "test4");
activation = new ProfileActivation();
performProfileActivation(parser.parse(options, new String[] {"-P", "-test1,+test2"}), activation);
assertContainsExactlyInAnyOrder(activation.getRequiredActiveProfileIds(), "test2");
assertContainsExactlyInAnyOrder(activation.getRequiredInactiveProfileIds(), "test1");
}
@Test
void testDetermineProjectActivation() throws ParseException {
final CommandLineParser parser = new DefaultParser();
final Options options = new Options();
options.addOption(Option.builder(CLIManager.PROJECT_LIST).hasArg().build());
ProjectActivation activation;
activation = new ProjectActivation();
performProjectActivation(
parser.parse(options, new String[] {"-pl", "test1,+test2,?test3,+?test4"}), activation);
assertContainsExactlyInAnyOrder(activation.getRequiredActiveProjectSelectors(), "test1", "test2");
assertContainsExactlyInAnyOrder(activation.getOptionalActiveProjectSelectors(), "test3", "test4");
activation = new ProjectActivation();
performProjectActivation(
parser.parse(options, new String[] {"-pl", "!test1,-test2,-?test3,!?test4"}), activation);
assertContainsExactlyInAnyOrder(activation.getRequiredInactiveProjectSelectors(), "test1", "test2");
assertContainsExactlyInAnyOrder(activation.getOptionalInactiveProjectSelectors(), "test3", "test4");
activation = new ProjectActivation();
performProjectActivation(parser.parse(options, new String[] {"-pl", "-test1,+test2"}), activation);
assertContainsExactlyInAnyOrder(activation.getRequiredActiveProjectSelectors(), "test2");
assertContainsExactlyInAnyOrder(activation.getRequiredInactiveProjectSelectors(), "test1");
}
@Test
void testCalculateDegreeOfConcurrency() {
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("0"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("-1"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("0x4"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("1.0"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("1."));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("AA"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("C"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("C2.2C"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("C2.2"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("2C2"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("CXXX"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("XXXC"));
int cpus = Runtime.getRuntime().availableProcessors();
assertEquals((int) (cpus * 2.2), cli.calculateDegreeOfConcurrency("2.2C"));
assertEquals(1, cli.calculateDegreeOfConcurrency("0.0001C"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("-2.2C"));
assertThrows(IllegalArgumentException.class, () -> cli.calculateDegreeOfConcurrency("0C"));
}
@Test
void testMavenConfig() throws Exception {
System.setProperty(
MavenCli.MULTIMODULE_PROJECT_DIRECTORY, new File("src/test/projects/config").getCanonicalPath());
CliRequest request = new CliRequest(new String[0], null);
// read .mvn/maven.config
cli.initialize(request);
cli.cli(request);
assertEquals("multithreaded", request.commandLine.getOptionValue(CLIManager.BUILDER));
assertEquals("8", request.commandLine.getOptionValue(CLIManager.THREADS));
// override from command line
request = new CliRequest(new String[] {"--builder", "foobar"}, null);
cli.cli(request);
assertEquals("foobar", request.commandLine.getOptionValue("builder"));
}
@Test
void testMavenConfigInvalid() throws Exception {
System.setProperty(
MavenCli.MULTIMODULE_PROJECT_DIRECTORY,
new File("src/test/projects/config-illegal").getCanonicalPath());
CliRequest request = new CliRequest(new String[0], null);
cli.initialize(request);
assertThrows(ParseException.class, () -> cli.cli(request));
}
/**
* Read .mvn/maven.config with the following definitions:
* <pre>
* -T
* 3
* -Drevision=1.3.0
* "-Dlabel=Apache Maven"
* </pre>
* and check if the {@code -T 3} option can be overwritten via command line
* argument.
*
* @throws Exception in case of failure.
*/
@Test
void testMVNConfigurationThreadCanBeOverwrittenViaCommandLine() throws Exception {
System.setProperty(
MavenCli.MULTIMODULE_PROJECT_DIRECTORY,
new File("src/test/projects/mavenConfigProperties").getCanonicalPath());
CliRequest request = new CliRequest(new String[] {"-T", "5"}, null);
cli.initialize(request);
// read .mvn/maven.config
cli.cli(request);
assertEquals("5", request.commandLine.getOptionValue(CLIManager.THREADS));
}
/**
* Read .mvn/maven.config with the following definitions:
* <pre>
* -T
* 3
* -Drevision=1.3.0
* "-Dlabel=Apache Maven"
* </pre>
* and check if the {@code -Drevision-1.3.0} option can be overwritten via command line
* argument.
*
* @throws Exception
*/
@Test
void testMVNConfigurationDefinedPropertiesCanBeOverwrittenViaCommandLine() throws Exception {
System.setProperty(
MavenCli.MULTIMODULE_PROJECT_DIRECTORY,
new File("src/test/projects/mavenConfigProperties").getCanonicalPath());
CliRequest request = new CliRequest(new String[] {"-Drevision=8.1.0"}, null);
cli.initialize(request);
// read .mvn/maven.config
cli.cli(request);
cli.properties(request);
String revision = request.getUserProperties().getProperty("revision");
assertEquals("8.1.0", revision);
}
/**
* Read .mvn/maven.config with the following definitions:
* <pre>
* -T
* 3
* -Drevision=1.3.0
* "-Dlabel=Apache Maven"
* </pre>
* and check if the {@code -Drevision-1.3.0} option can be overwritten via command line
* argument.
*
* @throws Exception
*/
@Test
void testMVNConfigurationCLIRepeatedPropertiesLastWins() throws Exception {
System.setProperty(
MavenCli.MULTIMODULE_PROJECT_DIRECTORY,
new File("src/test/projects/mavenConfigProperties").getCanonicalPath());
CliRequest request = new CliRequest(new String[] {"-Drevision=8.1.0", "-Drevision=8.2.0"}, null);
cli.initialize(request);
// read .mvn/maven.config
cli.cli(request);
cli.properties(request);
String revision = request.getUserProperties().getProperty("revision");
assertEquals("8.2.0", revision);
}
/**
* Read .mvn/maven.config with the following definitions:
* <pre>
* -T
* 3
* -Drevision=1.3.0
* "-Dlabel=Apache Maven"
* </pre>
* and check if the {@code -Drevision-1.3.0} option can be overwritten via command line argument when there are
* funky arguments present.
*
* @throws Exception
*/
@Test
void testMVNConfigurationFunkyArguments() throws Exception {
System.setProperty(
MavenCli.MULTIMODULE_PROJECT_DIRECTORY,
new File("src/test/projects/mavenConfigProperties").getCanonicalPath());
CliRequest request = new CliRequest(
new String[] {
"-Drevision=8.1.0", "--file=-Dpom.xml", "\"-Dfoo=bar ", "\"-Dfoo2=bar two\"", "-Drevision=8.2.0"
},
null);
cli.initialize(request);
// read .mvn/maven.config
cli.cli(request);
cli.properties(request);
assertEquals("3", request.commandLine.getOptionValue(CLIManager.THREADS));
String revision = request.getUserProperties().getProperty("revision");
assertEquals("8.2.0", revision);
assertEquals("bar ", request.getUserProperties().getProperty("foo"));
assertEquals("bar two", request.getUserProperties().getProperty("foo2"));
assertEquals("Apache Maven", request.getUserProperties().getProperty("label"));
assertEquals("-Dpom.xml", request.getCommandLine().getOptionValue(CLIManager.ALTERNATE_POM_FILE));
}
@Test
void testStyleColors() throws Exception {
assumeTrue(MessageUtils.isColorEnabled(), "ANSI not supported");
CliRequest request;
MessageUtils.setColorEnabled(true);
request = new CliRequest(new String[] {"-B"}, null);
cli.cli(request);
cli.properties(request);
cli.logging(request);
assertFalse(MessageUtils.isColorEnabled(), "Expected MessageUtils.isColorEnabled() to return false");
MessageUtils.setColorEnabled(true);
request = new CliRequest(new String[] {"--non-interactive"}, null);
cli.cli(request);
cli.properties(request);
cli.logging(request);
assertFalse(MessageUtils.isColorEnabled(), "Expected MessageUtils.isColorEnabled() to return false");
MessageUtils.setColorEnabled(true);
request = new CliRequest(new String[] {"--force-interactive", "--non-interactive"}, null);
cli.cli(request);
cli.properties(request);
cli.logging(request);
assertTrue(MessageUtils.isColorEnabled(), "Expected MessageUtils.isColorEnabled() to return true");
MessageUtils.setColorEnabled(true);
request = new CliRequest(new String[] {"-l", "target/temp/mvn.log"}, null);
request.workingDirectory = "target/temp";
cli.cli(request);
cli.properties(request);
cli.logging(request);
assertFalse(MessageUtils.isColorEnabled(), "Expected MessageUtils.isColorEnabled() to return false");
MessageUtils.setColorEnabled(false);
request = new CliRequest(new String[] {"-Dstyle.color=always"}, null);
cli.cli(request);
cli.properties(request);
cli.logging(request);
assertTrue(MessageUtils.isColorEnabled(), "Expected MessageUtils.isColorEnabled() to return true");
MessageUtils.setColorEnabled(true);
request = new CliRequest(new String[] {"-Dstyle.color=never"}, null);
cli.cli(request);
cli.properties(request);
cli.logging(request);
assertFalse(MessageUtils.isColorEnabled(), "Expected MessageUtils.isColorEnabled() to return false");
MessageUtils.setColorEnabled(false);
request = new CliRequest(new String[] {"-Dstyle.color=always", "-B", "-l", "target/temp/mvn.log"}, null);
request.workingDirectory = "target/temp";
cli.cli(request);
cli.properties(request);
cli.logging(request);
assertTrue(MessageUtils.isColorEnabled(), "Expected MessageUtils.isColorEnabled() to return true");
MessageUtils.setColorEnabled(false);
CliRequest maybeColorRequest =
new CliRequest(new String[] {"-Dstyle.color=maybe", "-B", "-l", "target/temp/mvn.log"}, null);
request.workingDirectory = "target/temp";
cli.cli(maybeColorRequest);
cli.properties(maybeColorRequest);
assertThrows(
IllegalArgumentException.class, () -> cli.logging(maybeColorRequest), "maybe is not a valid option");
}
/**
* Verifies MNG-6558
*/
@Test
void testToolchainsBuildingEvents() throws Exception {
final EventSpyDispatcher eventSpyDispatcherMock = mock(EventSpyDispatcher.class);
MavenCli customizedMavenCli = new MavenCli() {
@Override
protected void customizeContainer(PlexusContainer container) {
super.customizeContainer(container);
container.addComponent(mock(Maven.class), "org.apache.maven.Maven");
((DefaultPlexusContainer) container)
.addPlexusInjector(Collections.emptyList(), binder -> binder.bind(EventSpyDispatcher.class)
.toInstance(eventSpyDispatcherMock));
}
};
CliRequest cliRequest = new CliRequest(new String[] {}, null);
customizedMavenCli.cli(cliRequest);
customizedMavenCli.logging(cliRequest);
customizedMavenCli.container(cliRequest);
customizedMavenCli.toolchains(cliRequest);
InOrder orderedEventSpyDispatcherMock = inOrder(eventSpyDispatcherMock);
orderedEventSpyDispatcherMock
.verify(eventSpyDispatcherMock, times(1))
.onEvent(any(ToolchainsBuildingRequest.class));
orderedEventSpyDispatcherMock
.verify(eventSpyDispatcherMock, times(1))
.onEvent(any(ToolchainsBuildingResult.class));
}
@Test
void resumeFromSelectorIsSuggestedWithoutGroupId() {
List<MavenProject> allProjects =
asList(createMavenProject("group", "module-a"), createMavenProject("group", "module-b"));
MavenProject failedProject = allProjects.get(0);
String selector = cli.getResumeFromSelector(allProjects, failedProject);
assertEquals(":module-a", selector);
}
@Test
void resumeFromSelectorContainsGroupIdWhenArtifactIdIsNotUnique() {
List<MavenProject> allProjects =
asList(createMavenProject("group-a", "module"), createMavenProject("group-b", "module"));
MavenProject failedProject = allProjects.get(0);
String selector = cli.getResumeFromSelector(allProjects, failedProject);
assertEquals("group-a:module", selector);
}
@Test
void verifyLocalRepositoryPath() throws Exception {
MavenCli cli = new MavenCli();
CliRequest request = new CliRequest(new String[] {}, null);
request.commandLine = new CommandLine.Builder().build();
MavenExecutionRequest executionRequest;
// Use default
cli.cli(request);
executionRequest = cli.populateRequest(request);
assertNull(executionRequest.getLocalRepositoryPath());
// System-properties override default
request.getSystemProperties().setProperty(Constants.MAVEN_REPO_LOCAL, "." + File.separatorChar + "custom1");
executionRequest = cli.populateRequest(request);
assertNotNull(executionRequest.getLocalRepositoryPath());
assertEquals(
"." + File.separatorChar + "custom1",
executionRequest.getLocalRepositoryPath().toString());
// User-properties override system properties
request.getUserProperties().setProperty(Constants.MAVEN_REPO_LOCAL, "." + File.separatorChar + "custom2");
executionRequest = cli.populateRequest(request);
assertNotNull(executionRequest.getLocalRepositoryPath());
assertEquals(
"." + File.separatorChar + "custom2",
executionRequest.getLocalRepositoryPath().toString());
}
/**
* MNG-7032: Disable colours for {@code --version} if {@code --batch-mode} is also given.
* @throws Exception cli invocation.
*/
@Test
void testVersionStringWithoutAnsi() throws Exception {
// given
// - request with version and batch mode
CliRequest cliRequest = new CliRequest(new String[] {"--version", "--batch-mode"}, null);
ByteArrayOutputStream systemOut = new ByteArrayOutputStream();
PrintStream oldOut = System.out;
System.setOut(new PrintStream(systemOut));
// when
try {
cli.cli(cliRequest);
} catch (MavenCli.ExitException exitException) {
// expected
} finally {
// restore sysout
System.setOut(oldOut);
}
String versionOut = new String(systemOut.toByteArray(), StandardCharsets.UTF_8);
// then
assertEquals(stripAnsiCodes(versionOut), versionOut);
}
@Test
void populatePropertiesCanContainEqualsSign() throws Exception {
// Arrange
CliRequest request = new CliRequest(new String[] {"-Dw=x=y", "validate"}, null);
// Act
cli.cli(request);
cli.properties(request);
// Assert
assertEquals("x=y", request.getUserProperties().getProperty("w"));
}
@Test
void populatePropertiesSpace() throws Exception {
// Arrange
CliRequest request = new CliRequest(new String[] {"-D", "z=2", "validate"}, null);
// Act
cli.cli(request);
cli.properties(request);
// Assert
assertEquals("2", request.getUserProperties().getProperty("z"));
}
@Test
void populatePropertiesShorthand() throws Exception {
// Arrange
CliRequest request = new CliRequest(new String[] {"-Dx", "validate"}, null);
// Act
cli.cli(request);
cli.properties(request);
// Assert
assertEquals("true", request.getUserProperties().getProperty("x"));
}
@Test
void populatePropertiesMultiple() throws Exception {
// Arrange
CliRequest request = new CliRequest(new String[] {"-Dx=1", "-Dy", "validate"}, null);
// Act
cli.cli(request);
cli.properties(request);
// Assert
assertEquals("1", request.getUserProperties().getProperty("x"));
assertEquals("true", request.getUserProperties().getProperty("y"));
}
@Test
void populatePropertiesOverwrite() throws Exception {
// Arrange
CliRequest request = new CliRequest(new String[] {"-Dx", "-Dx=false", "validate"}, null);
// Act
cli.cli(request);
cli.properties(request);
// Assert
assertEquals("false", request.getUserProperties().getProperty("x"));
}
@Test
public void findRootProjectWithAttribute() {
Path test = Paths.get("src/test/projects/root-attribute");
assertEquals(test, new DefaultRootLocator().findRoot(test.resolve("child")));
}
@Test
public void testPropertiesInterpolation() throws Exception {
FileSystem fs = Jimfs.newFileSystem(Configuration.windows());
Path mavenHome = fs.getPath("C:\\maven");
Files.createDirectories(mavenHome);
Path mavenConf = mavenHome.resolve("conf");
Files.createDirectories(mavenConf);
Path mavenUserProps = mavenConf.resolve("maven-user.properties");
Files.writeString(mavenUserProps, "${includes} = ?${session.rootDirectory}/.mvn/maven-user.properties\n");
Path rootDirectory = fs.getPath("C:\\myRootDirectory");
Path topDirectory = rootDirectory.resolve("myTopDirectory");
Path mvn = rootDirectory.resolve(".mvn");
Files.createDirectories(mvn);
Files.writeString(
mvn.resolve("maven-user.properties"),
"${includes} = env-${envName}.properties\nfro = ${bar}z\n" + "bar = chti${java.version}\n");
Files.writeString(mvn.resolve("env-test.properties"), "\n");
// Arrange
CliRequest request = new CliRequest(
new String[] {
"-DenvName=test",
"-Dfoo=bar",
"-DvalFound=s${foo}i",
"-DvalNotFound=s${foz}i",
"-DvalRootDirectory=${session.rootDirectory}/.mvn/foo",
"-DvalTopDirectory=${session.topDirectory}/pom.xml",
"-f",
"${session.rootDirectory}/my-child",
"prefix:3.0.0:${foo}",
"validate"
},
null);
request.rootDirectory = rootDirectory;
request.topDirectory = topDirectory;
System.setProperty("maven.installation.conf", mavenConf.toString());
// Act
cli.setFileSystem(fs);
cli.cli(request);
cli.properties(request);
// Assert
assertTrue(request.getUserProperties().getProperty("fro").startsWith("chti"));
assertEquals("sbari", request.getUserProperties().getProperty("valFound"));
assertEquals("s${foz}i", request.getUserProperties().getProperty("valNotFound"));
assertEquals("C:\\myRootDirectory/.mvn/foo", request.getUserProperties().getProperty("valRootDirectory"));
assertEquals(
"C:\\myRootDirectory\\myTopDirectory/pom.xml",
request.getUserProperties().getProperty("valTopDirectory"));
assertEquals("C:\\myRootDirectory/my-child", request.getCommandLine().getOptionValue('f'));
assertArrayEquals(
new String[] {"prefix:3.0.0:bar", "validate"},
request.getCommandLine().getArgs());
Path p = fs.getPath(request.getUserProperties().getProperty("valTopDirectory"));
assertEquals("C:\\myRootDirectory\\myTopDirectory\\pom.xml", p.toString());
}
@Test
public void testEmptyProfile() throws Exception {
CliRequest request = new CliRequest(new String[] {"-P", ""}, null);
cli.cli(request);
cli.populateRequest(request);
}
@Test
public void testEmptyProject() throws Exception {
CliRequest request = new CliRequest(new String[] {"-pl", ""}, null);
cli.cli(request);
cli.populateRequest(request);
}
@ParameterizedTest
@MethodSource("activateBatchModeArguments")
public void activateBatchMode(boolean ciEnv, String[] cliArgs, boolean isBatchMode) throws Exception {
CliRequest request = new CliRequest(cliArgs, null);
if (ciEnv) {
request.getSystemProperties().put("env.CI", "true");
}
cli.cli(request);
boolean batchMode = !cli.populateRequest(request).isInteractiveMode();
assertEquals(isBatchMode, batchMode);
}
public static Stream<Arguments> activateBatchModeArguments() {
return Stream.of(
Arguments.of(false, new String[] {}, false),
Arguments.of(true, new String[] {}, true),
Arguments.of(true, new String[] {"--force-interactive"}, false),
Arguments.of(true, new String[] {"--force-interactive", "--non-interactive"}, false),
Arguments.of(true, new String[] {"--force-interactive", "--batch-mode"}, false),
Arguments.of(true, new String[] {"--force-interactive", "--non-interactive", "--batch-mode"}, false),
Arguments.of(false, new String[] {"--non-interactive"}, true),
Arguments.of(false, new String[] {"--batch-mode"}, true),
Arguments.of(false, new String[] {"--non-interactive", "--batch-mode"}, true));
}
@ParameterizedTest
@MethodSource("calculateTransferListenerArguments")
public void calculateTransferListener(boolean ciEnv, String[] cliArgs, Class<TransferListener> expectedSubClass)
throws Exception {
CliRequest request = new CliRequest(cliArgs, null);
if (ciEnv) {
request.getSystemProperties().put("env.CI", "true");
}
cli.cli(request);
cli.logging(request);
TransferListener transferListener = cli.populateRequest(request).getTransferListener();
if (transferListener instanceof SimplexTransferListener simplexTransferListener) {
transferListener = simplexTransferListener.getDelegate();
}
assertEquals(expectedSubClass, transferListener.getClass());
}
public static Stream<Arguments> calculateTransferListenerArguments() {
return Stream.of(
Arguments.of(false, new String[] {}, ConsoleMavenTransferListener.class),
Arguments.of(true, new String[] {}, QuietMavenTransferListener.class),
Arguments.of(false, new String[] {"-ntp"}, QuietMavenTransferListener.class),
Arguments.of(false, new String[] {"--quiet"}, QuietMavenTransferListener.class),
Arguments.of(true, new String[] {"--force-interactive"}, ConsoleMavenTransferListener.class),
Arguments.of(
true,
new String[] {"--force-interactive", "--non-interactive"},
ConsoleMavenTransferListener.class),
Arguments.of(
true, new String[] {"--force-interactive", "--batch-mode"}, ConsoleMavenTransferListener.class),
Arguments.of(
true,
new String[] {"--force-interactive", "--non-interactive", "--batch-mode"},
ConsoleMavenTransferListener.class),
Arguments.of(false, new String[] {"--non-interactive"}, Slf4jMavenTransferListener.class),
Arguments.of(false, new String[] {"--batch-mode"}, Slf4jMavenTransferListener.class),
Arguments.of(
false, new String[] {"--non-interactive", "--batch-mode"}, Slf4jMavenTransferListener.class));
}
private MavenProject createMavenProject(String groupId, String artifactId) {
MavenProject project = new MavenProject();
project.setGroupId(groupId);
project.setArtifactId(artifactId);
return project;
}
static String stripAnsiCodes(String msg) {
return msg.replaceAll("\u001b\\[[;\\d]*[ -/]*[@-~]", "");
}
}
|
MavenCliTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java
|
{
"start": 6094,
"end": 6664
}
|
class ____ extends LocalStateCompositeXPackPlugin {
public P(final Settings settings, final Path configPath) {
super(settings, configPath);
plugins.add(new LogsDBPlugin(settings) {
@Override
protected XPackLicenseState getLicenseState() {
return P.this.getLicenseState();
}
@Override
protected LicenseService getLicenseService() {
return P.this.getLicenseService();
}
});
}
}
}
|
P
|
java
|
apache__camel
|
components/camel-dynamic-router/src/main/java/org/apache/camel/component/dynamicrouter/control/DynamicRouterControlProducer.java
|
{
"start": 3158,
"end": 13390
}
|
class ____ extends HeaderSelectorProducer {
/**
* The {@link DynamicRouterControlService} for the Dynamic Router.
*/
private final DynamicRouterControlService dynamicRouterControlService;
/**
* The configuration for the Dynamic Router.
*/
private final DynamicRouterControlConfiguration configuration;
/**
* Create the {@link org.apache.camel.Producer} for the Dynamic Router with the supplied {@link Endpoint} URI.
*
* @param endpoint the {@link DynamicRouterEndpoint}
* @param dynamicRouterControlService the {@link DynamicRouterControlService}
* @param configuration the configuration for the Dynamic Router
*/
public DynamicRouterControlProducer(final DynamicRouterControlEndpoint endpoint,
final DynamicRouterControlService dynamicRouterControlService,
final DynamicRouterControlConfiguration configuration) {
super(endpoint, CONTROL_ACTION_HEADER, configuration::getControlActionOrDefault);
this.dynamicRouterControlService = dynamicRouterControlService;
this.configuration = configuration;
}
/**
* Create a filter from parameters in the message body.
*
* @param dynamicRouterControlService the {@link DynamicRouterControlService}
* @param message the message, where the body contains a control message
* @param update whether to update an existing filter (true) or add a new one (false)
* @return the ID of the added filter
*/
static String subscribeFromMessage(
final DynamicRouterControlService dynamicRouterControlService,
final Message message, final boolean update) {
DynamicRouterControlMessage messageBody = message.getBody(DynamicRouterControlMessage.class);
String subscriptionId = messageBody.getSubscriptionId();
String subscribeChannel = messageBody.getSubscribeChannel();
String destinationUri = messageBody.getDestinationUri();
String priority = String.valueOf(messageBody.getPriority());
String predicate = messageBody.getPredicate();
String predicateBean = messageBody.getPredicateBean();
String expressionLanguage = messageBody.getExpressionLanguage();
if (ObjectHelper.isNotEmpty(predicateBean)) {
return dynamicRouterControlService.subscribeWithPredicateBean(subscribeChannel, subscriptionId,
destinationUri, Integer.parseInt(priority), predicateBean, update);
} else if (ObjectHelper.isNotEmpty(predicate) && ObjectHelper.isNotEmpty(expressionLanguage)) {
return dynamicRouterControlService.subscribeWithPredicateExpression(subscribeChannel, subscriptionId,
destinationUri, Integer.parseInt(priority), predicate, expressionLanguage, update);
} else {
throw new IllegalStateException(ERROR_NO_PREDICATE_BEAN_FOUND);
}
}
/**
* Create a filter from parameters in message headers.
*
* @param dynamicRouterControlService the {@link DynamicRouterControlService}
* @param message the message, where the headers contain subscription params
* @param update whether to update an existing filter (true) or add a new one (false)
* @return the ID of the added filter
*/
static String subscribeFromHeaders(
final DynamicRouterControlService dynamicRouterControlService,
final Message message, final boolean update) {
Map<String, Object> headers = message.getHeaders();
String subscriptionId = (String) headers.get(CONTROL_SUBSCRIPTION_ID);
String subscribeChannel = (String) headers.get(CONTROL_SUBSCRIBE_CHANNEL);
String destinationUri = (String) headers.get(CONTROL_DESTINATION_URI);
String priority = String.valueOf(headers.get(CONTROL_PRIORITY));
String predicate = (String) headers.get(CONTROL_PREDICATE);
String predicateBean = (String) headers.get(CONTROL_PREDICATE_BEAN);
String expressionLanguage = Optional.ofNullable((String) headers.get(CONTROL_EXPRESSION_LANGUAGE))
.orElse(SIMPLE_LANGUAGE);
if (ObjectHelper.isNotEmpty(predicateBean)) {
return dynamicRouterControlService.subscribeWithPredicateBean(subscribeChannel, subscriptionId,
destinationUri, Integer.parseInt(priority), predicateBean, update);
} else if (ObjectHelper.isNotEmpty(predicate) && ObjectHelper.isNotEmpty(expressionLanguage)) {
return dynamicRouterControlService.subscribeWithPredicateExpression(subscribeChannel, subscriptionId,
destinationUri, Integer.parseInt(priority), predicate, expressionLanguage, update);
} else {
return dynamicRouterControlService.subscribeWithPredicateInstance(subscribeChannel, subscriptionId,
destinationUri, Integer.parseInt(priority), message.getBody(), update);
}
}
/**
* Performs "subscribe" if the {@link DynamicRouterControlConstants#CONTROL_ACTION_HEADER} header has a value of
* {@link DynamicRouterControlConstants#CONTROL_ACTION_SUBSCRIBE}.
*
* @param message the incoming message from the exchange
* @param callback the async callback
*/
@InvokeOnHeader(CONTROL_ACTION_SUBSCRIBE)
public void performSubscribe(final Message message, AsyncCallback callback) {
String filterId;
if (message.getBody() instanceof DynamicRouterControlMessage) {
filterId = subscribeFromMessage(dynamicRouterControlService, message, false);
} else {
filterId = subscribeFromHeaders(dynamicRouterControlService, message, false);
}
message.setBody(filterId);
callback.done(false);
}
/**
* Performs "unsubscribe" if the {@link DynamicRouterControlConstants#CONTROL_ACTION_HEADER} header has a value of
* {@link DynamicRouterControlConstants#CONTROL_ACTION_UNSUBSCRIBE}.
*
* @param message the incoming message from the exchange
* @param callback the async callback
*/
@InvokeOnHeader(CONTROL_ACTION_UNSUBSCRIBE)
public void performUnsubscribe(final Message message, AsyncCallback callback) {
String subscriptionId;
String subscribeChannel;
if (message.getBody() instanceof DynamicRouterControlMessage) {
DynamicRouterControlMessage controlMessage = message.getBody(DynamicRouterControlMessage.class);
subscriptionId = controlMessage.getSubscriptionId();
subscribeChannel = controlMessage.getSubscribeChannel();
} else {
subscriptionId = message.getHeader(CONTROL_SUBSCRIPTION_ID, configuration.getSubscriptionId(), String.class);
subscribeChannel = message.getHeader(CONTROL_SUBSCRIBE_CHANNEL, configuration.getSubscribeChannel(), String.class);
}
boolean result = dynamicRouterControlService.removeSubscription(subscribeChannel, subscriptionId);
message.setBody(result, boolean.class);
callback.done(false);
}
/**
* Performs "update" if the {@link DynamicRouterControlConstants#CONTROL_ACTION_HEADER} header has a value of
* {@link DynamicRouterControlConstants#CONTROL_ACTION_UPDATE}.
*
* @param message the incoming message from the exchange
* @param callback the async callback
*/
@InvokeOnHeader(CONTROL_ACTION_UPDATE)
public void performUpdate(final Message message, AsyncCallback callback) {
String filterId;
if (message.getBody() instanceof DynamicRouterControlMessage) {
filterId = subscribeFromMessage(dynamicRouterControlService, message, true);
} else {
filterId = subscribeFromHeaders(dynamicRouterControlService, message, true);
}
message.setBody(filterId);
callback.done(false);
}
/**
* Performs a "list" of the subscriptions of the channel if the
* {@link DynamicRouterControlConstants#CONTROL_ACTION_HEADER} header has a value of
* {@link DynamicRouterControlConstants#CONTROL_ACTION_LIST}.
*
* @param exchange the incoming exchange
* @param callback the async callback
*/
@InvokeOnHeader(CONTROL_ACTION_LIST)
public void performList(final Exchange exchange, AsyncCallback callback) {
Message message = exchange.getMessage();
Map<String, Object> headers = message.getHeaders();
String subscribeChannel = (String) headers.getOrDefault(CONTROL_SUBSCRIBE_CHANNEL, configuration.getSubscribeChannel());
try {
String filters = dynamicRouterControlService.getSubscriptionsForChannel(subscribeChannel);
message.setBody(filters, String.class);
} catch (Exception e) {
exchange.setException(e);
} finally {
callback.done(false);
}
}
/**
* Performs the retrieval of routing "statistics" of the channel if the
* {@link DynamicRouterControlConstants#CONTROL_ACTION_HEADER} header has a value of
* {@link DynamicRouterControlConstants#CONTROL_ACTION_STATS}.
*
* @param exchange the incoming exchange
* @param callback the async callback
*/
@InvokeOnHeader(CONTROL_ACTION_STATS)
public void performStats(final Exchange exchange, AsyncCallback callback) {
Message message = exchange.getMessage();
Map<String, Object> headers = message.getHeaders();
String subscribeChannel = (String) headers.getOrDefault(CONTROL_SUBSCRIBE_CHANNEL, configuration.getSubscribeChannel());
try {
String stats = dynamicRouterControlService.getStatisticsForChannel(subscribeChannel);
message.setBody(stats, String.class);
} catch (Exception e) {
exchange.setException(e);
} finally {
callback.done(false);
}
}
/**
* Create a {@link DynamicRouterControlProducer} instance.
*/
public static
|
DynamicRouterControlProducer
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/main/java/org/springframework/web/reactive/config/BlockingExecutionConfigurer.java
|
{
"start": 1000,
"end": 2339
}
|
class ____ {
private @Nullable AsyncTaskExecutor executor;
private @Nullable Predicate<HandlerMethod> blockingControllerMethodPredicate;
/**
* Configure an executor to invoke blocking controller methods with.
* <p>By default, this is not set in which case controller methods are
* invoked without the use of an Executor.
* @param executor the task executor to use
*/
public BlockingExecutionConfigurer setExecutor(AsyncTaskExecutor executor) {
this.executor = executor;
return this;
}
/**
* Configure a predicate to decide if a controller method is blocking and
* should be called on a separate thread if an executor is
* {@linkplain #setExecutor configured}.
* <p>The default predicate matches controller methods whose return type is
* not recognized by the configured
* {@link org.springframework.core.ReactiveAdapterRegistry}.
* @param predicate the predicate to use
*/
public BlockingExecutionConfigurer setControllerMethodPredicate(Predicate<HandlerMethod> predicate) {
this.blockingControllerMethodPredicate = predicate;
return this;
}
protected @Nullable AsyncTaskExecutor getExecutor() {
return this.executor;
}
protected @Nullable Predicate<HandlerMethod> getBlockingControllerMethodPredicate() {
return this.blockingControllerMethodPredicate;
}
}
|
BlockingExecutionConfigurer
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/annotatewith/AnnotateWithTest.java
|
{
"start": 1048,
"end": 25841
}
|
class ____ {
@RegisterExtension
final GeneratedSource generatedSource = new GeneratedSource();
@ProcessorTest
@WithClasses({ DeprecateAndCustomMapper.class, CustomAnnotation.class })
public void mapperBecomesDeprecatedAndGetsCustomAnnotation() {
DeprecateAndCustomMapper mapper = Mappers.getMapper( DeprecateAndCustomMapper.class );
assertThat( mapper.getClass() ).hasAnnotations( Deprecated.class, CustomAnnotation.class );
}
@ProcessorTest
@WithClasses( {
CustomNamedMapper.class,
CustomAnnotationWithParamsContainer.class,
CustomAnnotationWithParams.class,
CustomClassOnlyAnnotation.class,
CustomMethodOnlyAnnotation.class,
} )
public void annotationWithValue() {
generatedSource.addComparisonToFixtureFor( CustomNamedMapper.class );
}
@ProcessorTest
@WithClasses( { MultipleArrayValuesMapper.class, CustomAnnotationWithParamsContainer.class,
CustomAnnotationWithParams.class } )
public void annotationWithMultipleValues() {
generatedSource.addComparisonToFixtureFor( MultipleArrayValuesMapper.class );
}
@ProcessorTest
@WithClasses( { CustomNamedGenericClassMapper.class, CustomAnnotationWithParamsContainer.class,
CustomAnnotationWithParams.class } )
public void annotationWithCorrectGenericClassValue() {
CustomNamedGenericClassMapper mapper = Mappers.getMapper( CustomNamedGenericClassMapper.class );
CustomAnnotationWithParams annotation = mapper.getClass().getAnnotation( CustomAnnotationWithParams.class );
assertThat( annotation ).isNotNull();
assertThat( annotation.stringParam() ).isEqualTo( "test" );
assertThat( annotation.genericTypedClass() ).isEqualTo( Mapper.class );
}
@ProcessorTest
@WithClasses( { AnnotationWithoutElementNameMapper.class, CustomAnnotation.class } )
public void annotateWithoutElementName() {
generatedSource
.forMapper( AnnotationWithoutElementNameMapper.class )
.content()
.contains( "@CustomAnnotation(value = \"value\")" );
}
@ProcessorTest
@WithClasses({ MetaAnnotatedMapper.class, ClassMetaAnnotation.class, CustomClassOnlyAnnotation.class })
public void metaAnnotationWorks() {
MetaAnnotatedMapper mapper = Mappers.getMapper( MetaAnnotatedMapper.class );
assertThat( mapper.getClass() ).hasAnnotation( CustomClassOnlyAnnotation.class );
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithMissingParameter.class,
line = 15,
message = "Parameter \"required\" is required for annotation \"AnnotationWithRequiredParameter\"."
)
}
)
@WithClasses({ ErroneousMapperWithMissingParameter.class, AnnotationWithRequiredParameter.class })
public void erroneousMapperWithMissingParameter() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithMethodOnInterface.class,
line = 15,
message = "Annotation \"CustomMethodOnlyAnnotation\" is not allowed on classes."
)
}
)
@WithClasses({ ErroneousMapperWithMethodOnInterface.class, CustomMethodOnlyAnnotation.class })
public void erroneousMapperWithMethodOnInterface() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithMethodOnClass.class,
line = 15,
message = "Annotation \"CustomMethodOnlyAnnotation\" is not allowed on classes."
)
}
)
@WithClasses({ ErroneousMapperWithMethodOnClass.class, CustomMethodOnlyAnnotation.class })
public void erroneousMapperWithMethodOnClass() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithAnnotationOnlyOnInterface.class,
line = 15,
message = "Annotation \"CustomAnnotationOnlyAnnotation\" is not allowed on classes."
)
}
)
@WithClasses({ ErroneousMapperWithAnnotationOnlyOnInterface.class, CustomAnnotationOnlyAnnotation.class })
public void erroneousMapperWithAnnotationOnlyOnInterface() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithAnnotationOnlyOnClass.class,
line = 15,
message = "Annotation \"CustomAnnotationOnlyAnnotation\" is not allowed on classes."
)
}
)
@WithClasses({ ErroneousMapperWithAnnotationOnlyOnClass.class, CustomAnnotationOnlyAnnotation.class })
public void erroneousMapperWithAnnotationOnlyOnClass() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithClassOnMethod.class,
line = 18,
message = "Annotation \"CustomClassOnlyAnnotation\" is not allowed on methods."
)
}
)
@WithClasses({ ErroneousMapperWithClassOnMethod.class, CustomClassOnlyAnnotation.class, WithProperties.class })
public void erroneousMapperWithClassOnMethod() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithUnknownParameter.class,
line = 17,
message = "Unknown parameter \"unknownParameter\" for annotation \"CustomAnnotation\"." +
" Did you mean \"value\"?"
)
}
)
@WithClasses({ ErroneousMapperWithUnknownParameter.class, CustomAnnotation.class })
public void erroneousMapperWithUnknownParameter() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithNonExistantEnum.class,
line = 17,
message = "Enum \"AnnotateWithEnum\" does not have value \"NON_EXISTANT\"."
)
}
)
@WithClasses( { ErroneousMapperWithNonExistantEnum.class, CustomAnnotationWithParamsContainer.class,
CustomAnnotationWithParams.class } )
public void erroneousMapperWithNonExistantEnum() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithTooManyParameterValues.class,
line = 17,
message = "Parameter \"stringParam\" has too many value types supplied, type \"String\" is expected"
+ " for annotation \"CustomAnnotationWithParams\"."
)
}
)
@WithClasses( { ErroneousMapperWithTooManyParameterValues.class, CustomAnnotationWithParamsContainer.class,
CustomAnnotationWithParams.class } )
public void erroneousMapperWithTooManyParameterValues() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 16,
alternativeLine = 43,
message = "Parameter \"stringParam\" is not of type \"boolean\" but of type \"String\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 18,
alternativeLine = 43,
message = "Parameter \"stringParam\" is not of type \"byte\" but of type \"String\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 20,
alternativeLine = 43,
message = "Parameter \"stringParam\" is not of type \"char\" but of type \"String\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 22,
alternativeLine = 43,
message = "Parameter \"stringParam\" is not of type \"CustomAnnotationWithParams\""
+ " but of type \"String\" for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 24,
alternativeLine = 43,
message = "Parameter \"stringParam\" is not of type \"double\" but of type \"String\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 26,
alternativeLine = 43,
message = "Parameter \"stringParam\" is not of type \"float\" but of type \"String\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 28,
alternativeLine = 43,
message = "Parameter \"stringParam\" is not of type \"int\" but of type \"String\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 30,
alternativeLine = 43,
message = "Parameter \"stringParam\" is not of type \"long\" but of type \"String\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 32,
alternativeLine = 43,
message = "Parameter \"stringParam\" is not of type \"short\" but of type \"String\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 35,
alternativeLine = 43,
message = "Parameter \"genericTypedClass\" is not of type \"String\" "
+ "but of type \"Class<? extends java.lang.annotation.Annotation>\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 36,
alternativeLine = 43,
message = "Parameter \"enumParam\" is not of type \"WrongAnnotateWithEnum\" "
+ "but of type \"AnnotateWithEnum\" for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 40,
alternativeLine = 43,
message = "Parameter \"genericTypedClass\" is not of type \"ErroneousMapperWithWrongParameter\" "
+ "but of type \"Class<? extends java.lang.annotation.Annotation>\" "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithWrongParameter.class,
line = 42,
alternativeLine = 43,
message = "Parameter \"value\" is not of type \"boolean\" "
+ "but of type \"String\" for annotation \"CustomAnnotation\"."
)
}
)
@WithClasses({
ErroneousMapperWithWrongParameter.class, CustomAnnotationWithParams.class,
CustomAnnotationWithParamsContainer.class, WrongAnnotateWithEnum.class, CustomAnnotation.class
})
public void erroneousMapperWithWrongParameter() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 17,
alternativeLine = 43,
message = "Parameter \"stringParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 18,
alternativeLine = 43,
message = "Parameter \"booleanParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 19,
alternativeLine = 32,
message = "Parameter \"byteParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 20,
alternativeLine = 32,
message = "Parameter \"charParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 21,
alternativeLine = 32,
message = "Parameter \"doubleParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 22,
alternativeLine = 32,
message = "Parameter \"floatParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 23,
alternativeLine = 32,
message = "Parameter \"intParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 24,
alternativeLine = 32,
message = "Parameter \"longParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 25,
alternativeLine = 32,
message = "Parameter \"shortParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 26,
alternativeLine = 32,
message = "Parameter \"genericTypedClass\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
),
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMultipleArrayValuesMapper.class,
line = 27,
alternativeLine = 32,
message = "Parameter \"enumParam\" does not accept multiple values "
+ "for annotation \"CustomAnnotationWithParams\"."
)
}
)
@WithClasses( { ErroneousMultipleArrayValuesMapper.class, CustomAnnotationWithParamsContainer.class,
CustomAnnotationWithParams.class } )
public void erroneousMapperUsingMultipleValuesInsteadOfSingle() {
}
@ProcessorTest
@WithClasses( { MapperWithMissingAnnotationElementName.class,
CustomAnnotationWithTwoAnnotationElements.class } )
public void mapperWithMissingAnnotationElementNameShouldCompile() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithMissingEnumClass.class,
line = 17,
message = "enumClass needs to be defined when using enums."
)
}
)
@WithClasses( { ErroneousMapperWithMissingEnumClass.class, CustomAnnotationWithParamsContainer.class,
CustomAnnotationWithParams.class } )
public void erroneousMapperWithMissingEnumClass() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithMissingEnums.class,
line = 17,
message = "enums needs to be defined when using enumClass."
)
}
)
@WithClasses( { ErroneousMapperWithMissingEnums.class, CustomAnnotationWithParamsContainer.class,
CustomAnnotationWithParams.class } )
public void erroneousMapperWithMissingEnums() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithRepeatOfNotRepeatableAnnotation.class,
line = 16,
alternativeLine = 17,
message = "Annotation \"CustomAnnotation\" is not repeatable."
)
}
)
@WithClasses( { ErroneousMapperWithRepeatOfNotRepeatableAnnotation.class, CustomAnnotation.class } )
public void erroneousMapperWithRepeatOfNotRepeatableAnnotation() {
}
@ProcessorTest
@WithClasses( { MapperWithRepeatableAnnotation.class, CustomRepeatableAnnotation.class,
CustomRepeatableAnnotationContainer.class } )
public void mapperWithRepeatableAnnotationShouldCompile() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.ERROR,
type = ErroneousMapperWithParameterRepeat.class,
line = 18,
message = "Parameter \"stringParam\" must not be defined more than once."
)
}
)
@WithClasses( { ErroneousMapperWithParameterRepeat.class, CustomAnnotationWithParamsContainer.class,
CustomAnnotationWithParams.class } )
public void erroneousMapperWithParameterRepeat() {
}
@ProcessorTest
@ExpectedCompilationOutcome(
value = CompilationResult.SUCCEEDED,
diagnostics = {
@Diagnostic(
kind = javax.tools.Diagnostic.Kind.WARNING,
type = MapperWithIdenticalAnnotationRepeated.class,
line = 16,
alternativeLine = 17,
message = "Annotation \"CustomRepeatableAnnotation\" is already present "
+ "with the same elements configuration."
)
}
)
@WithClasses( { MapperWithIdenticalAnnotationRepeated.class, CustomRepeatableAnnotation.class,
CustomRepeatableAnnotationContainer.class } )
public void mapperWithIdenticalAnnotationRepeated() {
}
@ProcessorTest
@WithClasses( {AnnotateBeanMappingMethodMapper.class, CustomMethodOnlyAnnotation.class} )
public void beanMappingMethodWithCorrectCustomAnnotation() throws NoSuchMethodException {
AnnotateBeanMappingMethodMapper mapper = Mappers.getMapper( AnnotateBeanMappingMethodMapper.class );
Method method = mapper.getClass().getMethod( "map", AnnotateBeanMappingMethodMapper.Source.class );
assertThat( method.getAnnotation( CustomMethodOnlyAnnotation.class ) ).isNotNull();
}
@ProcessorTest
@WithClasses( {AnnotateIterableMappingMethodMapper.class, CustomMethodOnlyAnnotation.class} )
public void iterableMappingMethodWithCorrectCustomAnnotation() throws NoSuchMethodException {
AnnotateIterableMappingMethodMapper mapper = Mappers.getMapper( AnnotateIterableMappingMethodMapper.class );
Method method = mapper.getClass().getMethod( "toStringList", List.class );
assertThat( method.getAnnotation( CustomMethodOnlyAnnotation.class ) ).isNotNull();
}
@ProcessorTest
@WithClasses( {AnnotateMapMappingMethodMapper.class, CustomMethodOnlyAnnotation.class} )
public void mapMappingMethodWithCorrectCustomAnnotation() throws NoSuchMethodException {
AnnotateMapMappingMethodMapper mapper = Mappers.getMapper( AnnotateMapMappingMethodMapper.class );
Method method = mapper.getClass().getMethod( "longDateMapToStringStringMap", Map.class );
assertThat( method.getAnnotation( CustomMethodOnlyAnnotation.class ) ).isNotNull();
}
@ProcessorTest
@WithClasses( {AnnotateStreamMappingMethodMapper.class, CustomMethodOnlyAnnotation.class} )
public void streamMappingMethodWithCorrectCustomAnnotation() throws NoSuchMethodException {
AnnotateStreamMappingMethodMapper mapper = Mappers.getMapper( AnnotateStreamMappingMethodMapper.class );
Method method = mapper.getClass().getMethod( "toStringStream", Stream.class );
assertThat( method.getAnnotation( CustomMethodOnlyAnnotation.class ) ).isNotNull();
}
@ProcessorTest
@WithClasses( {AnnotateValueMappingMethodMapper.class, AnnotateWithEnum.class, CustomMethodOnlyAnnotation.class} )
public void valueMappingMethodWithCorrectCustomAnnotation() throws NoSuchMethodException {
AnnotateValueMappingMethodMapper mapper = Mappers.getMapper( AnnotateValueMappingMethodMapper.class );
Method method = mapper.getClass().getMethod( "map", String.class );
assertThat( method.getAnnotation( CustomMethodOnlyAnnotation.class ) ).isNotNull();
}
}
|
AnnotateWithTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/Issue963.java
|
{
"start": 1051,
"end": 1376
}
|
class ____ {
@JSONField(serializeUsing = EnumTypeCodec.class, deserializeUsing = EnumTypeCodec.class)
private EnumType type;
public EnumType getType() {
return type;
}
public void setType(EnumType type) {
this.type = type;
}
}
public static
|
Mock
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java
|
{
"start": 124256,
"end": 124730
}
|
class ____ extends FullTextFunctionTestCase {
QueryStringFunctionTestCase() {
super(QueryString.class);
}
@Override
public QueryBuilder queryBuilder() {
return new QueryStringQueryBuilder(fieldName() + ": " + queryString());
}
@Override
public String esqlQuery() {
return "qstr(\"" + fieldName() + ": " + queryString() + "\")";
}
}
private
|
QueryStringFunctionTestCase
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/TaskManagerRunner.java
|
{
"start": 5378,
"end": 33448
}
|
class ____ implements FatalErrorHandler {
private static final Logger LOG = LoggerFactory.getLogger(TaskManagerRunner.class);
private static final long FATAL_ERROR_SHUTDOWN_TIMEOUT_MS = 10000L;
private static final int SUCCESS_EXIT_CODE = 0;
@VisibleForTesting public static final int FAILURE_EXIT_CODE = 1;
private final Thread shutdownHook;
private final Object lock = new Object();
private final Configuration configuration;
private final Duration timeout;
private final PluginManager pluginManager;
private final TaskExecutorServiceFactory taskExecutorServiceFactory;
private final CompletableFuture<Result> terminationFuture;
@GuardedBy("lock")
private DeterminismEnvelope<ResourceID> resourceId;
/** Executor used to run future callbacks. */
@GuardedBy("lock")
private ExecutorService executor;
@GuardedBy("lock")
private RpcSystem rpcSystem;
@GuardedBy("lock")
private RpcService rpcService;
@GuardedBy("lock")
private HighAvailabilityServices highAvailabilityServices;
@GuardedBy("lock")
private MetricRegistryImpl metricRegistry;
@GuardedBy("lock")
private BlobCacheService blobCacheService;
@GuardedBy("lock")
private DeterminismEnvelope<WorkingDirectory> workingDirectory;
@GuardedBy("lock")
private TaskExecutorService taskExecutorService;
@GuardedBy("lock")
private boolean shutdown;
public TaskManagerRunner(
Configuration configuration,
PluginManager pluginManager,
TaskExecutorServiceFactory taskExecutorServiceFactory)
throws Exception {
this.configuration = checkNotNull(configuration);
this.pluginManager = checkNotNull(pluginManager);
this.taskExecutorServiceFactory = checkNotNull(taskExecutorServiceFactory);
timeout = configuration.get(RpcOptions.ASK_TIMEOUT_DURATION);
this.terminationFuture = new CompletableFuture<>();
this.shutdown = false;
this.shutdownHook =
ShutdownHookUtil.addShutdownHook(
() -> this.closeAsync(Result.JVM_SHUTDOWN).join(),
getClass().getSimpleName(),
LOG);
}
private void startTaskManagerRunnerServices() throws Exception {
synchronized (lock) {
rpcSystem = RpcSystem.load(configuration);
this.executor =
Executors.newScheduledThreadPool(
Hardware.getNumberCPUCores(),
new ExecutorThreadFactory("taskmanager-future"));
highAvailabilityServices =
HighAvailabilityServicesUtils.createHighAvailabilityServices(
configuration,
executor,
AddressResolution.NO_ADDRESS_RESOLUTION,
rpcSystem,
this);
JMXService.startInstance(configuration.get(JMXServerOptions.JMX_SERVER_PORT));
rpcService = createRpcService(configuration, highAvailabilityServices, rpcSystem);
this.resourceId =
getTaskManagerResourceID(
configuration, rpcService.getAddress(), rpcService.getPort());
this.workingDirectory =
ClusterEntrypointUtils.createTaskManagerWorkingDirectory(
configuration, resourceId);
LOG.info("Using working directory: {}", workingDirectory);
HeartbeatServices heartbeatServices =
HeartbeatServices.fromConfiguration(configuration);
metricRegistry =
new MetricRegistryImpl(
MetricRegistryConfiguration.fromConfiguration(
configuration,
rpcSystem.getMaximumMessageSizeInBytes(configuration)),
ReporterSetupBuilder.METRIC_SETUP_BUILDER.fromConfiguration(
configuration,
DefaultReporterFilters::metricsFromConfiguration,
pluginManager),
ReporterSetupBuilder.TRACE_SETUP_BUILDER.fromConfiguration(
configuration,
DefaultReporterFilters::tracesFromConfiguration,
pluginManager),
ReporterSetupBuilder.EVENT_SETUP_BUILDER.fromConfiguration(
configuration,
DefaultReporterFilters::eventsFromConfiguration,
pluginManager));
final RpcService metricQueryServiceRpcService =
MetricUtils.startRemoteMetricsRpcService(
configuration,
rpcService.getAddress(),
configuration.get(TaskManagerOptions.BIND_HOST),
rpcSystem);
metricRegistry.startQueryService(metricQueryServiceRpcService, resourceId.unwrap());
blobCacheService =
BlobUtils.createBlobCacheService(
configuration,
Reference.borrowed(workingDirectory.unwrap().getBlobStorageDirectory()),
highAvailabilityServices.createBlobStore(),
null);
final ExternalResourceInfoProvider externalResourceInfoProvider =
ExternalResourceUtils.createStaticExternalResourceInfoProviderFromConfig(
configuration, pluginManager);
final DelegationTokenReceiverRepository delegationTokenReceiverRepository =
new DelegationTokenReceiverRepository(configuration, pluginManager);
taskExecutorService =
taskExecutorServiceFactory.createTaskExecutor(
this.configuration,
this.resourceId.unwrap(),
rpcService,
highAvailabilityServices,
heartbeatServices,
metricRegistry,
blobCacheService,
false,
externalResourceInfoProvider,
workingDirectory.unwrap(),
this,
delegationTokenReceiverRepository);
handleUnexpectedTaskExecutorServiceTermination();
MemoryLogger.startIfConfigured(
LOG, configuration, terminationFuture.thenAccept(ignored -> {}));
}
}
@GuardedBy("lock")
private void handleUnexpectedTaskExecutorServiceTermination() {
taskExecutorService
.getTerminationFuture()
.whenComplete(
(unused, throwable) -> {
synchronized (lock) {
if (!shutdown) {
onFatalError(
new FlinkException(
"Unexpected termination of the TaskExecutor.",
throwable));
}
}
});
}
// --------------------------------------------------------------------------------------------
// Lifecycle management
// --------------------------------------------------------------------------------------------
public void start() throws Exception {
synchronized (lock) {
startTaskManagerRunnerServices();
taskExecutorService.start();
}
}
public void close() throws Exception {
try {
closeAsync().get();
} catch (ExecutionException e) {
ExceptionUtils.rethrowException(ExceptionUtils.stripExecutionException(e));
}
}
public CompletableFuture<Result> closeAsync() {
return closeAsync(Result.SUCCESS);
}
private CompletableFuture<Result> closeAsync(Result terminationResult) {
synchronized (lock) {
// remove shutdown hook to prevent resource leaks
ShutdownHookUtil.removeShutdownHook(shutdownHook, this.getClass().getSimpleName(), LOG);
if (shutdown) {
return terminationFuture;
}
final CompletableFuture<Void> taskManagerTerminationFuture;
if (taskExecutorService != null) {
taskManagerTerminationFuture = taskExecutorService.closeAsync();
} else {
taskManagerTerminationFuture = FutureUtils.completedVoidFuture();
}
final CompletableFuture<Void> serviceTerminationFuture =
FutureUtils.composeAfterwards(
taskManagerTerminationFuture, this::shutDownServices);
final CompletableFuture<Void> workingDirCleanupFuture =
FutureUtils.runAfterwards(
serviceTerminationFuture, () -> deleteWorkingDir(terminationResult));
final CompletableFuture<Void> rpcSystemClassLoaderCloseFuture;
if (rpcSystem != null) {
rpcSystemClassLoaderCloseFuture =
FutureUtils.runAfterwards(workingDirCleanupFuture, rpcSystem::close);
} else {
rpcSystemClassLoaderCloseFuture = FutureUtils.completedVoidFuture();
}
rpcSystemClassLoaderCloseFuture.whenComplete(
(Void ignored, Throwable throwable) -> {
if (throwable != null) {
terminationFuture.completeExceptionally(throwable);
} else {
terminationFuture.complete(terminationResult);
}
});
shutdown = true;
return terminationFuture;
}
}
private void deleteWorkingDir(Result terminationResult) throws IOException {
synchronized (lock) {
if (workingDirectory != null) {
if (!workingDirectory.isDeterministic() || terminationResult == Result.SUCCESS) {
workingDirectory.unwrap().delete();
}
}
}
}
private CompletableFuture<Void> shutDownServices() {
synchronized (lock) {
Collection<CompletableFuture<Void>> terminationFutures = new ArrayList<>(3);
Exception exception = null;
try {
JMXService.stopInstance();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
if (blobCacheService != null) {
try {
blobCacheService.close();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
}
if (metricRegistry != null) {
try {
terminationFutures.add(metricRegistry.closeAsync());
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
}
if (highAvailabilityServices != null) {
try {
highAvailabilityServices.close();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
}
if (rpcService != null) {
terminationFutures.add(rpcService.closeAsync());
}
if (executor != null) {
terminationFutures.add(
ExecutorUtils.nonBlockingShutdown(
timeout.toMillis(), TimeUnit.MILLISECONDS, executor));
}
if (exception != null) {
terminationFutures.add(FutureUtils.completedExceptionally(exception));
}
return FutureUtils.completeAll(terminationFutures);
}
}
// export the termination future for caller to know it is terminated
public CompletableFuture<Result> getTerminationFuture() {
return terminationFuture;
}
// --------------------------------------------------------------------------------------------
// FatalErrorHandler methods
// --------------------------------------------------------------------------------------------
@Override
public void onFatalError(Throwable exception) {
TaskManagerExceptionUtils.tryEnrichTaskManagerError(exception);
LOG.error(
"Fatal error occurred while executing the TaskManager. Shutting it down...",
exception);
if (ExceptionUtils.isJvmFatalOrOutOfMemoryError(exception)) {
terminateJVM();
} else {
closeAsync(Result.FAILURE);
FutureUtils.orTimeout(
terminationFuture,
FATAL_ERROR_SHUTDOWN_TIMEOUT_MS,
TimeUnit.MILLISECONDS,
String.format(
"Waiting for TaskManager shutting down timed out after %s ms.",
FATAL_ERROR_SHUTDOWN_TIMEOUT_MS));
}
}
private void terminateJVM() {
FlinkSecurityManager.forceProcessExit(FAILURE_EXIT_CODE);
}
// --------------------------------------------------------------------------------------------
// Static entry point
// --------------------------------------------------------------------------------------------
public static void main(String[] args) throws Exception {
// startup checks and logging
EnvironmentInformation.logEnvironmentInfo(LOG, "TaskManager", args);
SignalHandler.register(LOG);
JvmShutdownSafeguard.installAsShutdownHook(LOG);
long maxOpenFileHandles = EnvironmentInformation.getOpenFileHandlesLimit();
if (maxOpenFileHandles != -1L) {
LOG.info("Maximum number of open file descriptors is {}.", maxOpenFileHandles);
} else {
LOG.info("Cannot determine the maximum number of open file descriptors");
}
runTaskManagerProcessSecurely(args);
}
public static Configuration loadConfiguration(String[] args) throws FlinkParseException {
return ConfigurationParserUtils.loadCommonConfiguration(
args, TaskManagerRunner.class.getSimpleName());
}
public static int runTaskManager(Configuration configuration, PluginManager pluginManager)
throws Exception {
final TaskManagerRunner taskManagerRunner;
try {
taskManagerRunner =
new TaskManagerRunner(
configuration,
pluginManager,
TaskManagerRunner::createTaskExecutorService);
taskManagerRunner.start();
} catch (Exception exception) {
throw new FlinkException("Failed to start the TaskManagerRunner.", exception);
}
try {
return taskManagerRunner.getTerminationFuture().get().getExitCode();
} catch (Throwable t) {
throw new FlinkException(
"Unexpected failure during runtime of TaskManagerRunner.",
ExceptionUtils.stripExecutionException(t));
}
}
public static void runTaskManagerProcessSecurely(String[] args) {
Configuration configuration = null;
try {
configuration = loadConfiguration(args);
} catch (FlinkParseException fpe) {
LOG.error("Could not load the configuration.", fpe);
System.exit(FAILURE_EXIT_CODE);
}
runTaskManagerProcessSecurely(checkNotNull(configuration));
}
public static void runTaskManagerProcessSecurely(Configuration configuration) {
FlinkSecurityManager.setFromConfiguration(configuration);
final PluginManager pluginManager =
PluginUtils.createPluginManagerFromRootFolder(configuration);
FileSystem.initialize(configuration, pluginManager);
StateChangelogStorageLoader.initialize(pluginManager);
int exitCode;
Throwable throwable = null;
ClusterEntrypointUtils.configureUncaughtExceptionHandler(configuration);
try {
SecurityUtils.install(new SecurityConfiguration(configuration));
exitCode =
SecurityUtils.getInstalledContext()
.runSecured(() -> runTaskManager(configuration, pluginManager));
} catch (Throwable t) {
throwable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class);
exitCode = FAILURE_EXIT_CODE;
}
if (throwable != null) {
LOG.error("Terminating TaskManagerRunner with exit code {}.", exitCode, throwable);
} else {
LOG.info("Terminating TaskManagerRunner with exit code {}.", exitCode);
}
System.exit(exitCode);
}
// --------------------------------------------------------------------------------------------
// Static utilities
// --------------------------------------------------------------------------------------------
public static TaskExecutorService createTaskExecutorService(
Configuration configuration,
ResourceID resourceID,
RpcService rpcService,
HighAvailabilityServices highAvailabilityServices,
HeartbeatServices heartbeatServices,
MetricRegistry metricRegistry,
BlobCacheService blobCacheService,
boolean localCommunicationOnly,
ExternalResourceInfoProvider externalResourceInfoProvider,
WorkingDirectory workingDirectory,
FatalErrorHandler fatalErrorHandler,
DelegationTokenReceiverRepository delegationTokenReceiverRepository)
throws Exception {
final TaskExecutor taskExecutor =
startTaskManager(
configuration,
resourceID,
rpcService,
highAvailabilityServices,
heartbeatServices,
metricRegistry,
blobCacheService,
localCommunicationOnly,
externalResourceInfoProvider,
workingDirectory,
fatalErrorHandler,
delegationTokenReceiverRepository);
return TaskExecutorToServiceAdapter.createFor(taskExecutor);
}
public static TaskExecutor startTaskManager(
Configuration configuration,
ResourceID resourceID,
RpcService rpcService,
HighAvailabilityServices highAvailabilityServices,
HeartbeatServices heartbeatServices,
MetricRegistry metricRegistry,
TaskExecutorBlobService taskExecutorBlobService,
boolean localCommunicationOnly,
ExternalResourceInfoProvider externalResourceInfoProvider,
WorkingDirectory workingDirectory,
FatalErrorHandler fatalErrorHandler,
DelegationTokenReceiverRepository delegationTokenReceiverRepository)
throws Exception {
checkNotNull(configuration);
checkNotNull(resourceID);
checkNotNull(rpcService);
checkNotNull(highAvailabilityServices);
LOG.info("Starting TaskManager with ResourceID: {}", resourceID.getStringWithMetadata());
SystemOutRedirectionUtils.redirectSystemOutAndError(configuration);
String externalAddress = rpcService.getAddress();
final TaskExecutorResourceSpec taskExecutorResourceSpec =
TaskExecutorResourceUtils.resourceSpecFromConfig(configuration);
TaskManagerServicesConfiguration taskManagerServicesConfiguration =
TaskManagerServicesConfiguration.fromConfiguration(
configuration,
resourceID,
externalAddress,
localCommunicationOnly,
taskExecutorResourceSpec,
workingDirectory);
Tuple2<TaskManagerMetricGroup, MetricGroup> taskManagerMetricGroup =
MetricUtils.instantiateTaskManagerMetricGroup(
metricRegistry,
externalAddress,
resourceID,
taskManagerServicesConfiguration.getSystemResourceMetricsProbingInterval());
final ExecutorService ioExecutor =
Executors.newFixedThreadPool(
taskManagerServicesConfiguration.getNumIoThreads(),
new ExecutorThreadFactory("flink-taskexecutor-io"));
TaskManagerServices taskManagerServices =
TaskManagerServices.fromConfiguration(
taskManagerServicesConfiguration,
taskExecutorBlobService.getPermanentBlobService(),
taskManagerMetricGroup.f1,
ioExecutor,
rpcService.getScheduledExecutor(),
fatalErrorHandler,
workingDirectory);
MetricUtils.instantiateFlinkMemoryMetricGroup(
taskManagerMetricGroup.f1,
taskManagerServices.getTaskSlotTable(),
taskManagerServices::getManagedMemorySize);
TaskManagerConfiguration taskManagerConfiguration =
TaskManagerConfiguration.fromConfiguration(
configuration,
taskExecutorResourceSpec,
externalAddress,
workingDirectory.getTmpDirectory());
String metricQueryServiceAddress = metricRegistry.getMetricQueryServiceGatewayRpcAddress();
return new TaskExecutor(
rpcService,
taskManagerConfiguration,
highAvailabilityServices,
taskManagerServices,
externalResourceInfoProvider,
heartbeatServices,
taskManagerMetricGroup.f0,
metricQueryServiceAddress,
taskExecutorBlobService,
fatalErrorHandler,
new TaskExecutorPartitionTrackerImpl(taskManagerServices.getShuffleEnvironment()),
delegationTokenReceiverRepository);
}
/**
* Create a RPC service for the task manager.
*
* @param configuration The configuration for the TaskManager.
* @param haServices to use for the task manager hostname retrieval
*/
@VisibleForTesting
static RpcService createRpcService(
final Configuration configuration,
final HighAvailabilityServices haServices,
final RpcSystem rpcSystem)
throws Exception {
checkNotNull(configuration);
checkNotNull(haServices);
return RpcUtils.createRemoteRpcService(
rpcSystem,
configuration,
determineTaskManagerBindAddress(configuration, haServices, rpcSystem),
configuration.get(TaskManagerOptions.RPC_PORT),
configuration.get(TaskManagerOptions.BIND_HOST),
configuration.getOptional(TaskManagerOptions.RPC_BIND_PORT));
}
private static String determineTaskManagerBindAddress(
final Configuration configuration,
final HighAvailabilityServices haServices,
RpcSystemUtils rpcSystemUtils)
throws Exception {
final String configuredTaskManagerHostname = configuration.get(TaskManagerOptions.HOST);
if (configuredTaskManagerHostname != null) {
LOG.info(
"Using configured hostname/address for TaskManager: {}.",
configuredTaskManagerHostname);
return configuredTaskManagerHostname;
} else {
return determineTaskManagerBindAddressByConnectingToResourceManager(
configuration, haServices, rpcSystemUtils);
}
}
private static String determineTaskManagerBindAddressByConnectingToResourceManager(
final Configuration configuration,
final HighAvailabilityServices haServices,
RpcSystemUtils rpcSystemUtils)
throws LeaderRetrievalException {
final Duration lookupTimeout = configuration.get(RpcOptions.LOOKUP_TIMEOUT_DURATION);
final InetAddress taskManagerAddress =
LeaderRetrievalUtils.findConnectingAddress(
haServices.getResourceManagerLeaderRetriever(),
lookupTimeout,
rpcSystemUtils);
LOG.info(
"TaskManager will use hostname/address '{}' ({}) for communication.",
taskManagerAddress.getHostName(),
taskManagerAddress.getHostAddress());
HostBindPolicy bindPolicy =
HostBindPolicy.fromString(configuration.get(TaskManagerOptions.HOST_BIND_POLICY));
return bindPolicy == HostBindPolicy.IP
? taskManagerAddress.getHostAddress()
: taskManagerAddress.getHostName();
}
@VisibleForTesting
static DeterminismEnvelope<ResourceID> getTaskManagerResourceID(
Configuration config, String rpcAddress, int rpcPort) {
final String metadata =
config.get(TaskManagerOptionsInternal.TASK_MANAGER_RESOURCE_ID_METADATA, "");
return config.getOptional(TaskManagerOptions.TASK_MANAGER_RESOURCE_ID)
.map(
value ->
DeterminismEnvelope.deterministicValue(
new ResourceID(value, metadata)))
.orElseGet(
FunctionUtils.uncheckedSupplier(
() -> {
final String hostName =
InetAddress.getLocalHost().getHostName();
final String value =
StringUtils.isNullOrWhitespaceOnly(rpcAddress)
? hostName
+ "-"
+ new AbstractID()
.toString()
.substring(0, 6)
: rpcAddress
+ ":"
+ rpcPort
+ "-"
+ new AbstractID()
.toString()
.substring(0, 6);
return DeterminismEnvelope.nondeterministicValue(
new ResourceID(value, metadata));
}));
}
/** Factory for {@link TaskExecutor}. */
public
|
TaskManagerRunner
|
java
|
apache__flink
|
flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/ParquetVectorizedInputFormat.java
|
{
"start": 3609,
"end": 14473
}
|
class ____<T, SplitT extends FileSourceSplit>
implements BulkFormat<T, SplitT> {
private static final Logger LOG = LoggerFactory.getLogger(ParquetVectorizedInputFormat.class);
private static final long serialVersionUID = 1L;
protected final SerializableConfiguration hadoopConfig;
private final String[] projectedFields;
private final LogicalType[] projectedTypes;
private final ColumnBatchFactory<SplitT> batchFactory;
private final int batchSize;
protected final boolean isUtcTimestamp;
private final boolean isCaseSensitive;
public ParquetVectorizedInputFormat(
SerializableConfiguration hadoopConfig,
RowType projectedType,
ColumnBatchFactory<SplitT> batchFactory,
int batchSize,
boolean isUtcTimestamp,
boolean isCaseSensitive) {
this.hadoopConfig = hadoopConfig;
this.projectedFields = projectedType.getFieldNames().toArray(new String[0]);
this.projectedTypes = projectedType.getChildren().toArray(new LogicalType[0]);
this.batchFactory = batchFactory;
this.batchSize = batchSize;
this.isUtcTimestamp = isUtcTimestamp;
this.isCaseSensitive = isCaseSensitive;
}
@Override
public ParquetReader createReader(final Configuration config, final SplitT split)
throws IOException {
final Path filePath = split.path();
final long splitOffset = split.offset();
final long splitLength = split.length();
// Using Flink FileSystem instead of Hadoop FileSystem directly, so we can get the hadoop
// config that create inputFile needed from config.yaml
final FileSystem fs = filePath.getFileSystem();
final ParquetInputFile inputFile =
new ParquetInputFile(fs.open(filePath), fs.getFileStatus(filePath).getLen());
// Notice: This filter is RowGroups level, not individual records.
FilterCompat.Filter filter = getFilter(hadoopConfig.conf());
ParquetReadOptions parquetReadOptions =
ParquetReadOptions.builder()
.withRange(splitOffset, splitOffset + splitLength)
.withRecordFilter(filter)
.build();
ParquetFileReader parquetFileReader = ParquetFileReader.open(inputFile, parquetReadOptions);
Set<Integer> unknownFieldsIndices = new HashSet<>();
MessageType fileSchema = parquetFileReader.getFooter().getFileMetaData().getSchema();
// Pruning unnecessary column, we should set the projection schema before running any
// filtering (e.g. getting filtered record count) because projection impacts filtering
MessageType requestedSchema =
clipParquetSchema(fileSchema, unknownFieldsIndices, hadoopConfig.conf());
parquetFileReader.setRequestedSchema(requestedSchema);
checkSchema(fileSchema, requestedSchema);
final long totalRowCount = parquetFileReader.getRecordCount();
final Pool<ParquetReaderBatch<T>> poolOfBatches =
createPoolOfBatches(split, requestedSchema, numBatchesToCirculate(config));
RowType projectedType = RowType.of(projectedTypes, projectedFields);
MessageColumnIO columnIO = new ColumnIOFactory().getColumnIO(requestedSchema);
List<ParquetField> fields =
buildFieldsList(projectedType.getFields(), projectedType.getFieldNames(), columnIO);
return new ParquetReader(
parquetFileReader,
requestedSchema,
unknownFieldsIndices,
totalRowCount,
poolOfBatches,
fields);
}
protected int numBatchesToCirculate(Configuration config) {
return config.get(SourceReaderOptions.ELEMENT_QUEUE_CAPACITY);
}
@Override
public ParquetReader restoreReader(final Configuration config, final SplitT split)
throws IOException {
assert split.getReaderPosition().isPresent();
final CheckpointedPosition checkpointedPosition = split.getReaderPosition().get();
Preconditions.checkArgument(
checkpointedPosition.getOffset() == CheckpointedPosition.NO_OFFSET,
"The offset of CheckpointedPosition should always be NO_OFFSET");
ParquetReader reader = createReader(config, split);
reader.seek(checkpointedPosition.getRecordsAfterOffset());
return reader;
}
@Override
public boolean isSplittable() {
return true;
}
/** Clips `parquetSchema` according to `fieldNames`. */
private MessageType clipParquetSchema(
GroupType parquetSchema,
Collection<Integer> unknownFieldsIndices,
org.apache.hadoop.conf.Configuration config) {
Type[] types = new Type[projectedFields.length];
if (isCaseSensitive) {
for (int i = 0; i < projectedFields.length; ++i) {
String fieldName = projectedFields[i];
if (!parquetSchema.containsField(fieldName)) {
LOG.warn(
"{} does not exist in {}, will fill the field with null.",
fieldName,
parquetSchema);
types[i] =
ParquetSchemaConverter.convertToParquetType(
fieldName, projectedTypes[i], config);
unknownFieldsIndices.add(i);
} else {
types[i] = parquetSchema.getType(fieldName);
}
}
} else {
Map<String, Type> caseInsensitiveFieldMap = new HashMap<>();
for (Type type : parquetSchema.getFields()) {
caseInsensitiveFieldMap.compute(
type.getName().toLowerCase(Locale.ROOT),
(key, previousType) -> {
if (previousType != null) {
throw new FlinkRuntimeException(
"Parquet with case insensitive mode should have no duplicate key: "
+ key);
}
return type;
});
}
for (int i = 0; i < projectedFields.length; ++i) {
Type type =
caseInsensitiveFieldMap.get(projectedFields[i].toLowerCase(Locale.ROOT));
if (type == null) {
LOG.warn(
"{} does not exist in {}, will fill the field with null.",
projectedFields[i],
parquetSchema);
type =
ParquetSchemaConverter.convertToParquetType(
projectedFields[i].toLowerCase(Locale.ROOT),
projectedTypes[i],
config);
unknownFieldsIndices.add(i);
}
// TODO clip for array,map,row types.
types[i] = type;
}
}
return Types.buildMessage().addFields(types).named("flink-parquet");
}
private void checkSchema(MessageType fileSchema, MessageType requestedSchema)
throws IOException, UnsupportedOperationException {
if (projectedFields.length != requestedSchema.getFieldCount()) {
throw new RuntimeException(
"The quality of field type is incompatible with the request schema!");
}
/*
* Check that the requested schema is supported.
*/
for (int i = 0; i < requestedSchema.getFieldCount(); ++i) {
String[] colPath = requestedSchema.getPaths().get(i);
if (fileSchema.containsPath(colPath)) {
ColumnDescriptor fd = fileSchema.getColumnDescription(colPath);
if (!fd.equals(requestedSchema.getColumns().get(i))) {
throw new UnsupportedOperationException("Schema evolution not supported.");
}
} else {
if (requestedSchema.getColumns().get(i).getMaxDefinitionLevel() == 0) {
// Column is missing in data but the required data is non-nullable. This file is
// invalid.
throw new IOException(
"Required column is missing in data file. Col: "
+ Arrays.toString(colPath));
}
}
}
}
private Pool<ParquetReaderBatch<T>> createPoolOfBatches(
SplitT split, MessageType requestedSchema, int numBatches) {
final Pool<ParquetReaderBatch<T>> pool = new Pool<>(numBatches);
for (int i = 0; i < numBatches; i++) {
pool.add(createReaderBatch(split, requestedSchema, pool.recycler()));
}
return pool;
}
private ParquetReaderBatch<T> createReaderBatch(
SplitT split,
MessageType requestedSchema,
Pool.Recycler<ParquetReaderBatch<T>> recycler) {
WritableColumnVector[] writableVectors = createWritableVectors(requestedSchema);
VectorizedColumnBatch columnarBatch =
batchFactory.create(split, createReadableVectors(writableVectors));
return createReaderBatch(writableVectors, columnarBatch, recycler);
}
private WritableColumnVector[] createWritableVectors(MessageType requestedSchema) {
WritableColumnVector[] columns = new WritableColumnVector[projectedTypes.length];
List<Type> types = requestedSchema.getFields();
for (int i = 0; i < projectedTypes.length; i++) {
columns[i] =
createWritableColumnVector(
batchSize,
projectedTypes[i],
types.get(i),
requestedSchema.getColumns(),
0);
}
return columns;
}
/**
* Create readable vectors from writable vectors. Especially for decimal, see {@link
* ParquetDecimalVector}.
*/
private ColumnVector[] createReadableVectors(WritableColumnVector[] writableVectors) {
ColumnVector[] vectors = new ColumnVector[writableVectors.length];
for (int i = 0; i < writableVectors.length; i++) {
vectors[i] =
projectedTypes[i].getTypeRoot() == LogicalTypeRoot.DECIMAL
? new ParquetDecimalVector(writableVectors[i])
: writableVectors[i];
}
return vectors;
}
private
|
ParquetVectorizedInputFormat
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/validation/MultibindsMethodValidator.java
|
{
"start": 2633,
"end": 4922
}
|
class ____ extends MethodValidator {
private final XMethodElement method;
Validator(XMethodElement method) {
super(method);
this.method = method;
}
@Override
protected void checkParameters() {
if (!method.getParameters().isEmpty()) {
report.addError(bindingMethods("cannot have parameters"));
}
}
/** Adds an error unless the method returns a {@code Map<K, V>} or {@code Set<T>}. */
@Override
protected void checkType() {
if (MapType.isMap(method.getReturnType())) {
checkMapType(MapType.from(method.getReturnType()));
} else if (SetType.isSet(method.getReturnType())) {
checkSetType(SetType.from(method.getReturnType()));
} else {
report.addError(bindingMethods("return type must be either a Set or Map type."));
}
}
private void checkMapType(MapType mapType) {
if (mapType.isRawType()) {
report.addError(bindingMethods("return type cannot be a raw Map type"));
} else if (isWildcard(mapType.keyType())) {
report.addError(
bindingMethods("return type cannot use a wildcard as the Map key type."));
} else if (isWildcard(mapType.valueType())) {
report.addError(
bindingMethods("return type cannot use a wildcard as the Map value type."));
} else if (isMapValueFrameworkType(mapType.valueType())) {
String frameworkTypeName = getSimpleName(mapType.valueType().getTypeElement());
report.addError(
bindingMethods(
"return type cannot use '%s' in the Map value type.", frameworkTypeName));
}
}
private void checkSetType(SetType setType) {
if (setType.isRawType()) {
report.addError(bindingMethods("return type cannot be a raw Set type"));
} else if (isWildcard(setType.elementType())) {
report.addError(bindingMethods("return type cannot use a wildcard as the Set value type."));
} else if (isSetValueFrameworkType(setType.elementType())) {
String frameworkTypeName = getSimpleName(setType.elementType().getTypeElement());
report.addError(
bindingMethods(
"return type cannot use '%s' in the Set value type.", frameworkTypeName));
}
}
}
}
|
Validator
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/server/sse/Headline.java
|
{
"start": 711,
"end": 1270
}
|
class ____ {
private String title;
private String description;
public Headline() {}
public Headline(String title, String description) {
this.title = title;
this.description = description;
}
public String getTitle() {
return title;
}
public String getDescription() {
return description;
}
public void setTitle(String title) {
this.title = title;
}
public void setDescription(String description) {
this.description = description;
}
}
// end::class[]
|
Headline
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDisableConnCache.java
|
{
"start": 1203,
"end": 1296
}
|
class ____ disabling client connection caching in a single node
* mini-cluster.
*/
public
|
tests
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/ast/SQLReplaceable.java
|
{
"start": 704,
"end": 784
}
|
interface ____ {
boolean replace(SQLExpr expr, SQLExpr target);
}
|
SQLReplaceable
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/AnnotationConfigApplicationContextTests.java
|
{
"start": 27994,
"end": 28119
}
|
class ____ {
BeanB b;
BeanC c;
@Autowired
BeanA(BeanB b, BeanC c) {
this.b = b;
this.c = c;
}
}
static
|
BeanA
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java
|
{
"start": 24739,
"end": 40532
}
|
enum ____ {
CORRUPTED_FILE,
TRUNCATED_FILE,
LARGER_THAN_EXPECTED_FILE,
FETCH_ERROR
}
public void testSnapshotFileIsDeletedAfterFailure() throws Exception {
DiscoveryNode pNode = DiscoveryNodeUtils.builder("foo").roles(Collections.emptySet()).build();
DiscoveryNode rNode = DiscoveryNodeUtils.builder("foo").roles(Collections.emptySet()).build();
IndexShard shard = newShard(false);
shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode));
shard.prepareForIndexRecovery();
RecoveryState.Index recoveryStateIndex = shard.recoveryState().getIndex();
Directory directory = shard.store().directory();
String[] filesBeforeRestoringSnapshotFile = directory.listAll();
String fileName = randomAlphaOfLength(10);
Tuple<StoreFileMetadata, byte[]> storeFileMetadataAndData = createStoreFileMetadataWithRandomContent(fileName);
StoreFileMetadata storeFileMetadata = storeFileMetadataAndData.v1();
byte[] fileData = storeFileMetadataAndData.v2();
final DownloadFileErrorType downloadFileErrorType = randomFrom(DownloadFileErrorType.values());
SnapshotFilesProvider snapshotFilesProvider = new SnapshotFilesProvider(mock(RepositoriesService.class)) {
@Override
public InputStream getInputStreamForSnapshotFile(
String requestedRepositoryName,
IndexId requestedIndexId,
ShardId requestedShardId,
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo,
LongConsumer rateLimiterListener
) {
return getFaultyInputStream(downloadFileErrorType, fileData);
}
@Override
public int getReadSnapshotFileBufferSizeForRepo(String repository) {
return (int) ByteSizeValue.of(128, ByteSizeUnit.KB).getBytes();
}
};
recoveryStateIndex.addFileDetail(storeFileMetadata.name(), storeFileMetadata.length(), false);
recoveryStateIndex.setFileDetailsComplete();
RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, 0L, snapshotFilesProvider, () -> {}, null);
String repositoryName = "repo";
IndexId indexId = new IndexId("index", "uuid");
BlobStoreIndexShardSnapshot.FileInfo fileInfo = new BlobStoreIndexShardSnapshot.FileInfo(
"name",
storeFileMetadata,
SNAPSHOT_FILE_PART_SIZE
);
PlainActionFuture<Void> writeSnapshotFileFuture = new PlainActionFuture<>();
recoveryTarget.restoreFileFromSnapshot(repositoryName, indexId, fileInfo, writeSnapshotFileFuture);
ExecutionException executionException = expectThrows(ExecutionException.class, writeSnapshotFileFuture::get);
Throwable downloadFileError = executionException.getCause();
switch (downloadFileErrorType) {
case CORRUPTED_FILE, LARGER_THAN_EXPECTED_FILE ->
// Files larger than expected are caught by VerifyingIndexInput too
assertThat(downloadFileError, is(instanceOf(CorruptIndexException.class)));
case TRUNCATED_FILE -> assertThat(downloadFileError, is(instanceOf(EOFException.class)));
case FETCH_ERROR -> assertThat(downloadFileError, is(instanceOf(RuntimeException.class)));
default -> throw new IllegalStateException("Unexpected value: " + downloadFileErrorType);
}
assertThat(filesBeforeRestoringSnapshotFile, equalTo(directory.listAll()));
RecoveryState.FileDetail fileDetails = recoveryStateIndex.getFileDetails(storeFileMetadata.name());
assertThat(fileDetails.recovered(), equalTo(0L));
// Subsequent writes on the same file can proceed without issues
PlainActionFuture<Void> writeChunkFuture = new PlainActionFuture<>();
ReleasableBytesReference bytesRef = ReleasableBytesReference.wrap(new BytesArray(fileData));
recoveryTarget.writeFileChunk(storeFileMetadata, 0, bytesRef, true, 0, writeChunkFuture);
writeChunkFuture.get();
assertThat(fileDetails.recovered(), equalTo(storeFileMetadata.length()));
recoveryTarget.decRef();
closeShards(shard);
}
public void testReceiveFileInfoDeletesRecoveredFiles() throws Exception {
DiscoveryNode pNode = DiscoveryNodeUtils.builder("foo").roles(Collections.emptySet()).build();
DiscoveryNode rNode = DiscoveryNodeUtils.builder("foo").roles(Collections.emptySet()).build();
IndexShard shard = newShard(false);
shard = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE));
shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode));
shard.prepareForIndexRecovery();
RecoveryState.Index recoveryStateIndex = shard.recoveryState().getIndex();
Directory directory = shard.store().directory();
String repositoryName = "repo";
IndexId indexId = new IndexId("index", "uuid");
ShardId shardId = shard.shardId();
Map<BlobStoreIndexShardSnapshot.FileInfo, byte[]> snapshotFiles = new IdentityHashMap<>();
for (int i = 0; i < randomIntBetween(5, 10); i++) {
String fileName = randomAlphaOfLength(10);
Tuple<StoreFileMetadata, byte[]> storeFileMetadataAndData = createStoreFileMetadataWithRandomContent(fileName);
StoreFileMetadata storeFileMetadata = storeFileMetadataAndData.v1();
byte[] fileData = storeFileMetadataAndData.v2();
recoveryStateIndex.addFileDetail(storeFileMetadata.name(), storeFileMetadata.length(), false);
BlobStoreIndexShardSnapshot.FileInfo fileInfo = new BlobStoreIndexShardSnapshot.FileInfo(
"name",
storeFileMetadata,
SNAPSHOT_FILE_PART_SIZE
);
snapshotFiles.put(fileInfo, fileData);
}
recoveryStateIndex.setFileDetailsComplete();
BlobStoreIndexShardSnapshot.FileInfo failingDownloadFile = randomFrom(snapshotFiles.keySet());
SnapshotFilesProvider snapshotFilesProvider = new SnapshotFilesProvider(mock(RepositoriesService.class)) {
@Override
public InputStream getInputStreamForSnapshotFile(
String requestedRepositoryName,
IndexId requestedIndexId,
ShardId requestedShardId,
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo,
LongConsumer rateLimiterListener
) {
assertThat(requestedRepositoryName, equalTo(repositoryName));
assertThat(requestedIndexId, equalTo(indexId));
assertThat(requestedShardId, equalTo(shardId));
byte[] fileData = snapshotFiles.get(snapshotFileInfo);
assertThat(fileData, is(notNullValue()));
if (snapshotFileInfo.isSame(failingDownloadFile)) {
return getFaultyInputStream(randomFrom(DownloadFileErrorType.values()), fileData);
}
return new ByteArrayInputStream(fileData);
}
@Override
public int getReadSnapshotFileBufferSizeForRepo(String repository) {
return (int) ByteSizeValue.of(128, ByteSizeUnit.KB).getBytes();
}
};
RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, 0L, snapshotFilesProvider, () -> {}, null);
String[] fileNamesBeforeRecoveringSnapshotFiles = directory.listAll();
for (Map.Entry<BlobStoreIndexShardSnapshot.FileInfo, byte[]> fileInfoEntry : snapshotFiles.entrySet()) {
BlobStoreIndexShardSnapshot.FileInfo fileInfo = fileInfoEntry.getKey();
PlainActionFuture<Void> writeSnapshotFileFuture = new PlainActionFuture<>();
recoveryTarget.restoreFileFromSnapshot(repositoryName, indexId, fileInfo, writeSnapshotFileFuture);
// Simulate error, that stops downloading snapshot files
if (fileInfo.isSame(failingDownloadFile)) {
expectThrows(Exception.class, writeSnapshotFileFuture::get);
break;
}
writeSnapshotFileFuture.get();
}
PlainActionFuture<Void> future = new PlainActionFuture<>();
recoveryTarget.receiveFileInfo(emptyList(), emptyList(), emptyList(), emptyList(), 0, future);
future.get();
assertThat(fileNamesBeforeRecoveringSnapshotFiles, is(equalTo(directory.listAll())));
recoveryTarget.decRef();
closeShards(shard);
}
public void testSnapshotFileAreDeletedAfterCancel() throws Exception {
DiscoveryNode pNode = DiscoveryNodeUtils.builder("foo").roles(Collections.emptySet()).build();
DiscoveryNode rNode = DiscoveryNodeUtils.builder("foo").roles(Collections.emptySet()).build();
IndexShard shard = newShard(false);
shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode));
shard.prepareForIndexRecovery();
RecoveryState.Index recoveryStateIndex = shard.recoveryState().getIndex();
Directory directory = shard.store().directory();
String[] filesBeforeRestoringSnapshotFile = directory.listAll();
String fileName = randomAlphaOfLength(10);
Tuple<StoreFileMetadata, byte[]> storeFileMetadataAndData = createStoreFileMetadataWithRandomContent(fileName);
StoreFileMetadata storeFileMetadata = storeFileMetadataAndData.v1();
byte[] fileData = storeFileMetadataAndData.v2();
SnapshotFilesProvider snapshotFilesProvider = new SnapshotFilesProvider(mock(RepositoriesService.class)) {
@Override
public InputStream getInputStreamForSnapshotFile(
String requestedRepositoryName,
IndexId requestedIndexId,
ShardId requestedShardId,
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo,
LongConsumer rateLimiterListener
) {
return new ByteArrayInputStream(fileData);
}
@Override
public int getReadSnapshotFileBufferSizeForRepo(String repository) {
return (int) ByteSizeValue.of(128, ByteSizeUnit.KB).getBytes();
}
};
recoveryStateIndex.addFileDetail(storeFileMetadata.name(), storeFileMetadata.length(), false);
recoveryStateIndex.setFileDetailsComplete();
RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, 0L, snapshotFilesProvider, () -> {}, null);
String repository = "repo";
IndexId indexId = new IndexId("index", "uuid");
BlobStoreIndexShardSnapshot.FileInfo fileInfo = new BlobStoreIndexShardSnapshot.FileInfo(
"name",
storeFileMetadata,
ByteSizeValue.of(Long.MAX_VALUE, ByteSizeUnit.BYTES)
);
recoveryTarget.incRef();
PlainActionFuture<Void> writeSnapshotFileFuture = new PlainActionFuture<>();
recoveryTarget.restoreFileFromSnapshot(repository, indexId, fileInfo, writeSnapshotFileFuture);
writeSnapshotFileFuture.get();
RecoveryState.FileDetail fileDetails = recoveryStateIndex.getFileDetails(storeFileMetadata.name());
assertThat(fileDetails.recovered(), equalTo(storeFileMetadata.length()));
final String[] filesBeforeCancellingRecovery = directory.listAll();
recoveryTarget.cancel("This is a test");
final String[] filesAfterCancellingRecoveryWithOneOutstandingReference = directory.listAll();
// Since there's still one outstanding reference the snapshot file is kept around
assertThat(filesBeforeCancellingRecovery, equalTo(filesAfterCancellingRecoveryWithOneOutstandingReference));
recoveryTarget.decRef();
// Once the reference is released, the tmp file should be deleted
assertThat(filesBeforeRestoringSnapshotFile, equalTo(directory.listAll()));
closeShards(shard);
}
public void testSnapshotFileDownloadPermitIsReleasedAfterClosingRecoveryTarget() throws Exception {
DiscoveryNode pNode = DiscoveryNodeUtils.builder("foo").roles(Collections.emptySet()).build();
DiscoveryNode rNode = DiscoveryNodeUtils.builder("foo").roles(Collections.emptySet()).build();
IndexShard shard = newShard(false);
shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode));
shard.prepareForIndexRecovery();
AtomicBoolean snapshotFileDownloadsPermitFlag = new AtomicBoolean();
Releasable snapshotFileDownloadsPermit = () -> {
assertThat(snapshotFileDownloadsPermitFlag.compareAndSet(false, true), is(equalTo(true)));
};
RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, 0L, null, snapshotFileDownloadsPermit, null);
recoveryTarget.decRef();
assertThat(snapshotFileDownloadsPermitFlag.get(), is(equalTo(true)));
closeShards(shard);
}
private Tuple<StoreFileMetadata, byte[]> createStoreFileMetadataWithRandomContent(String fileName) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput("test", "file", out, 1024)) {
byte[] buffer = randomByteArrayOfLength(1024);
indexOutput.writeBytes(buffer, buffer.length);
CodecUtil.writeFooter(indexOutput);
}
byte[] luceneEncodedFileBytes = out.toByteArray();
long checksum = CodecUtil.retrieveChecksum(new ByteArrayIndexInput("test", luceneEncodedFileBytes));
String encodedChecksum = Store.digestToString(checksum);
String writtenBy = org.apache.lucene.util.Version.LATEST.toString();
return Tuple.tuple(
new StoreFileMetadata(fileName, luceneEncodedFileBytes.length, encodedChecksum, writtenBy),
luceneEncodedFileBytes
);
}
private InputStream getFaultyInputStream(DownloadFileErrorType downloadFileErrorType, byte[] fileData) {
switch (downloadFileErrorType) {
case CORRUPTED_FILE -> {
byte[] fileDataCopy = new byte[fileData.length];
System.arraycopy(fileData, 0, fileDataCopy, 0, fileData.length);
// Corrupt the file
for (int i = 0; i < randomIntBetween(1, fileDataCopy.length); i++) {
fileDataCopy[i] ^= (byte) 0xFF;
}
return new ByteArrayInputStream(fileDataCopy);
}
case TRUNCATED_FILE -> {
final int truncatedFileLength = fileData.length / 2;
byte[] truncatedCopy = new byte[truncatedFileLength];
System.arraycopy(fileData, 0, truncatedCopy, 0, truncatedFileLength);
return new ByteArrayInputStream(truncatedCopy);
}
case LARGER_THAN_EXPECTED_FILE -> {
byte[] largerData = new byte[fileData.length + randomIntBetween(1, 250)];
System.arraycopy(fileData, 0, largerData, 0, fileData.length);
for (int i = fileData.length; i < largerData.length; i++) {
largerData[i] = randomByte();
}
return new ByteArrayInputStream(largerData);
}
case FETCH_ERROR -> throw new RuntimeException("Unexpected error");
default -> throw new IllegalStateException("Unexpected value: " + downloadFileErrorType);
}
}
}
|
DownloadFileErrorType
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/version/TablePerClassAbstractRootTypeVersionUpdateTest.java
|
{
"start": 2604,
"end": 3014
}
|
class ____ {
@Id
private Long id;
private String name;
@Version
private Long version;
public Animal() {
}
public Animal(Long id, String name) {
this.id = id;
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getVersion() {
return version;
}
}
@Entity( name = "Dog" )
static
|
Animal
|
java
|
netty__netty
|
transport-native-kqueue/src/test/java/io/netty/channel/kqueue/KQueueSocketEchoTest.java
|
{
"start": 899,
"end": 1144
}
|
class ____ extends SocketEchoTest {
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> newFactories() {
return KQueueSocketTestPermutation.INSTANCE.socket();
}
}
|
KQueueSocketEchoTest
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/typeutils/GenericTypeSerializerSnapshot.java
|
{
"start": 2368,
"end": 2834
}
|
class ____ not be NULL");
out.writeUTF(typeClass.getName());
}
@Override
public final void readSnapshot(
int readVersion, DataInputView in, ClassLoader userCodeClassLoader) throws IOException {
typeClass = InstantiationUtil.resolveClassByName(in, userCodeClassLoader);
}
@Override
@SuppressWarnings("unchecked")
public final TypeSerializer<T> restoreSerializer() {
checkState(typeClass != null, "type
|
can
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/list/KeyValue.java
|
{
"start": 77,
"end": 1043
}
|
class ____<K, V> {
public final K key;
public final V value;
public KeyValue(K key, V value) {
this.key = key;
this.value = value;
}
public static <K, V> KeyValue<K, V> of(K key, V value) {
return new KeyValue<>(key, value);
}
public K key() {
return key;
}
public V value() {
return value;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
KeyValue<?, ?> keyValue = (KeyValue<?, ?>) o;
return key.equals(keyValue.key) && Objects.equals(value, keyValue.value);
}
@Override
public int hashCode() {
return Objects.hash(key, value);
}
@Override
public String toString() {
return "KeyValue{" +
"key=" + key +
", value=" + value +
'}';
}
}
|
KeyValue
|
java
|
processing__processing4
|
java/src/processing/mode/java/preproc/PdeIssueEmitter.java
|
{
"start": 4578,
"end": 7175
}
|
class ____ {
private final String message;
private final boolean attributeToPriorToken;
/**
* Create a new issue message simplification.
*
* <p>
* Create a new issue message simplification that leaves the token attribution alone (the token
* on which the error was reported will be the same before error message simplification).
* </p>
*
* @param newMessage The message to show to the user.
*/
public IssueMessageSimplification(String newMessage) {
message = newMessage;
attributeToPriorToken = false;
}
/**
* Create a new issue message simplification.
*
* <p>
* Create a new issue message simplification. Note that there is an option to have the error
* attributed to the "prior token". This is helpful, for example, when a semicolon is missing.
* The error is generated on the token after the line on which the semicolon was omitted so,
* while the error technically emerges on the next line, it is better for the user for it to
* appear earlier. Specifically, it is most sensible for it to appear on the "prior token".
* </p>
*
* @param newMessage The message to show to the user.
* @param newAttributeToPriorToken Boolean flag indicating if the error should be shown on the
* token prior to the one on which the error was originally generated. True if the error should
* be attributed to the prior token. False otherwise.
*/
public IssueMessageSimplification(String newMessage, boolean newAttributeToPriorToken) {
message = newMessage;
attributeToPriorToken = newAttributeToPriorToken;
}
/**
* Get the error message text that should be shown to the user.
*
* @return The error message text that should be shown to the user.
*/
public String getMessage() {
return message;
}
/**
* Flag indicating if the error should be attributed to the prior token.
*
* @return True if the error should be attributed to the prior non-skip token (not whitepsace or
* comment). This is useful when a mistake on a prior line like omitted semicolon causes an
* error on a later line but one wants error highlighting closer to the mistake itself. False
* if the error should be attributed to the original offending token.
*/
public boolean getAttributeToPriorToken() {
return attributeToPriorToken;
}
}
/**
* Data structure describing where an issue occurred.
*/
public static
|
IssueMessageSimplification
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/accessibility/AccessibilityTest.java
|
{
"start": 758,
"end": 2286
}
|
class ____ {
@ProcessorTest
@IssueKey("103")
public void testGeneratedModifiersFromAbstractClassAreCorrect() throws Exception {
Class<?> defaultFromAbstract = loadForMapper( DefaultSourceTargetMapperAbstr.class );
assertTrue( isDefault( defaultFromAbstract.getModifiers() ) );
assertTrue( isPublic( modifiersFor( defaultFromAbstract, "publicSourceToTarget" ) ) );
assertTrue( isProtected( modifiersFor( defaultFromAbstract, "protectedSourceToTarget" ) ) );
assertTrue( isDefault( modifiersFor( defaultFromAbstract, "defaultSourceToTarget" ) ) );
}
@ProcessorTest
@IssueKey("103")
public void testGeneratedModifiersFromInterfaceAreCorrect() throws Exception {
Class<?> defaultFromIfc = loadForMapper( DefaultSourceTargetMapperIfc.class );
assertTrue( isDefault( defaultFromIfc.getModifiers() ) );
assertTrue( isPublic( modifiersFor( defaultFromIfc, "implicitlyPublicSourceToTarget" ) ) );
}
private static Class<?> loadForMapper(Class<?> mapper) throws ClassNotFoundException {
return Thread.currentThread().getContextClassLoader().loadClass( mapper.getName() + "Impl" );
}
private int modifiersFor(Class<?> clazz, String method) throws Exception {
return clazz.getDeclaredMethod( method, Source.class ).getModifiers();
}
private static boolean isDefault(int modifiers) {
return !isPublic( modifiers ) && !isProtected( modifiers ) && !isPrivate( modifiers );
}
}
|
AccessibilityTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/PojoProduceProxyInterceptEndpointTest.java
|
{
"start": 1121,
"end": 3168
}
|
class ____ {
@Test
public void testPojoProduceInterceptAlreadyStarted() throws Exception {
CamelContext context = new DefaultCamelContext();
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
interceptSendToEndpoint("direct:start").to("language:simple:${body}${body}");
from("direct:start").to("mock:result");
}
});
// start Camel before POJO being injected
context.start();
// use the injector (will use the default)
// which should post process the bean to inject the @Produce
MyBean bean = context.getInjector().newInstance(MyBean.class);
MockEndpoint mock = context.getEndpoint("mock:result", MockEndpoint.class);
mock.expectedBodiesReceived("WorldWorld");
Object reply = bean.doSomething("World");
assertEquals("WorldWorld", reply);
mock.assertIsSatisfied();
context.stop();
}
@Test
public void testPojoProduceInterceptNotStarted() throws Exception {
CamelContext context = new DefaultCamelContext();
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
interceptSendToEndpoint("direct:start").to("language:simple:${body}${body}");
from("direct:start").to("mock:result");
}
});
// use the injector (will use the default)
// which should post process the bean to inject the @Produce
MyBean bean = context.getInjector().newInstance(MyBean.class);
// do NOT start Camel before POJO being injected
context.start();
MockEndpoint mock = context.getEndpoint("mock:result", MockEndpoint.class);
mock.expectedBodiesReceived("WorldWorld");
Object reply = bean.doSomething("World");
assertEquals("WorldWorld", reply);
mock.assertIsSatisfied();
context.stop();
}
public
|
PojoProduceProxyInterceptEndpointTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java
|
{
"start": 6758,
"end": 7049
}
|
interface ____ extends Builder permits BooleanVectorFixedBuilder {
/**
* Appends a boolean to the current entry.
*/
@Override
FixedBuilder appendBoolean(boolean value);
FixedBuilder appendBoolean(int index, boolean value);
}
}
|
FixedBuilder
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/test/java/org/springframework/messaging/rsocket/DefaultRSocketRequesterBuilderTests.java
|
{
"start": 8686,
"end": 9534
}
|
class ____ implements DuplexConnection {
private ByteBuf setupFrame;
public ByteBuf setupFrame() {
return this.setupFrame;
}
@Override
public void sendFrame(int i, ByteBuf byteBuf) {
this.setupFrame = this.setupFrame == null ? byteBuf : this.setupFrame;
}
@Override
public void sendErrorAndClose(RSocketErrorException e) {
}
@Override
public Flux<ByteBuf> receive() {
return Flux.empty();
}
@Override
public ByteBufAllocator alloc() {
return ByteBufAllocator.DEFAULT;
}
@Override
public Mono<Void> onClose() {
return Mono.never();
}
@Override
public void dispose() {
}
@Override
public boolean isDisposed() {
return false;
}
@Override
public SocketAddress remoteAddress() {
return InetSocketAddress.createUnresolved("localhost", 9090);
}
}
static
|
MockConnection
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/DefaultLastHttpContent.java
|
{
"start": 1029,
"end": 4839
}
|
class ____ extends DefaultHttpContent implements LastHttpContent {
private final HttpHeaders trailingHeaders;
/**
* Create a new empty, last HTTP content message.
*/
public DefaultLastHttpContent() {
this(Unpooled.buffer(0));
}
/**
* Create a new last HTTP content message with the given contents.
*/
public DefaultLastHttpContent(ByteBuf content) {
this(content, trailersFactory());
}
/**
* Create a new last HTTP content message with the given contents, and optional trailing header validation.
* <p>
* <b>Warning!</b> Setting {@code validateHeaders} to {@code false} will mean that Netty won't
* validate & protect against user-supplied header values that are malicious.
* This can leave your server implementation vulnerable to
* <a href="https://cwe.mitre.org/data/definitions/113.html">
* CWE-113: Improper Neutralization of CRLF Sequences in HTTP Headers ('HTTP Response Splitting')
* </a>.
* When disabling this validation, it is the responsibility of the caller to ensure that the values supplied
* do not contain a non-url-escaped carriage return (CR) and/or line feed (LF) characters.
*
* @deprecated Prefer the {@link #DefaultLastHttpContent(ByteBuf)} constructor instead, to always have header
* validation enabled.
*/
@Deprecated
public DefaultLastHttpContent(ByteBuf content, boolean validateHeaders) {
this(content, trailersFactory().withValidation(validateHeaders));
}
/**
* Create a new last HTTP content message with the given contents, and trailing headers from the given factory.
*/
public DefaultLastHttpContent(ByteBuf content, HttpHeadersFactory trailersFactory) {
super(content);
trailingHeaders = trailersFactory.newHeaders();
}
/**
* Create a new last HTTP content message with the given contents, and trailing headers.
*/
public DefaultLastHttpContent(ByteBuf content, HttpHeaders trailingHeaders) {
super(content);
this.trailingHeaders = checkNotNull(trailingHeaders, "trailingHeaders");
}
@Override
public LastHttpContent copy() {
return replace(content().copy());
}
@Override
public LastHttpContent duplicate() {
return replace(content().duplicate());
}
@Override
public LastHttpContent retainedDuplicate() {
return replace(content().retainedDuplicate());
}
@Override
public LastHttpContent replace(ByteBuf content) {
return new DefaultLastHttpContent(content, trailingHeaders().copy());
}
@Override
public LastHttpContent retain(int increment) {
super.retain(increment);
return this;
}
@Override
public LastHttpContent retain() {
super.retain();
return this;
}
@Override
public LastHttpContent touch() {
super.touch();
return this;
}
@Override
public LastHttpContent touch(Object hint) {
super.touch(hint);
return this;
}
@Override
public HttpHeaders trailingHeaders() {
return trailingHeaders;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(super.toString());
buf.append(StringUtil.NEWLINE);
appendHeaders(buf);
// Remove the last newline.
buf.setLength(buf.length() - StringUtil.NEWLINE.length());
return buf.toString();
}
private void appendHeaders(StringBuilder buf) {
for (Entry<String, String> e : trailingHeaders()) {
buf.append(e.getKey());
buf.append(": ");
buf.append(e.getValue());
buf.append(StringUtil.NEWLINE);
}
}
}
|
DefaultLastHttpContent
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java
|
{
"start": 7529,
"end": 9434
}
|
class ____. */
private final double avgRecall;
public Result(List<PerClassSingleValue> classes, double avgRecall) {
this.classes = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(classes, CLASSES));
this.avgRecall = avgRecall;
}
public Result(StreamInput in) throws IOException {
this.classes = in.readCollectionAsImmutableList(PerClassSingleValue::new);
this.avgRecall = in.readDouble();
}
@Override
public String getWriteableName() {
return registeredMetricName(Classification.NAME, NAME);
}
@Override
public String getMetricName() {
return NAME.getPreferredName();
}
public List<PerClassSingleValue> getClasses() {
return classes;
}
public double getAvgRecall() {
return avgRecall;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(classes);
out.writeDouble(avgRecall);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(CLASSES.getPreferredName(), classes);
builder.field(AVG_RECALL.getPreferredName(), avgRecall);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result that = (Result) o;
return Objects.equals(this.classes, that.classes) && this.avgRecall == that.avgRecall;
}
@Override
public int hashCode() {
return Objects.hash(classes, avgRecall);
}
}
}
|
recalls
|
java
|
netty__netty
|
transport-native-epoll/src/test/java/io/netty/channel/epoll/EpollTest.java
|
{
"start": 1066,
"end": 2795
}
|
class ____ {
@Test
public void testIsAvailable() {
assertTrue(Epoll.isAvailable());
}
// Testcase for https://github.com/netty/netty/issues/8444
@Test
@Timeout(value = 5000, unit = TimeUnit.MILLISECONDS)
public void testEpollWaitWithTimeOutMinusOne() throws Exception {
final EpollEventArray eventArray = new EpollEventArray(8);
try {
final FileDescriptor epoll = Native.newEpollCreate();
final FileDescriptor timerFd = Native.newTimerFd();
final FileDescriptor eventfd = Native.newEventFd();
Native.epollCtlAdd(epoll.intValue(), timerFd.intValue(), Native.EPOLLIN);
Native.epollCtlAdd(epoll.intValue(), eventfd.intValue(), Native.EPOLLIN);
final AtomicReference<Throwable> ref = new AtomicReference<Throwable>();
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
assertEquals(1, Native.epollWait(epoll, eventArray, false));
// This should have been woken up because of eventfd_write.
assertEquals(eventfd.intValue(), eventArray.fd(0));
} catch (Throwable cause) {
ref.set(cause);
}
}
});
t.start();
t.join(1000);
assertTrue(t.isAlive());
Native.eventFdWrite(eventfd.intValue(), 1);
t.join();
assertNull(ref.get());
epoll.close();
timerFd.close();
eventfd.close();
} finally {
eventArray.free();
}
}
}
|
EpollTest
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java
|
{
"start": 5804,
"end": 6171
}
|
class ____ implements ColumnName {
SquareConstraint(int num, int x, int y) {
this.num = num;
this.x = x;
this.y = y;
}
int num;
int x;
int y;
public String toString() {
return num + " in square " + x + "," + y;
}
}
/**
* A constraint that each cell can only be used once.
*/
static private
|
SquareConstraint
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/QueryFederationQueuePoliciesRequest.java
|
{
"start": 1611,
"end": 3934
}
|
class ____ {
@Private
@Unstable
public static QueryFederationQueuePoliciesRequest newInstance(
int pageSize, int currentPage, String queue, List<String> queues) {
QueryFederationQueuePoliciesRequest request =
Records.newRecord(QueryFederationQueuePoliciesRequest.class);
request.setPageSize(pageSize);
request.setCurrentPage(currentPage);
request.setQueue(queue);
request.setQueues(queues);
return request;
}
/**
* Sets the page size for FederationQueuePolicies pagination.
*
* @param pageSize The number of policies to display per page.
*/
@Private
@Unstable
public abstract void setPageSize(int pageSize);
/**
* Retrieves the page size.
*
* @return The number of policies to display per page.
*/
@Public
@Unstable
public abstract int getPageSize();
/**
* Sets the current page in the FederationQueuePolicies pagination.
*
* @param currentPage The current page number.
*/
@Private
@Unstable
public abstract void setCurrentPage(int currentPage);
/**
* Returns the current page number in the FederationQueuePolicies pagination.
*
* @return The current page number.
*/
@Public
@Unstable
public abstract int getCurrentPage();
/**
* Retrieves the queue.
*
* @return The name or identifier of the current queue.
*/
@Public
@Unstable
public abstract String getQueue();
/**
* Sets the queue to the specified value.
*
* We will use the fully qualified name matching for queues.
* For example, if the user inputs 'a', we will match
* queues that contain 'a' in their fully qualified names,
* such as 'root.a', 'root.b.a', and so on.
*
* @param queue queue name.
*/
@Private
@Unstable
public abstract void setQueue(String queue);
/**
* Retrieves a list of queues.
*
* This part contains exact matches,
* which will match the queues contained in the list.
*
* @return A list of queue names or identifiers.
*/
@Public
@Unstable
public abstract List<String> getQueues();
/**
* Sets the list of queues to the specified values.
*
* @param queues A list of queue names or identifiers to set.
*/
@Private
@Unstable
public abstract void setQueues(List<String> queues);
}
|
QueryFederationQueuePoliciesRequest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NamedLikeContextualKeywordTest.java
|
{
"start": 7083,
"end": 7224
}
|
class ____ {",
" // BUG: Diagnostic contains: [NamedLikeContextualKeyword]",
" void yield() {}",
"
|
A
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/NotWriteDefaultValueTest_NoneASM.java
|
{
"start": 2622,
"end": 2984
}
|
class ____ {
private int f0;
private int f1;
public int getF0() {
return f0;
}
public void setF0(int f0) {
this.f0 = f0;
}
public int getF1() {
return f1;
}
public void setF1(int f1) {
this.f1 = f1;
}
}
private static
|
VO_Int
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/MatrixVariablesMethodArgumentResolverTests.java
|
{
"start": 1894,
"end": 5951
}
|
class ____ {
private MatrixVariableMethodArgumentResolver resolver;
private ModelAndViewContainer mavContainer;
private ServletWebRequest webRequest;
private MockHttpServletRequest request;
private ResolvableMethod testMethod = ResolvableMethod.on(getClass()).named("handle").build();
@BeforeEach
void setup() throws Exception {
this.resolver = new MatrixVariableMethodArgumentResolver();
this.mavContainer = new ModelAndViewContainer();
this.request = new MockHttpServletRequest();
this.webRequest = new ServletWebRequest(request, new MockHttpServletResponse());
Map<String, MultiValueMap<String, String>> params = new LinkedHashMap<>();
this.request.setAttribute(HandlerMapping.MATRIX_VARIABLES_ATTRIBUTE, params);
}
@Test
void supportsParameter() {
assertThat(this.resolver.supportsParameter(this.testMethod.arg(String.class))).isFalse();
assertThat(this.resolver.supportsParameter(
this.testMethod.annot(matrixAttribute().noName()).arg(List.class, String.class))).isTrue();
assertThat(this.resolver.supportsParameter(
this.testMethod.annot(matrixAttribute().name("year")).arg(int.class))).isTrue();
}
@Test
void resolveArgument() throws Exception {
MultiValueMap<String, String> params = getVariablesFor("cars");
params.add("colors", "red");
params.add("colors", "green");
params.add("colors", "blue");
MethodParameter param = this.testMethod.annot(matrixAttribute().noName()).arg(List.class, String.class);
assertThat(this.resolver.resolveArgument(param, this.mavContainer, this.webRequest, null)).isEqualTo(Arrays.asList("red", "green", "blue"));
}
@Test
void resolveArgumentPathVariable() throws Exception {
getVariablesFor("cars").add("year", "2006");
getVariablesFor("bikes").add("year", "2005");
MethodParameter param = this.testMethod.annot(matrixAttribute().name("year")).arg(int.class);
assertThat(this.resolver.resolveArgument(param, this.mavContainer, this.webRequest, null)).isEqualTo("2006");
}
@Test
void resolveArgumentDefaultValue() throws Exception {
MethodParameter param = this.testMethod.annot(matrixAttribute().name("year")).arg(int.class);
assertThat(resolver.resolveArgument(param, this.mavContainer, this.webRequest, null)).isEqualTo("2013");
}
@Test
void resolveArgumentMultipleMatches() {
getVariablesFor("var1").add("colors", "red");
getVariablesFor("var2").add("colors", "green");
MethodParameter param = this.testMethod.annot(matrixAttribute().noName()).arg(List.class, String.class);
assertThatExceptionOfType(ServletRequestBindingException.class).isThrownBy(() ->
this.resolver.resolveArgument(param, this.mavContainer, this.webRequest, null));
}
@Test
void resolveArgumentRequired() {
MethodParameter param = this.testMethod.annot(matrixAttribute().noName()).arg(List.class, String.class);
assertThatExceptionOfType(ServletRequestBindingException.class).isThrownBy(() ->
this.resolver.resolveArgument(param, this.mavContainer, this.webRequest, null));
}
@Test
void resolveArgumentNoMatch() throws Exception {
MultiValueMap<String, String> params = getVariablesFor("cars");
params.add("anotherYear", "2012");
MethodParameter param = this.testMethod.annot(matrixAttribute().name("year")).arg(int.class);
assertThat(this.resolver.resolveArgument(param, this.mavContainer, this.webRequest, null)).isEqualTo("2013");
}
@SuppressWarnings("unchecked")
private MultiValueMap<String, String> getVariablesFor(String pathVarName) {
Map<String, MultiValueMap<String, String>> matrixVariables =
(Map<String, MultiValueMap<String, String>>) this.request.getAttribute(
HandlerMapping.MATRIX_VARIABLES_ATTRIBUTE);
MultiValueMap<String, String> params = new LinkedMultiValueMap<>();
matrixVariables.put(pathVarName, params);
return params;
}
public void handle(
String stringArg,
@MatrixVariable List<String> colors,
@MatrixVariable(name = "year", pathVar = "cars", required = false, defaultValue = "2013") int preferredYear) {
}
}
|
MatrixVariablesMethodArgumentResolverTests
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-qos/src/main/java/org/apache/dubbo/qos/command/impl/PublishMetadata.java
|
{
"start": 1734,
"end": 4111
}
|
class ____ implements BaseCommand {
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(PublishMetadata.class);
private final FrameworkModel frameworkModel;
public PublishMetadata(FrameworkModel frameworkModel) {
this.frameworkModel = frameworkModel;
}
@Override
public String execute(CommandContext commandContext, String[] args) {
logger.info("received publishMetadata command.");
StringBuilder stringBuilder = new StringBuilder();
List<ApplicationModel> applicationModels = frameworkModel.getApplicationModels();
for (ApplicationModel applicationModel : applicationModels) {
if (ArrayUtils.isEmpty(args)) {
ServiceInstanceMetadataUtils.refreshMetadataAndInstance(applicationModel);
stringBuilder
.append("publish metadata succeeded. App:")
.append(applicationModel.getApplicationName())
.append("\n");
} else {
try {
int delay = Integer.parseInt(args[0]);
FrameworkExecutorRepository frameworkExecutorRepository = applicationModel
.getFrameworkModel()
.getBeanFactory()
.getBean(FrameworkExecutorRepository.class);
frameworkExecutorRepository
.nextScheduledExecutor()
.schedule(
() -> ServiceInstanceMetadataUtils.refreshMetadataAndInstance(applicationModel),
delay,
TimeUnit.SECONDS);
} catch (NumberFormatException e) {
logger.error(CONFIG_PARAMETER_FORMAT_ERROR, "", "", "Wrong delay param", e);
return "publishMetadata failed! Wrong delay param!";
}
stringBuilder
.append("publish task submitted, will publish in ")
.append(args[0])
.append(" seconds. App:")
.append(applicationModel.getApplicationName())
.append("\n");
}
}
return stringBuilder.toString();
}
}
|
PublishMetadata
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/support/GenericGroovyApplicationContext.java
|
{
"start": 8020,
"end": 9396
}
|
class ____ package will be used as a prefix when
* loading each specified resource name
* @param resourceNames relatively-qualified names of resources to load
*/
public void load(Class<?> relativeClass, String... resourceNames) {
Resource[] resources = new Resource[resourceNames.length];
for (int i = 0; i < resourceNames.length; i++) {
resources[i] = new ClassPathResource(resourceNames[i], relativeClass);
}
load(resources);
}
// Implementation of the GroovyObject interface
@Override
public void setMetaClass(MetaClass metaClass) {
this.metaClass = metaClass;
}
@Override
public MetaClass getMetaClass() {
return this.metaClass;
}
@Override
public Object invokeMethod(String name, Object args) {
return this.metaClass.invokeMethod(this, name, args);
}
@Override
public void setProperty(String property, Object newValue) {
if (newValue instanceof BeanDefinition beanDefinition) {
registerBeanDefinition(property, beanDefinition);
}
else {
this.metaClass.setProperty(this, property, newValue);
}
}
@Override
public @Nullable Object getProperty(String property) {
if (containsBean(property)) {
return getBean(property);
}
else if (this.contextWrapper.isReadableProperty(property)) {
return this.contextWrapper.getPropertyValue(property);
}
throw new NoSuchBeanDefinitionException(property);
}
}
|
whose
|
java
|
quarkusio__quarkus
|
extensions/spring-scheduled/deployment/src/test/java/io/quarkus/spring/scheduled/deployment/InvalidCronExpressionTest.java
|
{
"start": 417,
"end": 818
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.setExpectedException(DeploymentException.class)
.withApplicationRoot((jar) -> jar
.addClasses(InvalidBean.class));
@Test
public void test() throws InterruptedException {
fail();
}
@ApplicationScoped
static
|
InvalidCronExpressionTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java
|
{
"start": 2183,
"end": 2539
}
|
class ____ extends ActionType<SearchResponse> {
public static final EnrichCoordinatorProxyAction INSTANCE = new EnrichCoordinatorProxyAction();
public static final String NAME = "indices:data/read/xpack/enrich/coordinate_lookups";
private EnrichCoordinatorProxyAction() {
super(NAME);
}
public static
|
EnrichCoordinatorProxyAction
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/CheckedExceptionNotThrownTest.java
|
{
"start": 4117,
"end": 4413
}
|
class ____ {
void test() throws IllegalStateException {}
}
""")
.doTest();
}
@Test
public void oneCheckedOneUnchecked_finding() {
compilationHelper
.addSourceLines(
"Test.java",
"""
public final
|
Test
|
java
|
apache__maven
|
impl/maven-impl/src/test/java/org/apache/maven/impl/util/PhasingExecutorTest.java
|
{
"start": 959,
"end": 1496
}
|
class ____ {
@Test
void testPhaser() {
try (PhasingExecutor p = new PhasingExecutor(Executors.newFixedThreadPool(4))) {
p.execute(() -> waitSomeTime(p, 2));
}
}
private void waitSomeTime(Executor executor, int nb) {
try {
Thread.sleep(10);
if (nb > 0) {
executor.execute(() -> waitSomeTime(executor, nb - 1));
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
|
PhasingExecutorTest
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/http/impl/CookieImpl.java
|
{
"start": 810,
"end": 3702
}
|
class ____ implements ServerCookie {
private final io.netty.handler.codec.http.cookie.Cookie nettyCookie;
// denotes if a cookie has been created from an HTTP request (true) or during the
// application/response life cycle (false)
private final boolean fromUserAgent;
private boolean changed;
// extension feature(s)
private CookieSameSite sameSite;
public CookieImpl(String name, String value) {
this.nettyCookie = new DefaultCookie(name, value);
fromUserAgent = false;
this.changed = true;
}
/**
* Internal constructor, only used by the CookieJar.
*
* @param nettyCookie the underlying cookie object
*/
CookieImpl(io.netty.handler.codec.http.cookie.Cookie nettyCookie) {
this.nettyCookie = nettyCookie;
fromUserAgent = true;
}
@Override
public String getValue() {
return nettyCookie.value();
}
@Override
public Cookie setValue(final String value) {
nettyCookie.setValue(value);
this.changed = true;
return this;
}
@Override
public String getName() {
return nettyCookie.name();
}
@Override
public Cookie setDomain(final String domain) {
nettyCookie.setDomain(domain);
this.changed = true;
return this;
}
@Override
public String getDomain() {
return nettyCookie.domain();
}
@Override
public Cookie setPath(final String path) {
nettyCookie.setPath(path);
this.changed = true;
return this;
}
@Override
public String getPath() {
return nettyCookie.path();
}
@Override
public Cookie setMaxAge(final long maxAge) {
nettyCookie.setMaxAge(maxAge);
this.changed = true;
return this;
}
@Override
public long getMaxAge() {
return nettyCookie.maxAge();
}
@Override
public Cookie setSecure(final boolean secure) {
nettyCookie.setSecure(secure);
this.changed = true;
return this;
}
@Override
public boolean isSecure() {
return nettyCookie.isSecure();
}
@Override
public Cookie setHttpOnly(final boolean httpOnly) {
nettyCookie.setHttpOnly(httpOnly);
this.changed = true;
return this;
}
@Override
public boolean isHttpOnly() {
return nettyCookie.isHttpOnly();
}
@Override
public Cookie setSameSite(final CookieSameSite sameSite) {
this.sameSite = sameSite;
this.changed = true;
return this;
}
@Override
public CookieSameSite getSameSite() {
return this.sameSite;
}
@Override
public String encode() {
if (sameSite != null) {
return ServerCookieEncoder.STRICT.encode(nettyCookie) + "; SameSite=" + sameSite.toString();
} else {
return ServerCookieEncoder.STRICT.encode(nettyCookie);
}
}
public boolean isChanged() {
return changed;
}
public void setChanged(boolean changed) {
this.changed = changed;
}
public boolean isFromUserAgent() {
return fromUserAgent;
}
}
|
CookieImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/hhh12973/SequenceMismatchStrategyUpperCaseStringValueTest.java
|
{
"start": 473,
"end": 856
}
|
class ____ {
@Test
public void test() {
assertEquals( SequenceMismatchStrategy.EXCEPTION, SequenceMismatchStrategy.interpret( "EXCEPTION" ) );
assertEquals( SequenceMismatchStrategy.LOG, SequenceMismatchStrategy.interpret( "LOG" ) );
assertEquals( SequenceMismatchStrategy.FIX, SequenceMismatchStrategy.interpret( "FIX" ) );
}
}
|
SequenceMismatchStrategyUpperCaseStringValueTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ShellCommandFencer.java
|
{
"start": 2176,
"end": 8245
}
|
class ____
extends Configured implements FenceMethod {
/** Length at which to abbreviate command in long messages */
private static final int ABBREV_LENGTH = 20;
/** Prefix for target parameters added to the environment */
private static final String TARGET_PREFIX = "target_";
/** Prefix for source parameters added to the environment */
private static final String SOURCE_PREFIX = "source_";
private static final String ARG_DELIMITER = ",";
@VisibleForTesting
static Logger LOG = LoggerFactory.getLogger(ShellCommandFencer.class);
@Override
public void checkArgs(String args) throws BadFencingConfigurationException {
if (args == null || args.isEmpty()) {
throw new BadFencingConfigurationException(
"No argument passed to 'shell' fencing method");
}
// Nothing else we can really check without actually running the command
}
@Override
public boolean tryFence(HAServiceTarget target, String args) {
ProcessBuilder builder;
String cmd = parseArgs(target.getTransitionTargetHAStatus(), args);
if (!Shell.WINDOWS) {
builder = new ProcessBuilder("bash", "-e", "-c", cmd);
} else {
builder = new ProcessBuilder("cmd.exe", "/c", cmd);
}
setConfAsEnvVars(builder.environment());
addTargetInfoAsEnvVars(target, builder.environment());
Process p;
try {
p = builder.start();
p.getOutputStream().close();
} catch (IOException e) {
LOG.warn("Unable to execute " + cmd, e);
return false;
}
String pid = tryGetPid(p);
LOG.info("Launched fencing command '" + cmd + "' with "
+ ((pid != null) ? ("pid " + pid) : "unknown pid"));
String logPrefix = abbreviate(cmd, ABBREV_LENGTH);
if (pid != null) {
logPrefix = "[PID " + pid + "] " + logPrefix;
}
// Pump logs to stderr
StreamPumper errPumper = new StreamPumper(
LOG, logPrefix, p.getErrorStream(),
StreamPumper.StreamType.STDERR);
errPumper.start();
StreamPumper outPumper = new StreamPumper(
LOG, logPrefix, p.getInputStream(),
StreamPumper.StreamType.STDOUT);
outPumper.start();
int rc;
try {
rc = p.waitFor();
errPumper.join();
outPumper.join();
} catch (InterruptedException ie) {
LOG.warn("Interrupted while waiting for fencing command: " + cmd);
return false;
}
return rc == 0;
}
private String parseArgs(HAServiceProtocol.HAServiceState state,
String cmd) {
String[] args = cmd.split(ARG_DELIMITER);
if (args.length == 1) {
// only one command is given, assuming both src and dst
// will execute the same command/script.
return args[0];
}
if (args.length > 2) {
throw new IllegalArgumentException("Expecting arguments size of at most "
+ "two, getting " + Arrays.asList(args));
}
if (HAServiceProtocol.HAServiceState.ACTIVE.equals(state)) {
return args[0];
} else if (HAServiceProtocol.HAServiceState.STANDBY.equals(state)) {
return args[1];
} else {
throw new IllegalArgumentException(
"Unexpected HA service state:" + state);
}
}
/**
* Abbreviate a string by putting '...' in the middle of it,
* in an attempt to keep logs from getting too messy.
* @param cmd the string to abbreviate
* @param len maximum length to abbreviate to
* @return abbreviated string
*/
static String abbreviate(String cmd, int len) {
if (cmd.length() > len && len >= 5) {
int firstHalf = (len - 3) / 2;
int rem = len - firstHalf - 3;
return cmd.substring(0, firstHalf) +
"..." + cmd.substring(cmd.length() - rem);
} else {
return cmd;
}
}
/**
* Attempt to use evil reflection tricks to determine the
* pid of a launched process. This is helpful to ops
* if debugging a fencing process that might have gone
* wrong. If running on a system or JVM where this doesn't
* work, it will simply return null.
*/
private static String tryGetPid(Process p) {
try {
Class<? extends Process> clazz = p.getClass();
if (clazz.getName().equals("java.lang.UNIXProcess")) {
Field f = clazz.getDeclaredField("pid");
f.setAccessible(true);
return String.valueOf(f.getInt(p));
} else {
LOG.trace("Unable to determine pid for " + p
+ " since it is not a UNIXProcess");
return null;
}
} catch (Throwable t) {
LOG.trace("Unable to determine pid for " + p, t);
return null;
}
}
/**
* Set the environment of the subprocess to be the Configuration,
* with '.'s and '-'s replaced by '_'s.
*/
private void setConfAsEnvVars(Map<String, String> env) {
for (Map.Entry<String, String> pair : getConf()) {
env.put(pair.getKey().replaceAll("[.-]", "_"), pair.getValue());
}
}
/**
* Add information about the target to the the environment of the
* subprocess.
*
* @param target
* @param environment
*/
private void addTargetInfoAsEnvVars(HAServiceTarget target,
Map<String, String> environment) {
String prefix;
HAServiceProtocol.HAServiceState targetState =
target.getTransitionTargetHAStatus();
if (targetState == null ||
HAServiceProtocol.HAServiceState.ACTIVE.equals(targetState)) {
// null is assumed to be same as ACTIVE, this is to be compatible
// with existing tests/use cases where target state is not specified
// but assuming it's active.
prefix = TARGET_PREFIX;
} else if (HAServiceProtocol.HAServiceState.STANDBY.equals(targetState)) {
prefix = SOURCE_PREFIX;
} else {
throw new IllegalArgumentException(
"Unexpected HA service state:" + targetState);
}
for (Map.Entry<String, String> e :
target.getFencingParameters().entrySet()) {
String key = prefix + e.getKey();
key = key.replaceAll("[.-]", "_");
environment.put(key, e.getValue());
}
}
}
|
ShellCommandFencer
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/tests/net/SharedServersConcurrencyTest.java
|
{
"start": 2047,
"end": 2569
}
|
class ____ extends AbstractVerticle {
@Override
public void start(Promise<Void> startPromise) {
vertx.createHttpServer(new HttpServerOptions())
.requestHandler(req -> {
})
.listen(15152).onComplete(ar -> {
if (ar.succeeded()) {
System.out.println("REST listening on port: " + ar.result().actualPort());
startPromise.complete();
} else {
startPromise.fail(ar.cause());
}
});
}
}
private static
|
RestVerticle
|
java
|
junit-team__junit5
|
documentation/src/test/java/example/extensions/RandomNumberDemo.java
|
{
"start": 484,
"end": 1182
}
|
class ____ {
// Use static randomNumber0 field anywhere in the test class,
// including @BeforeAll or @AfterEach lifecycle methods.
@Random
// end::user_guide[]
@Nullable
// tag::user_guide[]
private static Integer randomNumber0;
// Use randomNumber1 field in test methods and @BeforeEach
// or @AfterEach lifecycle methods.
@Random
private int randomNumber1;
RandomNumberDemo(@Random int randomNumber2) {
// Use randomNumber2 in constructor.
}
@BeforeEach
void beforeEach(@Random int randomNumber3) {
// Use randomNumber3 in @BeforeEach method.
}
@Test
void test(@Random int randomNumber4) {
// Use randomNumber4 in test method.
}
}
// end::user_guide[]
|
RandomNumberDemo
|
java
|
apache__maven
|
impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/GAVUtilsTest.java
|
{
"start": 7122,
"end": 12315
}
|
class ____ {
@Test
@DisplayName("should compute GAVs from multiple POMs")
void shouldComputeGAVsFromMultiplePOMs() throws Exception {
String parentPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
<packaging>pom</packaging>
</project>
""";
String childPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
</parent>
<artifactId>child-project</artifactId>
</project>
""";
Document parentDoc = Document.of(parentPomXml);
Document childDoc = Document.of(childPomXml);
Map<Path, Document> pomMap = new HashMap<>();
pomMap.put(Paths.get("/project/pom.xml"), parentDoc);
pomMap.put(Paths.get("/project/child/pom.xml"), childDoc);
UpgradeContext context = createMockContext();
Set<Coordinates> gavs = InferenceStrategy.computeAllArtifactCoordinates(context, pomMap);
assertEquals(2, gavs.size());
assertTrue(gavs.contains(Coordinates.of("com.example", "parent-project", "1.0.0")));
assertTrue(gavs.contains(Coordinates.of("com.example", "child-project", "1.0.0")));
}
@Test
@DisplayName("should handle empty POM map")
void shouldHandleEmptyPOMMap() {
UpgradeContext context = createMockContext();
Map<Path, Document> pomMap = new HashMap<>();
Set<Coordinates> gavs = AbstractUpgradeStrategy.computeAllArtifactCoordinates(context, pomMap);
assertNotNull(gavs);
assertTrue(gavs.isEmpty(), "Expected collection to be empty but had " + gavs.size() + " elements: " + gavs);
}
@Test
@DisplayName("should deduplicate identical GAVs")
void shouldDeduplicateIdenticalGAVs() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>com.example</groupId>
<artifactId>duplicate-project</artifactId>
<version>1.0.0</version>
</project>
""";
Document doc1 = Document.of(pomXml);
Document doc2 = Document.of(pomXml);
Map<Path, Document> pomMap = new HashMap<>();
pomMap.put(Paths.get("/project/pom1.xml"), doc1);
pomMap.put(Paths.get("/project/pom2.xml"), doc2);
UpgradeContext context = createMockContext();
Set<Coordinates> gavs = InferenceStrategy.computeAllArtifactCoordinates(context, pomMap);
assertEquals(1, gavs.size());
assertTrue(gavs.contains(Coordinates.of("com.example", "duplicate-project", "1.0.0")));
}
@Test
@DisplayName("should skip POMs with incomplete GAVs")
void shouldSkipPOMsWithIncompleteGAVs() throws Exception {
String validPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>com.example</groupId>
<artifactId>valid-project</artifactId>
<version>1.0.0</version>
</project>
""";
String invalidPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<artifactId>invalid-project</artifactId>
<!-- Missing groupId and version -->
</project>
""";
Document validDoc = Document.of(validPomXml);
Document invalidDoc = Document.of(invalidPomXml);
Map<Path, Document> pomMap = new HashMap<>();
pomMap.put(Paths.get("/project/valid.xml"), validDoc);
pomMap.put(Paths.get("/project/invalid.xml"), invalidDoc);
UpgradeContext context = createMockContext();
Set<Coordinates> gavs = InferenceStrategy.computeAllArtifactCoordinates(context, pomMap);
assertEquals(1, gavs.size());
assertTrue(gavs.contains(Coordinates.of("com.example", "valid-project", "1.0.0")));
}
}
@Nested
@DisplayName("Edge Cases")
|
GAVComputationTests
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-testng/src/main/java/smoketest/testng/web/SampleController.java
|
{
"start": 962,
"end": 1167
}
|
class ____ {
@Autowired
private HelloWorldService helloWorldService;
@GetMapping("/")
@ResponseBody
public String helloWorld() {
return this.helloWorldService.getHelloMessage();
}
}
|
SampleController
|
java
|
alibaba__nacos
|
auth/src/test/java/com/alibaba/nacos/auth/parser/grpc/ConfigGrpcResourceParserTest.java
|
{
"start": 1315,
"end": 5965
}
|
class ____ {
private ConfigGrpcResourceParser resourceParser;
@BeforeEach
void setUp() throws Exception {
resourceParser = new ConfigGrpcResourceParser();
}
@Test
@Secured(signType = Constants.Config.CONFIG_MODULE)
void testParseWithFullContext() throws NoSuchMethodException {
Secured secured = getMethodSecure();
Request request = mockConfigRequest("testNs", "testG", "testD");
Resource actual = resourceParser.parse(request, secured);
assertEquals("testNs", actual.getNamespaceId());
assertEquals("testG", actual.getGroup());
assertEquals("testD", actual.getName());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getType());
}
@Test
@Secured(signType = Constants.Config.CONFIG_MODULE)
void testParseWithoutNamespace() throws NoSuchMethodException {
Secured secured = getMethodSecure();
Request request = mockConfigRequest("", "testG", "testD");
Resource actual = resourceParser.parse(request, secured);
assertEquals("", actual.getNamespaceId());
assertEquals("testG", actual.getGroup());
assertEquals("testD", actual.getName());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getType());
}
@Test
@Secured(signType = Constants.Config.CONFIG_MODULE)
void testParseWithoutGroup() throws NoSuchMethodException {
Secured secured = getMethodSecure();
Request request = mockConfigRequest("testNs", "", "testD");
Resource actual = resourceParser.parse(request, secured);
assertEquals("testNs", actual.getNamespaceId());
assertEquals(StringUtils.EMPTY, actual.getGroup());
assertEquals("testD", actual.getName());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getType());
}
@Test
@Secured(signType = Constants.Config.CONFIG_MODULE)
void testParseWithoutDataId() throws NoSuchMethodException {
Secured secured = getMethodSecure();
Request request = mockConfigRequest("testNs", "testG", "");
Resource actual = resourceParser.parse(request, secured);
assertEquals("testNs", actual.getNamespaceId());
assertEquals("testG", actual.getGroup());
assertEquals(StringUtils.EMPTY, actual.getName());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getType());
}
@Test
@Secured(signType = Constants.Config.CONFIG_MODULE)
void testParseWithConfigBatchListenRequest() throws NoSuchMethodException {
Secured secured = getMethodSecure();
ConfigBatchListenRequest request = new ConfigBatchListenRequest();
request.addConfigListenContext("testG", "testD", "testNs", "111");
Resource actual = resourceParser.parse(request, secured);
assertEquals("testNs", actual.getNamespaceId());
assertEquals(StringUtils.EMPTY, actual.getGroup());
assertEquals(StringUtils.EMPTY, actual.getName());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getType());
request.getConfigListenContexts().clear();
actual = resourceParser.parse(request, secured);
assertEquals(StringUtils.EMPTY, actual.getNamespaceId());
assertEquals(StringUtils.EMPTY, actual.getGroup());
assertEquals(StringUtils.EMPTY, actual.getName());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getType());
}
@Test
@Secured(signType = Constants.Config.CONFIG_MODULE)
void testParseWithReflectionRequest() throws NoSuchMethodException {
Secured secured = getMethodSecure();
Request request = ConfigChangeNotifyRequest.build("rTestD", "rTestG", "rTestNs");
Resource actual = resourceParser.parse(request, secured);
assertEquals("rTestNs", actual.getNamespaceId());
assertEquals("rTestG", actual.getGroup());
assertEquals("rTestD", actual.getName());
}
private Request mockConfigRequest(String tenant, String group, String dataId) {
ConfigPublishRequest request = new ConfigPublishRequest();
request.setTenant(tenant);
request.setGroup(group);
request.setDataId(dataId);
return request;
}
private Secured getMethodSecure() throws NoSuchMethodException {
StackTraceElement[] traces = new Exception().getStackTrace();
StackTraceElement callerElement = traces[1];
String methodName = callerElement.getMethodName();
Method method = this.getClass().getDeclaredMethod(methodName);
return method.getAnnotation(Secured.class);
}
}
|
ConfigGrpcResourceParserTest
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_3296/MapperNotExtendingConfig.java
|
{
"start": 289,
"end": 366
}
|
interface ____ {
Entity toEntity(Payload payload);
}
|
MapperNotExtendingConfig
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/PrimitiveArrayPassedToVarargsMethodTest.java
|
{
"start": 882,
"end": 1472
}
|
class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(PrimitiveArrayPassedToVarargsMethod.class, getClass());
@Test
public void positiveCase() {
compilationHelper
.addSourceLines(
"PrimitiveArrayPassedToVarargsMethodPositiveCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import java.util.Arrays;
/**
* @author eaftan@google.com (Eddie Aftandilian)
*/
public
|
PrimitiveArrayPassedToVarargsMethodTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/JUnitIncompatibleTypeTest.java
|
{
"start": 858,
"end": 1302
}
|
class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(JUnitIncompatibleType.class, getClass());
@Test
public void assertEquals_mismatched() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
|
JUnitIncompatibleTypeTest
|
java
|
grpc__grpc-java
|
services/src/generated/test/grpc/io/grpc/reflection/testing/ReflectableServiceGrpc.java
|
{
"start": 5595,
"end": 6497
}
|
class ____
extends io.grpc.stub.AbstractAsyncStub<ReflectableServiceStub> {
private ReflectableServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ReflectableServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ReflectableServiceStub(channel, callOptions);
}
/**
*/
public void method(io.grpc.reflection.testing.Request request,
io.grpc.stub.StreamObserver<io.grpc.reflection.testing.Reply> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getMethodMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service ReflectableService.
*/
public static final
|
ReflectableServiceStub
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/legacy/RecursiveComparisonAssert_isEqualTo_withTypeComparators_Test.java
|
{
"start": 2635,
"end": 14281
}
|
class ____ extends WithLegacyIntrospectionStrategyBaseTest {
@ParameterizedTest(name = "{3}: actual={0} / expected={1} - comparatorsByType: {2}")
@MethodSource("recursivelyEqualObjectsWhenUsingTypeComparators")
void should_pass_for_objects_with_the_same_data_when_using_registered_comparator_by_types(Object actual,
Object expected,
Map<Class<?>, Comparator<Object>> comparatorByTypes,
String testDescription) {
// GIVEN
comparatorByTypes.forEach((key, value) -> recursiveComparisonConfiguration.registerComparatorForType(value,
key));
// THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.isEqualTo(expected);
}
@ParameterizedTest(name = "{3}: actual={0} / expected={1} - comparatorsByType: {2}")
@MethodSource("recursivelyEqualObjectsWhenUsingTypeComparators")
void should_pass_for_objects_with_the_same_data_when_using_registered_equals_by_types(Object actual,
Object expected,
Map<Class<?>, Comparator<Object>> comparatorByTypes,
String testDescription) {
// GIVEN
comparatorByTypes.forEach((key, value) -> recursiveComparisonConfiguration.registerEqualsForType(asBiPredicate(value),
key));
// THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.isEqualTo(expected);
}
private static BiPredicate<Object, Object> asBiPredicate(Comparator<Object> comparator) {
return (Object o1, Object o2) -> comparator.compare(o1, o2) == 0;
}
private static Stream<Arguments> recursivelyEqualObjectsWhenUsingTypeComparators() {
Person person1 = new Person("John");
person1.home.address.number = 1;
Person person2 = new Person("JoHN");
person2.home.address.number = 2;
Person person3 = new Person("John");
person3.home.address.number = 1;
Person person4 = new Person("John");
person4.home.address.number = 2;
Person person5 = new Person("John");
person5.home.address.number = 1;
person5.dateOfBirth = new Date(123);
person5.neighbour = new Person("Jack");
person5.neighbour.home.address.number = 123;
Person person6 = new Person("John");
person6.home.address.number = 1;
person6.dateOfBirth = new Date(123);
person6.neighbour = new Person("Jim");
person6.neighbour.home.address.number = 456;
MapEntry<Class<?>, Comparator<?>> stringComparator = entry(String.class, CaseInsensitiveStringComparator.INSTANCE);
MapEntry<Class<?>, Comparator<?>> intComparator = entry(Integer.class, new AlwaysEqualComparator<Integer>());
MapEntry<Class<?>, Comparator<?>> personComparator = entry(Person.class, new AlwaysEqualComparator<Person>());
return Stream.of(arguments(person1, person2, mapOf(stringComparator, intComparator),
"same data except int fields and case for strings"),
arguments(person3, person4, mapOf(intComparator), "same data except for int fields"),
// any neighbour differences should be ignored as we compare persons with AlwaysEqualComparator
arguments(person5, person6, mapOf(personComparator),
"same data except for persons, person's fields should not be compared recursively except at the root level"));
}
@Test
void should_fail_when_actual_differs_from_expected_when_using_comparators_by_type() {
// GIVEN
Person actual = new Person("John");
actual.home.address.number = 1;
actual.dateOfBirth = new Date(123);
actual.neighbour = new Person("Jack");
actual.neighbour.home.address.number = 123;
// actually a clone of actual
Person expected = new Person("John");
expected.home.address.number = 1;
expected.dateOfBirth = new Date(123);
expected.neighbour = new Person("Jack");
expected.neighbour.home.address.number = 123;
// register comparators for some type that will fail the comparison
recursiveComparisonConfiguration.registerComparatorForType(new AlwaysDifferentComparator<>(), Date.class);
recursiveComparisonConfiguration.registerEqualsForType((Address a1, Address a2) -> false, Address.class);
// WHEN/THEN
ComparisonDifference dateOfBirthDiff = diff("dateOfBirth", actual.dateOfBirth, expected.dateOfBirth);
ComparisonDifference addressDiff = diff("home.address", actual.home.address, expected.home.address);
ComparisonDifference neighbourAddressDiff = diff("neighbour.home.address",
actual.neighbour.home.address,
expected.neighbour.home.address);
compareRecursivelyFailsWithDifferences(actual, expected, dateOfBirthDiff, addressDiff, neighbourAddressDiff);
}
@Test
void should_be_able_to_compare_objects_recursively_using_some_precision_for_numerical_types() {
// GIVEN
Giant goliath = new Giant("Goliath", 3.0);
Giant goliathTwin = new Giant("Goliath", 3.1);
// THEN
then(goliath).usingRecursiveComparison(recursiveComparisonConfiguration)
.withComparatorForType(new AtPrecisionComparator<>(0.2), Double.class)
.isEqualTo(goliathTwin);
then(goliath).usingRecursiveComparison(recursiveComparisonConfiguration)
.withEqualsForType((d1, d2) -> Math.abs(d1 - d2) < 0.2, Double.class)
.isEqualTo(goliathTwin);
}
@Test
void should_handle_null_field_with_type_comparator() {
// GIVEN
Patient actual = new Patient(null);
Patient expected = new Patient(new Timestamp(3L));
// THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.withComparatorForType(ALWAYS_EQUALS_TIMESTAMP, Timestamp.class)
.isEqualTo(expected);
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.withEqualsForType((o1, o2) -> true, Timestamp.class)
.isEqualTo(expected);
}
@Test
void should_use_custom_comparator_over_reference_comparison() {
// GIVEN
Timestamp dateOfBirth = new Timestamp(3L);
Patient actual = new Patient(dateOfBirth);
Patient expected = new Patient(dateOfBirth);
// THEN
var assertionError = expectAssertionError(() -> assertThat(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.withComparatorForType(NEVER_EQUALS, Timestamp.class)
.isEqualTo(expected));
// THEN
then(assertionError).hasMessageContaining("- java.sql.Timestamp -> org.assertj.tests.core.testkit.NeverEqualComparator");
}
@Test
void should_use_custom_equal_over_reference_comparison() {
// GIVEN
Timestamp dateOfBirth = new Timestamp(3L);
Patient actual = new Patient(dateOfBirth);
Patient expected = new Patient(dateOfBirth);
// THEN
var assertionError = expectAssertionError(() -> assertThat(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.withEqualsForType((o1, o2) -> false, Timestamp.class)
.isEqualTo(expected));
// THEN
then(assertionError).hasMessageContaining("- java.sql.Timestamp -> ");
}
@Test
void should_treat_timestamp_as_equal_to_date_when_registering_a_Date_symmetric_comparator() {
// GIVEN
Person actual = new Person("Fred");
actual.dateOfBirth = new Timestamp(1000L);
Person expected = new Person(actual.name);
expected.dateOfBirth = new Date(1000L);
// THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.withComparatorForType(SYMMETRIC_DATE_COMPARATOR, Timestamp.class)
.isEqualTo(expected);
then(expected).usingRecursiveComparison(recursiveComparisonConfiguration)
.withComparatorForType(SYMMETRIC_DATE_COMPARATOR, Timestamp.class)
.isEqualTo(actual);
}
@Test
void ignoringOverriddenEquals_should_not_interfere_with_comparators_by_type() {
// GIVEN
Person actual = new Person("Fred");
actual.neighbour = new AlwaysEqualPerson();
actual.neighbour.name = "Omar";
Person expected = new Person("Fred");
expected.neighbour = new AlwaysEqualPerson();
expected.neighbour.name = "Omar2";
// THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.withComparatorForType(ALWAYS_EQUALS, AlwaysEqualPerson.class) // fails if commented
.ignoringOverriddenEqualsForFields("neighbour")
.isEqualTo(expected);
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.withEqualsForType((o1, o2) -> true, AlwaysEqualPerson.class) // fails if commented
.ignoringOverriddenEqualsForFields("neighbour")
.isEqualTo(expected);
}
@Test
void should_pass_using_a_BiPredicate_to_compare_fields_with_different_types_but_same_values() {
// GIVEN
TimeOffset actual = new TimeOffset();
actual.time = LocalTime.now();
TimeOffsetDto expected = new TimeOffsetDto();
expected.time = actual.time.toString();
// WHEN/THEN
then(actual).usingRecursiveComparison()
.withEqualsForTypes((t, s) -> LocalTime.parse(s).equals(t), LocalTime.class, String.class)
.isEqualTo(expected);
}
@Test
void should_pass_using_two_BiPredicates_that_matches_fields_with_different_types_but_same_values() {
// GIVEN
TimeOffset actual = new TimeOffset();
actual.time = LocalTime.now();
actual.offset = ZoneOffset.UTC;
TimeOffsetDto expected = new TimeOffsetDto();
expected.time = actual.time.toString();
expected.offset = actual.offset.getId();
// WHEN/THEN
then(actual).usingRecursiveComparison()
.withEqualsForTypes((z, s) -> ZoneOffset.of(s).equals(z), ZoneOffset.class, String.class)
.withEqualsForTypes((t, s) -> LocalTime.parse(s).equals(t), LocalTime.class, String.class)
.isEqualTo(expected);
}
@Test
void should_pass_having_two_BiPredicates_with_same_left_type() {
// GIVEN
LocalTime now = LocalTime.now();
TimeOffsetDto actual = new TimeOffsetDto();
actual.time = now.toString();
actual.offset = "Z";
TimeOffset expected = new TimeOffset();
expected.time = now;
expected.offset = ZoneOffset.UTC;
// WHEN/THEN
then(actual).usingRecursiveComparison()
.withEqualsForTypes((s, z) -> ZoneOffset.of(s).equals(z), String.class, ZoneOffset.class)
.withEqualsForTypes((s, t) -> LocalTime.parse(s).equals(t), String.class, LocalTime.class)
.isEqualTo(expected);
}
}
|
RecursiveComparisonAssert_isEqualTo_withTypeComparators_Test
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestFederationUtil.java
|
{
"start": 1649,
"end": 2899
}
|
class ____ {
@Test
public void testInstanceCreation() {
Configuration conf = new HdfsConfiguration();
// Use mock resolver classes
conf.setClass(FEDERATION_NAMENODE_RESOLVER_CLIENT_CLASS,
MockResolver.class, ActiveNamenodeResolver.class);
conf.setClass(FEDERATION_FILE_RESOLVER_CLIENT_CLASS,
MockResolver.class, FileSubclusterResolver.class);
Router router = new Router();
StateStoreService stateStore = new StateStoreService();
ActiveNamenodeResolver namenodeResolverWithContext =
FederationUtil.newActiveNamenodeResolver(conf, stateStore);
ActiveNamenodeResolver namenodeResolverWithoutContext =
FederationUtil.newActiveNamenodeResolver(conf, null);
FileSubclusterResolver subclusterResolverWithContext =
FederationUtil.newFileSubclusterResolver(conf, router);
FileSubclusterResolver subclusterResolverWithoutContext =
FederationUtil.newFileSubclusterResolver(conf, null);
// Check all instances are created successfully.
assertNotNull(namenodeResolverWithContext);
assertNotNull(namenodeResolverWithoutContext);
assertNotNull(subclusterResolverWithContext);
assertNotNull(subclusterResolverWithoutContext);
}
}
|
TestFederationUtil
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemLease.java
|
{
"start": 3428,
"end": 19027
}
|
class ____ extends AbstractAbfsIntegrationTest {
private static final int TEST_EXECUTION_TIMEOUT = 30 * 1000;
private static final int LONG_TEST_EXECUTION_TIMEOUT = 90 * 1000;
private static final String TEST_FILE = "testfile";
private final boolean isHNSEnabled;
private static final int TEST_BYTES = 20;
private static final String PARALLEL_ACCESS = "Parallel access to the create path "
+ "detected";
public ITestAzureBlobFileSystemLease() throws Exception {
super();
this.isHNSEnabled = getConfiguration()
.getBoolean(FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT, false);
}
private AzureBlobFileSystem getCustomFileSystem(Path infiniteLeaseDirs, int numLeaseThreads) throws Exception {
Configuration conf = getRawConfiguration();
conf.setBoolean(String.format("fs.%s.impl.disable.cache", getAbfsScheme()), true);
conf.set(FS_AZURE_INFINITE_LEASE_KEY, infiniteLeaseDirs.toUri().getPath());
conf.setInt(FS_AZURE_LEASE_THREADS, numLeaseThreads);
return getFileSystem(conf);
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testNoInfiniteLease() throws IOException {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getFileSystem();
fs.mkdirs(testFilePath.getParent());
try (FSDataOutputStream out = fs.create(testFilePath)) {
Assertions.assertFalse(
((AbfsOutputStream) out.getWrappedStream()).hasLease(), "Output stream should not have lease");
}
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testNoLeaseThreads() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 0);
fs.mkdirs(testFilePath.getParent());
LambdaTestUtils.intercept(IOException.class, ERR_NO_LEASE_THREADS, () -> {
try (FSDataOutputStream out = fs.create(testFilePath)) {
}
return "No failure when lease requested with 0 lease threads";
});
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testOneWriter() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 1);
fs.mkdirs(testFilePath.getParent());
FSDataOutputStream out = fs.create(testFilePath);
Assertions.assertTrue(
((AbfsOutputStream) out.getWrappedStream()).hasLease(), "Output stream should have lease");
out.close();
Assertions.assertFalse(
((AbfsOutputStream) out.getWrappedStream()).hasLease(), "Output stream should not have lease");
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testSubDir() throws Exception {
final Path testFilePath = new Path(new Path(path(methodName.getMethodName()), "subdir"),
TEST_FILE);
final AzureBlobFileSystem fs =
getCustomFileSystem(testFilePath.getParent().getParent(), 1);
fs.mkdirs(testFilePath.getParent().getParent());
FSDataOutputStream out = fs.create(testFilePath);
Assertions.assertTrue(
((AbfsOutputStream) out.getWrappedStream()).hasLease(), "Output stream should have lease");
out.close();
Assertions.assertFalse(
((AbfsOutputStream) out.getWrappedStream()).hasLease(), "Output stream should not have lease");
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testTwoCreate() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 1);
AbfsClient client = fs.getAbfsStore().getClientHandler().getIngressClient();
assumeValidTestConfigPresent(getRawConfiguration(), FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT);
fs.mkdirs(testFilePath.getParent());
try (FSDataOutputStream out = fs.create(testFilePath)) {
LambdaTestUtils.intercept(IOException.class,
isHNSEnabled && getIngressServiceType() == AbfsServiceType.DFS ? PARALLEL_ACCESS
: client instanceof AbfsBlobClient
? ERR_NO_LEASE_ID_SPECIFIED_BLOB
: ERR_NO_LEASE_ID_SPECIFIED, () -> {
try (FSDataOutputStream out2 = fs.create(testFilePath)) {
}
return "Expected second create on infinite lease dir to fail";
});
}
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
}
private void twoWriters(AzureBlobFileSystem fs, Path testFilePath, boolean expectException) throws Exception {
AbfsClient client = fs.getAbfsStore().getClientHandler().getIngressClient();
try (FSDataOutputStream out = fs.create(testFilePath)) {
try (FSDataOutputStream out2 = fs.append(testFilePath)) {
out2.writeInt(2);
out2.hsync();
} catch (IOException e) {
if (expectException) {
GenericTestUtils.assertExceptionContains(ERR_ACQUIRING_LEASE, e);
} else {
throw e;
}
}
out.writeInt(1);
try {
out.hsync();
} catch (IOException e) {
// Etag mismatch leads to condition not met error for blob endpoint.
if (client instanceof AbfsBlobClient) {
GenericTestUtils.assertExceptionContains(CONDITION_NOT_MET, e);
} else {
throw e;
}
}
} catch (IOException e) {
// Etag mismatch leads to condition not met error for blob endpoint.
if (client instanceof AbfsBlobClient) {
GenericTestUtils.assertExceptionContains(CONDITION_NOT_MET, e);
} else {
throw e;
}
}
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testTwoWritersCreateAppendNoInfiniteLease() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getFileSystem();
assumeThat(isAppendBlobEnabled()).as("Parallel Writes Not Allowed on Append Blobs").isFalse();
fs.mkdirs(testFilePath.getParent());
twoWriters(fs, testFilePath, false);
}
@Test
@Timeout(value = LONG_TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testTwoWritersCreateAppendWithInfiniteLeaseEnabled() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 1);
assumeThat(isAppendBlobEnabled()).as("Parallel Writes Not Allowed on Append Blobs").isFalse();
fs.mkdirs(testFilePath.getParent());
twoWriters(fs, testFilePath, true);
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testLeaseFreedOnClose() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 1);
fs.mkdirs(testFilePath.getParent());
FSDataOutputStream out;
out = fs.create(testFilePath);
out.write(0);
Assertions.assertTrue(
((AbfsOutputStream) out.getWrappedStream()).hasLease(), "Output stream should have lease");
out.close();
Assertions.assertFalse(
((AbfsOutputStream) out.getWrappedStream()).hasLease(), "Output stream should not have lease after close");
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testWriteAfterBreakLease() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 1);
AbfsClient client = fs.getAbfsStore().getClientHandler().getIngressClient();
fs.mkdirs(testFilePath.getParent());
FSDataOutputStream out;
out = fs.create(testFilePath);
out.write(0);
out.hsync();
fs.registerListener(new TracingHeaderValidator(
getConfiguration().getClientCorrelationId(), fs.getFileSystemId(),
FSOperationType.BREAK_LEASE, false, 0));
fs.breakLease(testFilePath);
fs.registerListener(null);
LambdaTestUtils.intercept(IOException.class, client instanceof AbfsBlobClient
? ERR_LEASE_EXPIRED_BLOB : ERR_LEASE_EXPIRED, () -> {
out.write(1);
out.hsync();
return "Expected exception on write after lease break but got " + out;
});
LambdaTestUtils.intercept(IOException.class, client instanceof AbfsBlobClient
? ERR_LEASE_EXPIRED_BLOB : ERR_LEASE_EXPIRED, () -> {
if (isAppendBlobEnabled() && getIngressServiceType() == AbfsServiceType.BLOB) {
out.write(TEST_BYTES);
}
out.close();
return "Expected exception on close after lease break but got " + out;
});
Assertions.assertTrue(
((AbfsOutputStream) out.getWrappedStream()).isLeaseFreed(), "Output stream lease should be freed");
try (FSDataOutputStream out2 = fs.append(testFilePath)) {
out2.write(2);
out2.hsync();
}
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
}
@Test
@Timeout(value = LONG_TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testLeaseFreedAfterBreak() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 1);
AbfsClient client = fs.getAbfsStore().getClientHandler().getIngressClient();
fs.mkdirs(testFilePath.getParent());
FSDataOutputStream out = fs.create(testFilePath);
out.write(0);
fs.breakLease(testFilePath);
LambdaTestUtils.intercept(IOException.class, client instanceof AbfsBlobClient
? ERR_LEASE_EXPIRED_BLOB : ERR_LEASE_EXPIRED, () -> {
out.close();
return "Expected exception on close after lease break but got " + out;
});
Assertions.assertTrue(
((AbfsOutputStream) out.getWrappedStream()).isLeaseFreed(), "Output stream lease should be freed");
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testInfiniteLease() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 1);
fs.mkdirs(testFilePath.getParent());
try (FSDataOutputStream out = fs.create(testFilePath)) {
Assertions.assertTrue(
((AbfsOutputStream) out.getWrappedStream()).hasLease(), "Output stream should have lease");
out.write(0);
}
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed());
try (FSDataOutputStream out = fs.append(testFilePath)) {
Assertions.assertTrue(
((AbfsOutputStream) out.getWrappedStream()).hasLease(), "Output stream should have lease");
out.write(1);
}
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testFileSystemClose() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 1);
fs.mkdirs(testFilePath.getParent());
try (FSDataOutputStream out = fs.create(testFilePath)) {
out.write(0);
Assertions.assertFalse(
fs.getAbfsStore().areLeasesFreed(), "Store leases should exist");
}
fs.close();
Assertions.assertTrue(fs.getAbfsStore().areLeasesFreed(), "Store leases were not freed");
Callable<String> exceptionRaisingCallable = () -> {
try (FSDataOutputStream out2 = fs.append(testFilePath)) {
}
return "Expected exception on new append after closed FS";
};
/*
* For ApacheHttpClient, the failure would happen when trying to get a connection
* from KeepAliveCache, which is not possible after the FS is closed, as that
* also closes the cache.
*
* For JDK_Client, the failure happens when trying to submit a task to the
* executor service, which is not possible after the FS is closed, as that
* also shuts down the executor service.
*/
try {
exceptionRaisingCallable.call();
fail("Expected exception was not thrown");
} catch (Exception e) {
if (!(e instanceof AbfsDriverException
|| e instanceof RejectedExecutionException)) {
fail("Unexpected exception type: " + e.getClass());
}
}
}
@Test
@Timeout(value = TEST_EXECUTION_TIMEOUT, unit = TimeUnit.MILLISECONDS)
public void testAcquireRetry() throws Exception {
final Path testFilePath = new Path(path(methodName.getMethodName()), TEST_FILE);
final AzureBlobFileSystem fs = getCustomFileSystem(testFilePath.getParent(), 1);
fs.mkdirs(testFilePath.getParent());
fs.createNewFile(testFilePath);
TracingContext tracingContext = getTestTracingContext(fs, true);
Listener listener = new TracingHeaderValidator(
getConfiguration().getClientCorrelationId(), fs.getFileSystemId(),
FSOperationType.TEST_OP, true, 0);
tracingContext.setListener(listener);
AbfsLease lease = new AbfsLease(fs.getAbfsClient(),
testFilePath.toUri().getPath(), true, INFINITE_LEASE_DURATION,
null, tracingContext);
Assertions.assertNotNull(lease.getLeaseID(), "Did not successfully lease file");
listener.setOperation(FSOperationType.RELEASE_LEASE);
lease.free();
lease.getTracingContext().setListener(null);
Assertions.assertEquals(0, lease.getAcquireRetryCount(), "Unexpected acquire retry count");
AbfsClient mockClient = spy(fs.getAbfsClient());
doThrow(new AbfsLease.LeaseException("failed to acquire 1"))
.doThrow(new AbfsLease.LeaseException("failed to acquire 2"))
.doCallRealMethod().when(mockClient)
.acquireLease(anyString(), anyInt(), any(), any(TracingContext.class));
lease = new AbfsLease(mockClient, testFilePath.toUri().getPath(), true, 5, 1,
INFINITE_LEASE_DURATION, null, tracingContext);
Assertions.assertNotNull(lease.getLeaseID(), "Acquire lease should have retried");
lease.free();
Assertions.assertEquals(2, lease.getAcquireRetryCount(), "Unexpected acquire retry count");
doThrow(new AbfsLease.LeaseException("failed to acquire")).when(mockClient)
.acquireLease(anyString(), anyInt(), any(), any(TracingContext.class));
LambdaTestUtils.intercept(AzureBlobFileSystemException.class, () -> {
new AbfsLease(mockClient, testFilePath.toUri().getPath(), true, 5, 1,
INFINITE_LEASE_DURATION, null, tracingContext);
});
}
}
|
ITestAzureBlobFileSystemLease
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/mutation/spi/AfterUseAction.java
|
{
"start": 254,
"end": 299
}
|
enum ____ {
CLEAN,
DROP,
NONE
}
|
AfterUseAction
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlShowRuleStatement.java
|
{
"start": 924,
"end": 2333
}
|
class ____ extends MySqlStatementImpl implements MySqlShowStatement {
private SQLName name;
private SQLOrderBy orderBy;
private SQLExpr where;
private SQLLimit limit;
private boolean full;
private boolean version;
public MySqlShowRuleStatement() {
}
public SQLLimit getLimit() {
return limit;
}
public void setLimit(SQLLimit limit) {
this.limit = limit;
}
public SQLOrderBy getOrderBy() {
return orderBy;
}
public void setOrderBy(SQLOrderBy orderBy) {
this.orderBy = orderBy;
}
public SQLExpr getWhere() {
return where;
}
public void setWhere(SQLExpr where) {
this.where = where;
}
public SQLName getName() {
return name;
}
public void setName(SQLName name) {
this.name = name;
}
public boolean isFull() {
return full;
}
public void setFull(boolean full) {
this.full = full;
}
public boolean isVersion() {
return version;
}
public void setVersion(boolean version) {
this.version = version;
}
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, where);
acceptChild(visitor, orderBy);
acceptChild(visitor, limit);
}
visitor.endVisit(this);
}
}
|
MySqlShowRuleStatement
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/oracle/visitor/OracleOutputVisitor.java
|
{
"start": 2542,
"end": 84645
}
|
class ____ extends SQLASTOutputVisitor implements OracleASTVisitor {
private final boolean printPostSemi;
{
this.dbType = DbType.oracle;
}
public OracleOutputVisitor(StringBuilder appender) {
this(appender, true);
}
public OracleOutputVisitor(StringBuilder appender, boolean printPostSemi) {
super(appender, DbType.oracle, Oracle.DIALECT);
this.printPostSemi = printPostSemi;
}
public boolean isPrintPostSemi() {
return printPostSemi;
}
private void printHints(List<SQLHint> hints) {
if (hints.size() > 0) {
print0("/*+ ");
printAndAccept(hints, ", ");
print0(" */");
}
}
public boolean visit(OracleAnalytic x) {
print0(ucase ? "(" : "(");
boolean space = false;
if (x.getPartitionBy().size() > 0) {
print0(ucase ? "PARTITION BY " : "partition by ");
printAndAccept(x.getPartitionBy(), ", ");
space = true;
}
SQLOrderBy orderBy = x.getOrderBy();
if (orderBy != null) {
if (space) {
print(' ');
}
visit(orderBy);
space = true;
}
OracleAnalyticWindowing windowing = x.getWindowing();
if (windowing != null) {
if (space) {
print(' ');
}
visit(windowing);
}
if (x.isWindowingPreceding()) {
print0(ucase ? " PRECEDING" : " preceding");
}
if (x.isWindowingBetweenEndFollowing()) {
print0(ucase ? " FOLLOWING" : " following");
}
print(')');
return false;
}
public boolean visit(OracleAnalyticWindowing x) {
print0(x.getType().name().toUpperCase());
print(' ');
SQLExpr expr = x.getExpr();
if (expr instanceof SQLBetweenExpr && x.getParent() instanceof SQLOver) {
SQLOver over = (SQLOver) x.getParent();
SQLBetweenExpr betweenExpr = (SQLBetweenExpr) expr;
SQLOver.WindowingBound beginBound = over.getWindowingBetweenBeginBound();
if (beginBound != null) {
print0(ucase ? " BETWEEN " : " between ");
betweenExpr.getBeginExpr().accept(this);
print(' ');
print0(ucase ? beginBound.name : beginBound.nameLCase);
print0(ucase ? " AND " : " and ");
betweenExpr.getEndExpr().accept(this);
return false;
}
}
expr.accept(this);
return false;
}
public boolean visit(OracleDeleteStatement x) {
print0(ucase ? "DELETE " : "delete ");
SQLTableSource tableSource = x.getTableSource();
if (x.getHints().size() > 0) {
printAndAccept(x.getHints(), ", ");
print(' ');
}
print0(ucase ? "FROM " : "from ");
if (x.isOnly()) {
print0(ucase ? "ONLY (" : "only (");
x.getTableName().accept(this);
print(')');
printAlias(x.getAlias());
} else {
x.getTableSource().accept(this);
}
if (x.getWhere() != null) {
println();
this.indentCount++;
print0(ucase ? "WHERE " : "where ");
x.getWhere().accept(this);
this.indentCount--;
}
if (x.getReturning() != null) {
println();
x.getReturning().accept(this);
}
return false;
}
public boolean visit(OracleIntervalExpr x) {
if (x.getValue() instanceof SQLLiteralExpr || x.getValue() instanceof SQLVariantRefExpr) {
print0(ucase ? "INTERVAL " : "interval ");
}
x.getValue().accept(this);
print(' ');
print0(x.getType().name());
if (x.getPrecision() != null) {
print('(');
printExpr(x.getPrecision(), parameterized);
if (x.getFactionalSecondsPrecision() != null) {
print0(", ");
print(x.getFactionalSecondsPrecision().intValue());
}
print(')');
}
if (x.getToType() != null) {
print0(ucase ? " TO " : " to ");
print0(x.getToType().name());
if (x.getToFactionalSecondsPrecision() != null) {
print('(');
printExpr(x.getToFactionalSecondsPrecision(), parameterized);
print(')');
}
}
return false;
}
public boolean visit(OracleOuterExpr x) {
x.getExpr().accept(this);
print0("(+)");
return false;
}
public boolean visit(SQLScriptCommitStatement astNode) {
print('/');
println();
return false;
}
public boolean visit(SQLSelect x) {
SQLWithSubqueryClause with = x.getWithSubQuery();
if (with != null) {
with.accept(this);
println();
}
SQLSelectQuery query = x.getQuery();
query.accept(this);
if (x.getRestriction() != null) {
println();
print("WITH ");
x.getRestriction().accept(this);
}
SQLOrderBy orderBy = x.getOrderBy();
if (orderBy != null) {
boolean hasFirst = false;
if (query instanceof SQLSelectQueryBlock) {
SQLSelectQueryBlock queryBlock = (SQLSelectQueryBlock) query;
hasFirst = queryBlock.getFirst() != null;
}
if (!hasFirst) {
println();
orderBy.accept(this);
}
}
return false;
}
public boolean visit(OracleSelectJoin x) {
x.getLeft().accept(this);
SQLTableSource right = x.getRight();
if (x.getJoinType() == JoinType.COMMA) {
print0(", ");
x.getRight().accept(this);
} else {
boolean isRoot = x.getParent() instanceof SQLSelectQueryBlock;
if (isRoot) {
this.indentCount++;
}
println();
print0(ucase ? x.getJoinType().name : x.getJoinType().nameLCase);
print(' ');
if (right instanceof SQLJoinTableSource) {
print('(');
incrementIndent();
println();
right.accept(this);
decrementIndent();
println();
print(')');
} else {
right.accept(this);
}
if (isRoot) {
this.indentCount--;
}
if (x.getCondition() != null) {
print0(ucase ? " ON " : " on ");
incrementIndent();
x.getCondition().accept(this);
decrementIndent();
print(' ');
if (x.getAfterCommentsDirect() != null) {
printAfterComments(x.getAfterCommentsDirect());
println();
}
}
if (x.getUsing().size() > 0) {
print0(ucase ? " USING (" : " using (");
printAndAccept(x.getUsing(), ", ");
print(')');
}
printFlashback(x.getFlashback());
}
SQLPivot pivot = x.getPivot();
if (pivot != null) {
println();
pivot.accept(this);
}
SQLUnpivot unpivot = x.getUnpivot();
if (unpivot != null) {
println();
unpivot.accept(this);
}
return false;
}
public boolean visit(SQLSelectOrderByItem x) {
x.getExpr().accept(this);
if (x.getType() != null) {
print(' ');
String typeName = x.getType().name();
print0(ucase ? typeName.toUpperCase() : typeName.toLowerCase());
}
if (x.getNullsOrderType() != null) {
print(' ');
print0(x.getNullsOrderType().toFormalString());
}
return false;
}
public boolean visit(SQLSelectQueryBlock select) {
if (select instanceof OracleSelectQueryBlock) {
return visit((OracleSelectQueryBlock) select);
}
return super.visit(select);
}
public boolean visit(OracleSelectQueryBlock x) {
if (isPrettyFormat() && x.hasBeforeComment()) {
printlnComments(x.getBeforeCommentsDirect());
}
if (x.isParenthesized()) {
print('(');
}
print0(ucase ? "SELECT " : "select ");
if (x.getHintsSize() > 0) {
printAndAccept(x.getHints(), ", ");
print(' ');
}
if (SQLSetQuantifier.ALL == x.getDistionOption()) {
print0(ucase ? "ALL " : "all ");
} else if (SQLSetQuantifier.DISTINCT == x.getDistionOption()) {
print0(ucase ? "DISTINCT " : "distinct ");
} else if (SQLSetQuantifier.UNIQUE == x.getDistionOption()) {
print0(ucase ? "UNIQUE " : "unique ");
}
printSelectList(x.getSelectList());
printInto(x);
printFrom(x);
printWhere(x);
printHierarchical(x);
printGroupBy(x);
printModel(x);
printOrderBy(x);
printFetchFirst(x);
if (x.isForUpdate()) {
println();
print0(ucase ? "FOR UPDATE" : "for update");
if (x.getForUpdateOfSize() > 0) {
print(" OF ");
printAndAccept(x.getForUpdateOf(), ", ");
}
if (x.isNoWait()) {
print0(ucase ? " NOWAIT" : " nowait");
} else if (x.isSkipLocked()) {
print0(ucase ? " SKIP LOCKED" : " skip locked");
} else if (x.getWaitTime() != null) {
print0(ucase ? " WAIT " : " wait ");
x.getWaitTime().accept(this);
}
}
if (x.isParenthesized()) {
print(')');
}
return false;
}
@Override
protected void printFrom(SQLSelectQueryBlock x) {
println();
print0(ucase ? "FROM " : "from ");
if (x.getCommentsAfterFrom() != null) {
printAfterComments(x.getCommentsAfterFrom());
println();
}
SQLTableSource from = x.getFrom();
if (from == null) {
print0(ucase ? "DUAL" : "dual");
} else {
from.accept(this);
}
}
private void printInto(OracleSelectQueryBlock x) {
SQLExprTableSource into = x.getInto();
if (into == null) {
return;
}
println();
print0(ucase ? "INTO " : "into ");
into.accept(this);
}
private void printModel(OracleSelectQueryBlock x) {
ModelClause model = x.getModelClause();
if (model == null) {
return;
}
println();
model.accept(this);
}
public boolean visit(CheckOption x) {
print0(ucase ? "CHECK OPTION" : "check option");
if (x.getConstraint() != null) {
print0(ucase ? " CONSTRAINT" : " constraint");
print(' ');
x.getConstraint().accept(this);
}
return false;
}
public boolean visit(ReadOnly x) {
print0(ucase ? "READ ONLY" : "read only");
if (x.getConstraint() != null) {
print0(ucase ? " CONSTRAINT" : " constraint");
print(' ');
x.getConstraint().accept(this);
}
return false;
}
public boolean visit(OracleSelectSubqueryTableSource x) {
print('(');
this.indentCount++;
println();
x.getSelect().accept(this);
this.indentCount--;
println();
print(')');
SQLPivot pivot = x.getPivot();
if (pivot != null) {
println();
pivot.accept(this);
}
SQLUnpivot unpivot = x.getUnpivot();
if (unpivot != null) {
println();
unpivot.accept(this);
}
printFlashback(x.getFlashback());
if ((x.getAlias() != null) && (x.getAlias().length() != 0)) {
print(' ');
print0(x.getAlias());
}
return false;
}
public boolean visit(OracleSelectTableReference x) {
if (x.isOnly()) {
print0(ucase ? "ONLY (" : "only (");
printTableSourceExpr(x.getExpr());
if (x.getPartition() != null) {
print(' ');
x.getPartition().accept(this);
}
print(')');
} else {
printTableSourceExpr(x.getExpr());
if (x.getPartition() != null) {
print(' ');
x.getPartition().accept(this);
}
}
if (x.getHints().size() > 0) {
this.printHints(x.getHints());
}
if (x.getSampleClause() != null) {
print(' ');
x.getSampleClause().accept(this);
}
if (x.getAfterCommentsDirect() != null) {
printAfterComments(x.getAfterCommentsDirect());
println();
}
if (x.getPivot() != null) {
println();
x.getPivot().accept(this);
}
SQLUnpivot unpivot = x.getUnpivot();
if (unpivot != null) {
println();
unpivot.accept(this);
}
printFlashback(x.getFlashback());
printAlias(x.getAlias());
return false;
}
private void printFlashback(SQLExpr flashback) {
if (flashback == null) {
return;
}
println();
if (flashback instanceof SQLBetweenExpr) {
flashback.accept(this);
} else {
print0(ucase ? "AS OF " : "as of ");
flashback.accept(this);
}
}
public boolean visit(OracleUpdateStatement x) {
print0(ucase ? "UPDATE " : "update ");
if (x.getHints().size() > 0) {
printAndAccept(x.getHints(), ", ");
print(' ');
}
if (x.isOnly()) {
print0(ucase ? "ONLY (" : "only (");
x.getTableSource().accept(this);
print(')');
} else {
x.getTableSource().accept(this);
}
printAlias(x.getAlias());
println();
print0(ucase ? "SET " : "set ");
for (int i = 0, size = x.getItems().size(); i < size; ++i) {
if (i != 0) {
print0(", ");
}
x.getItems().get(i).accept(this);
}
if (x.getWhere() != null) {
println();
print0(ucase ? "WHERE " : "where ");
this.indentCount++;
x.getWhere().accept(this);
this.indentCount--;
}
if (x.getReturning().size() > 0) {
println();
print0(ucase ? "RETURNING " : "returning ");
printAndAccept(x.getReturning(), ", ");
print0(ucase ? " INTO " : " into ");
printAndAccept(x.getReturningInto(), ", ");
}
return false;
}
@Override
public boolean visit(SampleClause x) {
print0(ucase ? "SAMPLE " : "sample ");
if (x.isBlock()) {
print0(ucase ? "BLOCK " : "block ");
}
print('(');
printAndAccept(x.getPercent(), ", ");
print(')');
if (x.getSeedValue() != null) {
print0(ucase ? " SEED (" : " seed (");
x.getSeedValue().accept(this);
print(')');
}
return false;
}
@Override
public boolean visit(PartitionExtensionClause x) {
if (x.isSubPartition()) {
print0(ucase ? "SUBPARTITION " : "subpartition ");
} else {
print0(ucase ? "PARTITION " : "partition ");
}
if (x.getPartition() != null) {
print('(');
x.getPartition().accept(this);
print(')');
} else {
print0(ucase ? "FOR (" : "for (");
printAndAccept(x.getFor(), ",");
print(')');
}
return false;
}
// @Override
// public boolean visit(VersionsFlashbackQueryClause x) {
// print0(ucase ? "VERSIONS BETWEEN " : "versions between ");
// print0(x.getType().name());
// print(' ');
// x.getBegin().accept(this);
// print0(ucase ? " AND " : " and ");
// x.getEnd().accept(this);
// return false;
// }
//
// @Override
// public void endVisit(VersionsFlashbackQueryClause x) {
//
// }
//
// @Override
// public boolean visit(AsOfFlashbackQueryClause x) {
// print0(ucase ? "AS OF " : "as of ");
// print0(x.getType().name());
// print0(" (");
// x.getExpr().accept(this);
// print(')');
// return false;
// }
//
// @Override
// public void endVisit(AsOfFlashbackQueryClause x) {
//
// }
@Override
public boolean visit(OracleWithSubqueryEntry x) {
print0(x.getAlias());
if (x.getColumns().size() > 0) {
print0(" (");
printAndAccept(x.getColumns(), ", ");
print(')');
}
print0(ucase ? " AS " : " as ");
print('(');
this.indentCount++;
println();
x.getSubQuery().accept(this);
this.indentCount--;
println();
print(')');
if (x.getSearchClause() != null) {
println();
x.getSearchClause().accept(this);
}
if (x.getCycleClause() != null) {
println();
x.getCycleClause().accept(this);
}
return false;
}
@Override
public boolean visit(SearchClause x) {
print0(ucase ? "SEARCH " : "search ");
print0(x.getType().name());
print0(ucase ? " FIRST BY " : " first by ");
printAndAccept(x.getItems(), ", ");
print0(ucase ? " SET " : " set ");
x.getOrderingColumn().accept(this);
return false;
}
@Override
public boolean visit(CycleClause x) {
print0(ucase ? "CYCLE " : "cycle ");
printAndAccept(x.getAliases(), ", ");
print0(ucase ? " SET " : " set ");
x.getMark().accept(this);
print0(ucase ? " TO " : " to ");
x.getValue().accept(this);
print0(ucase ? " DEFAULT " : " default ");
x.getDefaultValue().accept(this);
return false;
}
@Override
public boolean visit(OracleBinaryFloatExpr x) {
if (x != null && x.getValue() != null) {
print0(x.getValue().toString());
print('F');
}
return false;
}
@Override
public boolean visit(OracleBinaryDoubleExpr x) {
if (x != null && x.getValue() != null) {
print0(x.getValue().toString());
print('D');
}
return false;
}
@Override
public boolean visit(OracleIsSetExpr x) {
x.getNestedTable().accept(this);
print0(ucase ? " IS A SET" : " is a set");
return false;
}
@Override
public boolean visit(ReturnRowsClause x) {
if (x.isAll()) {
print0(ucase ? "RETURN ALL ROWS" : "return all rows");
} else {
print0(ucase ? "RETURN UPDATED ROWS" : "return updated rows");
}
return false;
}
@Override
public boolean visit(ModelClause x) {
print0(ucase ? "MODEL" : "model");
this.indentCount++;
for (CellReferenceOption opt : x.getCellReferenceOptions()) {
print(' ');
print0(opt.name);
}
if (x.getReturnRowsClause() != null) {
print(' ');
x.getReturnRowsClause().accept(this);
}
for (ReferenceModelClause item : x.getReferenceModelClauses()) {
print(' ');
item.accept(this);
}
x.getMainModel().accept(this);
this.indentCount--;
return false;
}
@Override
public boolean visit(MainModelClause x) {
if (x.getMainModelName() != null) {
print0(ucase ? " MAIN " : " main ");
x.getMainModelName().accept(this);
}
println();
x.getModelColumnClause().accept(this);
for (CellReferenceOption opt : x.getCellReferenceOptions()) {
println();
print0(opt.name);
}
println();
x.getModelRulesClause().accept(this);
return false;
}
@Override
public boolean visit(ModelColumnClause x) {
if (x.getQueryPartitionClause() != null) {
x.getQueryPartitionClause().accept(this);
println();
}
print0(ucase ? "DIMENSION BY (" : "dimension by (");
printAndAccept(x.getDimensionByColumns(), ", ");
print(')');
println();
print0(ucase ? "MEASURES (" : "measures (");
printAndAccept(x.getMeasuresColumns(), ", ");
print(')');
return false;
}
@Override
public boolean visit(QueryPartitionClause x) {
print0(ucase ? "PARTITION BY (" : "partition by (");
printAndAccept(x.getExprList(), ", ");
print(')');
return false;
}
@Override
public boolean visit(ModelColumn x) {
x.getExpr().accept(this);
if (x.getAlias() != null) {
print(' ');
print0(x.getAlias());
}
return false;
}
@Override
public boolean visit(ModelRulesClause x) {
if (x.getOptions().size() > 0) {
print0(ucase ? "RULES" : "rules");
for (ModelRuleOption opt : x.getOptions()) {
print(' ');
print0(opt.name);
}
}
if (x.getIterate() != null) {
print0(ucase ? " ITERATE (" : " iterate (");
x.getIterate().accept(this);
print(')');
if (x.getUntil() != null) {
print0(ucase ? " UNTIL (" : " until (");
x.getUntil().accept(this);
print(')');
}
}
print0(" (");
printAndAccept(x.getCellAssignmentItems(), ", ");
print(')');
return false;
}
@Override
public boolean visit(CellAssignmentItem x) {
if (x.getOption() != null) {
print0(x.getOption().name);
print(' ');
}
x.getCellAssignment().accept(this);
if (x.getOrderBy() != null) {
print(' ');
x.getOrderBy().accept(this);
}
print0(" = ");
x.getExpr().accept(this);
return false;
}
@Override
public boolean visit(CellAssignment x) {
x.getMeasureColumn().accept(this);
print0("[");
printAndAccept(x.getConditions(), ", ");
print0("]");
return false;
}
@Override
public boolean visit(OracleReturningClause x) {
print0(ucase ? "RETURNING " : "returning ");
printAndAccept(x.getItems(), ", ");
print0(ucase ? " INTO " : " into ");
printAndAccept(x.getValues(), ", ");
return false;
}
@Override
public boolean visit(OracleInsertStatement x) {
//visit((SQLInsertStatement) x);
print0(ucase ? "INSERT " : "insert ");
if (x.getHints().size() > 0) {
printAndAccept(x.getHints(), ", ");
print(' ');
}
print0(ucase ? "INTO " : "into ");
x.getTableSource().accept(this);
printInsertColumns(x.getColumns());
if (x.getValues() != null) {
println();
print0(ucase ? "VALUES " : "values ");
x.getValues().accept(this);
} else {
if (x.getQuery() != null) {
println();
x.getQuery().accept(this);
}
}
if (x.getReturning() != null) {
println();
x.getReturning().accept(this);
}
if (x.getErrorLogging() != null) {
println();
x.getErrorLogging().accept(this);
}
return false;
}
@Override
public boolean visit(InsertIntoClause x) {
print0(ucase ? "INTO " : "into ");
x.getTableSource().accept(this);
if (x.getColumns().size() > 0) {
this.indentCount++;
println();
print('(');
for (int i = 0, size = x.getColumns().size(); i < size; ++i) {
if (i != 0) {
if (i % 5 == 0) {
println();
}
print0(", ");
}
x.getColumns().get(i).accept(this);
}
print(')');
this.indentCount--;
}
if (x.getValues() != null) {
println();
print0(ucase ? "VALUES " : "values ");
x.getValues().accept(this);
} else {
if (x.getQuery() != null) {
println();
x.getQuery().accept(this);
}
}
return false;
}
@Override
public boolean visit(OracleMultiInsertStatement x) {
print0(ucase ? "INSERT " : "insert ");
if (x.getHints().size() > 0) {
this.printHints(x.getHints());
}
if (x.getOption() != null) {
print0(x.getOption().name());
print(' ');
}
for (int i = 0, size = x.getEntries().size(); i < size; ++i) {
this.indentCount++;
println();
x.getEntries().get(i).accept(this);
this.indentCount--;
}
println();
x.getSubQuery().accept(this);
return false;
}
@Override
public boolean visit(ConditionalInsertClause x) {
for (int i = 0, size = x.getItems().size(); i < size; ++i) {
if (i != 0) {
println();
}
ConditionalInsertClauseItem item = x.getItems().get(i);
item.accept(this);
}
if (x.getElseItem() != null) {
println();
print0(ucase ? "ELSE" : "else");
this.indentCount++;
println();
x.getElseItem().accept(this);
this.indentCount--;
}
return false;
}
@Override
public boolean visit(ConditionalInsertClauseItem x) {
print0(ucase ? "WHEN " : "when ");
x.getWhen().accept(this);
print0(ucase ? " THEN" : " then");
this.indentCount++;
println();
x.getThen().accept(this);
this.indentCount--;
return false;
}
@Override
public boolean visit(OracleLockTableStatement x) {
print0(ucase ? "LOCK TABLE " : "lock table ");
x.getTable().accept(this);
if (x.getPartition() != null) {
print0(" PARTITION (");
x.getPartition().accept(this);
print0(") ");
}
print0(ucase ? " IN " : " in ");
print0(x.getLockMode().toString());
print0(ucase ? " MODE " : " mode ");
if (x.isNoWait()) {
print0(ucase ? "NOWAIT" : "nowait");
} else if (x.getWait() != null) {
print0(ucase ? "WAIT " : "wait ");
x.getWait().accept(this);
}
return false;
}
@Override
public boolean visit(OracleAlterSessionStatement x) {
print0(ucase ? "ALTER SESSION SET " : "alter session set ");
printAndAccept(x.getItems(), ", ");
return false;
}
@Override
public boolean visit(OracleDatetimeExpr x) {
x.getExpr().accept(this);
SQLExpr timeZone = x.getTimeZone();
if (timeZone instanceof SQLIdentifierExpr) {
if (((SQLIdentifierExpr) timeZone).getName().equalsIgnoreCase("LOCAL")) {
print0(ucase ? " AT LOCAL" : "alter session set ");
return false;
}
}
print0(ucase ? " AT TIME ZONE " : " at time zone ");
timeZone.accept(this);
return false;
}
@Override
public boolean visit(OracleSysdateExpr x) {
print0(ucase ? "SYSDATE" : "sysdate");
if (x.getOption() != null) {
print('@');
print0(x.getOption());
}
return false;
}
@Override
public boolean visit(OracleArgumentExpr x) {
print0(x.getArgumentName());
print0(" => ");
x.getValue().accept(this);
return false;
}
@Override
public boolean visit(OracleSetTransactionStatement x) {
if (x.isReadOnly()) {
print0(ucase ? "SET TRANSACTION READ ONLY" : "set transaction read only");
} else {
print0(ucase ? "SET TRANSACTION" : "set transaction");
}
SQLExpr name = x.getName();
if (name != null) {
print0(ucase ? " NAME " : " name ");
name.accept(this);
}
return false;
}
@Override
public boolean visit(OracleExplainStatement x) {
print0(ucase ? "EXPLAIN PLAN" : "explain plan");
this.indentCount++;
println();
if (x.getStatementId() != null) {
print0(ucase ? "SET STATEMENT_ID = " : "set statement_id = ");
x.getStatementId().accept(this);
println();
}
if (x.getInto() != null) {
print0(ucase ? "INTO " : "into ");
x.getInto().accept(this);
println();
}
print0(ucase ? "FOR" : "for");
println();
x.getStatement().accept(this);
this.indentCount--;
return false;
}
@Override
public boolean visit(SQLAlterProcedureStatement x) {
print0(ucase ? "ALTER PROCEDURE " : "alter procedure ");
x.getName().accept(this);
if (x.isCompile()) {
print0(ucase ? " COMPILE" : " compile");
}
if (x.isReuseSettings()) {
print0(ucase ? " REUSE SETTINGS" : " reuse settings");
}
return false;
}
@Override
public boolean visit(OracleAlterTableDropPartition x) {
print0(ucase ? "DROP PARTITION " : "drop partition ");
x.getName().accept(this);
return false;
}
@Override
public boolean visit(SQLAlterTableStatement x) {
if (x.getItems().size() == 1) {
SQLAlterTableItem item = x.getItems().get(0);
if (item instanceof SQLAlterTableRename) {
SQLExpr to = ((SQLAlterTableRename) item).getTo().getExpr();
print0(ucase ? "RENAME " : "rename ");
x.getName().accept(this);
print0(ucase ? " TO " : " to ");
to.accept(this);
return false;
}
}
print0(ucase ? "ALTER TABLE " : "alter table ");
printTableSourceExpr(x.getName());
this.indentCount++;
for (SQLAlterTableItem item : x.getItems()) {
println();
item.accept(this);
}
if (x.isUpdateGlobalIndexes()) {
println();
print0(ucase ? "UPDATE GLOABL INDEXES" : "update gloabl indexes");
}
this.indentCount--;
return false;
}
@Override
public boolean visit(OracleAlterTableTruncatePartition x) {
print0(ucase ? "TRUNCATE PARTITION " : "truncate partition ");
x.getName().accept(this);
return false;
}
@Override
public boolean visit(TableSpaceItem x) {
print0(ucase ? "TABLESPACE " : "tablespace ");
x.getTablespace().accept(this);
return false;
}
@Override
public boolean visit(UpdateIndexesClause x) {
print0(ucase ? "UPDATE INDEXES" : "update indexes");
if (x.getItems().size() > 0) {
print('(');
printAndAccept(x.getItems(), ", ");
print(')');
}
return false;
}
@Override
public boolean visit(OracleAlterTableSplitPartition x) {
print0(ucase ? "SPLIT PARTITION " : "split partition ");
x.getName().accept(this);
if (x.getAt().size() > 0) {
this.indentCount++;
println();
print0(ucase ? "AT (" : "at (");
printAndAccept(x.getAt(), ", ");
print(')');
this.indentCount--;
}
if (x.getInto().size() > 0) {
println();
this.indentCount++;
print0(ucase ? "INTO (" : "into (");
printAndAccept(x.getInto(), ", ");
print(')');
this.indentCount--;
}
if (x.getUpdateIndexes() != null) {
println();
this.indentCount++;
x.getUpdateIndexes().accept(this);
this.indentCount--;
}
return false;
}
@Override
public boolean visit(NestedTablePartitionSpec x) {
print0(ucase ? "PARTITION " : "partition ");
x.getPartition().accept(this);
for (SQLObject item : x.getSegmentAttributeItems()) {
print(' ');
item.accept(this);
}
return false;
}
@Override
public boolean visit(OracleAlterTableModify x) {
print0(ucase ? "MODIFY (" : "modify (");
this.indentCount++;
for (int i = 0, size = x.getColumns().size(); i < size; ++i) {
println();
SQLColumnDefinition column = x.getColumns().get(i);
column.accept(this);
if (i != size - 1) {
print0(", ");
}
}
this.indentCount--;
println();
print(')');
return false;
}
@Override
public boolean visit(OracleCreateIndexStatement x) {
print0(ucase ? "CREATE " : "create ");
if (x.getType() != null) {
print0(x.getType());
print(' ');
}
print0(ucase ? "INDEX " : "index ");
x.getName().accept(this);
print0(ucase ? " ON " : " on ");
if (x.isCluster()) {
print0(ucase ? "CLUSTER " : "cluster ");
}
x.getTable().accept(this);
List<SQLSelectOrderByItem> items = x.getItems();
if (items.size() > 0) {
print('(');
printAndAccept(items, ", ");
print(')');
}
if (x.isIndexOnlyTopLevel()) {
println();
print0(ucase ? "INDEX ONLY TOPLEVEL" : "index only toplevel");
}
if (x.isComputeStatistics()) {
println();
print0(ucase ? "COMPUTE STATISTICS" : "compute statistics");
}
if (x.isReverse()) {
println();
print0(ucase ? "REVERSE" : "reverse");
}
this.printOracleSegmentAttributes(x);
if (x.isOnline()) {
print0(ucase ? " ONLINE" : " online");
}
if (x.isNoParallel()) {
print0(ucase ? " NOPARALLEL" : " noparallel");
} else if (x.getParallel() != null) {
print0(ucase ? " PARALLEL " : " parallel ");
x.getParallel().accept(this);
}
Boolean sort = x.getSort();
if (sort != null) {
if (sort.booleanValue()) {
print0(ucase ? " SORT" : " sort");
} else {
print0(ucase ? " NOSORT" : " nosort");
}
}
if (x.getLocalPartitions().size() > 0) {
println();
print0(ucase ? "LOCAL (" : "local (");
this.indentCount++;
println();
printlnAndAccept(x.getLocalPartitions(), ",");
this.indentCount--;
println();
print(')');
} else if (x.isLocal()) {
print0(ucase ? " LOCAL" : " local");
}
List<SQLName> localStoreIn = x.getLocalStoreIn();
if (localStoreIn.size() > 0) {
print0(ucase ? " STORE IN (" : " store in (");
printAndAccept(localStoreIn, ", ");
print(')');
}
List<SQLPartitionBy> globalPartitions = x.getGlobalPartitions();
if (globalPartitions.size() > 0) {
for (SQLPartitionBy globalPartition : globalPartitions) {
println();
print0(ucase ? "GLOBAL " : "global ");
print0(ucase ? "PARTITION BY " : "partition by ");
globalPartition.accept(this);
}
} else {
if (x.isGlobal()) {
print0(ucase ? " GLOBAL" : " global");
}
}
return false;
}
@Override
public boolean visit(OracleForStatement x) {
boolean all = x.isAll();
if (all) {
print0(ucase ? "FORALL " : "forall ");
} else {
print0(ucase ? "FOR " : "for ");
}
x.getIndex().accept(this);
print0(ucase ? " IN " : " in ");
if (x.isReverse()) {
print0(ucase ? "REVERSE " : "reverse ");
}
SQLExpr range = x.getRange();
range.accept(this);
if (!all) {
println();
print0(ucase ? "LOOP" : "loop");
}
this.indentCount++;
println();
for (int i = 0, size = x.getStatements().size(); i < size; ++i) {
SQLStatement stmt = x.getStatements().get(i);
stmt.accept(this);
if (!all) {
if (i != size - 1) {
println();
}
}
}
this.indentCount--;
if (!all) {
println();
print0(ucase ? "END LOOP" : "end loop");
SQLName endLabel = x.getEndLabel();
if (endLabel != null) {
print(' ');
endLabel.accept(this);
}
}
return false;
}
@Override
public boolean visit(SQLIfStatement.Else x) {
print0(ucase ? "ELSE" : "else");
this.indentCount++;
println();
for (int i = 0, size = x.getStatements().size(); i < size; ++i) {
if (i != 0) {
println();
}
SQLStatement item = x.getStatements().get(i);
item.accept(this);
}
this.indentCount--;
return false;
}
@Override
public boolean visit(SQLIfStatement.ElseIf x) {
print0(ucase ? "ELSIF " : "elsif ");
x.getCondition().accept(this);
print0(ucase ? " THEN" : " then");
this.indentCount++;
for (int i = 0, size = x.getStatements().size(); i < size; ++i) {
println();
SQLStatement item = x.getStatements().get(i);
item.accept(this);
}
this.indentCount--;
return false;
}
@Override
public boolean visit(SQLIfStatement x) {
print0(ucase ? "IF " : "if ");
int lines = this.lines;
this.indentCount++;
x.getCondition().accept(this);
this.indentCount--;
if (lines != this.lines) {
println();
} else {
print(' ');
}
print0(ucase ? "THEN" : "then");
this.indentCount++;
for (int i = 0, size = x.getStatements().size(); i < size; ++i) {
println();
SQLStatement item = x.getStatements().get(i);
item.accept(this);
}
this.indentCount--;
for (SQLIfStatement.ElseIf elseIf : x.getElseIfList()) {
println();
elseIf.accept(this);
}
if (x.getElseItem() != null) {
println();
x.getElseItem().accept(this);
}
println();
print0(ucase ? "END IF" : "end if");
return false;
}
@Override
public boolean visit(OracleRangeExpr x) {
x.getLowBound().accept(this);
print0("..");
x.getUpBound().accept(this);
return false;
}
protected void visitColumnDefault(SQLColumnDefinition x) {
if (x.getParent() instanceof SQLBlockStatement) {
print0(" := ");
} else {
print0(ucase ? " DEFAULT " : " default ");
}
printExpr(x.getDefaultExpr(), false);
}
@Override
public boolean visit(OraclePrimaryKey x) {
if (x.getName() != null) {
print0(ucase ? "CONSTRAINT " : "constraint ");
x.getName().accept(this);
print(' ');
}
print0(ucase ? "PRIMARY KEY (" : "primary key (");
printAndAccept(x.getColumns(), ", ");
print(')');
Boolean rely = x.getRely();
if (rely != null) {
if (rely.booleanValue()) {
print0(ucase ? " RELY" : " rely");
}
}
printConstraintState(x);
Boolean validate = x.getValidate();
if (validate != null) {
if (validate.booleanValue()) {
print0(ucase ? " VALIDATE" : " validate");
} else {
print0(ucase ? " NOVALIDATE" : " novalidate");
}
}
return false;
}
protected void printConstraintState(OracleConstraint x) {
this.indentCount++;
if (x.getUsing() != null) {
println();
x.getUsing().accept(this);
}
if (x.getExceptionsInto() != null) {
println();
print0(ucase ? "EXCEPTIONS INTO " : "exceptions into ");
x.getExceptionsInto().accept(this);
}
Boolean enable = x.getEnable();
if (enable != null) {
if (enable.booleanValue()) {
print0(ucase ? " ENABLE" : " enable");
} else {
print0(ucase ? " DISABLE" : " disable");
}
}
if (x.getInitially() != null) {
print0(ucase ? " INITIALLY " : " initially ");
print0(x.getInitially().name());
}
if (x.getDeferrable() != null) {
if (x.getDeferrable().booleanValue()) {
print0(ucase ? " DEFERRABLE" : " deferrable");
} else {
print0(ucase ? " NOT DEFERRABLE" : " not deferrable");
}
}
this.indentCount--;
}
@Override
public boolean visit(OracleCreateTableStatement x) {
printCreateTable(x, false);
if (x.getOf() != null) {
println();
print0(ucase ? "OF " : "of ");
x.getOf().accept(this);
}
OracleCreateTableStatement.OIDIndex oidIndex = x.getOidIndex();
if (oidIndex != null) {
println();
oidIndex.accept(this);
}
OracleCreateTableStatement.Organization organization = x.getOrganization();
if (organization != null) {
println();
this.indentCount++;
organization.accept(this);
this.indentCount--;
}
if (x.getIncluding().size() > 0) {
print0(ucase ? " INCLUDING " : " including ");
printAndAccept(x.getIncluding(), ", ");
print0(ucase ? " OVERFLOW " : " overflow ");
}
printOracleSegmentAttributes(x);
if (x.isInMemoryMetadata()) {
println();
print0(ucase ? "IN_MEMORY_METADATA" : "in_memory_metadata");
}
if (x.isCursorSpecificSegment()) {
println();
print0(ucase ? "CURSOR_SPECIFIC_SEGMENT" : "cursor_specific_segment");
}
if (Boolean.TRUE.equals(x.getParallel())) {
println();
print0(ucase ? "PARALLEL" : "parallel");
final SQLExpr parallelValue = x.getParallelValue();
if (parallelValue != null) {
print(' ');
printExpr(parallelValue);
}
} else if (Boolean.FALSE.equals(x.getParallel())) {
println();
print0(ucase ? "NOPARALLEL" : "noparallel");
}
if (Boolean.TRUE.equals(x.getCache())) {
println();
print0(ucase ? "CACHE" : "cache");
} else if (Boolean.FALSE.equals(x.getCache())) {
println();
print0(ucase ? "NOCACHE" : "nocache");
}
if (x.getLobStorage() != null) {
println();
x.getLobStorage().accept(this);
}
if (x.isOnCommitPreserveRows()) {
println();
print0(ucase ? "ON COMMIT PRESERVE ROWS" : "on commit preserve rows");
} else if (x.isOnCommitDeleteRows()) {
println();
print0(ucase ? "ON COMMIT DELETE ROWS" : "on commit delete rows");
}
if (x.isMonitoring()) {
println();
print0(ucase ? "MONITORING" : "monitoring");
}
printPartitionBy(x);
if (x.getCluster() != null) {
println();
print0(ucase ? "CLUSTER " : "cluster ");
x.getCluster().accept(this);
print0(" (");
printAndAccept(x.getClusterColumns(), ",");
print0(")");
}
final OracleXmlColumnProperties xmlTypeColumnProperties = x.getXmlTypeColumnProperties();
if (xmlTypeColumnProperties != null) {
println();
xmlTypeColumnProperties.accept(this);
}
final SQLSelect select = x.getSelect();
if (select != null) {
println();
print0(ucase ? "AS" : "as");
println();
select.accept(this);
}
return false;
}
@Override
public boolean visit(OracleStorageClause x) {
print0(ucase ? "STORAGE (" : "storage (");
this.indentCount++;
final SQLExpr initial = x.getInitial();
if (initial != null) {
println();
print0(ucase ? "INITIAL " : "initial ");
printExpr(initial, false);
}
final SQLExpr next = x.getNext();
if (next != null) {
println();
print0(ucase ? "NEXT " : "next ");
printExpr(next, false);
}
final SQLExpr minExtents = x.getMinExtents();
if (minExtents != null) {
println();
print0(ucase ? "MINEXTENTS " : "minextents ");
printExpr(minExtents, false);
}
final SQLExpr maxExtents = x.getMaxExtents();
if (maxExtents != null) {
println();
print0(ucase ? "MAXEXTENTS " : "maxextents ");
printExpr(maxExtents, false);
}
final SQLExpr pctIncrease = x.getPctIncrease();
if (pctIncrease != null) {
println();
print0(ucase ? "PCTINCREASE " : "pctincrease ");
printExpr(pctIncrease, false);
}
final SQLExpr maxSize = x.getMaxSize();
if (maxSize != null) {
println();
print0(ucase ? "MAXSIZE " : "maxsize ");
printExpr(maxSize, false);
}
final SQLExpr freeLists = x.getFreeLists();
if (freeLists != null) {
println();
print0(ucase ? "FREELISTS " : "freelists ");
printExpr(freeLists, false);
}
final SQLExpr freeListGroups = x.getFreeListGroups();
if (freeListGroups != null) {
println();
print0(ucase ? "FREELIST GROUPS " : "freelist groups ");
printExpr(freeListGroups, false);
}
final SQLExpr bufferPool = x.getBufferPool();
if (bufferPool != null) {
println();
print0(ucase ? "BUFFER_POOL " : "buffer_pool ");
printExpr(bufferPool, false);
}
final SQLExpr objno = x.getObjno();
if (objno != null) {
println();
print0(ucase ? "OBJNO " : "objno ");
printExpr(objno, false);
}
if (x.getFlashCache() != null) {
println();
print0(ucase ? "FLASH_CACHE " : "flash_cache ");
print0(ucase ? x.getFlashCache().name() : x.getFlashCache().name().toLowerCase());
}
if (x.getCellFlashCache() != null) {
println();
print0(ucase ? "CELL_FLASH_CACHE " : "cell_flash_cache ");
print0(ucase ? x.getCellFlashCache().name() : x.getCellFlashCache().name().toLowerCase());
}
this.indentCount--;
println();
print(')');
return false;
}
@Override
public boolean visit(OracleGotoStatement x) {
print0(ucase ? "GOTO " : "GOTO ");
x.getLabel().accept(this);
return false;
}
@Override
public boolean visit(OracleLabelStatement x) {
print0("<<");
x.getLabel().accept(this);
print0(">>");
return false;
}
@Override
public boolean visit(OracleAlterTriggerStatement x) {
print0(ucase ? "ALTER TRIGGER " : "alter trigger ");
x.getName().accept(this);
if (x.isCompile()) {
print0(ucase ? " COMPILE" : " compile");
}
if (x.getEnable() != null) {
if (x.getEnable().booleanValue()) {
print0(ucase ? "ENABLE" : "enable");
} else {
print0(ucase ? "DISABLE" : "disable");
}
}
return false;
}
@Override
public boolean visit(OracleAlterSynonymStatement x) {
print0(ucase ? "ALTER SYNONYM " : "alter synonym ");
x.getName().accept(this);
if (x.isCompile()) {
print0(ucase ? " COMPILE" : " compile");
}
if (x.getEnable() != null) {
if (x.getEnable().booleanValue()) {
print0(ucase ? "ENABLE" : "enable");
} else {
print0(ucase ? "DISABLE" : "disable");
}
}
return false;
}
// @Override
// public boolean visit(AsOfSnapshotClause x) {
// print0(ucase ? "AS OF SNAPSHOT(" : "as of snapshot(");
// x.getExpr().accept(this);
// print(')');
// return false;
// }
//
// @Override
// public void endVisit(AsOfSnapshotClause x) {
//
// }
@Override
public boolean visit(OracleAlterViewStatement x) {
print0(ucase ? "ALTER VIEW " : "alter view ");
x.getName().accept(this);
if (x.isCompile()) {
print0(ucase ? " COMPILE" : " compile");
}
if (x.getEnable() != null) {
if (x.getEnable().booleanValue()) {
print0(ucase ? "ENABLE" : "enable");
} else {
print0(ucase ? "DISABLE" : "disable");
}
}
return false;
}
@Override
public boolean visit(OracleAlterTableMoveTablespace x) {
print0(ucase ? " MOVE TABLESPACE " : " move tablespace ");
x.getName().accept(this);
return false;
}
@Override
public boolean visit(OracleAlterTableRowMovement x) {
if (x.isEnable()) {
print0(ucase ? " ENABLE ROW MOVEMENT " : " enable row movement ");
} else {
print0(ucase ? " DISABLE ROW MOVEMENT " : " disable row movement ");
}
return false;
}
@Override
public boolean visit(OracleAlterTableShrinkSpace x) {
print0(ucase ? " SHRINK SPACE " : " shrink space ");
if (x.isCompact()) {
print0(ucase ? "COMPACT " : "compact ");
}
if (x.isCascade()) {
print0(ucase ? "CASCADE " : "cascade ");
}
if (x.isCheck()) {
print0(ucase ? "CHECK " : "check ");
}
return false;
}
@Override
public boolean visit(OracleAlterSummaryStatement x) {
print0(ucase ? "ALTER SUMMARY " : "alter summary ");
x.getName().accept(this);
if (x.isCompile()) {
print0(ucase ? " COMPILE " : " compile ");
}
return false;
}
@Override
public boolean visit(OracleFileSpecification x) {
printAndAccept(x.getFileNames(), ", ");
if (x.getSize() != null) {
print0(ucase ? " SIZE " : " size ");
x.getSize().accept(this);
}
if (x.isAutoExtendOff()) {
print0(ucase ? " AUTOEXTEND OFF" : " autoextend off");
} else if (x.getAutoExtendOn() != null) {
print0(ucase ? " AUTOEXTEND ON " : " autoextend on ");
x.getAutoExtendOn().accept(this);
}
return false;
}
@Override
public boolean visit(OracleAlterTablespaceAddDataFile x) {
print0(ucase ? "ADD DATAFILE" : "add datafile");
this.indentCount++;
for (OracleFileSpecification file : x.getFiles()) {
println();
file.accept(this);
}
this.indentCount--;
return false;
}
@Override
public boolean visit(OracleAlterTablespaceStatement x) {
print0(ucase ? "ALTER TABLESPACE " : "alter tablespace ");
x.getName().accept(this);
println();
x.getItem().accept(this);
return false;
}
@Override
public boolean visit(SQLTruncateStatement x) {
print0(ucase ? "TRUNCATE TABLE " : "truncate table ");
printAndAccept(x.getTableSources(), ", ");
if (x.isPurgeSnapshotLog()) {
print0(ucase ? " PURGE SNAPSHOT LOG" : " purge snapshot log");
}
return false;
}
@Override
public boolean visit(OracleExitStatement x) {
print0(ucase ? "EXIT" : "exit");
if (x.getLabel() != null) {
print(' ');
print0(x.getLabel());
}
if (x.getWhen() != null) {
print0(ucase ? " WHEN " : " when ");
x.getWhen().accept(this);
}
return false;
}
@Override
public boolean visit(OracleContinueStatement x) {
print0(ucase ? "CONTINUE" : "continue");
String label = x.getLabel();
if (label != null) {
print(' ');
print0(label);
}
if (x.getWhen() != null) {
print0(ucase ? " WHEN " : " when ");
x.getWhen().accept(this);
}
return false;
}
@Override
public boolean visit(OracleRaiseStatement x) {
print0(ucase ? "RAISE" : "raise");
if (x.getException() != null) {
print(' ');
x.getException().accept(this);
}
print(';');
return false;
}
@Override
public boolean visit(SQLSavePointStatement x) {
print0(ucase ? "SAVEPOINT" : "savepoint");
if (x.getName() != null) {
print0(ucase ? " TO " : " to ");
x.getName().accept(this);
}
return false;
}
@Override
protected void printCreateFunctionBody(SQLCreateFunctionStatement x) {
String wrappedSource = x.getWrappedSource();
if (wrappedSource != null) {
print0(ucase ? " WRAPPED " : " wrapped ");
print0(wrappedSource);
if (x.isAfterSemi()) {
print(';');
}
return;
}
printCreateFunctionReturns(x);
if (x.isPipelined()) {
print(ucase ? "PIPELINED " : "pipelined ");
}
if (x.isDeterministic()) {
print(ucase ? "DETERMINISTIC " : "deterministic ");
}
SQLName authid = x.getAuthid();
if (authid != null) {
print(ucase ? " AUTHID " : " authid ");
authid.accept(this);
}
SQLStatement block = x.getBlock();
if (block != null && !x.isCreate()) {
println();
println("IS");
} else {
println();
if (block instanceof SQLBlockStatement) {
SQLBlockStatement blockStatement = (SQLBlockStatement) block;
if (blockStatement.getParameters().size() > 0 || authid != null) {
println(ucase ? "AS" : "as");
}
}
}
String javaCallSpec = x.getJavaCallSpec();
if (javaCallSpec != null) {
print0(ucase ? "LANGUAGE JAVA NAME '" : "language java name '");
print0(javaCallSpec);
print('\'');
return;
}
if (x.isParallelEnable()) {
print0(ucase ? "PARALLEL_ENABLE" : "parallel_enable");
println();
}
if (x.isAggregate()) {
print0(ucase ? "AGGREGATE" : "aggregate");
println();
}
SQLName using = x.getUsing();
if (using != null) {
print0(ucase ? "USING " : "using ");
using.accept(this);
}
if (block != null) {
block.accept(this);
}
}
@Override
public boolean visit(OracleCreateDatabaseDbLinkStatement x) {
print0(ucase ? "CREATE " : "create ");
if (x.isShared()) {
print0(ucase ? "SHARE " : "share ");
}
if (x.isPublic()) {
print0(ucase ? "PUBLIC " : "public ");
}
print0(ucase ? "DATABASE LINK " : "database link ");
x.getName().accept(this);
if (x.getUser() != null) {
print0(ucase ? " CONNECT TO " : " connect to ");
x.getUser().accept(this);
if (x.getPassword() != null) {
print0(ucase ? " IDENTIFIED BY " : " identified by ");
print0(x.getPassword());
}
}
if (x.getAuthenticatedUser() != null) {
print0(ucase ? " AUTHENTICATED BY " : " authenticated by ");
x.getAuthenticatedUser().accept(this);
if (x.getAuthenticatedPassword() != null) {
print0(ucase ? " IDENTIFIED BY " : " identified by ");
print0(x.getAuthenticatedPassword());
}
}
if (x.getUsing() != null) {
print0(ucase ? " USING " : " using ");
x.getUsing().accept(this);
}
return false;
}
@Override
public boolean visit(OracleDropDbLinkStatement x) {
print0(ucase ? "DROP " : "drop ");
if (x.isPublic()) {
print0(ucase ? "PUBLIC " : "public ");
}
print0(ucase ? "DATABASE LINK " : "database link ");
x.getName().accept(this);
return false;
}
public boolean visit(SQLCharacterDataType x) {
print0(x.getName());
final List<SQLExpr> arguments = x.getArguments();
if (arguments.size() > 0) {
print('(');
SQLExpr arg0 = arguments.get(0);
printExpr(arg0, false);
if (x.getCharType() != null) {
print(' ');
print0(x.getCharType());
}
print(')');
}
return false;
}
@Override
public boolean visit(OracleDataTypeIntervalYear x) {
print0(x.getName());
if (x.getArguments().size() > 0) {
print('(');
x.getArguments().get(0).accept(this);
print(')');
}
print0(ucase ? " TO MONTH" : " to month");
return false;
}
@Override
public boolean visit(OracleDataTypeIntervalDay x) {
print0(x.getName());
if (x.getArguments().size() > 0) {
print('(');
x.getArguments().get(0).accept(this);
print(')');
}
print0(ucase ? " TO SECOND" : " to second");
if (x.getFractionalSeconds().size() > 0) {
print('(');
x.getFractionalSeconds().get(0).accept(this);
print(')');
}
return false;
}
@Override
public boolean visit(OracleUsingIndexClause x) {
print0(ucase ? "USING INDEX" : "using index");
final SQLObject index = x.getIndex();
if (index != null) {
print(' ');
if (index instanceof SQLCreateIndexStatement) {
print('(');
index.accept(this);
print(')');
} else {
index.accept(this);
}
}
printOracleSegmentAttributes(x);
if (x.isComputeStatistics()) {
println();
print0(ucase ? "COMPUTE STATISTICS" : "compute statistics");
}
if (x.getEnable() != null) {
if (x.getEnable().booleanValue()) {
println();
print0(ucase ? "ENABLE" : "enable");
} else {
println();
print0(ucase ? "DISABLE" : "disable");
}
}
if (x.isReverse()) {
println();
print0(ucase ? "REVERSE" : "reverse");
}
return false;
}
@Override
public boolean visit(OracleLobStorageClause x) {
print0(ucase ? "LOB (" : "lob (");
printAndAccept(x.getItems(), ",");
print0(ucase ? ") STORE AS" : ") store as");
if (x.isSecureFile()) {
print0(ucase ? " SECUREFILE" : " securefile");
}
if (x.isBasicFile()) {
print0(ucase ? " BASICFILE" : " basicfile");
}
SQLName segementName = x.getSegementName();
if (segementName != null) {
print(' ');
segementName.accept(this);
}
print0(" (");
this.indentCount++;
printOracleSegmentAttributes(x);
if (x.getEnable() != null) {
println();
if (x.getEnable().booleanValue()) {
print0(ucase ? "ENABLE STORAGE IN ROW" : "enable storage in row");
} else {
print0(ucase ? "DISABLE STORAGE IN ROW" : "disable storage in row");
}
}
if (x.getChunk() != null) {
println();
print0(ucase ? "CHUNK " : "chunk ");
x.getChunk().accept(this);
}
if (x.getCache() != null) {
println();
if (x.getCache().booleanValue()) {
print0(ucase ? "CACHE" : "cache");
} else {
print0(ucase ? "NOCACHE" : "nocache");
}
}
if (x.getKeepDuplicate() != null) {
println();
if (x.getKeepDuplicate().booleanValue()) {
print0(ucase ? "KEEP_DUPLICATES" : "keep_duplicates");
} else {
print0(ucase ? "DEDUPLICATE" : "deduplicate");
}
}
if (x.isRetention()) {
println();
print0(ucase ? "RETENTION" : "retention");
}
this.indentCount--;
println();
print(')');
return false;
}
@Override
public boolean visit(OracleUnique x) {
visit((SQLUnique) x);
printConstraintState(x);
return false;
}
@Override
public boolean visit(OracleForeignKey x) {
visit((SQLForeignKeyImpl) x);
printConstraintState(x);
return false;
}
@Override
public boolean visit(OracleCheck x) {
visit((SQLCheck) x);
printConstraintState(x);
return false;
}
@Override
protected void printCascade() {
print0(ucase ? " CASCADE CONSTRAINTS" : " cascade constraints");
}
public boolean visit(SQLCharExpr x, boolean parameterized) {
if (x.getText() != null && x.getText().length() == 0) {
print0(ucase ? "NULL" : "null");
} else {
super.visit(x, parameterized);
}
return false;
}
@Override
public boolean visit(OracleSupplementalIdKey x) {
print0(ucase ? "SUPPLEMENTAL LOG DATA (" : "supplemental log data (");
int count = 0;
if (x.isAll()) {
print0(ucase ? "ALL" : "all");
count++;
}
if (x.isPrimaryKey()) {
if (count != 0) {
print0(", ");
}
print0(ucase ? "PRIMARY KEY" : "primary key");
count++;
}
if (x.isUnique()) {
if (count != 0) {
print0(", ");
}
print0(ucase ? "UNIQUE" : "unique");
count++;
}
if (x.isUniqueIndex()) {
if (count != 0) {
print0(", ");
}
print0(ucase ? "UNIQUE INDEX" : "unique index");
count++;
}
if (x.isForeignKey()) {
if (count != 0) {
print0(", ");
}
print0(ucase ? "FOREIGN KEY" : "foreign key");
count++;
}
print0(ucase ? ") COLUMNS" : ") columns");
return false;
}
@Override
public boolean visit(OracleSupplementalLogGrp x) {
print0(ucase ? "SUPPLEMENTAL LOG GROUP " : "supplemental log group ");
x.getGroup().accept(this);
print0(" (");
printAndAccept(x.getColumns(), ", ");
print(')');
if (x.isAlways()) {
print0(ucase ? " ALWAYS" : " always");
}
return false;
}
public boolean visit(OracleCreateTableStatement.Organization x) {
String type = x.getType();
print0(ucase ? "ORGANIZATION " : "organization ");
print0(ucase ? type : type.toLowerCase());
printOracleSegmentAttributes(x);
if (x.getPctthreshold() != null) {
println();
print0(ucase ? "PCTTHRESHOLD " : "pctthreshold ");
print(x.getPctthreshold());
}
if ("EXTERNAL".equalsIgnoreCase(type)) {
print0(" (");
this.indentCount++;
if (x.getExternalType() != null) {
println();
print0(ucase ? "TYPE " : "type ");
x.getExternalType().accept(this);
}
if (x.getExternalDirectory() != null) {
println();
print0(ucase ? "DEFAULT DIRECTORY " : "default directory ");
x.getExternalDirectory().accept(this);
}
if (x.getExternalDirectoryRecordFormat() != null) {
println();
this.indentCount++;
print0(ucase ? "ACCESS PARAMETERS (" : "access parameters (");
x.getExternalDirectoryRecordFormat().accept(this);
this.indentCount--;
println();
print(')');
}
if (x.getExternalDirectoryLocation().size() > 0) {
println();
print0(ucase ? "LOCATION (" : " location(");
printAndAccept(x.getExternalDirectoryLocation(), ", ");
print(')');
}
this.indentCount--;
println();
print(')');
if (x.getExternalRejectLimit() != null) {
println();
print0(ucase ? "REJECT LIMIT " : "reject limit ");
x.getExternalRejectLimit().accept(this);
}
}
return false;
}
public boolean visit(OracleCreateTableStatement.OIDIndex x) {
print0(ucase ? "OIDINDEX" : "oidindex");
if (x.getName() != null) {
print(' ');
x.getName().accept(this);
}
print(" (");
this.indentCount++;
printOracleSegmentAttributes(x);
this.indentCount--;
println();
print(")");
return false;
}
@Override
public boolean visit(OracleCreatePackageStatement x) {
if (x.isOrReplace()) {
print0(ucase ? "CREATE OR REPLACE PACKAGE " : "create or replace procedure ");
} else {
print0(ucase ? "CREATE PACKAGE " : "create procedure ");
}
if (x.isBody()) {
print0(ucase ? "BODY " : "body ");
}
x.getName().accept(this);
if (x.isBody()) {
println();
print0(ucase ? "BEGIN" : "begin");
}
this.indentCount++;
List<SQLStatement> statements = x.getStatements();
for (int i = 0, size = statements.size(); i < size; ++i) {
println();
SQLStatement stmt = statements.get(i);
stmt.accept(this);
}
this.indentCount--;
if (x.isBody() || statements.size() > 0) {
println();
print0(ucase ? "END " : "end ");
x.getName().accept(this);
print(';');
}
return false;
}
@Override
public boolean visit(SQLAssignItem x) {
x.getTarget().accept(this);
print0(" := ");
x.getValue().accept(this);
return false;
}
@Override
public boolean visit(OracleExecuteImmediateStatement x) {
print0(ucase ? "EXECUTE IMMEDIATE " : "execute immediate ");
x.getDynamicSql().accept(this);
List<SQLExpr> into = x.getInto();
if (into.size() > 0) {
print0(ucase ? " INTO " : " into ");
printAndAccept(into, ", ");
}
List<SQLArgument> using = x.getArguments();
if (using.size() > 0) {
print0(ucase ? " USING " : " using ");
printAndAccept(using, ", ");
}
List<SQLExpr> returnInto = x.getReturnInto();
if (returnInto.size() > 0) {
print0(ucase ? " RETURNNING INTO " : " returnning into ");
printAndAccept(returnInto, ", ");
}
return false;
}
@Override
public boolean visit(OracleTreatExpr x) {
print0(ucase ? "TREAT (" : "treat (");
x.getExpr().accept(this);
print0(ucase ? " AS " : " as ");
if (x.isRef()) {
print0(ucase ? "REF " : "ref ");
}
x.getType().accept(this);
print(')');
return false;
}
@Override
public boolean visit(OracleCreateSynonymStatement x) {
if (x.isOrReplace()) {
print0(ucase ? "CREATE OR REPLACE " : "create or replace ");
} else {
print0(ucase ? "CREATE " : "create ");
}
if (x.isPublic()) {
print0(ucase ? "PUBLIC " : "public ");
}
print0(ucase ? "SYNONYM " : "synonym ");
x.getName().accept(this);
print0(ucase ? " FOR " : " for ");
x.getObject().accept(this);
return false;
}
@Override
public boolean visit(OracleCreateTypeStatement x) {
if (x.isOrReplace()) {
print0(ucase ? "CREATE OR REPLACE TYPE " : "create or replace type ");
} else {
print0(ucase ? "CREATE TYPE " : "create type ");
}
if (x.isBody()) {
print0(ucase ? "BODY " : "body ");
}
x.getName().accept(this);
SQLName under = x.getUnder();
if (under != null) {
print0(ucase ? " UNDER " : " under ");
under.accept(this);
}
SQLName authId = x.getAuthId();
if (authId != null) {
print0(ucase ? " AUTHID " : " authid ");
authId.accept(this);
}
if (x.isForce()) {
print0(ucase ? "FORCE " : "force ");
}
List<SQLParameter> parameters = x.getParameters();
SQLDataType tableOf = x.getTableOf();
if (x.isObject()) {
print0(" AS OBJECT");
}
if (parameters.size() > 0) {
if (x.isParen()) {
print(" (");
} else if (x.getParameters() != null && x.getParameters().size() > 0 && "ENUM".equals(x.getParameters().get(0).getDataType().getName())) {
print0(ucase ? " AS" : " as");
} else {
print0(ucase ? " IS" : " is");
}
indentCount++;
println();
for (int i = 0; i < parameters.size(); ++i) {
SQLParameter param = parameters.get(i);
param.accept(this);
SQLDataType dataType = param.getDataType();
if (i < parameters.size() - 1) {
if (dataType instanceof OracleFunctionDataType
&& ((OracleFunctionDataType) dataType).getBlock() != null) {
// skip
println();
} else if (dataType instanceof OracleProcedureDataType
&& ((OracleProcedureDataType) dataType).getBlock() != null) {
// skip
println();
} else {
println(", ");
}
}
}
indentCount--;
println();
if (x.isParen()) {
print0(")");
} else if (x.getParameters() != null && x.getParameters().size() > 0 && "ENUM".equals(x.getParameters().get(0).getDataType().getName())) {
//do nothing
} else {
print0("END");
}
} else if (tableOf != null) {
print0(ucase ? " AS TABLE OF " : " as table of ");
tableOf.accept(this);
SQLDataType indexBy = x.getIndexBy();
if (indexBy != null) {
print0(ucase ? " INDEX BY " : " index by ");
indexBy.accept(this);
}
} else if (x.getVarraySizeLimit() != null) {
print0(ucase ? " VARRAY (" : " varray (");
x.getVarraySizeLimit().accept(this);
print0(ucase ? ") OF " : ") of ");
x.getVarrayDataType().accept(this);
}
Boolean isFinal = x.getFinal();
if (isFinal != null) {
if (isFinal.booleanValue()) {
print0(ucase ? " FINAL" : " final");
} else {
print0(ucase ? " NOT FINAL" : " not final");
}
}
Boolean instantiable = x.getInstantiable();
if (instantiable != null) {
if (instantiable.booleanValue()) {
print0(ucase ? " INSTANTIABLE" : " instantiable");
} else {
print0(ucase ? " NOT INSTANTIABLE" : " not instantiable");
}
}
String wrappedSource = x.getWrappedSource();
if (wrappedSource != null) {
print0(ucase ? " WRAPPED" : " wrapped");
print0(wrappedSource);
}
return false;
}
@Override
public boolean visit(OraclePipeRowStatement x) {
print0(ucase ? "PIPE ROW(" : "pipe row(");
printAndAccept(x.getParameters(), ", ");
print(')');
return false;
}
@Override
public boolean visit(OracleIsOfTypeExpr x) {
printExpr(x.getExpr());
print0(ucase ? " IS OF TYPE (" : " is of type (");
List<SQLExpr> types = x.getTypes();
for (int i = 0, size = types.size(); i < size; ++i) {
if (i != 0) {
print0(", ");
}
SQLExpr type = types.get(i);
if (Boolean.TRUE.equals(type.getAttribute("ONLY"))) {
print0(ucase ? "ONLY " : "only ");
}
type.accept(this);
}
print(')');
return false;
}
@Override
public boolean visit(OracleRunStatement x) {
print0("@@");
printExpr(x.getExpr());
return false;
}
@Override
public boolean visit(OracleXmlColumnProperties x) {
print0(ucase ? "XMLTYPE " : "xmltype ");
x.getColumn().accept(this);
final OracleXmlColumnProperties.OracleXMLTypeStorage storage = x.getStorage();
if (storage != null) {
storage.accept(this);
}
final Boolean allowNonSchema = x.getAllowNonSchema();
if (allowNonSchema != null) {
if (allowNonSchema.booleanValue()) {
print0(ucase ? " ALLOW NONSCHEMA" : " allow nonschema");
} else {
print0(ucase ? " DISALLOW NONSCHEMA" : " disallow nonschema");
}
}
final Boolean allowAnySchema = x.getAllowAnySchema();
if (allowAnySchema != null) {
if (allowAnySchema.booleanValue()) {
print0(ucase ? " ALLOW ANYSCHEMA" : " allow anyschema");
} else {
print0(ucase ? " DISALLOW ANYSCHEMA" : " disallow anyschema");
}
}
return false;
}
@Override
public boolean visit(SQLExprTableSource x) {
printTableSourceExpr(x.getExpr());
final SQLTableSampling sampling = x.getSampling();
if (sampling != null) {
print(' ');
sampling.accept(this);
}
String alias = x.getAlias();
List<SQLName> columns = x.getColumnsDirect();
if (alias != null) {
SQLObject parent = x.getParent();
if (parent instanceof SQLCreateIndexStatement
|| parent instanceof SQLMergeStatement
|| parent instanceof SQLDeleteStatement) {
print(' ');
print0(alias);
} else {
print(' ');
print0(ucase ? " AS " : " as ");
print0(alias);
}
}
if (columns != null && columns.size() > 0) {
print(" (");
printAndAccept(columns, ", ");
print(')');
}
if (isPrettyFormat() && x.hasAfterComment()) {
print(' ');
printlnComment(x.getAfterCommentsDirect());
}
return false;
}
@Override
public boolean visit(OracleXmlColumnProperties.OracleXMLTypeStorage x) {
return false;
}
public boolean visit(SQLSubPartition x) {
super.visit(x);
incrementIndent();
printOracleSegmentAttributes(x);
decrementIndent();
return false;
}
public boolean visit(SQLPartitionValue x) {
super.visit(x);
incrementIndent();
printOracleSegmentAttributes(x);
decrementIndent();
return false;
}
@Override
public void endVisit(SQLDropUserStatement x) {
super.endVisit(x);
}
@Override
public boolean visit(SQLDropUserStatement x) {
print0(ucase ? "DROP USER " : "drop user ");
if (x.isIfExists()) {
print0(ucase ? "IF EXISTS " : "if exists ");
}
printAndAccept(x.getUsers(), ", ");
if (x.isCascade()) {
print0(ucase ? " CASCADE" : " cascade");
}
return false;
}
@Override
public boolean visit(OracleAlterPackageStatement x) {
print0(ucase ? "ALTER PACKAGE " : "alter package ");
x.getName().accept(this);
if (x.isCompile()) {
print0(ucase ? " COMPILE" : " compile");
}
if (x.isPack()) {
print0(ucase ? " PACKAGE" : " package");
}
if (x.isBody()) {
print0(ucase ? " BODY" : " body");
}
return false;
}
@Override
public boolean visit(OracleDropPackageStatement x) {
print0(ucase ? "DROP PACKAGE " : "drop package ");
x.getName().accept(this);
if (x.isBody()) {
print0(ucase ? "BODY " : "body ");
}
return false;
}
@Override
public boolean visit(OracleCreateTableSpaceStatement x) {
print0(ucase ? x.getSql().toUpperCase() : x.getSql().toLowerCase());
return false;
}
@Override
public boolean visit(SQLPartitionSingle x) {
if (x instanceof OraclePartitionSingle) {
return visit((OraclePartitionSingle) x);
}
return super.visit(x);
}
public boolean visit(OraclePartitionSingle x) {
boolean isDbPartiton = false, isTbPartition = false;
final SQLObject parent = x.getParent();
if (parent != null) {
final SQLObject parent2 = parent.getParent();
if (parent2 instanceof MySqlCreateTableStatement) {
MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) parent2;
isDbPartiton = parent == stmt.getDbPartitionBy();
isTbPartition = parent == stmt.getTablePartitionBy();
}
}
if (isDbPartiton) {
print0(ucase ? "DBPARTITION " : "dbpartition ");
} else if (isTbPartition) {
print0(ucase ? "TBPARTITION " : "tbpartition ");
} else {
print0(ucase ? "PARTITION " : "partition ");
}
x.getName().accept(this);
if (x.getValues() != null) {
print(' ');
x.getValues().accept(this);
}
this.indentCount++;
printOracleSegmentAttributes(x);
this.indentCount--;
if (x.getSubPartitionsCount() != null) {
this.indentCount++;
println();
print0(ucase ? "SUBPARTITIONS " : "subpartitions ");
x.getSubPartitionsCount().accept(this);
this.indentCount--;
}
if (x.getSubPartitions().size() > 0) {
print(" (");
this.indentCount++;
for (int i = 0; i < x.getSubPartitions().size(); ++i) {
if (i != 0) {
print(',');
}
println();
x.getSubPartitions().get(i).accept(this);
}
this.indentCount--;
println();
print(')');
}
SQLExpr locality = x.getLocality();
if (locality != null) {
print(ucase ? " LOCALITY = " : " locality = ");
locality.accept(this);
}
return false;
}
@Override
public boolean visit(SQLMergeStatement.WhenDelete x) {
print0(ucase ? "DELETE" : "delete");
if (x.isNot()) {
print0(ucase ? " NOT MATCHED" : " not matched");
}
SQLExpr where = x.getWhere();
if (where != null) {
print0(ucase ? " WHERE " : " where");
printExpr(where, parameterized);
}
return false;
}
}
|
OracleOutputVisitor
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/single/SingleDetachTest.java
|
{
"start": 1145,
"end": 3999
}
|
class ____ extends RxJavaTest {
@Test
public void doubleSubscribe() {
TestHelper.checkDoubleOnSubscribeSingle(new Function<Single<Object>, SingleSource<Object>>() {
@Override
public SingleSource<Object> apply(Single<Object> m) throws Exception {
return m.onTerminateDetach();
}
});
}
@Test
public void dispose() {
TestHelper.checkDisposed(PublishProcessor.create().singleOrError().onTerminateDetach());
}
@Test
public void onError() {
Single.error(new TestException())
.onTerminateDetach()
.test()
.assertFailure(TestException.class);
}
@Test
public void onSuccess() {
Single.just(1)
.onTerminateDetach()
.test()
.assertResult(1);
}
@Test
public void cancelDetaches() throws Exception {
Disposable d = Disposable.empty();
final WeakReference<Disposable> wr = new WeakReference<>(d);
TestObserver<Object> to = new Single<Object>() {
@Override
protected void subscribeActual(SingleObserver<? super Object> observer) {
observer.onSubscribe(wr.get());
};
}
.onTerminateDetach()
.test();
d = null;
to.dispose();
System.gc();
Thread.sleep(200);
to.assertEmpty();
assertNull(wr.get());
}
@Test
public void errorDetaches() throws Exception {
Disposable d = Disposable.empty();
final WeakReference<Disposable> wr = new WeakReference<>(d);
TestObserver<Integer> to = new Single<Integer>() {
@Override
protected void subscribeActual(SingleObserver<? super Integer> observer) {
observer.onSubscribe(wr.get());
observer.onError(new TestException());
observer.onError(new IOException());
};
}
.onTerminateDetach()
.test();
d = null;
System.gc();
Thread.sleep(200);
to.assertFailure(TestException.class);
assertNull(wr.get());
}
@Test
public void successDetaches() throws Exception {
Disposable d = Disposable.empty();
final WeakReference<Disposable> wr = new WeakReference<>(d);
TestObserver<Integer> to = new Single<Integer>() {
@Override
protected void subscribeActual(SingleObserver<? super Integer> observer) {
observer.onSubscribe(wr.get());
observer.onSuccess(1);
observer.onSuccess(2);
};
}
.onTerminateDetach()
.test();
d = null;
System.gc();
Thread.sleep(200);
to.assertResult(1);
assertNull(wr.get());
}
}
|
SingleDetachTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java
|
{
"start": 29198,
"end": 29364
}
|
class ____ {}
}
""")
.expectUnchanged()
.addInputLines(
"in/ReplaceReturnType.java",
"""
|
Inner
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/jmx/annotation/EndpointJmxExtension.java
|
{
"start": 1393,
"end": 1466
}
|
interface ____ {
/**
* The {@link Endpoint endpoint}
|
EndpointJmxExtension
|
java
|
apache__maven
|
compat/maven-compat/src/main/java/org/apache/maven/project/path/PathTranslator.java
|
{
"start": 935,
"end": 1267
}
|
interface ____ {
String ROLE = PathTranslator.class.getName();
void alignToBaseDirectory(Model model, File basedir);
String alignToBaseDirectory(String path, File basedir);
void unalignFromBaseDirectory(Model model, File basedir);
String unalignFromBaseDirectory(String directory, File basedir);
}
|
PathTranslator
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/event/spi/EnversPostCollectionRecreateEventListenerImpl.java
|
{
"start": 574,
"end": 1241
}
|
class ____
extends BaseEnversCollectionEventListener
implements PostCollectionRecreateEventListener {
public EnversPostCollectionRecreateEventListenerImpl(EnversService enversService) {
super( enversService );
}
@Override
public void onPostRecreateCollection(PostCollectionRecreateEvent event) {
final CollectionEntry collectionEntry = getCollectionEntry( event );
if ( !collectionEntry.getLoadedPersister().isInverse() ) {
onCollectionAction( event, event.getCollection(), null, collectionEntry );
}
else {
onCollectionActionInversed( event, event.getCollection(), null, collectionEntry );
}
}
}
|
EnversPostCollectionRecreateEventListenerImpl
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/EnumUtils.java
|
{
"start": 3832,
"end": 4008
}
|
enum ____.
* @throws NullPointerException if {@code enumClass} or {@code values} is {@code null}.
* @throws IllegalArgumentException if {@code enumClass} is not an
|
values
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/test/java/org/springframework/web/reactive/DispatcherHandlerErrorTests.java
|
{
"start": 8428,
"end": 9077
}
|
class ____ {
@Bean
public RequestMappingHandlerMapping handlerMapping() {
return new RequestMappingHandlerMapping();
}
@Bean
public RequestMappingHandlerAdapter handlerAdapter() {
return new RequestMappingHandlerAdapter();
}
@Bean
public ResponseBodyResultHandler resultHandler() {
return new ResponseBodyResultHandler(Collections.singletonList(
new EncoderHttpMessageWriter<>(CharSequenceEncoder.textPlainOnly())),
new HeaderContentTypeResolver());
}
@Bean
public TestController testController() {
return new TestController();
}
}
@Controller
@SuppressWarnings("unused")
private static
|
TestConfig
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java
|
{
"start": 1035,
"end": 3092
}
|
class ____ extends TransportNodesAction<
EsqlStatsRequest,
EsqlStatsResponse,
EsqlStatsRequest.NodeStatsRequest,
EsqlStatsResponse.NodeStatsResponse,
Void> {
// the plan executor holds the metrics
private final PlanExecutor planExecutor;
@Inject
public TransportEsqlStatsAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
PlanExecutor planExecutor
) {
super(
EsqlStatsAction.NAME,
clusterService,
transportService,
actionFilters,
EsqlStatsRequest.NodeStatsRequest::new,
threadPool.executor(ThreadPool.Names.MANAGEMENT)
);
this.planExecutor = planExecutor;
}
@Override
protected DiscoveryNode[] resolveRequest(EsqlStatsRequest request, ClusterState clusterState) {
return super.resolveRequest(request, clusterState);
}
@Override
protected EsqlStatsResponse newResponse(
EsqlStatsRequest request,
List<EsqlStatsResponse.NodeStatsResponse> nodes,
List<FailedNodeException> failures
) {
return new EsqlStatsResponse(clusterService.getClusterName(), nodes, failures);
}
@Override
protected EsqlStatsRequest.NodeStatsRequest newNodeRequest(EsqlStatsRequest request) {
return new EsqlStatsRequest.NodeStatsRequest(request);
}
@Override
protected EsqlStatsResponse.NodeStatsResponse newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException {
return new EsqlStatsResponse.NodeStatsResponse(in);
}
@Override
protected EsqlStatsResponse.NodeStatsResponse nodeOperation(EsqlStatsRequest.NodeStatsRequest request, Task task) {
EsqlStatsResponse.NodeStatsResponse statsResponse = new EsqlStatsResponse.NodeStatsResponse(clusterService.localNode());
statsResponse.setStats(planExecutor.metrics().stats());
return statsResponse;
}
}
|
TransportEsqlStatsAction
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java
|
{
"start": 1613,
"end": 1724
}
|
class ____ extends EsqlArithmeticOperation {
/** Arithmetic (quad) function. */
|
DateTimeArithmeticOperation
|
java
|
quarkusio__quarkus
|
integration-tests/opentelemetry-vertx-exporter/src/test/java/io/quarkus/it/opentelemetry/vertx/exporter/grpc/GrpcWithTLSWithTrustAllWithCompressionTest.java
|
{
"start": 611,
"end": 1168
}
|
class ____ implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
return Map.of("quarkus.tls.trust-all", "true");
}
@Override
public List<TestResourceEntry> testResources() {
return Collections.singletonList(
new TestResourceEntry(
OtelCollectorLifecycleManager.class,
Map.of("enableTLS", "true", "enableCompression", "true", "preventTrustCert", "true")));
}
}
}
|
Profile
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/streaming/api/datastream/extension/eventtime/EventTimeWatermarkHandlerTest.java
|
{
"start": 2250,
"end": 10493
}
|
class ____ {
private static final List<Watermark> outputWatermarks = new ArrayList<>();
private static final List<Long> advancedEventTimes = new ArrayList<>();
@AfterEach
void after() {
outputWatermarks.clear();
advancedEventTimes.clear();
}
@Test
void testOneInputWatermarkHandler() throws Exception {
// The test scenario is as follows:
// -----------------------------------------------------------------------------
// test scenario| expected result
// -----------------------------------------------------------------------------
// Step|Input 0|updateStatus|eventTimes| idleStatus |advancedEventTimes
// -----------------------------------------------------------------------------
// 1 | 1 | true,1 | [1] | [] | [1]
// 2 | 2 | true,2 | [1,2] | [] | [1,2]
// 3 | 1 | false,-1 | [1,2] | [] | [1,2]
// 4 | true | false,-1 | [1,2] | [true] | [1,2]
// 5 | false | false,-1 | [1,2] |[true,false]| [1,2]
// -----------------------------------------------------------------------------
// For example, Step 1 indicates that Input 0 will receive an event time watermark with a
// value of 1.
// After Step 1 is executed, the `updateStatus.isEventTimeUpdated` returned by the handler
// should be true,
// and `updateStatus.getNewEventTime` should be equal to 1.
// Additionally, the handler should output an event time watermark with a value of 1 and
// advance the current event time to 2.
EventTimeWatermarkHandler watermarkHandler =
new EventTimeWatermarkHandler(
1, new TestOutput(), new TestInternalTimeServiceManager());
EventTimeWatermarkHandler.EventTimeUpdateStatus updateStatus;
// Step 1
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.EVENT_TIME_WATERMARK_DECLARATION.newWatermark(1L), 0);
assertThat(updateStatus.isEventTimeUpdated()).isTrue();
assertThat(updateStatus.getNewEventTime()).isEqualTo(1L);
checkOutputEventTimeWatermarkValues(1L);
checkOutputIdleStatusWatermarkValues();
checkAdvancedEventTimes(1L);
// Step 2
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.EVENT_TIME_WATERMARK_DECLARATION.newWatermark(2L), 0);
assertThat(updateStatus.isEventTimeUpdated()).isTrue();
assertThat(updateStatus.getNewEventTime()).isEqualTo(2L);
checkOutputEventTimeWatermarkValues(1L, 2L);
checkOutputIdleStatusWatermarkValues();
checkAdvancedEventTimes(1L, 2L);
// Step 3
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.EVENT_TIME_WATERMARK_DECLARATION.newWatermark(1L), 0);
assertThat(updateStatus.isEventTimeUpdated()).isFalse();
assertThat(updateStatus.getNewEventTime()).isEqualTo(-1L);
checkOutputEventTimeWatermarkValues(1L, 2L);
checkOutputIdleStatusWatermarkValues();
checkAdvancedEventTimes(1L, 2L);
// Step 4
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.IDLE_STATUS_WATERMARK_DECLARATION.newWatermark(true), 0);
assertThat(updateStatus.isEventTimeUpdated()).isFalse();
checkOutputEventTimeWatermarkValues(1L, 2L);
checkOutputIdleStatusWatermarkValues(true);
checkAdvancedEventTimes(1L, 2L);
// Step 5
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.IDLE_STATUS_WATERMARK_DECLARATION.newWatermark(false),
0);
assertThat(updateStatus.isEventTimeUpdated()).isFalse();
checkOutputEventTimeWatermarkValues(1L, 2L);
checkOutputIdleStatusWatermarkValues(true, false);
checkAdvancedEventTimes(1L, 2L);
}
@Test
void testTwoInputWatermarkHandler() throws Exception {
// The test scenario is as follows:
// ---------------------------------------------------------------------------------
// test scenario | expected result
// ---------------------------------------------------------------------------------
// Step| Input 0 | Input 1 |updateStatus|eventTimes| idleStatus |advancedEventTimes
// ---------------------------------------------------------------------------------
// 1 | 1 | | false,-1 | [] | [] | []
// 2 | | 2 | true,1 | [1] | [] | [1]
// 3 | true | | false,-1 | [1] | [] | [1]
// 4 | | true | false,-1 | [1] | [true] | [1]
// 5 | | false | false,-1 | [1] |[true,false]| [1]
// -----------------------------------------------------------------------------
// For example, Step 1 indicates that Input 0 will receive an event time watermark with a
// value of 1.
// After Step 1 is executed, the `updateStatus.isEventTimeUpdated` returned by the handler
// should be false,
// and `updateStatus.getNewEventTime` should be equal to -1.
// Additionally, the handler should not output any event time watermark and idle status
// watermark.
EventTimeWatermarkHandler watermarkHandler =
new EventTimeWatermarkHandler(
2, new TestOutput(), new TestInternalTimeServiceManager());
EventTimeWatermarkHandler.EventTimeUpdateStatus updateStatus;
// Step 1
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.EVENT_TIME_WATERMARK_DECLARATION.newWatermark(1L), 0);
assertThat(updateStatus.isEventTimeUpdated()).isFalse();
checkOutputEventTimeWatermarkValues();
checkOutputIdleStatusWatermarkValues();
checkAdvancedEventTimes();
// Step 2
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.EVENT_TIME_WATERMARK_DECLARATION.newWatermark(2L), 1);
assertThat(updateStatus.isEventTimeUpdated()).isTrue();
assertThat(updateStatus.getNewEventTime()).isEqualTo(1L);
checkOutputEventTimeWatermarkValues(1L);
checkOutputIdleStatusWatermarkValues();
checkAdvancedEventTimes(1L);
// Step 3
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.IDLE_STATUS_WATERMARK_DECLARATION.newWatermark(true), 0);
assertThat(updateStatus.isEventTimeUpdated()).isFalse();
assertThat(updateStatus.getNewEventTime()).isEqualTo(-1L);
checkOutputEventTimeWatermarkValues(1L);
checkOutputIdleStatusWatermarkValues();
checkAdvancedEventTimes(1L);
// Step 4
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.IDLE_STATUS_WATERMARK_DECLARATION.newWatermark(true), 1);
assertThat(updateStatus.isEventTimeUpdated()).isFalse();
assertThat(updateStatus.getNewEventTime()).isEqualTo(-1L);
checkOutputEventTimeWatermarkValues(1L);
checkOutputIdleStatusWatermarkValues(true);
checkAdvancedEventTimes(1L);
// Step 5
updateStatus =
watermarkHandler.processWatermark(
EventTimeExtension.IDLE_STATUS_WATERMARK_DECLARATION.newWatermark(false),
1);
assertThat(updateStatus.isEventTimeUpdated()).isFalse();
assertThat(updateStatus.getNewEventTime()).isEqualTo(-1L);
checkOutputEventTimeWatermarkValues(1L);
checkOutputIdleStatusWatermarkValues(true, false);
checkAdvancedEventTimes(1L);
}
private static
|
EventTimeWatermarkHandlerTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/naturalid/mutable/cached/CachedMutableNaturalIdStrictReadWriteTest.java
|
{
"start": 1422,
"end": 6497
}
|
class ____ extends CachedMutableNaturalIdTest {
@Test
@JiraKey( value = "HHH-9203" )
public void testToMapConversion(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
(session) -> session.persist( new AllCached( "IT" ) )
);
final NaturalIdStatistics naturalIdStatistics = statistics.getNaturalIdStatistics( AllCached.class.getName() );
assertEquals( 1, naturalIdStatistics.getCachePutCount() );
}
@Test
@JiraKey( value = "HHH-7278" )
public void testInsertedNaturalIdCachedAfterTransactionSuccess(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
(session) -> session.persist( new Another( "it" ) )
);
scope.inTransaction(
(session) -> {
final Another it = session.bySimpleNaturalId( Another.class ).load( "it" );
assertNotNull( it );
}
);
assertEquals( 1, statistics.getNaturalIdCacheHitCount() );
}
@Test
@JiraKey( value = "HHH-7278" )
public void testInsertedNaturalIdNotCachedAfterTransactionFailure(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inSession(
(session) -> {
final Transaction transaction = session.getTransaction();
transaction.begin();
session.persist( new Another( "it" ) );
session.flush();
transaction.rollback();
}
);
scope.inTransaction(
(session) -> {
final Another it = session.bySimpleNaturalId( Another.class ).load( "it" );
assertNull( it );
assertEquals( 0, statistics.getNaturalIdCacheHitCount() );
}
);
}
@Test
@JiraKey( value = "HHH-7278" )
public void testChangedNaturalIdCachedAfterTransactionSuccess(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
(session) -> session.persist( new Another( "it" ) )
);
scope.inTransaction(
(session) -> {
final Another it = session.bySimpleNaturalId( Another.class ).load( "it" );
assertNotNull( it );
it.setName( "modified" );
}
);
statistics.clear();
scope.inTransaction(
(session) -> {
final Another it = session.bySimpleNaturalId( Another.class ).load( "modified" );
assertNotNull( it );
}
);
assertEquals( 1, statistics.getNaturalIdCacheHitCount() );
}
@Test
@JiraKey( value = "HHH-7278" )
public void testChangedNaturalIdNotCachedAfterTransactionFailure(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
(session) -> session.persist( new Another( "it" ) )
);
scope.inTransaction(
(session) -> {
final Another it = session.bySimpleNaturalId( Another.class ).load( "it" );
assertNotNull( it );
it.setName( "modified" );
session.flush();
session.getTransaction().markRollbackOnly();
}
);
statistics.clear();
scope.inTransaction(
(session) -> {
final Another modified = session.bySimpleNaturalId( Another.class ).load( "modified" );
assertNull( modified );
final Another original = session.bySimpleNaturalId( Another.class ).load( "it" );
assertNotNull( original );
}
);
assertEquals(0, statistics.getNaturalIdCacheHitCount());
}
@Test
@JiraKey( value = "HHH-7309" )
public void testInsertUpdateEntity_NaturalIdCachedAfterTransactionSuccess(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
(session) -> {
Another it = new Another( "it" );
// schedules an InsertAction
session.persist( it );
// schedules an UpdateAction
// - without bug-fix this will re-cache natural-id with identical key and at same time invalidate it
it.setSurname( "1234" );
}
);
scope.inTransaction(
(session) -> {
final Another it = session.bySimpleNaturalId( Another.class ).load( "it" );
assertNotNull( it );
}
);
assertEquals( 1, statistics.getNaturalIdCacheHitCount(), "In a strict access strategy we would expect a hit here" );
}
@Test
@JiraKey( value = "HHH-9200" )
public void testNaturalIdCacheStatisticsReset(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
(session) -> {
session.persist( new Another( "IT" ) );
}
);
NaturalIdStatistics stats = statistics.getNaturalIdStatistics( Another.class.getName() );
assertEquals( 1, stats.getCachePutCount() );
statistics.clear();
// Refresh statistics reference.
stats = statistics.getNaturalIdStatistics( Another.class.getName() );
assertEquals( 0, stats.getCachePutCount() );
}
}
|
CachedMutableNaturalIdStrictReadWriteTest
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java
|
{
"start": 21914,
"end": 22134
}
|
class ____ {",
" @Inject Foo(Bar bar) {}",
"}");
Source packagePrivateBar =
CompilerTests.javaSource(
"test.Bar",
"package test;",
"",
"
|
Foo
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamSessionWindowAggregate.java
|
{
"start": 17677,
"end": 18332
}
|
class ____ implements KTableValueGetter<Windowed<KIn>, VAgg> {
private SessionStore<KIn, VAgg> store;
@Override
public void init(final ProcessorContext<?, ?> context) {
store = context.getStateStore(storeName);
}
@Override
public ValueAndTimestamp<VAgg> get(final Windowed<KIn> key) {
return ValueAndTimestamp.make(
store.fetchSession(key.key(), key.window().start(), key.window().end()),
key.window().end());
}
@Override
public boolean isVersioned() {
return false;
}
}
}
|
KTableSessionWindowValueGetter
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/selection/qualifier/errors/ErroneousMessageByNamedWithIterableMapper.java
|
{
"start": 560,
"end": 650
}
|
class ____ {
public Collection<String> elements;
}
// CHECKSTYLE ON
}
|
Target
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.