language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/binding/BoundAuthorMapper.java
|
{
"start": 1351,
"end": 6782
}
|
interface ____ {
// ======================================================
List<Post> findPostsInArray(Integer[] ids);
// ======================================================
List<Post> findPostsInList(List<Integer> ids);
// ======================================================
int insertAuthor(Author author);
int insertAuthorInvalidSelectKey(Author author);
int insertAuthorInvalidInsert(Author author);
int insertAuthorDynamic(Author author);
// ======================================================
@ConstructorArgs({ @Arg(column = "AUTHOR_ID", javaType = int.class) })
// @formatter:off
@Results({
@Result(property = "username", column = "AUTHOR_USERNAME"),
@Result(property = "password", column = "AUTHOR_PASSWORD"),
@Result(property = "email", column = "AUTHOR_EMAIL"),
@Result(property = "bio", column = "AUTHOR_BIO")
})
@Select({
"SELECT ",
" ID as AUTHOR_ID,",
" USERNAME as AUTHOR_USERNAME,",
" PASSWORD as AUTHOR_PASSWORD,",
" EMAIL as AUTHOR_EMAIL,",
" BIO as AUTHOR_BIO",
"FROM AUTHOR WHERE ID = #{id}"})
// @formatter:on
Author selectAuthor(int id);
// ======================================================
@Result(property = "id", column = "AUTHOR_ID", id = true)
@Result(property = "username", column = "AUTHOR_USERNAME")
@Result(property = "password", column = "AUTHOR_PASSWORD")
@Result(property = "email", column = "AUTHOR_EMAIL")
@Result(property = "bio", column = "AUTHOR_BIO")
// @formatter:off
@Select({
"SELECT ",
" ID as AUTHOR_ID,",
" USERNAME as AUTHOR_USERNAME,",
" PASSWORD as AUTHOR_PASSWORD,",
" EMAIL as AUTHOR_EMAIL,",
" BIO as AUTHOR_BIO",
"FROM AUTHOR WHERE ID = #{id}"})
// @formatter:on
Author selectAuthorMapToPropertiesUsingRepeatable(int id);
// ======================================================
// @formatter:off
@ConstructorArgs({
@Arg(column = "AUTHOR_ID", javaType = Integer.class),
@Arg(column = "AUTHOR_USERNAME", javaType = String.class),
@Arg(column = "AUTHOR_PASSWORD", javaType = String.class),
@Arg(column = "AUTHOR_EMAIL", javaType = String.class),
@Arg(column = "AUTHOR_BIO", javaType = String.class),
@Arg(column = "AUTHOR_SECTION", javaType = Section.class)
})
@Select({
"SELECT ",
" ID as AUTHOR_ID,",
" USERNAME as AUTHOR_USERNAME,",
" PASSWORD as AUTHOR_PASSWORD,",
" EMAIL as AUTHOR_EMAIL,",
" BIO as AUTHOR_BIO,",
" FAVOURITE_SECTION as AUTHOR_SECTION",
"FROM AUTHOR WHERE ID = #{id}"})
// @formatter:on
Author selectAuthorConstructor(int id);
// ======================================================
@Arg(column = "AUTHOR_ID", javaType = Integer.class, id = true)
@Arg(column = "AUTHOR_USERNAME", javaType = String.class)
@Arg(column = "AUTHOR_PASSWORD", javaType = String.class)
@Arg(column = "AUTHOR_EMAIL", javaType = String.class)
@Arg(column = "AUTHOR_BIO", javaType = String.class)
@Arg(column = "AUTHOR_SECTION", javaType = Section.class)
// @formatter:off
@Select({
"SELECT ",
" ID as AUTHOR_ID,",
" USERNAME as AUTHOR_USERNAME,",
" PASSWORD as AUTHOR_PASSWORD,",
" EMAIL as AUTHOR_EMAIL,",
" BIO as AUTHOR_BIO,",
" FAVOURITE_SECTION as AUTHOR_SECTION",
"FROM AUTHOR WHERE ID = #{id}"})
// @formatter:on
Author selectAuthorMapToConstructorUsingRepeatable(int id);
// ======================================================
@Arg(column = "AUTHOR_ID", javaType = int.class)
@Result(property = "username", column = "AUTHOR_USERNAME")
// @formatter:off
@Select({
"SELECT ",
" ID as AUTHOR_ID,",
" USERNAME as AUTHOR_USERNAME,",
" PASSWORD as AUTHOR_PASSWORD,",
" EMAIL as AUTHOR_EMAIL,",
" BIO as AUTHOR_BIO",
"FROM AUTHOR WHERE ID = #{id}"})
// @formatter:on
Author selectAuthorUsingSingleRepeatable(int id);
// ======================================================
// @formatter:off
@ConstructorArgs({
@Arg(column = "AUTHOR_ID", javaType = Integer.class),
@Arg(column = "AUTHOR_USERNAME", javaType = String.class),
@Arg(column = "AUTHOR_PASSWORD", javaType = String.class),
@Arg(column = "AUTHOR_EMAIL", javaType = String.class),
@Arg(column = "AUTHOR_BIO", javaType = String.class)
})
@Arg(column = "AUTHOR_SECTION", javaType = Section.class)
@Select({
"SELECT ",
" ID as AUTHOR_ID,",
" USERNAME as AUTHOR_USERNAME,",
" PASSWORD as AUTHOR_PASSWORD,",
" EMAIL as AUTHOR_EMAIL,",
" BIO as AUTHOR_BIO,",
" FAVOURITE_SECTION as AUTHOR_SECTION",
"FROM AUTHOR WHERE ID = #{id}"})
// @formatter:on
Author selectAuthorUsingBothArgAndConstructorArgs(int id);
// ======================================================
@Results(@Result(property = "id", column = "AUTHOR_ID"))
@Result(property = "username", column = "AUTHOR_USERNAME")
// @formatter:off
@Select({
"SELECT ",
" ID as AUTHOR_ID,",
" USERNAME as AUTHOR_USERNAME",
"FROM AUTHOR WHERE ID = #{id}"})
// @formatter:on
Author selectAuthorUsingBothResultAndResults(int id);
// ======================================================
List<Post> findThreeSpecificPosts(@Param("one") int one, RowBounds rowBounds, @Param("two") int two, int three);
@Flush
List<BatchResult> flush();
}
|
BoundAuthorMapper
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/filter/AbstractFilterableTest.java
|
{
"start": 5484,
"end": 10963
}
|
class
____.addFilter(notInCompositeFilterFilter);
assertInstanceOf(CompositeFilter.class, filterable.getFilter());
assertEquals(3, ((CompositeFilter) filterable.getFilter()).getFilters().size());
}
@Test
void testRemoveSimpleFilterFromSimpleFilter() {
final Filter filter = ThresholdFilter.createFilter(Level.ERROR, null, null);
filterable.addFilter(filter);
filterable.removeFilter(filter);
assertNull(filterable.getFilter());
}
@Test
void testRemoveSimpleEqualFilterFromSimpleFilter() {
final Filter filterOriginal = new EqualFilter("test");
final Filter filterCopy = new EqualFilter("test");
filterable.addFilter(filterOriginal);
filterable.removeFilter(filterCopy);
assertNull(filterable.getFilter());
}
@Test
void testRemoveSimpleEqualFilterFromTwoSimpleFilters() {
final Filter filterOriginal = new EqualFilter("test");
final Filter filterCopy = new EqualFilter("test");
filterable.addFilter(filterOriginal);
filterable.addFilter(filterOriginal);
filterable.removeFilter(filterCopy);
assertSame(filterOriginal, filterable.getFilter());
filterable.removeFilter(filterCopy);
assertNull(filterable.getFilter());
}
@Test
void testRemoveSimpleEqualFilterFromMultipleSimpleFilters() {
final Filter filterOriginal = new EqualFilter("test");
final Filter filterCopy = new EqualFilter("test");
filterable.addFilter(filterOriginal);
filterable.addFilter(filterOriginal);
filterable.addFilter(filterCopy);
filterable.removeFilter(filterCopy);
assertInstanceOf(CompositeFilter.class, filterable.getFilter());
assertEquals(2, ((CompositeFilter) filterable.getFilter()).getFilters().size());
filterable.removeFilter(filterCopy);
assertEquals(filterOriginal, filterable.getFilter());
filterable.removeFilter(filterOriginal);
assertNull(filterable.getFilter());
}
@Test
void testRemoveNullFromSingleSimpleFilter() {
final Filter filter = ThresholdFilter.createFilter(Level.ERROR, null, null);
filterable.addFilter(filter);
filterable.removeFilter(null);
assertSame(filter, filterable.getFilter());
}
@Test
void testRemoveNonExistingFilterFromSingleSimpleFilter() {
final Filter filter = ThresholdFilter.createFilter(Level.ERROR, null, null);
final Filter newFilter = ThresholdFilter.createFilter(Level.WARN, null, null);
filterable.addFilter(filter);
filterable.removeFilter(newFilter);
assertSame(filter, filterable.getFilter());
}
@Test
void testRemoveSimpleFilterFromCompositeFilter() {
final Filter filter1 = ThresholdFilter.createFilter(Level.ERROR, null, null);
final Filter filter2 = ThresholdFilter.createFilter(Level.ERROR, null, null);
final Filter compositeFilter = CompositeFilter.createFilters(filter1, filter2);
filterable.addFilter(compositeFilter);
// should remove internal filter of compositeFilter
filterable.removeFilter(filter1);
assertFalse(filterable.getFilter() instanceof CompositeFilter);
assertEquals(filter2, filterable.getFilter());
}
@Test
void testRemoveSimpleFilterFromCompositeAndSimpleFilter() {
final Filter filter1 = ThresholdFilter.createFilter(Level.ERROR, null, null);
final Filter filter2 = ThresholdFilter.createFilter(Level.ERROR, null, null);
final Filter compositeFilter = CompositeFilter.createFilters(filter1, filter2);
final Filter anotherFilter = ThresholdFilter.createFilter(Level.WARN, null, null);
filterable.addFilter(compositeFilter);
filterable.addFilter(anotherFilter);
// should not remove internal filter of compositeFilter
filterable.removeFilter(anotherFilter);
assertInstanceOf(CompositeFilter.class, filterable.getFilter());
assertEquals(2, ((CompositeFilter) filterable.getFilter()).getFilters().size());
}
@Test
void testRemoveCompositeFilterFromCompositeFilter() {
final Filter filter1 = ThresholdFilter.createFilter(Level.ERROR, null, null);
final Filter filter2 = ThresholdFilter.createFilter(Level.ERROR, null, null);
final Filter compositeFilter = CompositeFilter.createFilters(filter1, filter2);
filterable.addFilter(compositeFilter);
filterable.removeFilter(compositeFilter);
assertNull(filterable.getFilter());
}
@Test
void testRemoveFiltersFromComposite() {
final Filter filter1 = ThresholdFilter.createFilter(Level.ERROR, null, null);
final Filter filter2 = ThresholdFilter.createFilter(Level.ERROR, null, null);
final Filter compositeFilter = CompositeFilter.createFilters(filter1, filter2);
final Filter anotherFilter = ThresholdFilter.createFilter(Level.WARN, null, null);
filterable.addFilter(compositeFilter);
filterable.addFilter(anotherFilter);
assertEquals(3, ((CompositeFilter) filterable.getFilter()).getFilters().size());
filterable.removeFilter(filter1);
assertEquals(2, ((CompositeFilter) filterable.getFilter()).getFilters().size());
filterable.removeFilter(filter2);
assertSame(anotherFilter, filterable.getFilter());
}
private static
|
filterable
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/processor/MapperCreationProcessor.java
|
{
"start": 20672,
"end": 38432
}
|
interface ____ and hence, the generated
// implementation as well. The check below must only be executed if there's no factory
// method that could be responsible.
reportErrorIfNoImplementationTypeIsRegisteredForInterfaceReturnType( method );
}
}
return mappingMethods;
}
private Javadoc getJavadoc(TypeElement element) {
JavadocGem javadocGem = JavadocGem.instanceOn( element );
if ( javadocGem == null || !isConsistent( javadocGem, element, messager ) ) {
return null;
}
Javadoc javadoc = new Javadoc.Builder()
.value( javadocGem.value().getValue() )
.authors( javadocGem.authors().getValue() )
.deprecated( javadocGem.deprecated().getValue() )
.since( javadocGem.since().getValue() )
.build();
return javadoc;
}
private Type getUserDesiredReturnType(SourceMethod method) {
SelectionParameters selectionParameters = method.getOptions().getBeanMapping().getSelectionParameters();
if ( selectionParameters != null && selectionParameters.getResultType() != null ) {
return typeFactory.getType( selectionParameters.getResultType() );
}
return null;
}
private <M extends ContainerMappingMethod> M createWithElementMappingMethod(SourceMethod method,
MappingMethodOptions mappingMethodOptions, ContainerMappingMethodBuilder<?, M> builder) {
FormattingParameters formattingParameters = null;
SelectionParameters selectionParameters = null;
if ( mappingMethodOptions.getIterableMapping() != null ) {
formattingParameters = mappingMethodOptions.getIterableMapping().getFormattingParameters();
selectionParameters = mappingMethodOptions.getIterableMapping().getSelectionParameters();
}
return builder
.mappingContext( mappingContext )
.method( method )
.formattingParameters( formattingParameters )
.selectionParameters( selectionParameters )
.build();
}
private void mergeInheritedOptions(SourceMethod method, MapperOptions mapperConfig,
List<SourceMethod> availableMethods, List<SourceMethod> initializingMethods,
AnnotationMirror annotationMirror) {
if ( initializingMethods.contains( method ) ) {
// cycle detected
initializingMethods.add( method );
messager.printMessage(
method.getExecutable(),
Message.INHERITCONFIGURATION_CYCLE,
Strings.join( initializingMethods, " -> " ) );
return;
}
initializingMethods.add( method );
MappingMethodOptions mappingOptions = method.getOptions();
List<SourceMethod> applicableReversePrototypeMethods = method.getApplicableReversePrototypeMethods();
SourceMethod inverseTemplateMethod =
getInverseTemplateMethod( join( availableMethods, applicableReversePrototypeMethods ),
method,
initializingMethods,
mapperConfig );
List<SourceMethod> applicablePrototypeMethods = method.getApplicablePrototypeMethods();
SourceMethod forwardTemplateMethod =
getForwardTemplateMethod(
join( availableMethods, applicablePrototypeMethods ),
method,
initializingMethods,
mapperConfig );
// apply defined (@InheritConfiguration, @InheritInverseConfiguration) mappings
if ( forwardTemplateMethod != null ) {
mappingOptions.applyInheritedOptions( method, forwardTemplateMethod, false, annotationMirror );
}
if ( inverseTemplateMethod != null ) {
mappingOptions.applyInheritedOptions( method, inverseTemplateMethod, true, annotationMirror );
}
// apply auto inherited options
MappingInheritanceStrategyGem inheritanceStrategy = mapperConfig.getMappingInheritanceStrategy();
if ( inheritanceStrategy.isAutoInherit() ) {
// but.. there should not be an @InheritedConfiguration
if ( forwardTemplateMethod == null && inheritanceStrategy.isApplyForward() ) {
if ( applicablePrototypeMethods.size() == 1 ) {
mappingOptions.applyInheritedOptions( method, first( applicablePrototypeMethods ), false,
annotationMirror );
}
else if ( applicablePrototypeMethods.size() > 1 ) {
messager.printMessage(
method.getExecutable(),
Message.INHERITCONFIGURATION_MULTIPLE_PROTOTYPE_METHODS_MATCH,
Strings.join( applicablePrototypeMethods, ", " ) );
}
}
// or no @InheritInverseConfiguration
if ( inverseTemplateMethod == null && inheritanceStrategy.isApplyReverse() ) {
if ( applicableReversePrototypeMethods.size() == 1 ) {
mappingOptions.applyInheritedOptions( method, first( applicableReversePrototypeMethods ), true,
annotationMirror );
}
else if ( applicableReversePrototypeMethods.size() > 1 ) {
messager.printMessage(
method.getExecutable(),
Message.INHERITINVERSECONFIGURATION_MULTIPLE_PROTOTYPE_METHODS_MATCH,
Strings.join( applicableReversePrototypeMethods, ", " ) );
}
}
}
// @BeanMapping( ignoreByDefault = true )
if ( mappingOptions.getBeanMapping() != null && mappingOptions.getBeanMapping().isIgnoredByDefault() ) {
mappingOptions.applyIgnoreAll( method, typeFactory, mappingContext.getMessager() );
}
mappingOptions.markAsFullyInitialized();
}
private void reportErrorIfNoImplementationTypeIsRegisteredForInterfaceReturnType(Method method) {
if ( method.getReturnType().getTypeMirror().getKind() != TypeKind.VOID &&
method.getReturnType().isInterface() &&
method.getReturnType().getImplementationType() == null ) {
messager.printMessage( method.getExecutable(), Message.GENERAL_NO_IMPLEMENTATION, method.getReturnType() );
}
}
/**
* Returns the configuring inverse method's options in case the given method is annotated with
* {@code @InheritInverseConfiguration} and exactly one such configuring method can unambiguously be selected (as
* per the source/target type and optionally the name given via {@code @InheritInverseConfiguration}).
*/
private SourceMethod getInverseTemplateMethod(List<SourceMethod> rawMethods, SourceMethod method,
List<SourceMethod> initializingMethods,
MapperOptions mapperConfig) {
SourceMethod resultMethod = null;
InheritInverseConfigurationGem inverseConfiguration =
InheritInverseConfigurationGem.instanceOn( method.getExecutable() );
if ( inverseConfiguration != null ) {
// method is configured as being inverse method, collect candidates
List<SourceMethod> candidates = new ArrayList<>();
for ( SourceMethod oneMethod : rawMethods ) {
if ( method.inverses( oneMethod ) ) {
candidates.add( oneMethod );
}
}
String name = inverseConfiguration.name().get();
if ( candidates.size() == 1 ) {
// no ambiguity: if no configuredBy is specified, or configuredBy specified and match
if ( name.isEmpty() ) {
resultMethod = candidates.get( 0 );
}
else if ( candidates.get( 0 ).getName().equals( name ) ) {
resultMethod = candidates.get( 0 );
}
else {
reportErrorWhenNonMatchingName( candidates.get( 0 ), method, inverseConfiguration );
}
}
else if ( candidates.size() > 1 ) {
// ambiguity: find a matching method that matches configuredBy
List<SourceMethod> nameFilteredcandidates = new ArrayList<>();
for ( SourceMethod candidate : candidates ) {
if ( candidate.getName().equals( name ) ) {
nameFilteredcandidates.add( candidate );
}
}
if ( nameFilteredcandidates.size() == 1 ) {
resultMethod = nameFilteredcandidates.get( 0 );
}
else if ( nameFilteredcandidates.size() > 1 ) {
reportErrorWhenSeveralNamesMatch( nameFilteredcandidates, method, inverseConfiguration );
}
else {
reportErrorWhenAmbiguousReverseMapping( candidates, method, inverseConfiguration );
}
}
}
return extractInitializedOptions( resultMethod, rawMethods, mapperConfig, initializingMethods,
getAnnotationMirror( inverseConfiguration ) );
}
private AnnotationMirror getAnnotationMirror(InheritInverseConfigurationGem inverseConfiguration) {
return inverseConfiguration == null ? null : inverseConfiguration.mirror();
}
private SourceMethod extractInitializedOptions(SourceMethod resultMethod,
List<SourceMethod> rawMethods,
MapperOptions mapperConfig,
List<SourceMethod> initializingMethods,
AnnotationMirror annotationMirror) {
if ( resultMethod != null ) {
if ( !resultMethod.getOptions().isFullyInitialized() ) {
mergeInheritedOptions( resultMethod, mapperConfig, rawMethods, initializingMethods,
annotationMirror );
}
return resultMethod;
}
return null;
}
/**
* Returns the configuring forward method's options in case the given method is annotated with
* {@code @InheritConfiguration} and exactly one such configuring method can unambiguously be selected (as per the
* source/target type and optionally the name given via {@code @InheritConfiguration}). The method cannot be marked
* forward mapping itself (hence 'other'). And neither can it contain an {@code @InheritReverseConfiguration}
*/
private SourceMethod getForwardTemplateMethod(List<SourceMethod> rawMethods, SourceMethod method,
List<SourceMethod> initializingMethods,
MapperOptions mapperConfig) {
SourceMethod resultMethod = null;
InheritConfigurationGem inheritConfiguration =
InheritConfigurationGem.instanceOn( method.getExecutable() );
if ( inheritConfiguration != null ) {
List<SourceMethod> candidates = new ArrayList<>();
for ( SourceMethod oneMethod : rawMethods ) {
// method must be similar but not equal
if ( method.canInheritFrom( oneMethod ) && !( oneMethod.equals( method ) ) ) {
candidates.add( oneMethod );
}
}
String name = inheritConfiguration.name().get();
if ( candidates.size() == 1 ) {
// no ambiguity: if no configuredBy is specified, or configuredBy specified and match
SourceMethod sourceMethod = first( candidates );
if ( name.isEmpty() ) {
resultMethod = sourceMethod;
}
else if ( sourceMethod.getName().equals( name ) ) {
resultMethod = sourceMethod;
}
else {
reportErrorWhenNonMatchingName( sourceMethod, method, inheritConfiguration );
}
}
else if ( candidates.size() > 1 ) {
// ambiguity: find a matching method that matches configuredBy
List<SourceMethod> nameFilteredCandidates = new ArrayList<>();
for ( SourceMethod candidate : candidates ) {
if ( candidate.getName().equals( name ) ) {
nameFilteredCandidates.add( candidate );
}
}
if ( nameFilteredCandidates.size() == 1 ) {
resultMethod = first( nameFilteredCandidates );
}
else if ( nameFilteredCandidates.size() > 1 ) {
reportErrorWhenSeveralNamesMatch( nameFilteredCandidates, method, inheritConfiguration );
}
else {
reportErrorWhenAmbiguousMapping( candidates, method, inheritConfiguration );
}
}
}
return extractInitializedOptions( resultMethod, rawMethods, mapperConfig, initializingMethods,
getAnnotationMirror( inheritConfiguration ) );
}
private AnnotationMirror getAnnotationMirror(InheritConfigurationGem inheritConfiguration) {
return inheritConfiguration == null ? null : inheritConfiguration.mirror();
}
private void reportErrorWhenAmbiguousReverseMapping(List<SourceMethod> candidates, SourceMethod method,
InheritInverseConfigurationGem inverseGem) {
List<String> candidateNames = new ArrayList<>();
for ( SourceMethod candidate : candidates ) {
candidateNames.add( candidate.getName() );
}
String name = inverseGem.name().get();
if ( name.isEmpty() ) {
messager.printMessage( method.getExecutable(),
inverseGem.mirror(),
Message.INHERITINVERSECONFIGURATION_DUPLICATES,
Strings.join( candidateNames, "(), " )
);
}
else {
messager.printMessage( method.getExecutable(),
inverseGem.mirror(),
Message.INHERITINVERSECONFIGURATION_INVALID_NAME,
Strings.join( candidateNames, "(), " ),
name
);
}
}
private void reportErrorWhenSeveralNamesMatch(List<SourceMethod> candidates, SourceMethod method,
InheritInverseConfigurationGem inverseGem) {
messager.printMessage( method.getExecutable(),
inverseGem.mirror(),
Message.INHERITINVERSECONFIGURATION_DUPLICATE_MATCHES,
inverseGem.name().get(),
Strings.join( candidates, ", " )
);
}
private void reportErrorWhenNonMatchingName(SourceMethod onlyCandidate, SourceMethod method,
InheritInverseConfigurationGem inverseGem) {
messager.printMessage( method.getExecutable(),
inverseGem.mirror(),
Message.INHERITINVERSECONFIGURATION_NO_NAME_MATCH,
inverseGem.name().get(),
onlyCandidate.getName()
);
}
private void reportErrorWhenAmbiguousMapping(List<SourceMethod> candidates, SourceMethod method,
InheritConfigurationGem gem) {
List<String> candidateNames = new ArrayList<>();
for ( SourceMethod candidate : candidates ) {
candidateNames.add( candidate.getName() );
}
String name = gem.name().get();
if ( name.isEmpty() ) {
messager.printMessage( method.getExecutable(),
gem.mirror(),
Message.INHERITCONFIGURATION_DUPLICATES,
Strings.join( candidateNames, "(), " )
);
}
else {
messager.printMessage(
method.getExecutable(),
gem.mirror(),
Message.INHERITCONFIGURATION_INVALIDNAME,
Strings.join( candidateNames, "(), " ),
name
);
}
}
private void reportErrorWhenSeveralNamesMatch(List<SourceMethod> candidates, SourceMethod method,
InheritConfigurationGem gem) {
messager.printMessage(
method.getExecutable(),
gem.mirror(),
Message.INHERITCONFIGURATION_DUPLICATE_MATCHES,
gem.name().get(),
Strings.join( candidates, ", " )
);
}
private void reportErrorWhenNonMatchingName(SourceMethod onlyCandidate, SourceMethod method,
InheritConfigurationGem gem) {
messager.printMessage(
method.getExecutable(),
gem.mirror(),
Message.INHERITCONFIGURATION_NO_NAME_MATCH,
gem.name().get(),
onlyCandidate.getName()
);
}
private boolean isConsistent( JavadocGem gem, TypeElement element, FormattingMessager messager ) {
if ( !gem.value().hasValue()
&& !gem.authors().hasValue()
&& !gem.deprecated().hasValue()
&& !gem.since().hasValue() ) {
messager.printMessage( element, gem.mirror(), Message.JAVADOC_NO_ELEMENTS );
return false;
}
return true;
}
}
|
type
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/slowtaskdetector/ExecutionTimeBasedSlowTaskDetector.java
|
{
"start": 13136,
"end": 14863
}
|
class ____ implements Comparable<ExecutionTimeWithInputBytes> {
private final long executionTime;
private final long inputBytes;
public ExecutionTimeWithInputBytes(long executionTime, long inputBytes) {
this.executionTime = executionTime;
this.inputBytes = inputBytes;
}
public long getExecutionTime() {
return executionTime;
}
public long getInputBytes() {
return inputBytes;
}
@Override
public int compareTo(ExecutionTimeWithInputBytes other) {
// In order to ensure the stability of comparison, it requires both elements' input
// bytes should be both valid or both UNKNOWN, unless the execution time is 0.
// (When baselineRatio is 0, a baseline of 0 execution time will be generated.)
if (inputBytes == NUM_BYTES_UNKNOWN || other.getInputBytes() == NUM_BYTES_UNKNOWN) {
if (inputBytes == NUM_BYTES_UNKNOWN && other.getInputBytes() == NUM_BYTES_UNKNOWN
|| executionTime == 0
|| other.executionTime == 0) {
return (int) (executionTime - other.getExecutionTime());
} else {
throw new IllegalArgumentException(
"Both compared elements should be NUM_BYTES_UNKNOWN.");
}
}
return Double.compare(
(double) executionTime / Math.max(inputBytes, Double.MIN_VALUE),
(double) other.getExecutionTime()
/ Math.max(other.getInputBytes(), Double.MIN_VALUE));
}
}
}
|
ExecutionTimeWithInputBytes
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/NamenodePriorityComparator.java
|
{
"start": 1234,
"end": 2214
}
|
class ____
implements Comparator<FederationNamenodeContext>, Serializable {
private static final long serialVersionUID = 2304924292036293331L;
@Override
public int compare(FederationNamenodeContext o1,
FederationNamenodeContext o2) {
FederationNamenodeServiceState state1 = o1.getState();
FederationNamenodeServiceState state2 = o2.getState();
if (state1 == state2) {
// Both have the same state, use mode dates
return compareModDates(o1, o2);
} else {
// Enum is ordered by priority
return state1.compareTo(state2);
}
}
/**
* Compare the modification dates.
*
* @param o1 Context 1.
* @param o2 Context 2.
* @return Comparison between dates.
*/
private int compareModDates(FederationNamenodeContext o1,
FederationNamenodeContext o2) {
// Reverse sort, lowest position is highest priority.
return (int) (o2.getDateModified() - o1.getDateModified());
}
}
|
NamenodePriorityComparator
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AInputStreamLeakage.java
|
{
"start": 1934,
"end": 6684
}
|
class ____ extends AbstractS3ATestBase {
/**
* How big a file to create?
*/
public static final int FILE_SIZE = 1024;
public static final byte[] DATASET = dataset(FILE_SIZE, '0', 10);
/**
* Time to wait after a GC/finalize is triggered before looking at the log.
*/
public static final long GC_DELAY = Duration.ofSeconds(1).toMillis();
@BeforeEach
@Override
public void setup() throws Exception {
super.setup();
assume("Stream leak detection not available",
getFileSystem().hasCapability(STREAM_LEAKS));
}
/**
* This test forces a GC of an open file then verifies that the
* log contains the error message.
* <p>
* Care is needed here to ensure that no strong references are held to the
* stream, otherwise: no GC.
* <p>
* It also assumes that {@code System.gc()} will do enough of a treewalk to
* prepare the stream for garbage collection (a weak ref is used to verify
* that it was removed as a reference), and that
* {@code System.runFinalization()} will then
* invoke the finalization.
* <p>
* The finalize code runs its own thread "Finalizer"; this is async enough
* that assertions on log entries only work if there is a pause after
* finalization is triggered and the log is reviewed.
* <p>
* The stream leak counter of the FileSystem is also updated; this
* is verified.
*/
@Test
public void testFinalizer() throws Throwable {
Path path = methodPath();
// Analytics accelerator currently does not support stream leak detection. This work is tracked
// in https://issues.apache.org/jira/browse/HADOOP-19451
skipIfAnalyticsAcceleratorEnabled(getConfiguration(),
"Analytics Accelerator currently does not support leak detection");
final S3AFileSystem fs = getFileSystem();
ContractTestUtils.createFile(fs, path, true, DATASET);
// DO NOT use try-with-resources; this
// test MUST be able to remove all references
// to the stream
FSDataInputStream in = fs.open(path);
try {
Assertions.assertThat(in.hasCapability(STREAM_LEAKS))
.describedAs("Stream leak detection not supported in: %s", in.getWrappedStream())
.isTrue();
Assertions.assertThat(in.read())
.describedAs("first byte read from %s", in)
.isEqualTo(DATASET[0]);
// get a weak ref so that after a GC we can look for it and verify it is gone
WeakReference<ObjectInputStream> wrs =
new WeakReference<>((ObjectInputStream) in.getWrappedStream());
boolean isClassicStream = wrs.get() instanceof S3AInputStream;
final IOStatistics fsStats = fs.getIOStatistics();
final long leaks = fsStats.counters().getOrDefault(STREAM_LEAKS, 0L);
// Capture the logs
GenericTestUtils.LogCapturer logs =
captureLogs(LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME));
LOG.info("captured log");
// remove strong reference to the stream
in = null;
// force the gc.
System.gc();
// make sure the GC removed the Stream.
Assertions.assertThat(wrs.get())
.describedAs("weak stream reference wasn't GC'd")
.isNull();
// finalize
System.runFinalization();
// finalize is async, so add a brief wait for it to be called.
// without this the log may or may not be empty
Thread.sleep(GC_DELAY);
LOG.info("end of log");
// check the log
logs.stopCapturing();
final String output = logs.getOutput();
LOG.info("output of leak log is {}", output);
Assertions.assertThat(output)
.describedAs("output from the logs during GC")
.contains("Stream not closed") // stream release
.contains(path.toUri().toString()) // path
.contains(Thread.currentThread().getName()) // thread
.contains("testFinalizer"); // stack
// verify that leakages are added to the FS statistics
// for classic stream the counter is 1, but for prefetching
// the count is greater -the inner streams can also
// get finalized while open so increment the leak counter
// multiple times.
assertThatStatisticCounter(fsStats, STREAM_LEAKS)
.isGreaterThanOrEqualTo(leaks + 1);
if (isClassicStream) {
Assertions.assertThat(output)
.describedAs("output from the logs during GC")
.contains("drain or abort reason finalize()"); // stream release
assertThatStatisticCounter(fsStats, STREAM_LEAKS)
.isEqualTo(leaks + 1);
}
} finally {
if (in != null) {
IOUtils.cleanupWithLogger(LOG, in);
}
}
}
}
|
ITestS3AInputStreamLeakage
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/test/java/org/apache/camel/component/salesforce/RestApiManualIT.java
|
{
"start": 5438,
"end": 47054
}
|
class ____ extends Merchandise__c {
// XML response contains a type string with the SObject type name
private String type;
public String getType() {
return type;
}
public void setType(final String type) {
this.type = type;
}
}
private static final AtomicInteger NEW_LINE_ITEM_ID = new AtomicInteger(100);
private static final String TEST_DOCUMENT_ID = "Test Document";
private static final AtomicInteger TEST_LINE_ITEM_ID = new AtomicInteger(1);
private String merchandiseId;
private String accountId;
private String contactId;
@AfterEach
public void removeData() {
template.request("salesforce:deleteSObject?sObjectName=Merchandise__c&sObjectId=" + merchandiseId, (Processor) e -> {
// NOOP
});
template.requestBody("direct:deleteLineItems", "");
}
@BeforeEach
public void setupData() {
final Merchandise__c merchandise = new Merchandise__c();
merchandise.setName("Test Merchandise");
merchandise.setPrice__c(10.0);
merchandise.setTotal_Inventory__c(100.0);
merchandise.setDescription__c("Test Merchandise!");
final CreateSObjectResult merchandiseResult
= template().requestBody("salesforce:createSObject", merchandise, CreateSObjectResult.class);
merchandiseId = merchandiseResult.getId();
}
private void createLineItem() {
Line_Item__c lineItem = new Line_Item__c();
final String lineItemId = String.valueOf(TEST_LINE_ITEM_ID.incrementAndGet());
lineItem.setName(lineItemId);
CreateSObjectResult result = template().requestBody("direct:createLineItem", lineItem, CreateSObjectResult.class);
}
private void createLineItems(int count) {
List<Line_Item__c> lineItems = new ArrayList<>();
for (int i = 0; i < count; i++) {
Line_Item__c lineItem = new Line_Item__c();
final String lineItemId = String.valueOf(TEST_LINE_ITEM_ID.incrementAndGet());
lineItem.setName(lineItemId);
lineItems.add(lineItem);
}
template().requestBody("direct:createLineItems", lineItems);
}
private void createAccountAndContact() {
final Account account = new Account();
account.setName("Child Test");
String accountExternalId = UUID.randomUUID().toString();
account.setExternal_Id__c(accountExternalId);
CreateSObjectResult accountResult
= template().requestBody("salesforce:createSObject", account, CreateSObjectResult.class);
accountId = accountResult.getId();
final Account accountRef = new Account();
accountRef.setExternal_Id__c(accountExternalId);
final Contact contact = new Contact();
contact.setAccount(accountRef);
contact.setLastName("RelationshipTest");
CreateSObjectResult contactResult
= template().requestBody("salesforce:createSObject", contact, CreateSObjectResult.class);
contactId = contactResult.getId();
}
private void deleteAccountAndContact() {
if (accountId != null) {
template.request("salesforce:deleteSObject?sObjectName=Account&sObjectId=" + accountId, (Processor) e -> {
// NOOP
});
}
if (contactId != null) {
template.request("salesforce:deleteSObject?sObjectName=Contact&sObjectId=" + contactId, (Processor) e -> {
// NOOP
});
}
}
@Test
public void testApexCall() throws Exception {
// request merchandise with id in URI template
Merchandise__c merchandise
= template().requestBodyAndHeader("direct:apexCallGet", null, "id", merchandiseId, Merchandise__c.class);
assertNotNull(merchandise);
// request merchandise with id as query param
merchandise = template().requestBodyAndHeader("direct:apexCallGetWithId", null,
SalesforceEndpointConfig.APEX_QUERY_PARAM_PREFIX + "id", merchandiseId,
Merchandise__c.class);
assertNotNull(merchandise);
// patch merchandise
// clear fields that won't be modified
merchandise.clearBaseFields();
merchandise.setId(merchandiseId);
merchandise.setPrice__c(null);
merchandise.setTotal_Inventory__c(null);
merchandise = template().requestBody("direct:apexCallPatch", new MerchandiseRequest(merchandise), Merchandise__c.class);
assertNotNull(merchandise);
Exchange exchange = new DefaultExchange(context);
template.send("direct:apexCallPostCustomError", exchange);
SalesforceException exception = exchange.getException(SalesforceException.class);
assertNotNull(exception);
assertEquals("test response", IOUtils.toString(exception.getResponseContent(), StandardCharsets.UTF_8));
}
@Test
public void testApexCallDetectResponseType() throws Exception {
// request merchandise with id in URI template
Merchandise__c merchandise
= template().requestBodyAndHeader("direct:apexCallGetDetectResponseType", null, "id", merchandiseId,
Merchandise__c.class);
assertNotNull(merchandise);
}
@Test
public void returnsHttpResponseStatusAndText() {
Exchange exchange = new DefaultExchange(context);
template().send("direct:query", exchange);
assertEquals("200", exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE));
assertNotNull(exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_TEXT));
}
@Test
public void testCreateUpdateDelete() throws Exception {
final Merchandise__c merchandise = new Merchandise__c();
merchandise.setName("Wee Wee Wee Plane");
merchandise.setDescription__c("Microlite plane");
merchandise.setPrice__c(2000.0);
merchandise.setTotal_Inventory__c(50.0);
final CreateSObjectResult result
= template().requestBody("salesforce:createSObject", merchandise, CreateSObjectResult.class);
assertNotNull(result);
assertTrue(result.getSuccess(), "Create success");
// test JSON update
// make the plane cheaper
merchandise.setPrice__c(1500.0);
// change inventory to half
merchandise.setTotal_Inventory__c(25.0);
// also need to set the Id
merchandise.setId(result.getId());
assertNotNull(
template().requestBodyAndHeader("salesforce:updateSObject", merchandise, SalesforceEndpointConfig.SOBJECT_ID,
result.getId()));
// delete the newly created SObject
assertNotNull(template().requestBody("salesforce:deleteSObject?sObjectName=Merchandise__c", result.getId()));
}
@Test
public void testCreateMultipart() {
final ContentVersion cv = new ContentVersion();
cv.setPathOnClient("camel-test-doc.pdf");
cv.setVersionDataBinary(getClass().getClassLoader().getResourceAsStream("camel-test-doc.pdf"));
final CreateSObjectResult result
= template.requestBody("salesforce:createSObject?sObjectName=ContentVersion",
cv,
CreateSObjectResult.class);
assertTrue(result.getSuccess());
}
@Test
public void testUpdateMultipart() {
final QueryRecordsFolder queryResult = template.requestBody("salesforce:query" +
"?sObjectQuery=SELECT Id FROM Folder WHERE Name = 'Test Documents'"
+
"&sObjectName=QueryRecordsFolder",
null, QueryRecordsFolder.class);
final Folder folder = queryResult.getRecords().get(0);
// Create a Document
final Document doc = new Document();
doc.setFolderId(folder.getId());
doc.setName("camel-test-doc.pdf");
doc.setBodyBinary(getClass().getClassLoader().getResourceAsStream("camel-test-doc.pdf"));
final CreateSObjectResult createResult = template.requestBody(
"salesforce:createSObject?sObjectName=Document",
doc,
CreateSObjectResult.class);
assertTrue(createResult.getSuccess());
assertNotNull(createResult.getId());
// Update the Document (e.g., change the name)
final Document updateDoc = new Document();
updateDoc.setId(createResult.getId());
updateDoc.setName("camel-test-doc-updated.pdf");
updateDoc.setBodyBinary(getClass().getClassLoader().getResourceAsStream("camel-test-doc.pdf"));
final Object updateResult = template.requestBody(
"salesforce:updateSObject?sObjectName=Document",
updateDoc);
assertNotNull(updateResult);
}
@Test
public void testRelationshipCreateDelete() throws Exception {
final Account account = new Account();
account.setName("Account 1");
String accountExternalId = UUID.randomUUID().toString();
account.setExternal_Id__c(accountExternalId);
final CreateSObjectResult accountResult
= template().requestBody("salesforce:createSObject", account, CreateSObjectResult.class);
assertNotNull(accountResult);
assertTrue(accountResult.getSuccess(), "Create success");
final Account accountRef = new Account();
accountRef.setExternal_Id__c(accountExternalId);
final Contact contact = new Contact();
contact.setAccount(accountRef);
contact.setLastName("RelationshipTest");
final CreateSObjectResult contactResult
= template().requestBody("salesforce:createSObject", contact, CreateSObjectResult.class);
assertNotNull(contactResult);
assertTrue(contactResult.getSuccess(), "Create success");
// delete the Contact
template().requestBodyAndHeader("salesforce:deleteSObject", contactResult.getId(), "sObjectName", "Contact");
// delete the Account
template().requestBodyAndHeader("salesforce:deleteSObject", accountResult.getId(), "sObjectName", "Account");
}
@Test
public void testFieldsToNull() throws Exception {
final Account account = new Account();
account.setName("Account 1");
account.setSite("test site");
final CreateSObjectResult accountResult
= template().requestBody("salesforce:createSObject", account, CreateSObjectResult.class);
assertNotNull(accountResult);
assertTrue(accountResult.getSuccess(), "Create success");
account.setId(accountResult.getId());
account.setSite(null);
account.getFieldsToNull().add("Site");
final Object updateAccountResult = template().requestBodyAndHeader("salesforce:updateSObject", account,
SalesforceEndpointConfig.SOBJECT_ID, account.getId());
assertNotNull(updateAccountResult);
Account updatedAccount = (Account) template().requestBodyAndHeader("salesforce:getSObject?sObjectFields=Id,Name,Site",
account.getId(), "sObjectName", "Account");
assertNull(updatedAccount.getSite());
// delete the Account
template().requestBodyAndHeader("salesforce:deleteSObject", accountResult.getId(), "sObjectName", "Account");
}
@Test
public void testRelationshipUpdate() throws Exception {
final Contact contact = new Contact();
contact.setLastName("RelationshipTest");
final CreateSObjectResult contactResult
= template().requestBody("salesforce:createSObject", contact, CreateSObjectResult.class);
assertNotNull(contactResult);
assertTrue(contactResult.getSuccess(), "Create success");
final Account account = new Account();
account.setName("Account 1");
String accountExternalId = UUID.randomUUID().toString();
account.setExternal_Id__c(accountExternalId);
final CreateSObjectResult accountResult
= template().requestBody("salesforce:createSObject", account, CreateSObjectResult.class);
assertNotNull(accountResult);
assertTrue(accountResult.getSuccess(), "Create success");
final Account accountRef = new Account();
accountRef.setExternal_Id__c(accountExternalId);
contact.setAccount(accountRef);
contact.setId(contactResult.getId());
final Object updateContactResult = template().requestBodyAndHeader("salesforce:updateSObject", contact,
SalesforceEndpointConfig.SOBJECT_ID, contact.getId());
assertNotNull(updateContactResult);
// delete the Contact
template().requestBodyAndHeader("salesforce:deleteSObject", contactResult.getId(), "sObjectName", "Contact");
// delete the Account
template().requestBodyAndHeader("salesforce:deleteSObject", accountResult.getId(), "sObjectName", "Account");
}
@Test
public void testCreateUpdateDeleteTasks() throws Exception {
final Task taken = new Task();
taken.setDescription("Task1");
taken.setActivityDate(ZonedDateTime.of(1700, 1, 2, 3, 4, 5, 6, ZoneId.systemDefault()));
final CreateSObjectResult result = template().requestBody("salesforce:createSObject", taken, CreateSObjectResult.class);
assertNotNull(result);
assertTrue(result.getSuccess(), "Create success");
// test JSON update
// make the plane cheaper
taken.setId(result.getId());
taken.setActivityDate(ZonedDateTime.of(1991, 1, 2, 3, 4, 5, 6, ZoneId.systemDefault()));
assertNotNull(template().requestBodyAndHeader("salesforce:updateSObject", taken, SalesforceEndpointConfig.SOBJECT_ID,
result.getId()));
// delete the newly created SObject
assertNotNull(template().requestBody("salesforce:deleteSObject?sObjectName=Task", result.getId()));
}
@Test
public void testCreateUpdateDeleteWithId() throws Exception {
Line_Item__c lineItem = new Line_Item__c();
final String lineItemId = String.valueOf(TEST_LINE_ITEM_ID.incrementAndGet());
lineItem.setName(lineItemId);
CreateSObjectResult result = template().requestBody("direct:createLineItem", lineItem, CreateSObjectResult.class);
assertNotNull(result);
assertTrue(result.getSuccess());
// get line item with Name 1
lineItem = template().requestBody("salesforce:getSObjectWithId?sObjectIdName=Name&sObjectName=Line_Item__c",
lineItemId, Line_Item__c.class);
assertNotNull(lineItem);
// test insert with id
// set the unit price and sold
lineItem.setUnit_Price__c(1000.0);
lineItem.setUnits_Sold__c(50.0);
// update line item with Name NEW_LINE_ITEM_ID
final String newLineItemId = String.valueOf(NEW_LINE_ITEM_ID.incrementAndGet());
lineItem.setName(newLineItemId);
UpsertSObjectResult upsertResult = template().requestBodyAndHeader("direct:upsertSObject", lineItem,
SalesforceEndpointConfig.SOBJECT_EXT_ID_VALUE, newLineItemId, UpsertSObjectResult.class);
assertNotNull(upsertResult);
assertTrue(upsertResult.getSuccess());
// clear read only parent type fields
lineItem.setMerchandise__c(null);
// change the units sold
lineItem.setUnits_Sold__c(25.0);
// update line item with Name NEW_LINE_ITEM_ID
upsertResult = template().requestBodyAndHeader("direct:upsertSObject", lineItem,
SalesforceEndpointConfig.SOBJECT_EXT_ID_VALUE, newLineItemId, UpsertSObjectResult.class);
assertNotNull(upsertResult);
// delete the SObject with Name NEW_LINE_ITEM_ID
assertNotNull(template().requestBody("direct:deleteSObjectWithId", newLineItemId));
}
@Test
public void testUpsert() throws Exception {
Line_Item__c lineItem = new Line_Item__c();
final String lineItemId = String.valueOf(TEST_LINE_ITEM_ID.incrementAndGet());
lineItem.setName(lineItemId);
UpsertSObjectResult result = template().requestBody("direct:upsertSObject", lineItem, UpsertSObjectResult.class);
assertNotNull(result);
assertNotNull(lineItem.getName());
assertTrue(result.getSuccess());
assertTrue(result.getCreated());
}
@Test
public void testGetBasicInfo() throws Exception {
final SObjectBasicInfo objectBasicInfo = template().requestBody("direct:getBasicInfo", null, SObjectBasicInfo.class);
assertNotNull(objectBasicInfo);
// set test Id for testGetSObject
assertFalse(objectBasicInfo.getRecentItems().isEmpty(), "RecentItems is empty");
merchandiseId = objectBasicInfo.getRecentItems().get(0).getId();
}
@Test
public void testGetBlobField() throws Exception {
// get document with Name "Test Document"
final HashMap<String, Object> headers = new HashMap<>();
headers.put(SalesforceEndpointConfig.SOBJECT_NAME, "Document");
headers.put(SalesforceEndpointConfig.SOBJECT_EXT_ID_NAME, "Name");
final Document document
= template().requestBodyAndHeaders("salesforce:getSObjectWithId", TEST_DOCUMENT_ID, headers, Document.class);
assertNotNull(document);
// get Body field for this document
try (final InputStream body = template().requestBody("direct:getBlobField", document, InputStream.class)) {
assertNotNull(body);
assertTrue(body.available() > 0);
}
}
@Test
public void testUploadBlob() throws Exception {
final InputStream inputStream = this.getClass().getResourceAsStream("/camel-test-doc.pdf");
final byte[] bytes = inputStream.readAllBytes();
ObjectMapper mapper = new ObjectMapper();
String enc = mapper.convertValue(bytes, String.class);
ContentVersion cv = new ContentVersion();
cv.setVersionData(enc);
cv.setPathOnClient("camel-test-doc.pdf");
cv.setTitle("Camel Test Doc");
final CreateSObjectResult result = template.requestBody("salesforce:createSObject", cv, CreateSObjectResult.class);
assertNotNull(result.getId());
}
@Test
public void testGetDescription() throws Exception {
final SObjectDescription sObjectDescription
= template().requestBody("direct:getDescription", null, SObjectDescription.class);
assertNotNull(sObjectDescription);
}
@Test
public void testGetGlobalObjects() throws Exception {
final GlobalObjects globalObjects = template().requestBody("direct:getGlobalObjects", null, GlobalObjects.class);
assertNotNull(globalObjects);
}
@Test
public void testGetResources() throws Exception {
@SuppressWarnings("unchecked")
final Map<String, String> resources = (Map<String, String>) template().requestBody("direct:getResources", "");
assertNotNull(resources);
assertTrue(resources.containsKey("metadata"));
}
@Test
public void testGetSObject() throws Exception {
final Merchandise__c merchandise = template().requestBody("direct:getSObject", merchandiseId, Merchandise__c.class);
assertNotNull(merchandise);
assertNull(merchandise.getTotal_Inventory__c());
assertNotNull(merchandise.getPrice__c());
}
@Test
public void testGetVersions() throws Exception {
// test getVersions doesn't need a body
// assert expected result
final Object o = template().requestBody("direct:getVersions", (Object) null);
List<Version> versions = null;
if (o instanceof Versions) {
versions = ((Versions) o).getVersions();
} else {
@SuppressWarnings("unchecked")
final List<Version> tmp = (List<Version>) o;
versions = tmp;
}
assertNotNull(versions);
}
@Test
public void testGetEventSchemaByEventName() {
final Object expandedResult
= template.requestBodyAndHeader("salesforce:getEventSchema", "", EVENT_NAME, "BatchApexErrorEvent");
assertNotNull(expandedResult);
final Object compactResult = template.requestBodyAndHeaders("salesforce:getEventSchema", "",
Map.of(EVENT_NAME, "BatchApexErrorEvent",
SalesforceEndpointConfig.EVENT_SCHEMA_FORMAT, "compact"));
assertNotNull(compactResult);
}
@Test
public void testGetEventSchemaBySchemaId() throws IOException {
final Object schemaResult = template.requestBodyAndHeaders("salesforce:getEventSchema", "",
Map.of(EVENT_NAME, "BatchApexErrorEvent",
SalesforceEndpointConfig.EVENT_SCHEMA_FORMAT, "compact"));
assertNotNull(schemaResult);
ObjectMapper mapper = new ObjectMapper();
@SuppressWarnings("unchecked")
final Map<String, Object> map = (Map<String, Object>) mapper.readValue((InputStream) schemaResult, Map.class);
final String schemaId = (String) map.get("uuid");
final Object idResult = template.requestBodyAndHeader("salesforce:getEventSchema", "", EVENT_SCHEMA_ID, schemaId);
assertNotNull(idResult);
}
@Test
public void testQuery() throws Exception {
createLineItem();
final QueryRecordsLine_Item__c queryRecords
= template().requestBody("direct:query", null, QueryRecordsLine_Item__c.class);
assertNotNull(queryRecords);
// verify polymorphic query resulted in the correct type
assertEquals(User.class, queryRecords.getRecords().get(0).getOwner().getClass());
final Line_Item__c lineItem = queryRecords.getRecords().get(0);
User user = (User) queryRecords.getRecords().get(0).getOwner();
assertNotNull(user.getUsername());
assertNotNull(lineItem.getRecordType());
}
@Test
public void testQueryDetectResponseClass() throws Exception {
createLineItem();
final QueryRecordsLine_Item__c queryRecords
= template().requestBody("direct:queryDetectResponseClass", null, QueryRecordsLine_Item__c.class);
assertNotNull(queryRecords);
}
@Test
public void testQueryWithSObjectName() throws Exception {
createLineItem();
final QueryRecordsLine_Item__c queryRecords
= template().requestBody("direct:queryWithSObjectName", null, QueryRecordsLine_Item__c.class);
assertNotNull(queryRecords);
// verify polymorphic query resulted in the correct type
assertEquals(User.class, queryRecords.getRecords().get(0).getOwner().getClass());
final Line_Item__c lineItem = queryRecords.getRecords().get(0);
User user = (User) queryRecords.getRecords().get(0).getOwner();
assertNotNull(user.getUsername());
assertNotNull(lineItem.getRecordType());
}
@Test
public void testQueryStreamResults() throws Exception {
final int createCount = 300;
createLineItems(createCount);
Exchange exchange = new DefaultExchange(context);
template().send("direct:queryStreamResult", exchange);
Iterator<?> queryRecords = exchange.getMessage(Iterator.class);
assertNotNull(exchange.getMessage().getHeader("CamelSalesforceQueryResultTotalSize"));
int count = 0;
while (queryRecords.hasNext()) {
count = count + 1;
queryRecords.next();
}
assertTrue(count >= createCount);
}
@Test
public void querySyncAsyncDoesntTimeout() throws Exception {
final Object result = template.requestBody("direct:querySyncAsync", "");
assertNotNull(result);
}
@Test
public void testParentRelationshipQuery() throws Exception {
try {
createAccountAndContact();
final QueryRecordsContact queryRecords
= template().requestBody("direct:parentRelationshipQuery", null, QueryRecordsContact.class);
Account account = queryRecords.getRecords().get(0).getAccount();
assertNotNull(account, "Account was null");
} finally {
deleteAccountAndContact();
}
}
@Test
public void testChildRelationshipQuery() throws Exception {
try {
createAccountAndContact();
final QueryRecordsAccount queryRecords
= template().requestBody("direct:childRelationshipQuery", null, QueryRecordsAccount.class);
assertFalse(queryRecords.getRecords().isEmpty());
Account account1 = queryRecords.getRecords().get(0);
assertFalse(account1.getContacts().getRecords().isEmpty());
} finally {
deleteAccountAndContact();
}
}
@Test
public void testQueryAll() throws Exception {
final QueryRecordsLine_Item__c queryRecords
= template().requestBody("direct:queryAll", null, QueryRecordsLine_Item__c.class);
assertNotNull(queryRecords);
}
@Test
public void testQueryAllStreamResults() throws Exception {
final int createCount = 300;
createLineItems(createCount);
final Iterator<Line_Item__c> queryRecords
= template().requestBody("direct:queryAllStreamResult", "", Iterator.class);
int count = 0;
while (queryRecords.hasNext()) {
count = count + 1;
queryRecords.next();
}
assertTrue(count >= createCount);
}
@Test
public void testRetry() throws Exception {
final SalesforceComponent sf = context().getComponent("salesforce", SalesforceComponent.class);
final String accessToken = sf.getSession().getAccessToken();
final SslContextFactory.Client sslContextFactory = new SslContextFactory.Client();
sslContextFactory.setSslContext(new SSLContextParameters().createSSLContext(context));
final ClientConnector connector = new ClientConnector();
connector.setSslContextFactory(sslContextFactory);
final HttpClientTransport transport = new HttpClientTransportOverHTTP(connector);
final HttpClient httpClient = new HttpClient(transport);
httpClient.setConnectTimeout(60000);
httpClient.start();
final String uri = sf.getLoginConfig().getLoginUrl() + "/services/oauth2/revoke?token=" + accessToken;
final Request logoutGet = httpClient.newRequest(uri).method(HttpMethod.GET).timeout(1, TimeUnit.MINUTES);
final ContentResponse response = logoutGet.send();
assertEquals(HttpStatus.OK_200, response.getStatus());
testGetGlobalObjects();
}
@Test
public void testRetryFailure() throws Exception {
final SalesforceComponent sf = context().getComponent("salesforce", SalesforceComponent.class);
final String accessToken = sf.getSession().getAccessToken();
final SslContextFactory.Client sslContextFactory = new SslContextFactory.Client();
sslContextFactory.setSslContext(new SSLContextParameters().createSSLContext(context));
final ClientConnector connector = new ClientConnector();
connector.setSslContextFactory(sslContextFactory);
final HttpClientTransport transport = new HttpClientTransportOverHTTP(connector);
final HttpClient httpClient = new HttpClient(transport);
httpClient.setConnectTimeout(60000);
httpClient.start();
final String uri = sf.getLoginConfig().getLoginUrl() + "/services/oauth2/revoke?token=" + accessToken;
final Request logoutGet = httpClient.newRequest(uri).method(HttpMethod.GET).timeout(1, TimeUnit.MINUTES);
final ContentResponse response = logoutGet.send();
assertEquals(HttpStatus.OK_200, response.getStatus());
// set component config to bad password to cause relogin attempts to
// fail
final String password = sf.getLoginConfig().getPassword();
sf.getLoginConfig().setPassword("bad_password");
try {
testGetGlobalObjects();
fail("Expected CamelExecutionException!");
} catch (final CamelExecutionException e) {
if (e.getCause() instanceof SalesforceException) {
final SalesforceException cause = (SalesforceException) e.getCause();
assertEquals(HttpStatus.BAD_REQUEST_400, cause.getStatusCode(), "Expected 400 on authentication retry failure");
} else {
fail("Expected SalesforceException!");
}
} finally {
// reset password and retries to allow other tests to pass
sf.getLoginConfig().setPassword(password);
}
}
@Test
public void testSearch() throws Exception {
final Object obj = template().requestBody("direct:search", (Object) null);
assertNotNull(obj);
}
@Test
public void testStatus300() throws Exception {
// get test merchandise
// note that the header value overrides sObjectFields in endpoint
final Merchandise__c merchandise = template().requestBodyAndHeader("direct:getSObject", merchandiseId, "sObjectFields",
"Name,Description__c,Price__c,Total_Inventory__c",
Merchandise__c.class);
assertNotNull(merchandise);
assertNotNull(merchandise.getName());
assertNotNull(merchandise.getPrice__c());
assertNotNull(merchandise.getTotal_Inventory__c());
CreateSObjectResult result = null;
try {
merchandise.clearBaseFields();
result = template().requestBody("salesforce:createSObject", merchandise, CreateSObjectResult.class);
assertNotNull(result);
assertNotNull(result.getId());
// look by external Id to cause 300 error
// note that the request SObject overrides settings on the endpoint
// for LineItem__c
try {
template().requestBody("salesforce:getSObjectWithId?sObjectIdName=Name", merchandise, Merchandise__c.class);
fail("Expected SalesforceException with statusCode 300");
} catch (final CamelExecutionException e) {
final Throwable cause = e.getCause();
assertTrue(cause instanceof SalesforceMultipleChoicesException);
final SalesforceMultipleChoicesException multipleChoices = (SalesforceMultipleChoicesException) cause;
assertEquals(300, multipleChoices.getStatusCode());
final List<String> choices = multipleChoices.getChoices();
assertNotNull(choices);
assertFalse(choices.isEmpty());
}
} finally {
// delete the test clone
if (result != null) {
template().requestBody("salesforce:deleteSObject?sObjectName=Merchandise__c", result.getId());
}
}
}
@Test
public void testStatus400() throws Exception {
// get test merchandise
// note that the header value overrides sObjectFields in endpoint
final Merchandise__c merchandise = template().requestBodyAndHeader("direct:getSObject", merchandiseId, "sObjectFields",
"Description__c,Price__c", Merchandise__c.class);
assertNotNull(merchandise);
assertNotNull(merchandise.getPrice__c());
assertNull(merchandise.getTotal_Inventory__c());
merchandise.clearBaseFields();
// required field Total_Inventory__c is missing
CreateSObjectResult result = null;
try {
result = template().requestBody("salesforce:createSObject", merchandise, CreateSObjectResult.class);
fail("Expected SalesforceException with statusCode 400");
} catch (final CamelExecutionException e) {
final Throwable cause = e.getCause();
assertTrue(cause instanceof SalesforceException);
final SalesforceException badRequest = (SalesforceException) cause;
assertEquals(400, badRequest.getStatusCode());
assertEquals(1, badRequest.getErrors().size());
assertEquals("[Total_Inventory__c]", badRequest.getErrors().get(0).getFields().toString());
} finally {
// delete the clone if created
if (result != null) {
template().requestBody("salesforce:deleteSObject", result.getId());
}
}
}
@Test
public void testStatus404() throws Exception {
// try to get a non existent SObject
try {
template().requestBody("direct:getSObject", "ILLEGAL_ID", Merchandise__c.class);
fail("Expected SalesforceException");
} catch (final CamelExecutionException e) {
final Throwable cause = e.getCause();
assertTrue(cause instanceof NoSuchSObjectException);
final NoSuchSObjectException noSuchObject = (NoSuchSObjectException) cause;
assertEquals(404, noSuchObject.getStatusCode());
assertEquals(1, noSuchObject.getErrors().size());
}
}
@Test
public void testFetchingGlobalObjects() throws Exception {
final GlobalObjects globalObjects = template().requestBody("salesforce:getGlobalObjects", null, GlobalObjects.class);
assertNotNull(globalObjects);
assertFalse(globalObjects.getSobjects().isEmpty());
}
@Test
public void testBodyIsPreservedAfterError() throws Exception {
Contact contact = new Contact();
final Object result = template.requestBody("direct:createSObjectContinueOnException", contact);
assertNotNull(result);
assertEquals(contact, result);
}
@Override
protected RouteBuilder doCreateRouteBuilder() throws Exception {
// create test route
return new RouteBuilder() {
@Override
public void configure() {
// testGetVersion
from("direct:getVersions").to("salesforce:getVersions");
// testGetResources
from("direct:getResources").to("salesforce:getResources");
// testGetGlobalObjects
from("direct:getGlobalObjects").to("salesforce:getGlobalObjects");
// testGetBasicInfo
from("direct:getBasicInfo").to("salesforce:getBasicInfo?sObjectName=Merchandise__c");
// testGetDescription
from("direct:getDescription").to("salesforce:getDescription?sObjectName=Merchandise__c");
// testGetSObject
from("direct:getSObject")
.to("salesforce:getSObject?sObjectName=Merchandise__c&sObjectFields=Description__c,Price__c");
from("direct:deleteLineItems")
.to("salesforce:query?sObjectQuery=SELECT Id FROM Line_Item__C&sObjectClass="
+ QueryRecordsLine_Item__c.class.getName())
.filter(simple("${body.records.size} > 0"))
.split(simple("${body.records}"),
AggregationStrategies.flexible().accumulateInCollection(ArrayList.class))
.transform(simple("${body.id}"))
.end()
.split(simple("${collate(200)}"))
.to("salesforce:compositeDeleteSObjectCollections")
.end();
from("direct:createLineItem").to("salesforce:createSObject?sObjectName=Line_Item__c");
from("direct:createLineItems")
.split(simple("${collate(200)}"))
.to("salesforce:compositeCreateSObjectCollections");
from("direct:upsertSObject")
.to("salesforce:upsertSObject?sObjectName=Line_Item__c&sObjectIdName=Name");
// testDeleteSObjectWithId
from("direct:deleteSObjectWithId")
.to("salesforce:deleteSObjectWithId?sObjectName=Line_Item__c&sObjectIdName=Name");
// testGetBlobField
from("direct:getBlobField")
.to("salesforce:getBlobField?sObjectName=Document&sObjectBlobFieldName=Body");
// testQuery
from("direct:queryDetectResponseClass")
.to("salesforce:query?sObjectQuery=SELECT Id, name, Typeof Owner WHEN User Then Username End, recordTypeId, RecordType.Name "
+ "from Line_Item__c "
+ "ORDER BY CreatedDate DESC "
+ "LIMIT 1");
// testQuery
from("direct:query")
.to("salesforce:query?sObjectQuery=SELECT Id, name, Typeof Owner WHEN User Then Username End, recordTypeId, RecordType.Name "
+ "from Line_Item__c "
+ "ORDER BY CreatedDate DESC "
+ "LIMIT 1"
+ "&sObjectClass=" + QueryRecordsLine_Item__c.class.getName());
// testQuery
from("direct:queryWithSObjectName")
.to("salesforce:query?sObjectQuery=SELECT Id, name, Typeof Owner WHEN User Then Username End, recordTypeId, RecordType.Name from Line_Item__c"
+ "&sObjectName=QueryRecordsLine_Item__c");
// testQuery
from("direct:queryStreamResult")
.setHeader("sObjectClass", constant(QueryRecordsLine_Item__c.class.getName()))
.setHeader("Sforce-Query-Options", constant("batchSize=200"))
.to("salesforce:query?sObjectQuery=SELECT Id, name, Typeof Owner WHEN User Then Username End, recordTypeId, RecordType.Name from Line_Item__c Order By Name"
+ "&streamQueryResult=true");
// testQuery
from("direct:queryAllStreamResult")
.setHeader("sObjectClass", constant(QueryRecordsLine_Item__c.class.getName()))
.setHeader("Sforce-Query-Options", constant("batchSize=200"))
.to("salesforce:queryAll?sObjectQuery=SELECT Id, name, Typeof Owner WHEN User Then Username End, recordTypeId, RecordType.Name from Line_Item__c Order By Name"
+ "&streamQueryResult=true");
// testParentRelationshipQuery
from("direct:parentRelationshipQuery")
.process(exchange -> exchange.getIn()
.setBody("SELECT LastName, Account.Name FROM Contact WHERE Id = '" + contactId + "'"))
.to("salesforce:query?sObjectClass=" + QueryRecordsContact.class.getName() + "");
// testChildRelationshipQuery
from("direct:childRelationshipQuery")
.process(exchange -> exchange.getIn()
.setBody("SELECT Id, Name, (SELECT Id, LastName FROM Contacts)" + " FROM Account WHERE Id = '"
+ accountId + "'"))
.to("salesforce:query?sObjectClass=" + QueryRecordsAccount.class.getName() + "");
// testQueryAll
from("direct:queryAll")
.to("salesforce:queryAll?sObjectQuery=SELECT name from Line_Item__c&sObjectClass="
+ QueryRecordsLine_Item__c.class.getName() + "");
from("direct:querySyncAsync")
.to("direct:querySync")
.to("direct:queryAsync");
from("direct:querySync?synchronous=false").routeId("r.querySync")
.to("salesforce:query?rawPayload=true&sObjectQuery=Select Id From Contact Where Name = 'Sync'");
from("direct:queryAsync?synchronous=true").routeId("r.queryAsync")
.to("salesforce:query?rawPayload=true&sObjectQuery=Select Id From Contact Where Name = 'Sync'");
// testSearch
from("direct:search").to("salesforce:search?sObjectSearch=FIND {Wee}");
// testApexCall
from("direct:apexCallGet")
.to("salesforce:apexCall?apexMethod=GET&apexUrl=Merchandise/{id}&sObjectName=Merchandise__c");
// testApexCall
from("direct:apexCallGetDetectResponseType")
.to("salesforce:apexCall?apexMethod=GET&apexUrl=Merchandise/{id}");
from("direct:apexCallGetWithId")
.to("salesforce:apexCall/Merchandise/?apexMethod=GET&id=dummyId" + "&sObjectClass="
+ Merchandise__c.class.getName());
from("direct:apexCallPatch").to("salesforce:apexCall/Merchandise/"
+ "?apexMethod=PATCH&sObjectClass=" + MerchandiseResponse.class.getName());
from("direct:apexCallPostCustomError").to("salesforce:apexCall/Merchandise/"
+ "?apexMethod=POST&sObjectClass=java.lang.String");
from("direct:createSObjectContinueOnException").onException(Exception.class).continued(true).end()
.to("salesforce:createSObject");
}
};
}
}
|
MerchandiseResponse
|
java
|
reactor__reactor-core
|
reactor-test/src/test/java/reactor/test/StepVerifierTimeoutTests.java
|
{
"start": 848,
"end": 1682
}
|
class ____ {
@Test
public void verifyThenAssert_failsAfterCustomTimeout() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() ->
StepVerifier.create(Mono.delay(Duration.ofMillis(150)))
.expectComplete()
.verifyThenAssertThat(Duration.ofMillis(50)))
.withMessageStartingWith("VerifySubscriber timed out");
}
@Test
public void verifyThenAssertUsesCustomTimeout() {
try {
StepVerifier.setDefaultTimeout(Duration.ofMillis(50));
Duration longerThanDefaultTimeout = Duration.ofMillis(150);
StepVerifier.create(Mono.delay(longerThanDefaultTimeout))
.expectNext(0L)
.expectComplete()
.verifyThenAssertThat(Duration.ofMillis(250));
} finally {
StepVerifier.resetDefaultTimeout();
}
}
}
|
StepVerifierTimeoutTests
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/binding/MapperProxy.java
|
{
"start": 4049,
"end": 4467
}
|
class ____ implements MapperMethodInvoker {
private final MethodHandle methodHandle;
public DefaultMethodInvoker(MethodHandle methodHandle) {
this.methodHandle = methodHandle;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args, SqlSession sqlSession) throws Throwable {
return methodHandle.bindTo(proxy).invokeWithArguments(args);
}
}
}
|
DefaultMethodInvoker
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/network/SslVersionsTransportLayerTest.java
|
{
"start": 1857,
"end": 9272
}
|
class ____ {
private static final int BUFFER_SIZE = 4 * 1024;
private static final Time TIME = Time.SYSTEM;
public static Stream<Arguments> parameters() {
List<Arguments> parameters = new ArrayList<>();
parameters.add(Arguments.of(Collections.singletonList("TLSv1.2"), Collections.singletonList("TLSv1.2")));
parameters.add(Arguments.of(Collections.singletonList("TLSv1.2"), Collections.singletonList("TLSv1.3")));
parameters.add(Arguments.of(Collections.singletonList("TLSv1.3"), Collections.singletonList("TLSv1.2")));
parameters.add(Arguments.of(Collections.singletonList("TLSv1.3"), Collections.singletonList("TLSv1.3")));
parameters.add(Arguments.of(Collections.singletonList("TLSv1.2"), Arrays.asList("TLSv1.2", "TLSv1.3")));
parameters.add(Arguments.of(Collections.singletonList("TLSv1.2"), Arrays.asList("TLSv1.3", "TLSv1.2")));
parameters.add(Arguments.of(Collections.singletonList("TLSv1.3"), Arrays.asList("TLSv1.2", "TLSv1.3")));
parameters.add(Arguments.of(Collections.singletonList("TLSv1.3"), Arrays.asList("TLSv1.3", "TLSv1.2")));
parameters.add(Arguments.of(Arrays.asList("TLSv1.3", "TLSv1.2"), Collections.singletonList("TLSv1.3")));
parameters.add(Arguments.of(Arrays.asList("TLSv1.3", "TLSv1.2"), Collections.singletonList("TLSv1.2")));
parameters.add(Arguments.of(Arrays.asList("TLSv1.3", "TLSv1.2"), Arrays.asList("TLSv1.2", "TLSv1.3")));
parameters.add(Arguments.of(Arrays.asList("TLSv1.3", "TLSv1.2"), Arrays.asList("TLSv1.3", "TLSv1.2")));
parameters.add(Arguments.of(Arrays.asList("TLSv1.2", "TLSv1.3"), Collections.singletonList("TLSv1.3")));
parameters.add(Arguments.of(Arrays.asList("TLSv1.2", "TLSv1.3"), Collections.singletonList("TLSv1.2")));
parameters.add(Arguments.of(Arrays.asList("TLSv1.2", "TLSv1.3"), Arrays.asList("TLSv1.2", "TLSv1.3")));
parameters.add(Arguments.of(Arrays.asList("TLSv1.2", "TLSv1.3"), Arrays.asList("TLSv1.3", "TLSv1.2")));
return parameters.stream();
}
/**
* Tests that connection success with the default TLS version.
* Note that debug mode for javax.net.ssl can be enabled via {@code System.setProperty("javax.net.debug", "ssl:handshake");}
*/
@ParameterizedTest(name = "testTlsDefaults(tlsServerProtocol = {0}, tlsClientProtocol = {1})")
@MethodSource("parameters")
public void testTlsDefaults(List<String> serverProtocols, List<String> clientProtocols) throws Exception {
// Create certificates for use by client and server. Add server cert to client truststore and vice versa.
CertStores serverCertStores = new CertStores(true, "server", "localhost");
CertStores clientCertStores = new CertStores(false, "client", "localhost");
Map<String, Object> sslClientConfigs = getTrustingConfig(clientCertStores, serverCertStores, clientProtocols);
Map<String, Object> sslServerConfigs = getTrustingConfig(serverCertStores, clientCertStores, serverProtocols);
NioEchoServer server = NetworkTestUtils.createEchoServer(ListenerName.forSecurityProtocol(SecurityProtocol.SSL),
SecurityProtocol.SSL,
new TestSecurityConfig(sslServerConfigs),
null,
TIME);
Selector selector = createClientSelector(sslClientConfigs);
String node = "0";
selector.connect(node, new InetSocketAddress("localhost", server.port()), BUFFER_SIZE, BUFFER_SIZE);
if (isCompatible(serverProtocols, clientProtocols)) {
NetworkTestUtils.waitForChannelReady(selector, node);
int msgSz = 1024 * 1024;
String message = TestUtils.randomString(msgSz);
selector.send(new NetworkSend(node, ByteBufferSend.sizePrefixed(ByteBuffer.wrap(message.getBytes()))));
while (selector.completedReceives().isEmpty()) {
selector.poll(100L);
}
int totalBytes = msgSz + 4; // including 4-byte size
server.waitForMetric("incoming-byte", totalBytes);
server.waitForMetric("outgoing-byte", totalBytes);
server.waitForMetric("request", 1);
server.waitForMetric("response", 1);
} else {
NetworkTestUtils.waitForChannelClose(selector, node, ChannelState.State.AUTHENTICATION_FAILED);
server.verifyAuthenticationMetrics(0, 1);
}
server.close();
selector.close();
}
/**
* <p>
* The explanation of this check in the structure of the ClientHello SSL message.
* Please, take a look at the <a href="https://docs.oracle.com/en/java/javase/11/security/java-secure-socket-extension-jsse-reference-guide.html#GUID-4D421910-C36D-40A2-8BA2-7D42CCBED3C6">Guide</a>,
* "Send ClientHello Message" section.
* <p>
* > Client version: For TLS 1.3, this has a fixed value, TLSv1.2; TLS 1.3 uses the extension supported_versions and not this field to negotiate protocol version
* ...
* > supported_versions: Lists which versions of TLS the client supports. In particular, if the client
* > requests TLS 1.3, then the client version field has the value TLSv1.2 and this extension
* > contains the value TLSv1.3; if the client requests TLS 1.2, then the client version field has the
* > value TLSv1.2 and this extension either doesn't exist or contains the value TLSv1.2 but not the value TLSv1.3.
* <p>
*
* This mean that TLSv1.3 client can fallback to TLSv1.2 but TLSv1.2 client can't change protocol to TLSv1.3.
*
* @param serverProtocols Server protocols. Expected to be non empty.
* @param clientProtocols Client protocols. Expected to be non empty.
* @return {@code true} if client should be able to connect to the server.
*/
private boolean isCompatible(List<String> serverProtocols, List<String> clientProtocols) {
assertNotNull(serverProtocols);
assertFalse(serverProtocols.isEmpty());
assertNotNull(clientProtocols);
assertFalse(clientProtocols.isEmpty());
return serverProtocols.contains(clientProtocols.get(0)) ||
(clientProtocols.get(0).equals("TLSv1.3") && !Collections.disjoint(serverProtocols, clientProtocols));
}
private static Map<String, Object> getTrustingConfig(CertStores certStores, CertStores peerCertStores, List<String> tlsProtocols) {
Map<String, Object> configs = certStores.getTrustingConfig(peerCertStores);
configs.putAll(sslConfig(tlsProtocols));
return configs;
}
private static Map<String, Object> sslConfig(List<String> tlsProtocols) {
Map<String, Object> sslConfig = new HashMap<>();
sslConfig.put(SslConfigs.SSL_PROTOCOL_CONFIG, tlsProtocols.get(0));
sslConfig.put(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG, tlsProtocols);
return sslConfig;
}
private Selector createClientSelector(Map<String, Object> sslClientConfigs) {
SslTransportLayerTest.TestSslChannelBuilder channelBuilder =
new SslTransportLayerTest.TestSslChannelBuilder(ConnectionMode.CLIENT);
channelBuilder.configureBufferSizes(null, null, null);
channelBuilder.configure(sslClientConfigs);
return new Selector(100 * 5000, new Metrics(), TIME, "MetricGroup", channelBuilder, new LogContext());
}
}
|
SslVersionsTransportLayerTest
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/cmd/DeployCommandDeclarationHandler.java
|
{
"start": 181,
"end": 757
}
|
class ____ implements BiConsumer<Object, BuildResult> {
@Override
public void accept(Object o, BuildResult buildResult) {
DeployCommandDeclarationResultBuildItem result = buildResult.consume(DeployCommandDeclarationResultBuildItem.class);
// FYI: AugmentAction.performCustomBuild runs in its own classloader
// so we can only pass back instances of those classes in the system classloader
Consumer<List<String>> consumer = (Consumer<List<String>>) o;
consumer.accept(result.getCommands());
}
}
|
DeployCommandDeclarationHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/Order.java
|
{
"start": 2358,
"end": 4125
}
|
interface ____<X> {
/**
* An order where an entity is sorted by the given attribute,
* with smaller values first. If the given attribute is of textual
* type, the ordering is case-sensitive.
*/
static <T> Order<T> asc(SingularAttribute<T,?> attribute) {
return new AttributeOrder<>(ASCENDING, Nulls.NONE, attribute);
}
/**
* An order where an entity is sorted by the given attribute,
* with larger values first. If the given attribute is of textual
* type, the ordering is case-sensitive.
*/
static <T> Order<T> desc(SingularAttribute<T,?> attribute) {
return new AttributeOrder<>(DESCENDING, Nulls.NONE, attribute);
}
/**
* An order where an entity is sorted by the given attribute,
* in the given direction. If the given attribute is of textual
* type, the ordering is case-sensitive.
*/
static <T> Order<T> by(SingularAttribute<T,?> attribute, SortDirection direction) {
return new AttributeOrder<>(direction, Nulls.NONE, attribute);
}
/**
* An order where an entity is sorted by the given attribute,
* in the given direction, with the specified case-sensitivity.
*/
static <T> Order<T> by(SingularAttribute<T,?> attribute, SortDirection direction, boolean ignoreCase) {
return new AttributeOrder<>(direction, Nulls.NONE, attribute, !ignoreCase);
}
/**
* An order where an entity is sorted by the given attribute,
* in the given direction, with the specified precedence for
* null values. If the given attribute is of textual type, the
* ordering is case-sensitive.
*/
static <T> Order<T> by(SingularAttribute<T, ?> attribute, SortDirection direction, Nulls nullPrecedence) {
return new AttributeOrder<>(direction, nullPrecedence, attribute);
}
/**
* An order where an entity of the given
|
Order
|
java
|
google__auto
|
common/src/test/java/com/google/auto/common/MoreElementsTest.java
|
{
"start": 14376,
"end": 14627
}
|
class ____ extends AbstractMessage.Builder<Builder> {
@Override
@SuppressWarnings("rawtypes")
Builder internalMergeFrom(AbstractMessageLite other) {
return this;
}
}
}
@SuppressWarnings("rawtypes")
static
|
Builder
|
java
|
google__gson
|
gson/src/test/java/com/google/gson/functional/JsonAdapterAnnotationOnFieldsTest.java
|
{
"start": 7918,
"end": 8643
}
|
class ____ {
@JsonAdapter(PartJsonFieldAnnotationAdapter.class)
final Part part1;
final Part part2; // Doesn't have the JsonAdapter annotation
@SuppressWarnings("unused")
GadgetWithTwoParts(Part part1, Part part2) {
this.part1 = part1;
this.part2 = part2;
}
}
@Test
public void testJsonAdapterWrappedInNullSafeAsRequested() {
Gson gson = new Gson();
String fromJson = "{'part':null}";
GadgetWithOptionalPart gadget = gson.fromJson(fromJson, GadgetWithOptionalPart.class);
assertThat(gadget.part).isNull();
String toJson = gson.toJson(gadget);
assertThat(toJson).doesNotContain("PartJsonFieldAnnotationAdapter");
}
private static final
|
GadgetWithTwoParts
|
java
|
spring-projects__spring-security
|
access/src/test/java/org/springframework/security/access/annotation/SecuredAnnotationSecurityMetadataSourceTests.java
|
{
"start": 8829,
"end": 8982
}
|
class ____ implements ReturnVoid {
@Override
@AnnotatedAnnotation
public void doSomething(List<?> param) {
}
}
}
|
AnnotatedAnnotationAtMethodLevel
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/main/java/io/micronaut/http/server/netty/handler/accesslog/element/RequestProtocolElementBuilder.java
|
{
"start": 765,
"end": 1066
}
|
class ____ implements LogElementBuilder {
@Override
public LogElement build(String token, String param) {
if (RequestProtocolElement.REQUEST_PROTOCOL.equals(token)) {
return RequestProtocolElement.INSTANCE;
}
return null;
}
}
|
RequestProtocolElementBuilder
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/TreeMultimapExplicitTest.java
|
{
"start": 1611,
"end": 8605
}
|
enum ____ implements Comparator<@Nullable String> {
COMPARATOR;
@Override
public int compare(@Nullable String first, @Nullable String second) {
if (first == second) {
return 0;
} else if (first == null) {
return -1;
} else if (second == null) {
return 1;
} else if (first.length() != second.length()) {
return first.length() - second.length();
} else {
return first.compareTo(second);
}
}
}
/** Decreasing integer values. A {@code null} comes before any non-null value. */
private static final Comparator<@Nullable Integer> DECREASING_INT_COMPARATOR =
Ordering.<Integer>natural().reverse().<Integer>nullsFirst();
private SetMultimap<String, Integer> create() {
return TreeMultimap.create(StringLength.COMPARATOR, DECREASING_INT_COMPARATOR);
}
/** Create and populate a {@code TreeMultimap} with explicit comparators. */
private TreeMultimap<@Nullable String, @Nullable Integer> createPopulate() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap =
TreeMultimap.create(StringLength.COMPARATOR, DECREASING_INT_COMPARATOR);
multimap.put("google", 2);
multimap.put("google", 6);
multimap.put(null, 3);
multimap.put(null, 1);
multimap.put(null, 7);
multimap.put("tree", 0);
multimap.put("tree", null);
return multimap;
}
/** Test that a TreeMultimap created from another uses the natural ordering. */
public void testMultimapCreateFromTreeMultimap() {
TreeMultimap<String, Integer> tree =
TreeMultimap.create(StringLength.COMPARATOR, DECREASING_INT_COMPARATOR);
tree.put("google", 2);
tree.put("google", 6);
tree.put("tree", 0);
tree.put("tree", 3);
assertThat(tree.keySet()).containsExactly("tree", "google").inOrder();
assertThat(tree.get("google")).containsExactly(6, 2).inOrder();
TreeMultimap<String, Integer> copy = TreeMultimap.create(tree);
assertEquals(tree, copy);
assertThat(copy.keySet()).containsExactly("google", "tree").inOrder();
assertThat(copy.get("google")).containsExactly(2, 6).inOrder();
assertEquals(Ordering.natural(), copy.keyComparator());
assertEquals(Ordering.natural(), copy.valueComparator());
assertEquals(Ordering.natural(), copy.get("google").comparator());
}
public void testToString() {
Multimap<String, Integer> multimap = create();
multimap.put("foo", 3);
multimap.put("bar", 1);
multimap.putAll("foo", asList(-1, 2, 4));
multimap.putAll("bar", asList(2, 3));
multimap.put("foo", 1);
assertEquals("{bar=[3, 2, 1], foo=[4, 3, 2, 1, -1]}", multimap.toString());
}
public void testGetComparator() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap = createPopulate();
assertEquals(StringLength.COMPARATOR, multimap.keyComparator());
assertEquals(DECREASING_INT_COMPARATOR, multimap.valueComparator());
}
public void testOrderedGet() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap = createPopulate();
assertThat(multimap.get(null)).containsExactly(7, 3, 1).inOrder();
assertThat(multimap.get("google")).containsExactly(6, 2).inOrder();
assertThat(multimap.get("tree")).containsExactly(null, 0).inOrder();
}
public void testOrderedKeySet() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap = createPopulate();
assertThat(multimap.keySet()).containsExactly(null, "tree", "google").inOrder();
}
public void testOrderedAsMapEntries() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap = createPopulate();
Iterator<Entry<String, Collection<Integer>>> iterator = multimap.asMap().entrySet().iterator();
Entry<String, Collection<Integer>> entry = iterator.next();
assertEquals(null, entry.getKey());
assertThat(entry.getValue()).containsExactly(7, 3, 1);
entry = iterator.next();
assertEquals("tree", entry.getKey());
assertThat(entry.getValue()).containsExactly(null, 0);
entry = iterator.next();
assertEquals("google", entry.getKey());
assertThat(entry.getValue()).containsExactly(6, 2);
}
public void testOrderedEntries() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap = createPopulate();
assertThat(multimap.entries())
.containsExactly(
Maps.<@Nullable String, Integer>immutableEntry(null, 7),
Maps.<@Nullable String, Integer>immutableEntry(null, 3),
Maps.<@Nullable String, Integer>immutableEntry(null, 1),
Maps.<String, @Nullable Integer>immutableEntry("tree", null),
immutableEntry("tree", 0),
immutableEntry("google", 6),
immutableEntry("google", 2))
.inOrder();
}
public void testOrderedValues() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap = createPopulate();
assertThat(multimap.values()).containsExactly(7, 3, 1, null, 0, 6, 2).inOrder();
}
public void testComparator() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap = createPopulate();
assertEquals(DECREASING_INT_COMPARATOR, multimap.get("foo").comparator());
assertEquals(DECREASING_INT_COMPARATOR, multimap.get("missing").comparator());
}
public void testMultimapComparators() {
Multimap<String, Integer> multimap = create();
multimap.put("foo", 3);
multimap.put("bar", 1);
multimap.putAll("foo", asList(-1, 2, 4));
multimap.putAll("bar", asList(2, 3));
multimap.put("foo", 1);
TreeMultimap<String, Integer> copy =
TreeMultimap.create(StringLength.COMPARATOR, DECREASING_INT_COMPARATOR);
copy.putAll(multimap);
assertEquals(multimap, copy);
assertEquals(StringLength.COMPARATOR, copy.keyComparator());
assertEquals(DECREASING_INT_COMPARATOR, copy.valueComparator());
}
public void testSortedKeySet() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap = createPopulate();
SortedSet<@Nullable String> keySet = multimap.keySet();
assertEquals(null, keySet.first());
assertEquals("google", keySet.last());
assertEquals(StringLength.COMPARATOR, keySet.comparator());
assertEquals(Sets.<@Nullable String>newHashSet(null, "tree"), keySet.headSet("yahoo"));
assertEquals(newHashSet("google"), keySet.tailSet("yahoo"));
assertEquals(newHashSet("tree"), keySet.subSet("ask", "yahoo"));
}
@GwtIncompatible // SerializableTester
public void testExplicitComparatorSerialization() {
TreeMultimap<@Nullable String, @Nullable Integer> multimap = createPopulate();
TreeMultimap<@Nullable String, @Nullable Integer> copy =
SerializableTester.reserializeAndAssert(multimap);
assertThat(copy.values()).containsExactly(7, 3, 1, null, 0, 6, 2).inOrder();
assertThat(copy.keySet()).containsExactly(null, "tree", "google").inOrder();
assertEquals(multimap.keyComparator(), copy.keyComparator());
assertEquals(multimap.valueComparator(), copy.valueComparator());
}
}
|
StringLength
|
java
|
apache__camel
|
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/StreamCachingHelper.java
|
{
"start": 1220,
"end": 3725
}
|
class ____ {
private StreamCachingHelper() {
}
public static StreamCache convertToStreamCache(StreamCachingStrategy strategy, Exchange exchange, Message message) {
// check if body is already cached
try {
Object body = message.getBody();
if (body == null) {
return null;
} else if (body instanceof StreamCache sc) {
// reset so the cache is ready to be used before processing
sc.reset();
return sc;
}
} catch (Exception e) {
handleException(exchange, null, e);
}
// check if we somewhere failed due to a stream caching exception
Throwable cause = exchange.getException();
if (cause == null) {
cause = exchange.getProperty(ExchangePropertyKey.EXCEPTION_CAUGHT, Throwable.class);
}
return tryStreamCache(strategy, exchange, message, cause);
}
private static StreamCache tryStreamCache(
StreamCachingStrategy strategy, Exchange exchange, Message inMessage, Throwable cause) {
final boolean failed = cause != null && ObjectHelper.getException(StreamCacheException.class, cause) != null;
if (!failed) {
boolean disabled = exchange.getExchangeExtension().isStreamCacheDisabled();
if (disabled) {
return null;
}
try {
// cache the body and if we could do that replace it as the new body
StreamCache sc = strategy.cache(exchange);
if (sc != null) {
inMessage.setBody(sc);
}
return sc;
} catch (Exception e) {
handleException(exchange, e);
}
}
return null;
}
private static void handleException(Exchange exchange, Exception e) {
handleException(exchange, exchange.getMessage().getBody(), e);
}
private static void handleException(Exchange exchange, Object value, Exception e) {
// lets allow Camels error handler to deal with stream cache failures
StreamCacheException tce = new StreamCacheException(value, e);
exchange.setException(tce);
// because this is stream caching error then we cannot use redelivery as the message body is corrupt
// so mark as redelivery exhausted
exchange.getExchangeExtension().setRedeliveryExhausted(true);
}
}
|
StreamCachingHelper
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerHealthInfo.java
|
{
"start": 2235,
"end": 4041
}
|
class ____ {
String operation;
long count;
ResourceInfo resources;
LastRunDetails() {
}
LastRunDetails(String operation, long count, Resource resource) {
this.operation = operation;
this.count = count;
this.resources = new ResourceInfo(resource);
}
public String getOperation() {
return operation;
}
public long getCount() {
return count;
}
public ResourceInfo getResources() {
return resources;
}
}
long lastrun;
List<OperationInformation> operationsInfo;
List<LastRunDetails> lastRunDetails;
CapacitySchedulerHealthInfo() {
}
public long getLastrun() {
return lastrun;
}
public List<OperationInformation> getOperationsInfo() {
return operationsInfo;
}
CapacitySchedulerHealthInfo(CapacityScheduler cs) {
SchedulerHealth ht = cs.getSchedulerHealth();
lastrun = ht.getLastSchedulerRunTime();
operationsInfo = new ArrayList<>();
operationsInfo.add(new OperationInformation("last-allocation",
ht.getLastAllocationDetails()));
operationsInfo.add(
new OperationInformation("last-release", ht.getLastReleaseDetails()));
operationsInfo.add(new OperationInformation("last-preemption",
ht.getLastPreemptionDetails()));
operationsInfo.add(new OperationInformation("last-reservation",
ht.getLastReservationDetails()));
lastRunDetails = new ArrayList<>();
lastRunDetails.add(new LastRunDetails("releases", ht.getReleaseCount(), ht
.getResourcesReleased()));
lastRunDetails.add(new LastRunDetails("allocations", ht
.getAllocationCount(), ht.getResourcesAllocated()));
lastRunDetails.add(new LastRunDetails("reservations", ht
.getReservationCount(), ht.getResourcesReserved()));
}
}
|
LastRunDetails
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java
|
{
"start": 895,
"end": 2951
}
|
class ____ extends BaseTasksRequest<CancelTasksRequest> {
public static final String DEFAULT_REASON = "by user request";
public static final boolean DEFAULT_WAIT_FOR_COMPLETION = false;
private String reason = DEFAULT_REASON;
private boolean waitForCompletion = DEFAULT_WAIT_FOR_COMPLETION;
public CancelTasksRequest() {}
public CancelTasksRequest(StreamInput in) throws IOException {
super(in);
this.reason = in.readString();
waitForCompletion = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(reason);
out.writeBoolean(waitForCompletion);
}
@Override
public boolean match(Task task) {
return super.match(task) && task instanceof CancellableTask;
}
/**
* Set the reason for canceling the task.
*/
public CancelTasksRequest setReason(String reason) {
this.reason = reason;
return this;
}
/**
* The reason for canceling the task.
*/
public String getReason() {
return reason;
}
/**
* If {@code true}, the request blocks until the cancellation of the task and its descendant tasks is completed.
* Otherwise, the request can return soon after the cancellation is started. Defaults to {@code false}.
*/
public void setWaitForCompletion(boolean waitForCompletion) {
this.waitForCompletion = waitForCompletion;
}
public boolean waitForCompletion() {
return waitForCompletion;
}
@Override
public String getDescription() {
return "reason["
+ reason
+ "], waitForCompletion["
+ waitForCompletion
+ "], targetTaskId["
+ getTargetTaskId()
+ "], targetParentTaskId["
+ getTargetParentTaskId()
+ "], nodes"
+ Arrays.toString(getNodes())
+ ", actions"
+ Arrays.toString(getActions());
}
}
|
CancelTasksRequest
|
java
|
quarkusio__quarkus
|
integration-tests/oidc-wiremock/src/main/java/io/quarkus/it/keycloak/BearerGlobalTokenChainValidator.java
|
{
"start": 454,
"end": 1045
}
|
class ____ implements TokenCertificateValidator {
@Override
public void validate(OidcTenantConfig oidcConfig, List<X509Certificate> chain, String tokenClaims)
throws CertificateException {
String rootCertificateThumbprint = TrustStoreUtils.calculateThumprint(chain.get(chain.size() - 1));
JsonObject claims = new JsonObject(tokenClaims);
if (!rootCertificateThumbprint.equals(claims.getString("root-certificate-thumbprint"))) {
throw new CertificateException("Invalid root certificate");
}
}
}
|
BearerGlobalTokenChainValidator
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/Invocation.java
|
{
"start": 1633,
"end": 5201
}
|
interface ____
*
* @return
*/
String getServiceName();
/**
* get parameter types.
*
* @return parameter types.
* @serial
*/
Class<?>[] getParameterTypes();
/**
* get parameter's signature, string representation of parameter types.
*
* @return parameter's signature
*/
default String[] getCompatibleParamSignatures() {
return Stream.of(getParameterTypes()).map(Class::getName).toArray(String[]::new);
}
/**
* get arguments.
*
* @return arguments.
* @serial
*/
Object[] getArguments();
/**
* get attachments.
*
* @return attachments.
* @serial
*/
Map<String, String> getAttachments();
@Experimental("Experiment api for supporting Object transmission")
Map<String, Object> getObjectAttachments();
@Experimental("Experiment api for supporting Object transmission")
Map<String, Object> copyObjectAttachments();
@Experimental("Experiment api for supporting Object transmission")
void foreachAttachment(Consumer<Map.Entry<String, Object>> consumer);
void setAttachment(String key, String value);
@Experimental("Experiment api for supporting Object transmission")
void setAttachment(String key, Object value);
@Experimental("Experiment api for supporting Object transmission")
void setObjectAttachment(String key, Object value);
void setAttachmentIfAbsent(String key, String value);
@Experimental("Experiment api for supporting Object transmission")
void setAttachmentIfAbsent(String key, Object value);
@Experimental("Experiment api for supporting Object transmission")
void setObjectAttachmentIfAbsent(String key, Object value);
/**
* get attachment by key.
*
* @return attachment value.
* @serial
*/
String getAttachment(String key);
@Experimental("Experiment api for supporting Object transmission")
Object getObjectAttachment(String key);
@Experimental("Experiment api for supporting Object transmission")
default Object getObjectAttachmentWithoutConvert(String key) {
return getObjectAttachment(key);
}
/**
* get attachment by key with default value.
*
* @return attachment value.
* @serial
*/
String getAttachment(String key, String defaultValue);
@Experimental("Experiment api for supporting Object transmission")
Object getObjectAttachment(String key, Object defaultValue);
/**
* get the invoker in current context.
*
* @return invoker.
* @transient
*/
@Transient
Invoker<?> getInvoker();
void setServiceModel(ServiceModel serviceModel);
ServiceModel getServiceModel();
default ModuleModel getModuleModel() {
return ScopeModelUtil.getModuleModel(
getServiceModel() == null ? null : getServiceModel().getModuleModel());
}
Object put(Object key, Object value);
Object get(Object key);
Map<Object, Object> getAttributes();
/**
* To add invoked invokers into invocation. Can be used in ClusterFilter or Filter for tracing or debugging purpose.
* Currently, only support in consumer side.
*
* @param invoker invoked invokers
*/
void addInvokedInvoker(Invoker<?> invoker);
/**
* Get all invoked invokers in current invocation.
* NOTICE: A curtain invoker could be invoked for twice or more if retries.
*
* @return invokers
*/
List<Invoker<?>> getInvokedInvokers();
}
|
name
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/CompileTimeConstantCheckerTest.java
|
{
"start": 5161,
"end": 5810
}
|
class ____ {
public void m(String s, @CompileTimeConstant String p) {}
public void r(@CompileTimeConstant final String x) {
m(x, x);
}
public void s() {
r("boo");
}
}
""")
.doTest();
}
@Test
public void matches_staticCallFailsWithNonConstant() {
compilationHelper
.addSourceLines(
"test/CompileTimeConstantTestCase.java",
"""
package test;
import com.google.errorprone.annotations.CompileTimeConstant;
public
|
CompileTimeConstantTestCase
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/typesafe/ValidationSuccessTest.java
|
{
"start": 396,
"end": 2925
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Movie.class, MovieExtensions.class)
.addAsResource(new StringAsset("{@io.quarkus.qute.deployment.typesafe.Movie movie}"
+ "{@java.lang.Long age}"
+ "{@java.lang.String surname}"
+ "{@java.util.Map<String,String> map}"
+ "{@int cislo}"
// Property found
+ "{movie.name} "
// Built-in value resolvers
+ "{movie.name ?: 'Mono'} "
+ "{movie.alwaysTrue ? 'Mono' : 'Stereo'} "
+ "{movie.alwaysFalsePrimitive ? 'Mono' : 'Stereo'} "
+ "{movie.alwaysFalsePrimitive.negate} "
+ "{movie.mainCharacters.size} "
// Name and number of params ok and param type ignored
+ "{movie.findService('foo')} "
// Name and number of params ok; name type ignored, age ok
+ "{movie.findServices(name,age)} "
// Varargs method
+ "{movie.findNames(age,'foo',surname)} "
// Name, number of params and type ok for extension method
+ "{movie.toNumber(surname)} "
// Varargs extension method
+ "{movie.toLong(1l,2l)} "
// Primitive type in param declaration
+ "{movie.toInt(cislo)} "
// Field access
+ "{#each movie.mainCharacters}{it.substring(1)}{/} "
// Method param assignability
+ "{map.get('foo')}"),
"templates/movie.html"));
@Inject
Template movie;
@Test
public void testResult() {
// Validation succeeded! Yay!
assertEquals("Jason Jason Mono Stereo true 1 10 11 ok 43 3 1 ohn bar",
movie.data("movie", new Movie("John"), "name", "Vasik", "surname", "Hu", "age", 10l, "map",
Collections.singletonMap("foo", "bar")).data("cislo", 1).render());
}
}
|
ValidationSuccessTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/NamedStoredProcedureQueriesJpaAnnotation.java
|
{
"start": 740,
"end": 2027
}
|
class ____
implements NamedStoredProcedureQueries, RepeatableContainer<NamedStoredProcedureQuery> {
private jakarta.persistence.NamedStoredProcedureQuery[] value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public NamedStoredProcedureQueriesJpaAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public NamedStoredProcedureQueriesJpaAnnotation(
NamedStoredProcedureQueries annotation,
ModelsContext modelContext) {
this.value = extractJdkValue(
annotation,
JpaAnnotations.NAMED_STORED_PROCEDURE_QUERIES,
"value",
modelContext
);
}
/**
* Used in creating annotation instances from Jandex variant
*/
public NamedStoredProcedureQueriesJpaAnnotation(
Map<String, Object> attributeValues,
ModelsContext modelContext) {
this.value = (NamedStoredProcedureQuery[]) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return NamedStoredProcedureQueries.class;
}
@Override
public jakarta.persistence.NamedStoredProcedureQuery[] value() {
return value;
}
public void value(jakarta.persistence.NamedStoredProcedureQuery[] value) {
this.value = value;
}
}
|
NamedStoredProcedureQueriesJpaAnnotation
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/PojoTypeExtractionTest.java
|
{
"start": 35220,
"end": 35760
}
|
class ____<K, V> {
public PojoWithRecursiveGenericField<K, V> parent;
public PojoWithRecursiveGenericField() {}
}
@Test
void testPojoWithRecursiveGenericField() {
TypeInformation<?> ti = TypeExtractor.createTypeInfo(PojoWithRecursiveGenericField.class);
assertThat(ti).isInstanceOf(PojoTypeInfo.class);
assertThat(((PojoTypeInfo) ti).getPojoFieldAt(0).getTypeInformation().getClass())
.isEqualTo(GenericTypeInfo.class);
}
public static
|
PojoWithRecursiveGenericField
|
java
|
grpc__grpc-java
|
okhttp/src/test/java/io/grpc/okhttp/OkHttpClientTransportFactoryTest.java
|
{
"start": 831,
"end": 1091
}
|
class ____ extends AbstractClientTransportFactoryTest {
@Override
protected ClientTransportFactory newClientTransportFactory() {
return OkHttpChannelBuilder.forAddress("localhost", 0)
.buildTransportFactory();
}
}
|
OkHttpClientTransportFactoryTest
|
java
|
quarkusio__quarkus
|
devtools/cli-common/src/main/java/io/quarkus/cli/common/registry/RegistryClientMixin.java
|
{
"start": 924,
"end": 6519
}
|
class ____ {
static final boolean VALIDATE = !Boolean.parseBoolean(System.getenv("REGISTRY_CLIENT_TEST"));
/** @see io.quarkus.cli.common.registry.ToggleRegistryClientMixin#setRegistryClient */
public final String getRegistryClientProperty() {
return "-DquarkusRegistryClient=" + enabled();
}
@CommandLine.Option(names = {
"--refresh" }, description = "Refresh the local Quarkus extension registry cache", defaultValue = "false")
boolean refresh = false;
@CommandLine.Option(paramLabel = "CONFIG", names = { "--config" }, description = "Configuration file")
String config;
public boolean enabled() {
return true;
}
public String getConfigArg() {
return config;
}
public RegistriesConfig resolveConfig() throws RegistryResolutionException {
return config == null
? RegistriesConfig.resolveConfig()
: RegistriesConfig.resolveFromFile(Path.of(config));
}
public QuarkusProject createQuarkusProject(Path projectRoot, TargetQuarkusPlatformGroup targetVersion, BuildTool buildTool,
OutputOptionMixin log) throws RegistryResolutionException {
return createQuarkusProject(projectRoot, targetVersion, buildTool, log, List.of());
}
public QuarkusProject createQuarkusProject(Path projectRoot, TargetQuarkusPlatformGroup targetVersion, BuildTool buildTool,
OutputOptionMixin log, Collection<String> extensions) throws RegistryResolutionException {
ExtensionCatalog catalog = getExtensionCatalog(targetVersion, log);
if (VALIDATE && catalog.getQuarkusCoreVersion().startsWith("1.")) {
throw new UnsupportedOperationException("The version 2 CLI can not be used with Quarkus 1.x projects.\n"
+ "Use the maven/gradle plugins when working with Quarkus 1.x projects.");
}
catalog = CreateProjectHelper.completeCatalog(catalog, extensions, QuarkusProjectHelper.artifactResolver());
return QuarkusProjectHelper.getProject(projectRoot, catalog, buildTool, JavaVersion.NA, log);
}
ExtensionCatalog getExtensionCatalog(TargetQuarkusPlatformGroup targetVersion, OutputOptionMixin log)
throws RegistryResolutionException {
log.debug("Resolving Quarkus extension catalog for " + targetVersion);
QuarkusProjectHelper.setMessageWriter(log);
if (enabled()) {
QuarkusProjectHelper.setToolsConfig(resolveConfig());
}
if (VALIDATE && targetVersion.isStreamSpecified() && !enabled()) {
throw new UnsupportedOperationException(
"Specifying a stream (--stream) requires the registry client to resolve resources. " +
"Please try again with the registry client enabled (--registry-client)");
}
if (targetVersion.isPlatformSpecified()) {
ArtifactCoords coords = targetVersion.getPlatformBom();
return ToolsUtils.resolvePlatformDescriptorDirectly(coords.getGroupId(), coords.getArtifactId(),
coords.getVersion(), QuarkusProjectHelper.artifactResolver(), log);
}
final ExtensionCatalogResolver catalogResolver;
try {
catalogResolver = getExtensionCatalogResolver(log);
} catch (RegistryResolutionException e) {
log.warn(
"Configured Quarkus extension registries appear to be unavailable at the moment. "
+ "It should still be possible to create a project by providing the groupId:artifactId:version of the desired Quarkus platform BOM, "
+ "e.g. 'quarkus create -P "
+ ToolsConstants.DEFAULT_PLATFORM_BOM_GROUP_ID + ":"
+ ToolsConstants.DEFAULT_PLATFORM_BOM_ARTIFACT_ID + ":" + VersionHelper.clientVersion() + "'");
throw e;
}
if (!catalogResolver.hasRegistries()) {
log.debug("Falling back to direct resolution of the platform bom");
// Fall back to previous methods of finding registries (e.g. client has been disabled)
return ToolsUtils.resolvePlatformDescriptorDirectly(null, null, VersionHelper.clientVersion(),
QuarkusProjectHelper.artifactResolver(), log);
}
refreshRegistryCache(log);
if (targetVersion.isStreamSpecified()) {
return catalogResolver.resolveExtensionCatalog(targetVersion.getStream());
}
return catalogResolver.resolveExtensionCatalog();
}
public ExtensionCatalogResolver getExtensionCatalogResolver(OutputOptionMixin log) throws RegistryResolutionException {
return QuarkusProjectHelper.getCatalogResolver(enabled(), log);
}
public void refreshRegistryCache(OutputOptionMixin log) throws RegistryResolutionException {
if (!refresh) {
return;
}
final ExtensionCatalogResolver catalogResolver = getExtensionCatalogResolver(log);
if (!catalogResolver.hasRegistries()) {
log.warn("Skipping refresh since no registries are configured");
return;
}
log.debug("Refreshing registry cache");
try {
catalogResolver.clearRegistryCache();
} catch (Exception e) {
log.warn("Unable to refresh the registry cache: %s", e.getMessage());
}
}
@Override
public String toString() {
return "RegistryClientMixin [useRegistryClient=" + enabled() + "]";
}
}
|
RegistryClientMixin
|
java
|
netty__netty
|
transport-native-kqueue/src/test/java/io/netty/channel/kqueue/KQueueDomainSocketReuseFdTest.java
|
{
"start": 941,
"end": 1344
}
|
class ____ extends AbstractSocketReuseFdTest {
@Override
protected SocketAddress newSocketAddress() {
return KQueueSocketTestPermutation.newSocketAddress();
}
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> newFactories() {
return KQueueSocketTestPermutation.INSTANCE.domainSocket();
}
}
|
KQueueDomainSocketReuseFdTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webmvc/src/main/java/org/springframework/boot/webmvc/autoconfigure/error/ErrorMvcAutoConfiguration.java
|
{
"start": 11542,
"end": 12138
}
|
class ____ implements BeanFactoryPostProcessor {
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
String[] errorControllerBeans = beanFactory.getBeanNamesForType(ErrorController.class, false, false);
for (String errorControllerBean : errorControllerBeans) {
try {
beanFactory.getBeanDefinition(errorControllerBean)
.setAttribute(AutoProxyUtils.PRESERVE_TARGET_CLASS_ATTRIBUTE, Boolean.TRUE);
}
catch (Throwable ex) {
// Ignore
}
}
}
}
}
|
PreserveErrorControllerTargetClassPostProcessor
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotileFromFieldDocValuesAndLiteralEvaluator.java
|
{
"start": 1039,
"end": 3472
}
|
class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(StGeotileFromFieldDocValuesAndLiteralEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator encoded;
private final int precision;
private final DriverContext driverContext;
private Warnings warnings;
public StGeotileFromFieldDocValuesAndLiteralEvaluator(Source source,
EvalOperator.ExpressionEvaluator encoded, int precision, DriverContext driverContext) {
this.source = source;
this.encoded = encoded;
this.precision = precision;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (LongBlock encodedBlock = (LongBlock) encoded.eval(page)) {
return eval(page.getPositionCount(), encodedBlock);
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += encoded.baseRamBytesUsed();
return baseRamBytesUsed;
}
public LongBlock eval(int positionCount, LongBlock encodedBlock) {
try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
boolean allBlocksAreNulls = true;
if (!encodedBlock.isNull(p)) {
allBlocksAreNulls = false;
}
if (allBlocksAreNulls) {
result.appendNull();
continue position;
}
try {
StGeotile.fromFieldDocValuesAndLiteral(result, p, encodedBlock, this.precision);
} catch (IllegalArgumentException e) {
warnings().registerException(e);
result.appendNull();
}
}
return result.build();
}
}
@Override
public String toString() {
return "StGeotileFromFieldDocValuesAndLiteralEvaluator[" + "encoded=" + encoded + ", precision=" + precision + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(encoded);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static
|
StGeotileFromFieldDocValuesAndLiteralEvaluator
|
java
|
spring-projects__spring-security
|
ldap/src/main/java/org/springframework/security/ldap/search/FilterBasedLdapUserSearch.java
|
{
"start": 1472,
"end": 6433
}
|
class ____ implements LdapUserSearch {
private static final Log logger = LogFactory.getLog(FilterBasedLdapUserSearch.class);
private final ContextSource contextSource;
/**
* The LDAP SearchControls object used for the search. Shared between searches so
* shouldn't be modified once the bean has been configured.
*/
private final SearchControls searchControls = new SearchControls();
/**
* Context name to search in, relative to the base of the configured ContextSource.
*/
private final String searchBase;
/**
* The filter expression used in the user search. This is an LDAP search filter (as
* defined in 'RFC 2254') with optional arguments. See the documentation for the
* <tt>search</tt> methods in {@link javax.naming.directory.DirContext DirContext} for
* more information.
*
* <p>
* In this case, the username is the only parameter.
* </p>
* Possible examples are:
* <ul>
* <li>(uid={0}) - this would search for a username match on the uid attribute.</li>
* </ul>
*/
private final String searchFilter;
public FilterBasedLdapUserSearch(String searchBase, String searchFilter, BaseLdapPathContextSource contextSource) {
Assert.notNull(contextSource, "contextSource must not be null");
Assert.notNull(searchFilter, "searchFilter must not be null.");
Assert.notNull(searchBase, "searchBase must not be null (an empty string is acceptable).");
this.searchFilter = searchFilter;
this.contextSource = contextSource;
this.searchBase = searchBase;
setSearchSubtree(true);
if (searchBase.isEmpty()) {
logger.info(LogMessage.format("Searches will be performed from the root %s since SearchBase not set",
contextSource.getBaseLdapName()));
}
}
/**
* Return the LdapUserDetails containing the user's information
* @param username the username to search for.
* @return An LdapUserDetails object containing the details of the located user's
* directory entry
* @throws UsernameNotFoundException if no matching entry is found.
*/
@Override
public DirContextOperations searchForUser(String username) {
logger.trace(LogMessage.of(() -> "Searching for user '" + username + "', with " + this));
SpringSecurityLdapTemplate template = new SpringSecurityLdapTemplate(this.contextSource);
template.setSearchControls(this.searchControls);
try {
DirContextOperations operations = template.searchForSingleEntry(this.searchBase, this.searchFilter,
new String[] { username });
logger.debug(LogMessage.of(() -> "Found user '" + username + "', with " + this));
return operations;
}
catch (IncorrectResultSizeDataAccessException ex) {
if (ex.getActualSize() == 0) {
throw UsernameNotFoundException.fromUsername(username);
}
// Search should never return multiple results if properly configured
throw ex;
}
}
/**
* Sets the corresponding property on the {@link SearchControls} instance used in the
* search.
* @param deref the derefLinkFlag value as defined in SearchControls..
*/
public void setDerefLinkFlag(boolean deref) {
this.searchControls.setDerefLinkFlag(deref);
}
/**
* If true then searches the entire subtree as identified by context, if false (the
* default) then only searches the level identified by the context.
* @param searchSubtree true the underlying search controls should be set to
* SearchControls.SUBTREE_SCOPE rather than SearchControls.ONELEVEL_SCOPE.
*/
public void setSearchSubtree(boolean searchSubtree) {
this.searchControls
.setSearchScope(searchSubtree ? SearchControls.SUBTREE_SCOPE : SearchControls.ONELEVEL_SCOPE);
}
/**
* The time to wait before the search fails; the default is zero, meaning forever.
* @param searchTimeLimit the time limit for the search (in milliseconds).
*/
public void setSearchTimeLimit(int searchTimeLimit) {
this.searchControls.setTimeLimit(searchTimeLimit);
}
/**
* Specifies the attributes that will be returned as part of the search.
* <p>
* null indicates that all attributes will be returned. An empty array indicates no
* attributes are returned.
* @param attrs An array of attribute names identifying the attributes that will be
* returned. Can be null.
*/
public void setReturningAttributes(String[] attrs) {
this.searchControls.setReturningAttributes(attrs);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append(" [");
sb.append("searchFilter=").append(this.searchFilter).append("; ");
sb.append("searchBase=").append(this.searchBase).append("; ");
sb.append("scope=")
.append((this.searchControls.getSearchScope() != SearchControls.SUBTREE_SCOPE) ? "single-level" : "subtree")
.append("; ");
sb.append("searchTimeLimit=").append(this.searchControls.getTimeLimit()).append("; ");
sb.append("derefLinkFlag=").append(this.searchControls.getDerefLinkFlag()).append(" ]");
return sb.toString();
}
}
|
FilterBasedLdapUserSearch
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/spr10546/Spr10546Tests.java
|
{
"start": 2663,
"end": 2739
}
|
class ____ {
@Configuration
public static
|
AEnclosingWithImportResourceConfig
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/autoconfigure/JdbcConnectionDetails.java
|
{
"start": 1090,
"end": 1551
}
|
interface ____ extends ConnectionDetails {
/**
* Username for the database.
* @return the username for the database
*/
@Nullable String getUsername();
/**
* Password for the database.
* @return the password for the database
*/
@Nullable String getPassword();
/**
* JDBC url for the database.
* @return the JDBC url for the database
*/
String getJdbcUrl();
/**
* The name of the JDBC driver class. Defaults to the
|
JdbcConnectionDetails
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchRequest.java
|
{
"start": 1238,
"end": 9447
}
|
class ____ extends LegacyActionRequest {
public static final String INLINE_WATCH_ID = "_inlined_";
private String id;
private boolean ignoreCondition = false;
private boolean recordExecution = false;
@Nullable
private Map<String, Object> triggerData = null;
@Nullable
private Map<String, Object> alternativeInput = null;
private Map<String, ActionExecutionMode> actionModes = new HashMap<>();
private BytesReference watchSource;
private XContentType xContentType = XContentType.JSON;
private boolean debug = false;
public ExecuteWatchRequest() {}
/**
* @param id the id of the watch to execute
*/
public ExecuteWatchRequest(String id) {
this.id = id;
}
public ExecuteWatchRequest(StreamInput in) throws IOException {
super(in);
id = in.readOptionalString();
ignoreCondition = in.readBoolean();
recordExecution = in.readBoolean();
alternativeInput = in.readOptional(StreamInput::readGenericMap);
triggerData = in.readOptional(StreamInput::readGenericMap);
long actionModesCount = in.readLong();
actionModes = new HashMap<>();
for (int i = 0; i < actionModesCount; i++) {
actionModes.put(in.readString(), ActionExecutionMode.resolve(in.readByte()));
}
if (in.readBoolean()) {
watchSource = in.readBytesReference();
xContentType = in.readEnum(XContentType.class);
}
debug = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(id);
out.writeBoolean(ignoreCondition);
out.writeBoolean(recordExecution);
out.writeOptional(StreamOutput::writeGenericMap, alternativeInput);
out.writeOptional(StreamOutput::writeGenericMap, triggerData);
out.writeLong(actionModes.size());
for (Map.Entry<String, ActionExecutionMode> entry : actionModes.entrySet()) {
out.writeString(entry.getKey());
out.writeByte(entry.getValue().id());
}
out.writeBoolean(watchSource != null);
if (watchSource != null) {
out.writeBytesReference(watchSource);
XContentHelper.writeTo(out, xContentType);
}
out.writeBoolean(debug);
}
/**
* @return The id of the watch to be executed
*/
public String getId() {
return id;
}
/**
* Sets the id of the watch to be executed
*/
public void setId(String id) {
this.id = id;
}
/**
* @return Should the condition for this execution be ignored
*/
public boolean isIgnoreCondition() {
return ignoreCondition;
}
/**
* @param ignoreCondition set if the condition for this execution be ignored
*/
public void setIgnoreCondition(boolean ignoreCondition) {
this.ignoreCondition = ignoreCondition;
}
/**
* @return Should this execution be recorded in the history index
*/
public boolean isRecordExecution() {
return recordExecution;
}
/**
* @param recordExecution Sets if this execution be recorded in the history index
*/
public void setRecordExecution(boolean recordExecution) {
this.recordExecution = recordExecution;
}
/**
* @return The alertnative input to use (may be null)
*/
public Map<String, Object> getAlternativeInput() {
return alternativeInput;
}
/**
* @param alternativeInput Set's the alernative input
*/
public void setAlternativeInput(Map<String, Object> alternativeInput) {
this.alternativeInput = alternativeInput;
}
/**
* @param data The data that should be associated with the trigger event.
*/
public void setTriggerData(Map<String, Object> data) throws IOException {
this.triggerData = data;
}
/**
* @param event the trigger event to use
*/
public void setTriggerEvent(TriggerEvent event) throws IOException {
setTriggerData(event.data());
}
/**
* @return the trigger to use
*/
public Map<String, Object> getTriggerData() {
return triggerData;
}
/**
* @return the source of the watch to execute
*/
public BytesReference getWatchSource() {
return watchSource;
}
public XContentType getXContentType() {
return xContentType;
}
/**
* @param watchSource instead of using an existing watch use this non persisted watch
*/
@SuppressWarnings("HiddenField")
public void setWatchSource(BytesReference watchSource, XContentType xContentType) {
this.watchSource = watchSource;
this.xContentType = xContentType;
}
/**
* @param watchSource instead of using an existing watch use this non persisted watch
*/
public void setWatchSource(WatchSourceBuilder watchSource) {
this.watchSource = watchSource.buildAsBytes(XContentType.JSON);
this.xContentType = XContentType.JSON;
}
/**
* @return the execution modes for the actions. These modes determine the nature of the execution
* of the watch actions while the watch is executing.
*/
public Map<String, ActionExecutionMode> getActionModes() {
return actionModes;
}
/**
* Sets the action execution mode for the give action (identified by its id).
*
* @param actionId the action id.
* @param actionMode the execution mode of the action.
*/
public void setActionMode(String actionId, ActionExecutionMode actionMode) {
actionModes.put(actionId, actionMode);
}
/**
* @return whether the watch should execute in debug mode. In debug mode the execution {@code vars}
* will be returned as part of the watch record.
*/
public boolean isDebug() {
return debug;
}
/**
* @param debug indicates whether the watch should execute in debug mode. In debug mode the
* returned watch record will hold the execution {@code vars}
*/
public void setDebug(boolean debug) {
this.debug = debug;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (id == null && watchSource == null) {
validationException = ValidateActions.addValidationError(
"a watch execution request must either have a watch id or an inline " + "watch source, but both are missing",
validationException
);
}
if (id != null && WatcherUtils.isValidId(id) == false) {
validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException);
}
for (String actionId : actionModes.keySet()) {
if (actionId == null) {
validationException = ValidateActions.addValidationError(
String.format(Locale.ROOT, "action id may not be null"),
validationException
);
} else if (WatcherUtils.isValidId(actionId) == false) {
validationException = ValidateActions.addValidationError(
String.format(Locale.ROOT, "action id [%s] contains whitespace", actionId),
validationException
);
}
}
if (watchSource != null && id != null) {
validationException = ValidateActions.addValidationError(
"a watch execution request must either have a watch id or an inline " + "watch source but not both",
validationException
);
}
if (watchSource != null && recordExecution) {
validationException = ValidateActions.addValidationError(
"the execution of an inline watch cannot be recorded",
validationException
);
}
return validationException;
}
@Override
public String toString() {
return "execute[" + id + "]";
}
}
|
ExecuteWatchRequest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/bean/BeanInPipelineTest.java
|
{
"start": 1971,
"end": 2216
}
|
class ____ {
private final String postfix;
public MyBean(String postfix) {
this.postfix = postfix;
}
public String doSomething(String body) {
return body + postfix;
}
}
}
|
MyBean
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/RMap.java
|
{
"start": 1231,
"end": 23290
}
|
interface ____<K, V> extends ConcurrentMap<K, V>, RExpirable, RMapAsync<K, V>, RDestroyable {
/**
* Loads all map entries to this Redis map using {@link org.redisson.api.map.MapLoader}.
*
* @param replaceExistingValues - <code>true</code> if existed values should be replaced, <code>false</code> otherwise.
* @param parallelism - parallelism level, used to increase speed of process execution
*/
void loadAll(boolean replaceExistingValues, int parallelism);
/**
* Loads map entries using {@link org.redisson.api.map.MapLoader} whose keys are listed in defined <code>keys</code> parameter.
*
* @param keys - map keys
* @param replaceExistingValues - <code>true</code> if existed values should be replaced, <code>false</code> otherwise.
* @param parallelism - parallelism level, used to increase speed of process execution
*/
void loadAll(Set<? extends K> keys, boolean replaceExistingValues, int parallelism);
/**
* Returns the value mapped by defined <code>key</code> or {@code null} if value is absent.
* <p>
* If map doesn't contain value for specified key and {@link MapLoader} is defined
* then value will be loaded in read-through mode.
*
* @param key the key
* @return the value mapped by defined <code>key</code> or {@code null} if value is absent
*/
@Override
V get(Object key);
/**
* Stores the specified <code>value</code> mapped by specified <code>key</code>.
* Returns previous value if map entry with specified <code>key</code> already existed.
* <p>
* If {@link MapWriter} is defined then map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return previous associated value
*/
@Override
V put(K key, V value);
/**
* Stores the specified <code>value</code> mapped by <code>key</code>
* only if there is no value with specified<code>key</code> stored before.
* <p>
* If {@link MapWriter} is defined then new map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>null</code> if key is a new one in the hash and value was set.
* Previous value if key already exists in the hash and change hasn't been made.
*/
@Override
V putIfAbsent(K key, V value);
/**
* Stores the specified <code>value</code> mapped by <code>key</code>
* only if mapping already exists.
* <p>
* If {@link MapWriter} is defined then new map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>null</code> if key doesn't exist in the hash and value hasn't been set.
* Previous value if key already exists in the hash and new value has been stored.
*/
V putIfExists(K key, V value);
/**
* Returns random keys from this map limited by <code>count</code>
*
* @param count - keys amount to return
* @return random keys
*/
Set<K> randomKeys(int count);
/**
* Returns random map entries from this map limited by <code>count</code>
*
* @param count - entries amount to return
* @return random entries
*/
Map<K, V> randomEntries(int count);
/**
* Returns <code>RMapReduce</code> object associated with this map
*
* @param <KOut> output key
* @param <VOut> output value
* @return MapReduce instance
*/
<KOut, VOut> RMapReduce<K, V, KOut, VOut> mapReduce();
/**
* Returns <code>RCountDownLatch</code> instance associated with key
*
* @param key - map key
* @return countdownlatch
*/
RCountDownLatch getCountDownLatch(K key);
/**
* Returns <code>RPermitExpirableSemaphore</code> instance associated with key
*
* @param key - map key
* @return permitExpirableSemaphore
*/
RPermitExpirableSemaphore getPermitExpirableSemaphore(K key);
/**
* Returns <code>RSemaphore</code> instance associated with key
*
* @param key - map key
* @return semaphore
*/
RSemaphore getSemaphore(K key);
/**
* Returns <code>RLock</code> instance associated with key
*
* @param key - map key
* @return fairlock
*/
RLock getFairLock(K key);
/**
* Returns <code>RReadWriteLock</code> instance associated with key
*
* @param key - map key
* @return readWriteLock
*/
RReadWriteLock getReadWriteLock(K key);
/**
* Returns <code>RLock</code> instance associated with key
*
* @param key - map key
* @return lock
*/
RLock getLock(K key);
/**
* Returns size of value mapped by specified <code>key</code> in bytes
*
* @param key - map key
* @return size of value
*/
int valueSize(K key);
/**
* Adds the given <code>delta</code> to the current value
* by mapped <code>key</code>.
* <p>
* Works only with codecs below
* <p>
* {@link org.redisson.codec.JsonJacksonCodec},
* <p>
* {@link org.redisson.client.codec.StringCodec},
* <p>
* {@link org.redisson.client.codec.IntegerCodec},
* <p>
* {@link org.redisson.client.codec.DoubleCodec}
* <p>
* {@link org.redisson.client.codec.LongCodec}
*
* @param key - map key
* @param delta the value to add
* @return the updated value
*/
V addAndGet(K key, Number delta);
/**
* Returns <code>true</code> if this map contains map entry
* mapped by specified <code>key</code>, otherwise <code>false</code>
*
* @param key - map key
* @return <code>true</code> if this map contains map entry
* mapped by specified <code>key</code>, otherwise <code>false</code>
*/
@Override
boolean containsKey(Object key);
/**
* Returns <code>true</code> if this map contains any map entry
* with specified <code>value</code>, otherwise <code>false</code>
*
* @param value - map value
* @return <code>true</code> if this map contains any map entry
* with specified <code>value</code>, otherwise <code>false</code>
*/
@Override
boolean containsValue(Object value);
/**
* Removes map entry by specified <code>key</code> and returns value.
* <p>
* If {@link MapWriter} is defined then <code>key</code>is deleted in write-through mode.
*
* @param key - map key
* @return deleted value, <code>null</code> if map entry doesn't exist
*/
@Override
V remove(Object key);
/**
* Replaces previous value with a new <code>value</code> mapped by specified <code>key</code>.
* Returns <code>null</code> if there is no map entry stored before and doesn't store new map entry.
* <p>
* If {@link MapWriter} is defined then new <code>value</code>is written in write-through mode.
*
* @param key - map key
* @param value - map value
* @return previous associated value
* or <code>null</code> if there is no map entry stored before and doesn't store new map entry
*/
@Override
V replace(K key, V value);
/**
* Replaces previous <code>oldValue</code> with a <code>newValue</code> mapped by specified <code>key</code>.
* Returns <code>false</code> if previous value doesn't exist or equal to <code>oldValue</code>.
* <p>
* If {@link MapWriter} is defined then <code>newValue</code>is written in write-through mode.
*
* @param key - map key
* @param oldValue - map old value
* @param newValue - map new value
* @return <code>true</code> if value has been replaced otherwise <code>false</code>.
*/
@Override
boolean replace(K key, V oldValue, V newValue);
/**
* Removes map entry only if it exists with specified <code>key</code> and <code>value</code>.
* <p>
* If {@link MapWriter} is defined then <code>key</code>is deleted in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>true</code> if map entry has been removed otherwise <code>false</code>.
*/
@Override
boolean remove(Object key, Object value);
/**
* Stores map entries specified in <code>map</code> object in batch mode.
* <p>
* If {@link MapWriter} is defined then map entries will be stored in write-through mode.
*
* @param map mappings to be stored in this map
*/
@Override
void putAll(java.util.Map<? extends K, ? extends V> map);
/**
* Stores map entries specified in <code>map</code> object in batch mode.
* Batch inserted by chunks limited by <code>batchSize</code> value
* to avoid OOM and/or Redis response timeout error for map with big size.
* <p>
* If {@link MapWriter} is defined then map entries are stored in write-through mode.
*
* @param map mappings to be stored in this map
* @param batchSize - size of map entries batch
*/
void putAll(Map<? extends K, ? extends V> map, int batchSize);
/**
* Returns map slice contained the mappings with defined <code>keys</code>.
* <p>
* If map doesn't contain value/values for specified key/keys and {@link MapLoader} is defined
* then value/values will be loaded in read-through mode.
* <p>
* The returned map is <b>NOT</b> backed by the original map.
*
* @param keys map keys
* @return Map slice
*/
Map<K, V> getAll(Set<K> keys);
/**
* Removes map entries mapped by specified <code>keys</code>.
* <p>
* Works faster than <code>{@link #remove(Object)}</code> but not returning
* the value.
* <p>
* If {@link MapWriter} is defined then <code>keys</code>are deleted in write-through mode.
*
* @param keys - map keys
* @return the number of keys that were removed from the hash, not including specified but non existing keys
*/
long fastRemove(K... keys);
/**
* Stores the specified <code>value</code> mapped by specified <code>key</code>.
* <p>
* Works faster than <code>{@link #put(Object, Object)}</code> but not returning
* previous value.
* <p>
* Returns <code>true</code> if key is a new key in the hash and value was set or
* <code>false</code> if key already exists in the hash and the value was updated.
* <p>
* If {@link MapWriter} is defined then map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>true</code> if key is a new key in the hash and value was set.
* <code>false</code> if key already exists in the hash and the value was updated.
*/
boolean fastPut(K key, V value);
/**
* Replaces previous value with a new <code>value</code> mapped by specified <code>key</code>.
* <p>
* Works faster than <code>{@link #replace(Object, Object)}</code> but not returning
* the previous value.
* <p>
* Returns <code>true</code> if key exists and value was updated or
* <code>false</code> if key doesn't exists and value wasn't updated.
* <p>
* If {@link MapWriter} is defined then new map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>true</code> if key exists and value was updated.
* <code>false</code> if key doesn't exists and value wasn't updated.
*/
boolean fastReplace(K key, V value);
/**
* Stores the specified <code>value</code> mapped by specified <code>key</code>
* only if there is no value with specified<code>key</code> stored before.
* <p>
* Returns <code>true</code> if key is a new one in the hash and value was set or
* <code>false</code> if key already exists in the hash and change hasn't been made.
* <p>
* Works faster than <code>{@link #putIfAbsent(Object, Object)}</code> but not returning
* the previous value associated with <code>key</code>
* <p>
* If {@link MapWriter} is defined then new map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>true</code> if key is a new one in the hash and value was set.
* <code>false</code> if key already exists in the hash and change hasn't been made.
*/
boolean fastPutIfAbsent(K key, V value);
/**
* Stores the specified <code>value</code> mapped by <code>key</code>
* only if mapping already exists.
* <p>
* Returns <code>true</code> if key is a new one in the hash and value was set or
* <code>false</code> if key already exists in the hash and change hasn't been made.
* <p>
* Works faster than <code>{@link #putIfExists(Object, Object)}</code> but doesn't return
* previous value associated with <code>key</code>
* <p>
* If {@link MapWriter} is defined then new map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>true</code> if key already exists in the hash and new value has been stored.
* <code>false</code> if key doesn't exist in the hash and value hasn't been set.
*/
boolean fastPutIfExists(K key, V value);
/**
* Read all keys at once
*
* @return keys
*/
Set<K> readAllKeySet();
/**
* Read all values at once
*
* @return values
*/
Collection<V> readAllValues();
/**
* Read all map entries at once
*
* @return entries
*/
Set<Entry<K, V>> readAllEntrySet();
/**
* Read all map as local instance at once
*
* @return map
*/
Map<K, V> readAllMap();
/**
* Returns key set of this map.
* Keys are loaded in batch. Batch size is <code>10</code>.
*
* @see #readAllKeySet()
*
* @return key set
*/
@Override
Set<K> keySet();
/**
* Returns key set of this map.
* Keys are loaded in batch. Batch size is defined by <code>count</code> param.
*
* @see #readAllKeySet()
*
* @param count - size of keys batch
* @return key set
*/
Set<K> keySet(int count);
/**
* Returns key set of this map.
* If <code>pattern</code> is not null then only keys match this pattern are loaded.
* Keys are loaded in batch. Batch size is defined by <code>count</code> param.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
*
* Supported glob-style patterns:
* <p>
* h?llo subscribes to hello, hallo and hxllo
* <p>
* h*llo subscribes to hllo and heeeello
* <p>
* h[ae]llo subscribes to hello and hallo, but not hillo
*
* @see #readAllKeySet()
*
* @param pattern - key pattern
* @param count - size of keys batch
* @return key set
*/
Set<K> keySet(String pattern, int count);
/**
* Returns key set of this map.
* If <code>pattern</code> is not null then only keys match this pattern are loaded.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
*
* Supported glob-style patterns:
* <p>
* h?llo subscribes to hello, hallo and hxllo
* <p>
* h*llo subscribes to hllo and heeeello
* <p>
* h[ae]llo subscribes to hello and hallo, but not hillo
*
* @see #readAllKeySet()
*
* @param pattern - key pattern
* @return key set
*/
Set<K> keySet(String pattern);
/**
* Returns values collection of this map.
* Values are loaded in batch. Batch size is <code>10</code>.
*
* @see #readAllValues()
*
* @return values collection
*/
@Override
Collection<V> values();
/**
* Returns values collection of this map.
* Values are loaded in batch. Batch size is <code>10</code>.
* If <code>keyPattern</code> is not null then only values mapped by matched keys of this pattern are loaded.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
* Usage example:
* <pre>
* Codec valueCodec = ...
* RMap<String, MyObject> map = redissonClient.getMap("simpleMap", new CompositeCodec(StringCodec.INSTANCE, valueCodec, valueCodec));
*
* // or
*
* RMap<String, String> map = redissonClient.getMap("simpleMap", StringCodec.INSTANCE);
* </pre>
* <pre>
* Supported glob-style patterns:
* h?llo subscribes to hello, hallo and hxllo
* h*llo subscribes to hllo and heeeello
* h[ae]llo subscribes to hello and hallo, but not hillo
* </pre>
* @see #readAllValues()
*
* @param keyPattern - key pattern
* @return values collection
*/
Collection<V> values(String keyPattern);
/**
* Returns values collection of this map.
* Values are loaded in batch. Batch size is defined by <code>count</code> param.
* If <code>keyPattern</code> is not null then only values mapped by matched keys of this pattern are loaded.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
* Usage example:
* <pre>
* Codec valueCodec = ...
* RMap<String, MyObject> map = redissonClient.getMap("simpleMap", new CompositeCodec(StringCodec.INSTANCE, valueCodec, valueCodec));
*
* // or
*
* RMap<String, String> map = redissonClient.getMap("simpleMap", StringCodec.INSTANCE);
* </pre>
* <pre>
* Supported glob-style patterns:
* h?llo subscribes to hello, hallo and hxllo
* h*llo subscribes to hllo and heeeello
* h[ae]llo subscribes to hello and hallo, but not hillo
* </pre>
* @see #readAllValues()
*
* @param keyPattern - key pattern
* @param count - size of values batch
* @return values collection
*/
Collection<V> values(String keyPattern, int count);
/**
* Returns values collection of this map.
* Values are loaded in batch. Batch size is defined by <code>count</code> param.
*
* @see #readAllValues()
*
* @param count - size of values batch
* @return values collection
*/
Collection<V> values(int count);
/**
* Returns map entries collection.
* Map entries are loaded in batch. Batch size is <code>10</code>.
*
* @see #readAllEntrySet()
*
* @return map entries collection
*/
@Override
Set<java.util.Map.Entry<K, V>> entrySet();
/**
* Returns map entries collection.
* Map entries are loaded in batch. Batch size is <code>10</code>.
* If <code>keyPattern</code> is not null then only entries mapped by matched keys of this pattern are loaded.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
* Usage example:
* <pre>
* Codec valueCodec = ...
* RMap<String, MyObject> map = redissonClient.getMap("simpleMap", new CompositeCodec(StringCodec.INSTANCE, valueCodec, valueCodec));
*
* // or
*
* RMap<String, String> map = redissonClient.getMap("simpleMap", StringCodec.INSTANCE);
* </pre>
* <pre>
* Supported glob-style patterns:
* h?llo subscribes to hello, hallo and hxllo
* h*llo subscribes to hllo and heeeello
* h[ae]llo subscribes to hello and hallo, but not hillo
* </pre>
* @see #readAllEntrySet()
*
* @param keyPattern key pattern
* @return map entries collection
*/
Set<java.util.Map.Entry<K, V>> entrySet(String keyPattern);
/**
* Returns map entries collection.
* Map entries are loaded in batch. Batch size is defined by <code>count</code> param.
* If <code>keyPattern</code> is not null then only entries mapped by matched keys of this pattern are loaded.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
* Usage example:
* <pre>
* Codec valueCodec = ...
* RMap<String, MyObject> map = redissonClient.getMap("simpleMap", new CompositeCodec(StringCodec.INSTANCE, valueCodec, valueCodec));
*
* // or
*
* RMap<String, String> map = redissonClient.getMap("simpleMap", StringCodec.INSTANCE);
* </pre>
* <pre>
* Supported glob-style patterns:
* h?llo subscribes to hello, hallo and hxllo
* h*llo subscribes to hllo and heeeello
* h[ae]llo subscribes to hello and hallo, but not hillo
* </pre>
* @see #readAllEntrySet()
*
* @param keyPattern key pattern
* @param count size of entries batch
* @return map entries collection
*/
Set<java.util.Map.Entry<K, V>> entrySet(String keyPattern, int count);
/**
* Returns map entries collection.
* Map entries are loaded in batch. Batch size is defined by <code>count</code> param.
*
* @see #readAllEntrySet()
*
* @param count - size of entries batch
* @return map entries collection
*/
Set<java.util.Map.Entry<K, V>> entrySet(int count);
/**
* Adds object event listener
*
* @see org.redisson.api.listener.TrackingListener
* @see org.redisson.api.listener.MapPutListener
* @see org.redisson.api.listener.MapRemoveListener
* @see org.redisson.api.ExpiredObjectListener
* @see org.redisson.api.DeletedObjectListener
*
* @param listener object event listener
* @return listener id
*/
int addListener(ObjectListener listener);
}
|
RMap
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/IdentifierNameTest.java
|
{
"start": 15563,
"end": 15910
}
|
enum ____ {",
" ONE {",
" void f() {}",
" }",
"}")
.doTest();
}
@Test
public void unnamedVariables() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.Scanner;
import java.util.function.Function;
|
Test
|
java
|
google__auto
|
value/src/it/functional/src/test/java/com/google/auto/value/AutoValueJava8Test.java
|
{
"start": 41172,
"end": 41882
}
|
class ____<T extends @Nullable Object, U extends T> {
public abstract T nullOne();
public abstract U nullTwo();
public static <T extends @Nullable Object, U extends T> NullableVariableBound<T, U> create(
T nullOne, U nullTwo) {
return new AutoValue_AutoValueJava8Test_NullableVariableBound<>(nullOne, nullTwo);
}
}
@Test
public void nullableVariableBound() {
assumeTrue(javacHandlesTypeAnnotationsCorrectly);
NullableVariableBound<@Nullable CharSequence, @Nullable String> x =
NullableVariableBound.create(null, null);
assertThat(x.nullOne()).isNull();
assertThat(x.nullTwo()).isNull();
}
@AutoValue
public abstract static
|
NullableVariableBound
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/engine/jdbc/batch/internal/BatchImpl.java
|
{
"start": 1262,
"end": 9632
}
|
class ____ implements Batch {
private final BatchKey key;
private final int batchSizeToUse;
private final PreparedStatementGroup statementGroup;
private final JdbcCoordinator jdbcCoordinator;
private final SqlStatementLogger sqlStatementLogger;
private final SqlExceptionHelper sqlExceptionHelper;
private final LinkedHashSet<BatchObserver> observers = new LinkedHashSet<>();
private int batchPosition;
private boolean batchExecuted;
private StaleStateMapper[] staleStateMappers;
public BatchImpl(
BatchKey key,
PreparedStatementGroup statementGroup,
int batchSizeToUse,
JdbcCoordinator jdbcCoordinator) {
requireNonNull( key, "Batch key cannot be null" );
requireNonNull( jdbcCoordinator, "JDBC coordinator cannot be null" );
this.batchSizeToUse = batchSizeToUse;
this.key = key;
this.jdbcCoordinator = jdbcCoordinator;
this.statementGroup = statementGroup;
final var jdbcServices =
jdbcCoordinator.getJdbcSessionOwner().getJdbcSessionContext().getJdbcServices();
sqlStatementLogger = jdbcServices.getSqlStatementLogger();
sqlExceptionHelper = jdbcServices.getSqlExceptionHelper();
if ( BATCH_MESSAGE_LOGGER.isTraceEnabled() ) {
BATCH_MESSAGE_LOGGER.createBatch(
batchSizeToUse,
key.toLoggableString()
);
}
}
@Override
public final BatchKey getKey() {
return key;
}
@Override
public PreparedStatementGroup getStatementGroup() {
return statementGroup;
}
@Override
public void addObserver(BatchObserver observer) {
observers.add( observer );
}
@Override
public void addToBatch(
JdbcValueBindings jdbcValueBindings, TableInclusionChecker inclusionChecker,
StaleStateMapper staleStateMapper) {
if ( staleStateMapper != null ) {
if ( staleStateMappers == null ) {
staleStateMappers = new StaleStateMapper[batchSizeToUse];
}
staleStateMappers[batchPosition] = staleStateMapper;
}
addToBatch( jdbcValueBindings, inclusionChecker );
}
@Override
public void addToBatch(JdbcValueBindings jdbcValueBindings, TableInclusionChecker inclusionChecker) {
final boolean loggerTraceEnabled = BATCH_MESSAGE_LOGGER.isTraceEnabled();
if ( loggerTraceEnabled ) {
BATCH_MESSAGE_LOGGER.addToBatch(
batchPosition + 1,
batchSizeToUse,
getKey().toLoggableString()
);
}
try {
getStatementGroup().forEachStatement( (tableName, statementDetails) -> {
if ( inclusionChecker != null
&& !inclusionChecker.include( statementDetails.getMutatingTableDetails() ) ) {
if ( loggerTraceEnabled ) {
MODEL_MUTATION_LOGGER.skippingAddBatchForTable(
statementDetails.getMutatingTableDetails().getTableName(),
batchPosition+1
);
}
}
else {
MODEL_MUTATION_LOGGER.addBatchForTable(
statementDetails.getMutatingTableDetails().getTableName(),
batchPosition+1
);
//noinspection resource
final var statement = statementDetails.resolveStatement();
final String sqlString = statementDetails.getSqlString();
sqlStatementLogger.logStatement( sqlString );
jdbcValueBindings.beforeStatement( statementDetails );
try {
statement.addBatch();
}
catch (SQLException exception) {
throw sqlExceptionHelper.convert(
exception,
"Could not perform addBatch",
sqlString
);
}
finally {
jdbcValueBindings.afterStatement( statementDetails.getMutatingTableDetails() );
}
}
} );
}
catch (RuntimeException e) {
abortBatch( e );
throw e;
}
batchPosition++;
if ( batchPosition == batchSizeToUse ) {
notifyObserversImplicitExecution();
performExecution();
}
}
protected void releaseStatements() {
statementGroup.release();
}
protected void clearBatch(PreparedStatementDetails statementDetails) {
final var statement = statementDetails.getStatement();
assert statement != null;
try {
// This code can be called after the connection is released
// and the statement is closed. If the statement is closed,
// then SQLException will be thrown when PreparedStatement#clearBatch
// is called.
// Ensure the statement is not closed before
// calling PreparedStatement#clearBatch.
if ( !statement.isClosed() ) {
statement.clearBatch();
}
}
catch ( SQLException e ) {
BATCH_MESSAGE_LOGGER.unableToReleaseBatchStatement();
}
}
/**
* Convenience method to notify registered observers of an explicit execution of this batch.
*/
protected final void notifyObserversExplicitExecution() {
for ( var observer : observers ) {
observer.batchExplicitlyExecuted();
}
}
/**
* Convenience method to notify registered observers of an implicit execution of this batch.
*/
protected final void notifyObserversImplicitExecution() {
for ( var observer : observers ) {
observer.batchImplicitlyExecuted();
}
}
protected void abortBatch(Exception cause) {
try {
jdbcCoordinator.abortBatch();
}
catch (RuntimeException e) {
cause.addSuppressed( e );
}
}
@Override
public void execute() {
notifyObserversExplicitExecution();
if ( getStatementGroup().getNumberOfStatements() > 0 ) {
try {
if ( batchPosition == 0 ) {
if ( !batchExecuted && BATCH_MESSAGE_LOGGER.isTraceEnabled() ) {
BATCH_MESSAGE_LOGGER.emptyBatch( getKey().toLoggableString() );
}
}
else {
performExecution();
}
}
finally {
releaseStatements();
}
}
}
protected void performExecution() {
if ( BATCH_MESSAGE_LOGGER.isTraceEnabled() ) {
BATCH_MESSAGE_LOGGER.executeBatch(
batchPosition,
batchSizeToUse,
getKey().toLoggableString()
);
}
final var jdbcSessionOwner = jdbcCoordinator.getJdbcSessionOwner();
final var eventHandler = jdbcSessionOwner.getJdbcSessionContext().getEventHandler();
try {
getStatementGroup().forEachStatement( (tableName, statementDetails) -> {
final String sql = statementDetails.getSqlString();
final var statement = statementDetails.getStatement();
if ( statement != null ) {
try {
if ( statementDetails.getMutatingTableDetails().isIdentifierTable() ) {
final var eventMonitor = jdbcSessionOwner.getEventMonitor();
final var executionEvent = eventMonitor.beginJdbcBatchExecutionEvent();
final int[] rowCounts;
try {
eventHandler.jdbcExecuteBatchStart();
rowCounts = statement.executeBatch();
}
finally {
eventMonitor.completeJdbcBatchExecutionEvent( executionEvent, sql );
eventHandler.jdbcExecuteBatchEnd();
}
checkRowCounts( rowCounts, statementDetails );
}
else {
statement.executeBatch();
}
}
catch (SQLException e) {
abortBatch( e );
throw sqlExceptionHelper.convert( e, "could not execute batch", sql );
}
catch (RuntimeException re) {
abortBatch( re );
throw re;
}
}
} );
batchExecuted = true;
}
finally {
jdbcCoordinator.afterStatementExecution();
batchPosition = 0;
}
}
private void checkRowCounts(int[] rowCounts, PreparedStatementDetails statementDetails)
throws SQLException, HibernateException {
final int numberOfRowCounts = rowCounts.length;
if ( batchPosition != 0 && numberOfRowCounts != batchPosition ) {
JDBC_LOGGER.unexpectedRowCounts(
statementDetails.getMutatingTableDetails().getTableName(),
numberOfRowCounts,
batchPosition
);
}
final String sql = statementDetails.getSqlString();
for ( int i = 0; i < numberOfRowCounts; i++ ) {
try {
statementDetails.getExpectation()
.verifyOutcome( rowCounts[i], statementDetails.getStatement(), i, sql );
}
catch ( StaleStateException staleStateException ) {
if ( staleStateMappers != null ) {
throw staleStateMappers[i].map( staleStateException );
}
}
}
}
@Override
public void release() {
if ( BATCH_MESSAGE_LOGGER.isInfoEnabled() ) {
final var statementGroup = getStatementGroup();
if ( statementGroup.getNumberOfStatements() > 0
&& statementGroup.hasMatching( statementDetails -> statementDetails.getStatement() != null ) ) {
BATCH_MESSAGE_LOGGER.batchContainedStatementsOnRelease();
}
}
releaseStatements();
observers.clear();
}
@Override
public String toString() {
return "BatchImpl(" + getKey().toLoggableString() + ")";
}
}
|
BatchImpl
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/internal/invocation/TypeSafeMatchingTest.java
|
{
"start": 2518,
"end": 3238
}
|
class ____ implements ArgumentMatcher<Integer> {
@Override
public boolean matches(Integer arg) {
return false;
}
@SuppressWarnings("unused")
public boolean matches(Date arg) {
throw new UnsupportedOperationException();
}
@SuppressWarnings("unused")
public boolean matches(Integer arg, Void v) {
throw new UnsupportedOperationException();
}
}
boolean match = matchesTypeSafe().apply(new TestMatcher(), 123);
assertThat(match).isFalse();
}
@Test
public void matchesWithSubTypeExtendingGenericClass() {
abstract
|
TestMatcher
|
java
|
qos-ch__slf4j
|
log4j-over-slf4j/src/main/java/org/apache/log4j/Appender.java
|
{
"start": 792,
"end": 906
}
|
interface ____ your own strategies for outputting log
* statements.
*
* @author Ceki Gülcü
*/
public
|
for
|
java
|
apache__camel
|
components/camel-milo/src/generated/java/org/apache/camel/component/milo/browse/MiloBrowseComponentConfigurer.java
|
{
"start": 738,
"end": 11851
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
private org.apache.camel.component.milo.client.MiloClientConfiguration getOrCreateConfiguration(MiloBrowseComponent target) {
if (target.getConfiguration() == null) {
target.setConfiguration(new org.apache.camel.component.milo.client.MiloClientConfiguration());
}
return target.getConfiguration();
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
MiloBrowseComponent target = (MiloBrowseComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowedsecuritypolicies":
case "allowedSecurityPolicies": getOrCreateConfiguration(target).setAllowedSecurityPolicies(property(camelContext, java.lang.String.class, value)); return true;
case "applicationname":
case "applicationName": getOrCreateConfiguration(target).setApplicationName(property(camelContext, java.lang.String.class, value)); return true;
case "applicationuri":
case "applicationUri": getOrCreateConfiguration(target).setApplicationUri(property(camelContext, java.lang.String.class, value)); return true;
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "channellifetime":
case "channelLifetime": getOrCreateConfiguration(target).setChannelLifetime(property(camelContext, java.lang.Long.class, value)); return true;
case "clientid":
case "clientId": getOrCreateConfiguration(target).setClientId(property(camelContext, java.lang.String.class, value)); return true;
case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.milo.client.MiloClientConfiguration.class, value)); return true;
case "discoveryendpointsuffix":
case "discoveryEndpointSuffix": getOrCreateConfiguration(target).setDiscoveryEndpointSuffix(property(camelContext, java.lang.String.class, value)); return true;
case "discoveryendpointuri":
case "discoveryEndpointUri": getOrCreateConfiguration(target).setDiscoveryEndpointUri(property(camelContext, java.lang.String.class, value)); return true;
case "keyalias":
case "keyAlias": getOrCreateConfiguration(target).setKeyAlias(property(camelContext, java.lang.String.class, value)); return true;
case "keypassword":
case "keyPassword": getOrCreateConfiguration(target).setKeyPassword(property(camelContext, java.lang.String.class, value)); return true;
case "keystorepassword":
case "keyStorePassword": getOrCreateConfiguration(target).setKeyStorePassword(property(camelContext, java.lang.String.class, value)); return true;
case "keystoretype":
case "keyStoreType": getOrCreateConfiguration(target).setKeyStoreType(property(camelContext, java.lang.String.class, value)); return true;
case "keystoreurl":
case "keyStoreUrl": getOrCreateConfiguration(target).setKeyStoreUrl(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "maxpendingpublishrequests":
case "maxPendingPublishRequests": getOrCreateConfiguration(target).setMaxPendingPublishRequests(property(camelContext, java.lang.Long.class, value)); return true;
case "maxresponsemessagesize":
case "maxResponseMessageSize": getOrCreateConfiguration(target).setMaxResponseMessageSize(property(camelContext, java.lang.Long.class, value)); return true;
case "miloclientconnectionmanager":
case "miloClientConnectionManager": target.setMiloClientConnectionManager(property(camelContext, org.apache.camel.component.milo.client.MiloClientConnectionManager.class, value)); return true;
case "overridehost":
case "overrideHost": getOrCreateConfiguration(target).setOverrideHost(property(camelContext, boolean.class, value)); return true;
case "producturi":
case "productUri": getOrCreateConfiguration(target).setProductUri(property(camelContext, java.lang.String.class, value)); return true;
case "requesttimeout":
case "requestTimeout": getOrCreateConfiguration(target).setRequestTimeout(property(camelContext, java.lang.Long.class, value)); return true;
case "requestedpublishinginterval":
case "requestedPublishingInterval": getOrCreateConfiguration(target).setRequestedPublishingInterval(property(camelContext, java.lang.Double.class, value)); return true;
case "sessionname":
case "sessionName": getOrCreateConfiguration(target).setSessionName(property(camelContext, java.lang.String.class, value)); return true;
case "sessiontimeout":
case "sessionTimeout": getOrCreateConfiguration(target).setSessionTimeout(property(camelContext, java.lang.Long.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"miloClientConnectionManager"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowedsecuritypolicies":
case "allowedSecurityPolicies": return java.lang.String.class;
case "applicationname":
case "applicationName": return java.lang.String.class;
case "applicationuri":
case "applicationUri": return java.lang.String.class;
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "channellifetime":
case "channelLifetime": return java.lang.Long.class;
case "clientid":
case "clientId": return java.lang.String.class;
case "configuration": return org.apache.camel.component.milo.client.MiloClientConfiguration.class;
case "discoveryendpointsuffix":
case "discoveryEndpointSuffix": return java.lang.String.class;
case "discoveryendpointuri":
case "discoveryEndpointUri": return java.lang.String.class;
case "keyalias":
case "keyAlias": return java.lang.String.class;
case "keypassword":
case "keyPassword": return java.lang.String.class;
case "keystorepassword":
case "keyStorePassword": return java.lang.String.class;
case "keystoretype":
case "keyStoreType": return java.lang.String.class;
case "keystoreurl":
case "keyStoreUrl": return java.lang.String.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "maxpendingpublishrequests":
case "maxPendingPublishRequests": return java.lang.Long.class;
case "maxresponsemessagesize":
case "maxResponseMessageSize": return java.lang.Long.class;
case "miloclientconnectionmanager":
case "miloClientConnectionManager": return org.apache.camel.component.milo.client.MiloClientConnectionManager.class;
case "overridehost":
case "overrideHost": return boolean.class;
case "producturi":
case "productUri": return java.lang.String.class;
case "requesttimeout":
case "requestTimeout": return java.lang.Long.class;
case "requestedpublishinginterval":
case "requestedPublishingInterval": return java.lang.Double.class;
case "sessionname":
case "sessionName": return java.lang.String.class;
case "sessiontimeout":
case "sessionTimeout": return java.lang.Long.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
MiloBrowseComponent target = (MiloBrowseComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowedsecuritypolicies":
case "allowedSecurityPolicies": return getOrCreateConfiguration(target).getAllowedSecurityPolicies();
case "applicationname":
case "applicationName": return getOrCreateConfiguration(target).getApplicationName();
case "applicationuri":
case "applicationUri": return getOrCreateConfiguration(target).getApplicationUri();
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "channellifetime":
case "channelLifetime": return getOrCreateConfiguration(target).getChannelLifetime();
case "clientid":
case "clientId": return getOrCreateConfiguration(target).getClientId();
case "configuration": return target.getConfiguration();
case "discoveryendpointsuffix":
case "discoveryEndpointSuffix": return getOrCreateConfiguration(target).getDiscoveryEndpointSuffix();
case "discoveryendpointuri":
case "discoveryEndpointUri": return getOrCreateConfiguration(target).getDiscoveryEndpointUri();
case "keyalias":
case "keyAlias": return getOrCreateConfiguration(target).getKeyAlias();
case "keypassword":
case "keyPassword": return getOrCreateConfiguration(target).getKeyPassword();
case "keystorepassword":
case "keyStorePassword": return getOrCreateConfiguration(target).getKeyStorePassword();
case "keystoretype":
case "keyStoreType": return getOrCreateConfiguration(target).getKeyStoreType();
case "keystoreurl":
case "keyStoreUrl": return getOrCreateConfiguration(target).getKeyStoreUrl();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "maxpendingpublishrequests":
case "maxPendingPublishRequests": return getOrCreateConfiguration(target).getMaxPendingPublishRequests();
case "maxresponsemessagesize":
case "maxResponseMessageSize": return getOrCreateConfiguration(target).getMaxResponseMessageSize();
case "miloclientconnectionmanager":
case "miloClientConnectionManager": return target.getMiloClientConnectionManager();
case "overridehost":
case "overrideHost": return getOrCreateConfiguration(target).isOverrideHost();
case "producturi":
case "productUri": return getOrCreateConfiguration(target).getProductUri();
case "requesttimeout":
case "requestTimeout": return getOrCreateConfiguration(target).getRequestTimeout();
case "requestedpublishinginterval":
case "requestedPublishingInterval": return getOrCreateConfiguration(target).getRequestedPublishingInterval();
case "sessionname":
case "sessionName": return getOrCreateConfiguration(target).getSessionName();
case "sessiontimeout":
case "sessionTimeout": return getOrCreateConfiguration(target).getSessionTimeout();
default: return null;
}
}
}
|
MiloBrowseComponentConfigurer
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/jmx/annotation/JmxEndpointDiscovererTests.java
|
{
"start": 18071,
"end": 19150
}
|
class ____ {
@ManagedOperation(description = "Get all the things")
@ReadOperation
@Nullable Object getAll() {
return null;
}
@ReadOperation
@ManagedOperation(description = "Get something based on a timeUnit")
@ManagedOperationParameters({
@ManagedOperationParameter(name = "unitMs", description = "Number of milliseconds") })
@Nullable String getSomething(Long timeUnit) {
return null;
}
@WriteOperation
@ManagedOperation(description = "Update something based on bar")
@ManagedOperationParameters({ @ManagedOperationParameter(name = "foo", description = "Foo identifier"),
@ManagedOperationParameter(name = "bar", description = "Bar value") })
void update(String foo, String bar) {
}
@DeleteOperation
@ManagedOperation(description = "Delete something based on a timeUnit")
@ManagedOperationParameters({
@ManagedOperationParameter(name = "unitMs", description = "Number of milliseconds") })
void deleteSomething(Long timeUnit) {
}
}
@EndpointJmxExtension(endpoint = TestEndpoint.class)
static
|
TestJmxEndpointExtension
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/tofix/ExternalTypeCustomResolver1288Test.java
|
{
"start": 13352,
"end": 13517
}
|
class ____ {
@JsonPOJOBuilder(withPrefix = "")
@JsonPropertyOrder({ "form_of_payment", "payment_details" })
public static
|
PaymentMean
|
java
|
apache__flink
|
flink-connectors/flink-hadoop-compatibility/src/main/java/org/apache/flink/api/java/hadoop/mapreduce/HadoopInputFormat.java
|
{
"start": 1426,
"end": 2893
}
|
class ____<K, V> extends HadoopInputFormatBase<K, V, Tuple2<K, V>>
implements ResultTypeQueryable<Tuple2<K, V>> {
private static final long serialVersionUID = 1L;
public HadoopInputFormat(
org.apache.hadoop.mapreduce.InputFormat<K, V> mapreduceInputFormat,
Class<K> key,
Class<V> value,
Job job) {
super(mapreduceInputFormat, key, value, job);
}
public HadoopInputFormat(
org.apache.hadoop.mapreduce.InputFormat<K, V> mapreduceInputFormat,
Class<K> key,
Class<V> value)
throws IOException {
super(mapreduceInputFormat, key, value, Job.getInstance());
}
@Override
public Tuple2<K, V> nextRecord(Tuple2<K, V> record) throws IOException {
if (!this.fetched) {
fetchNext();
}
if (!this.hasNext) {
return null;
}
try {
record.f0 = recordReader.getCurrentKey();
record.f1 = recordReader.getCurrentValue();
} catch (InterruptedException e) {
throw new IOException("Could not get KeyValue pair.", e);
}
this.fetched = false;
return record;
}
@Override
public TypeInformation<Tuple2<K, V>> getProducedType() {
return new TupleTypeInfo<Tuple2<K, V>>(
TypeExtractor.createTypeInfo(keyClass), TypeExtractor.createTypeInfo(valueClass));
}
}
|
HadoopInputFormat
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cdi/testsupport/CdiContainerLinker.java
|
{
"start": 822,
"end": 1225
}
|
class ____ implements ServiceRegistry.SettingResolver {
@Override
public Object resolve(StandardServiceRegistryBuilder registryBuilder, ExtensionContext junitContext) {
final CdiContainerScope containerScope = CdiContainerExtension.findCdiContainerScope(
junitContext.getRequiredTestInstance(),
junitContext
);
return containerScope.getExtendedBeanManager();
}
}
}
|
ExtendedResolver
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/client/DefaultRequestExpectation.java
|
{
"start": 3720,
"end": 4599
}
|
class ____ {
private final ExpectedCount expectedCount;
private int matchedRequestCount;
public RequestCount(ExpectedCount expectedCount) {
this.expectedCount = expectedCount;
}
public ExpectedCount getExpectedCount() {
return this.expectedCount;
}
public int getMatchedRequestCount() {
return this.matchedRequestCount;
}
public void incrementAndValidate() {
this.matchedRequestCount++;
if (getMatchedRequestCount() > getExpectedCount().getMaxCount()) {
throw new AssertionError("No more calls expected.");
}
}
public boolean hasRemainingCount() {
return (getMatchedRequestCount() < getExpectedCount().getMaxCount());
}
public boolean isSatisfied() {
// Only validate min count since max count is checked on every request...
return (getMatchedRequestCount() >= getExpectedCount().getMinCount());
}
}
}
|
RequestCount
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/tree/domain/AbstractSqmJoin.java
|
{
"start": 964,
"end": 4611
}
|
class ____<L, R> extends AbstractSqmFrom<L, R> implements SqmJoin<L, R> {
private final SqmJoinType joinType;
private @Nullable SqmPredicate onClausePredicate;
public AbstractSqmJoin(
NavigablePath navigablePath,
SqmPathSource<R> referencedNavigable,
SqmFrom<?, L> lhs,
@Nullable String alias,
SqmJoinType joinType,
NodeBuilder nodeBuilder) {
super( navigablePath, referencedNavigable, lhs, alias, nodeBuilder );
this.joinType = joinType;
}
@Override
public SqmJoinType getSqmJoinType() {
return joinType;
}
@Override
public @Nullable SqmPredicate getJoinPredicate() {
return onClausePredicate;
}
@Override
public void setJoinPredicate(@Nullable SqmPredicate predicate) {
if ( LOG.isTraceEnabled() ) {
LOG.tracef(
"Setting join predicate [%s] (was [%s])",
predicate,
this.onClausePredicate == null ? "<null>" : this.onClausePredicate
);
}
this.onClausePredicate = predicate;
}
public void applyRestriction(SqmPredicate restriction) {
if ( this.onClausePredicate == null ) {
this.onClausePredicate = restriction;
}
else {
this.onClausePredicate = combinePredicates( onClausePredicate, restriction );
}
}
protected void copyTo(AbstractSqmJoin<L, R> target, SqmCopyContext context) {
super.copyTo( target, context );
target.onClausePredicate = onClausePredicate == null ? null : onClausePredicate.copy( context );
}
@Override
public <S extends R> SqmTreatedJoin<L, R, S> treatAs(Class<S> treatTarget) {
return treatAs( treatTarget, null );
}
@Override
public <S extends R> SqmTreatedJoin<L, R, S> treatAs(EntityDomainType<S> treatTarget) {
return treatAs( treatTarget, null );
}
@Override
public abstract <S extends R> SqmTreatedJoin<L, R, S> treatAs(Class<S> treatJavaType, @Nullable String alias);
@Override
public abstract <S extends R> SqmTreatedJoin<L, R, S> treatAs(EntityDomainType<S> treatTarget, @Nullable String alias);
@Override
public abstract <S extends R> SqmTreatedJoin<L, R, S> treatAs(Class<S> treatJavaType, @Nullable String alias, boolean fetched);
@Override
public abstract <S extends R> SqmTreatedJoin<L, R, S> treatAs(EntityDomainType<S> treatTarget, @Nullable String alias, boolean fetched);
@Override
public @Nullable SqmFrom<?, L> getLhs() {
//noinspection unchecked
return (SqmFrom<?, L>) super.getLhs();
}
@Override
public @Nullable SqmFrom<?, L> getParent() {
return getLhs();
}
@Override
public JoinType getJoinType() {
return joinType.getCorrespondingJpaJoinType();
}
@Override
public @Nullable SqmPredicate getOn() {
return getJoinPredicate();
}
@Override
public <X> SqmEntityJoin<R, X> join(Class<X> targetEntityClass) {
return super.join( targetEntityClass, joinType );
}
@Override
public <X> SqmEntityJoin<R, X> join(Class<X> targetEntityClass, SqmJoinType joinType) {
return super.join( targetEntityClass, joinType );
}
// No need for equals/hashCode or isCompatible/cacheHashCode, because the base implementation using NavigablePath
// is fine for the purpose of matching nodes "syntactically".
@Override
public boolean deepEquals(SqmFrom<?, ?> object) {
return super.deepEquals( object )
&& object instanceof AbstractSqmJoin<?,?> thatJoin
&& joinType == thatJoin.getSqmJoinType()
&& Objects.equals( onClausePredicate, thatJoin.getOn() );
}
@Override
public boolean isDeepCompatible(SqmFrom<?, ?> object) {
return super.isDeepCompatible( object )
&& object instanceof AbstractSqmJoin<?,?> thatJoin
&& joinType == thatJoin.getSqmJoinType()
&& SqmCacheable.areCompatible( onClausePredicate, thatJoin.getOn() );
}
}
|
AbstractSqmJoin
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/pattern/MessagePatternConverterTest.java
|
{
"start": 1590,
"end": 9566
}
|
class ____ {
@Test
void testPattern() {
final MessagePatternConverter converter = MessagePatternConverter.newInstance(null, null);
Message msg = new SimpleMessage("Hello!");
LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("Hello!", sb.toString(), "Unexpected result");
event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(null)
.build();
sb = new StringBuilder();
converter.format(event, sb);
assertEquals(0, sb.length(), "Incorrect length: " + sb);
msg = new SimpleMessage(null);
event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
sb = new StringBuilder();
converter.format(event, sb);
assertEquals(4, sb.length(), "Incorrect length: " + sb);
}
@Test
void testPatternAndParameterizedMessageDateLookup() {
final MessagePatternConverter converter = MessagePatternConverter.newInstance(null, null);
final Message msg = new ParameterizedMessage("${date:now:buhu}");
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
final StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("${date:now:buhu}", sb.toString(), "Unexpected result");
}
@Test
void testDefaultDisabledLookup() {
final Configuration config =
new DefaultConfigurationBuilder().addProperty("foo", "bar").build(true);
final MessagePatternConverter converter = MessagePatternConverter.newInstance(config, null);
final Message msg = new ParameterizedMessage("${foo}");
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
final StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("${foo}", sb.toString(), "Unexpected result");
}
@Test
void testDisabledLookup() {
final Configuration config =
new DefaultConfigurationBuilder().addProperty("foo", "bar").build(true);
final MessagePatternConverter converter =
MessagePatternConverter.newInstance(config, new String[] {"nolookups"});
final Message msg = new ParameterizedMessage("${foo}");
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
final StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("${foo}", sb.toString(), "Expected the raw pattern string without lookup");
}
@Test
void testLookup() {
final Configuration config =
new DefaultConfigurationBuilder().addProperty("foo", "bar").build(true);
final MessagePatternConverter converter = MessagePatternConverter.newInstance(config, new String[] {"lookups"});
final Message msg = new ParameterizedMessage("${foo}");
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
final StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("${foo}", sb.toString(), "Unexpected result");
}
@Test
void testPatternWithConfiguration() {
final Configuration config = new DefaultConfiguration();
final MessagePatternConverter converter = MessagePatternConverter.newInstance(config, null);
Message msg = new SimpleMessage("Hello!");
LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("Hello!", sb.toString(), "Unexpected result");
event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(null)
.build();
sb = new StringBuilder();
converter.format(event, sb);
assertEquals(0, sb.length(), "Incorrect length: " + sb);
msg = new SimpleMessage(null);
event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
sb = new StringBuilder();
converter.format(event, sb);
assertEquals(4, sb.length(), "Incorrect length: " + sb);
}
@Test
void testMapMessageFormatJson() {
final MessagePatternConverter converter = MessagePatternConverter.newInstance(null, new String[] {"json"});
final Message msg = new StringMapMessage().with("key", "val");
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
final StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("{\"key\":\"val\"}", sb.toString(), "Unexpected result");
}
@Test
void testMapMessageFormatXml() {
final MessagePatternConverter converter = MessagePatternConverter.newInstance(null, new String[] {"xml"});
final Message msg = new StringMapMessage().with("key", "val");
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
final StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("<Map>\n <Entry key=\"key\">val</Entry>\n</Map>", sb.toString(), "Unexpected result");
}
@Test
void testMapMessageFormatDefault() {
final MessagePatternConverter converter = MessagePatternConverter.newInstance(null, null);
final Message msg = new StringMapMessage().with("key", "val");
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
final StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("key=\"val\"", sb.toString(), "Unexpected result");
}
@Test
void testStructuredDataFormatFull() {
final MessagePatternConverter converter = MessagePatternConverter.newInstance(null, new String[] {"FULL"});
final Message msg = new StructuredDataMessage("id", "message", "type").with("key", "val");
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("MyLogger") //
.setLevel(Level.DEBUG) //
.setMessage(msg)
.build();
final StringBuilder sb = new StringBuilder();
converter.format(event, sb);
assertEquals("type [id key=\"val\"] message", sb.toString(), "Unexpected result");
}
}
|
MessagePatternConverterTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
|
{
"start": 155470,
"end": 157313
}
|
class ____ extends ParserRuleContext {
public ConstantContext probability;
public TerminalNode SAMPLE() { return getToken(EsqlBaseParser.SAMPLE, 0); }
public ConstantContext constant() {
return getRuleContext(ConstantContext.class,0);
}
@SuppressWarnings("this-escape")
public SampleCommandContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_sampleCommand; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterSampleCommand(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitSampleCommand(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitSampleCommand(this);
else return visitor.visitChildren(this);
}
}
public final SampleCommandContext sampleCommand() throws RecognitionException {
SampleCommandContext _localctx = new SampleCommandContext(_ctx, getState());
enterRule(_localctx, 114, RULE_sampleCommand);
try {
enterOuterAlt(_localctx, 1);
{
setState(568);
match(SAMPLE);
setState(569);
((SampleCommandContext)_localctx).probability = constant();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static
|
SampleCommandContext
|
java
|
square__moshi
|
moshi/src/test/java/com/squareup/moshi/internal/ClassJsonAdapterTest.java
|
{
"start": 3103,
"end": 3168
}
|
class ____ {
int d;
int a;
int c;
}
static
|
BaseAbcde
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
|
{
"start": 25385,
"end": 25818
}
|
class ____ implements Signer {
private static boolean s3SignerCalled = false;
@Override
public SdkHttpFullRequest sign(SdkHttpFullRequest request,
ExecutionAttributes executionAttributes) {
LOG.debug("Custom S3 signer called");
s3SignerCalled = true;
return request;
}
public static boolean isS3SignerCalled() {
return s3SignerCalled;
}
}
public static final
|
CustomS3Signer
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/RedissonLiveObjectServiceTest.java
|
{
"start": 60884,
"end": 65091
}
|
class ____ {
@RId(generator = UUIDGenerator.class)
private String id;
private List<SimpleObject> objects;
private SimpleObject so;
public String getId() {
return id;
}
public List<SimpleObject> getObjects() {
return objects;
}
public void setSo(SimpleObject so) {
this.so = so;
}
public SimpleObject getSo() {
return so;
}
}
@Test
public void testStoreInnerObject() {
RLiveObjectService service = redisson.getLiveObjectService();
ObjectWithList so = new ObjectWithList();
so = service.persist(so);
SimpleObject s = new SimpleObject();
s = service.persist(s);
so.setSo(s);
assertThat(s.getId()).isNotNull();
so.getObjects().add(s);
so = redisson.getLiveObjectService().detach(so);
assertThat(so.getSo().getId()).isEqualTo(s.getId());
assertThat(so.getObjects().get(0).getId()).isEqualTo(so.getSo().getId());
}
@Test
public void testFieldWithoutIdSetter() {
RLiveObjectService service = redisson.getLiveObjectService();
SimpleObject so = new SimpleObject();
so = service.persist(so);
so.setValue(10L);
so = redisson.getLiveObjectService().detach(so);
assertThat(so.getId()).isNotNull();
assertThat(so.getValue()).isEqualTo(10L);
so = redisson.getLiveObjectService().get(SimpleObject.class, so.getId());
assertThat(so.getId()).isNotNull();
assertThat(so.getValue()).isEqualTo(10L);
}
@Test
public void testCreateObjectsInRuntime() {
RLiveObjectService service = redisson.getLiveObjectService();
TestREntityWithMap so = new TestREntityWithMap();
so = service.persist(so);
so.getValue().put("1", "2");
so = redisson.getLiveObjectService().detach(so);
assertThat(so.getName()).isNotNull();
assertThat(so.getValue()).containsKey("1");
assertThat(so.getValue()).containsValue("2");
so = redisson.getLiveObjectService().get(TestREntityWithMap.class, so.getName());
assertThat(so.getName()).isNotNull();
assertThat(so.getValue()).containsKey("1");
assertThat(so.getValue()).containsValue("2");
}
@Test
public void testFieldAccessor() {
RLiveObjectService service = redisson.getLiveObjectService();
TestClass myObject = new TestClass();
myObject = service.persist(myObject);
myObject.setValue("123345");
assertEquals("123345", myObject.get("value"));
myObject.set("value", "9999");
assertEquals("9999", myObject.get("value"));
assertEquals("9999", myObject.getValue());
try {
myObject.get("555555");
} catch (Exception e) {
assertTrue(e instanceof NoSuchFieldException);
}
try {
myObject.set("555555", "999");
} catch (Exception e) {
assertTrue(e instanceof NoSuchFieldException);
}
}
@Test
public void testCollectionRewrite() {
Customer c = new Customer("123");
c = redisson.getLiveObjectService().merge(c);
Order o1 = new Order(c);
o1 = redisson.getLiveObjectService().merge(o1);
assertThat(o1.getId()).isEqualTo(1);
c.getOrders().add(o1);
Order o2 = new Order(c);
o2 = redisson.getLiveObjectService().merge(o2);
assertThat(o2.getId()).isEqualTo(2);
c.getOrders().add(o2);
assertThat(c.getOrders().size()).isEqualTo(2);
assertThat(redisson.getKeys().count()).isEqualTo(7);
List<Order> list = new ArrayList<>();
Order o3 = new Order(c);
o3 = redisson.getLiveObjectService().merge(o3);
assertThat(o3.getId()).isEqualTo(3);
list.add(o3);
c.setOrders(list);
assertThat(c.getOrders().size()).isEqualTo(1);
}
@REntity
public static
|
ObjectWithList
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/xml/DefaultNamespaceHandlerResolver.java
|
{
"start": 2408,
"end": 5525
}
|
class ____ / instance. */
private volatile @Nullable Map<String, Object> handlerMappings;
/**
* Create a new {@code DefaultNamespaceHandlerResolver} using the
* default mapping file location.
* <p>This constructor will result in the thread context ClassLoader being used
* to load resources.
* @see #DEFAULT_HANDLER_MAPPINGS_LOCATION
*/
public DefaultNamespaceHandlerResolver() {
this(null, DEFAULT_HANDLER_MAPPINGS_LOCATION);
}
/**
* Create a new {@code DefaultNamespaceHandlerResolver} using the
* default mapping file location.
* @param classLoader the {@link ClassLoader} instance used to load mapping resources
* (may be {@code null}, in which case the thread context ClassLoader will be used)
* @see #DEFAULT_HANDLER_MAPPINGS_LOCATION
*/
public DefaultNamespaceHandlerResolver(@Nullable ClassLoader classLoader) {
this(classLoader, DEFAULT_HANDLER_MAPPINGS_LOCATION);
}
/**
* Create a new {@code DefaultNamespaceHandlerResolver} using the
* supplied mapping file location.
* @param classLoader the {@link ClassLoader} instance used to load mapping resources
* may be {@code null}, in which case the thread context ClassLoader will be used
* @param handlerMappingsLocation the mapping file location
*/
public DefaultNamespaceHandlerResolver(@Nullable ClassLoader classLoader, String handlerMappingsLocation) {
Assert.notNull(handlerMappingsLocation, "Handler mappings location must not be null");
this.classLoader = (classLoader != null ? classLoader : ClassUtils.getDefaultClassLoader());
this.handlerMappingsLocation = handlerMappingsLocation;
}
/**
* Locate the {@link NamespaceHandler} for the supplied namespace URI
* from the configured mappings.
* @param namespaceUri the relevant namespace URI
* @return the located {@link NamespaceHandler}, or {@code null} if none found
*/
@Override
public @Nullable NamespaceHandler resolve(String namespaceUri) {
Map<String, Object> handlerMappings = getHandlerMappings();
Object handlerOrClassName = handlerMappings.get(namespaceUri);
if (handlerOrClassName == null) {
return null;
}
else if (handlerOrClassName instanceof NamespaceHandler namespaceHandler) {
return namespaceHandler;
}
else {
String className = (String) handlerOrClassName;
try {
Class<?> handlerClass = ClassUtils.forName(className, this.classLoader);
if (!NamespaceHandler.class.isAssignableFrom(handlerClass)) {
throw new FatalBeanException("Class [" + className + "] for namespace [" + namespaceUri +
"] does not implement the [" + NamespaceHandler.class.getName() + "] interface");
}
NamespaceHandler namespaceHandler = (NamespaceHandler) BeanUtils.instantiateClass(handlerClass);
namespaceHandler.init();
handlerMappings.put(namespaceUri, namespaceHandler);
return namespaceHandler;
}
catch (ClassNotFoundException ex) {
throw new FatalBeanException("Could not find NamespaceHandler class [" + className +
"] for namespace [" + namespaceUri + "]", ex);
}
catch (LinkageError err) {
throw new FatalBeanException("Unresolvable
|
name
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/argumentselectiondefects/ArgumentSelectionDefectCheckerTest.java
|
{
"start": 2448,
"end": 3028
}
|
class ____ {
abstract void target(Object first, Object second);
void test(Object first, Object second) {
// BUG: Diagnostic contains: target(first, second)
// target(/* first= */second, /* second= */first)
target(second, first);
}
}
""")
.doTest();
}
@Test
public void argumentSelectionDefectChecker_findsSwap_withSwappedMatchingPairWithMethod() {
testHelper
.addSourceLines(
"Test.java",
"""
abstract
|
Test
|
java
|
alibaba__nacos
|
cmdb/src/main/java/com/alibaba/nacos/cmdb/memory/CmdbProvider.java
|
{
"start": 5517,
"end": 7037
}
|
class ____ implements Runnable {
@Override
public void run() {
Loggers.MAIN.debug("LABEL-TASK {}", "start dump.");
if (cmdbService == null) {
return;
}
try {
Map<String, Label> tmpLabelMap = new HashMap<>(16);
Set<String> labelNames = cmdbService.getLabelNames();
if (labelNames == null || labelNames.isEmpty()) {
Loggers.MAIN.warn("CMDB-LABEL-TASK {}", "load label names failed!");
} else {
for (String labelName : labelNames) {
// If get null label, it's still ok. We will try it later when we meet this label:
tmpLabelMap.put(labelName, cmdbService.getLabel(labelName));
}
if (Loggers.MAIN.isDebugEnabled()) {
Loggers.MAIN.debug("LABEL-TASK {}", "got label map:" + JacksonUtils.toJson(tmpLabelMap));
}
labelMap = tmpLabelMap;
}
} catch (Exception e) {
Loggers.MAIN.error("CMDB-LABEL-TASK {}", "dump failed!", e);
} finally {
CmdbExecutor.scheduleCmdbTask(this, switches.getLabelTaskInterval(), TimeUnit.SECONDS);
}
}
}
public
|
CmdbLabelTask
|
java
|
bumptech__glide
|
library/test/src/test/java/com/bumptech/glide/load/MultiTransformationTest.java
|
{
"start": 1150,
"end": 5418
}
|
class ____ {
@Rule public final KeyTester keyTester = new KeyTester();
@Mock private Transformation<Object> first;
@Mock private Transformation<Object> second;
@Mock private Resource<Object> initial;
@Mock private Resource<Object> firstTransformed;
@Mock private Resource<Object> secondTransformed;
private Application context;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
context = ApplicationProvider.getApplicationContext();
doAnswer(new Util.WriteDigest("first"))
.when(first)
.updateDiskCacheKey(any(MessageDigest.class));
doAnswer(new Util.WriteDigest("second"))
.when(second)
.updateDiskCacheKey(any(MessageDigest.class));
}
@Test
public void testAppliesTransformationsInOrder() {
final int width = 584;
final int height = 768;
MultiTransformation<Object> transformation = new MultiTransformation<>(first, second);
when(first.transform(anyContext(), eq(initial), eq(width), eq(height)))
.thenReturn(firstTransformed);
when(second.transform(anyContext(), eq(firstTransformed), eq(width), eq(height)))
.thenReturn(secondTransformed);
assertEquals(secondTransformed, transformation.transform(context, initial, width, height));
}
@Test
public void testInitialResourceIsNotRecycled() {
when(first.transform(anyContext(), anyResource(), anyInt(), anyInt()))
.thenReturn(firstTransformed);
MultiTransformation<Object> transformation = new MultiTransformation<>(first);
transformation.transform(context, initial, 123, 456);
verify(initial, never()).recycle();
}
@Test
public void testInitialResourceIsNotRecycledEvenIfReturnedByMultipleTransformations() {
when(first.transform(anyContext(), anyResource(), anyInt(), anyInt())).thenReturn(initial);
when(second.transform(anyContext(), anyResource(), anyInt(), anyInt())).thenReturn(initial);
MultiTransformation<Object> transformation = new MultiTransformation<>(first, second);
transformation.transform(context, initial, 1111, 2222);
verify(initial, never()).recycle();
}
@Test
public void
testInitialResourceIsNotRecycledIfReturnedByOneTransformationButNotByALaterTransformation() {
when(first.transform(anyContext(), anyResource(), anyInt(), anyInt())).thenReturn(initial);
when(second.transform(anyContext(), anyResource(), anyInt(), anyInt()))
.thenReturn(mockResource());
MultiTransformation<Object> transformation = new MultiTransformation<>(first, second);
transformation.transform(context, initial, 1, 2);
verify(initial, never()).recycle();
}
@Test
public void testFinalResourceIsNotRecycled() {
when(first.transform(anyContext(), anyResource(), anyInt(), anyInt()))
.thenReturn(firstTransformed);
MultiTransformation<Object> transformation = new MultiTransformation<>(first);
transformation.transform(context, mockResource(), 111, 222);
verify(firstTransformed, never()).recycle();
}
@Test
public void testIntermediateResourcesAreRecycled() {
when(first.transform(anyContext(), anyResource(), anyInt(), anyInt()))
.thenReturn(firstTransformed);
when(second.transform(anyContext(), anyResource(), anyInt(), anyInt()))
.thenReturn(secondTransformed);
MultiTransformation<Object> transformation = new MultiTransformation<>(first, second);
transformation.transform(context, mockResource(), 233, 454);
verify(firstTransformed).recycle();
}
@Test
public void testEquals() throws NoSuchAlgorithmException {
keyTester
.addEquivalenceGroup(new MultiTransformation<>(first), new MultiTransformation<>(first))
.addEquivalenceGroup(new MultiTransformation<>(second))
.addEquivalenceGroup(new MultiTransformation<>(first, second))
.addEquivalenceGroup(new MultiTransformation<>(second, first))
.addRegressionTest(
new MultiTransformation<>(first),
"a7937b64b8caa58f03721bb6bacf5c78cb235febe0e70b1b84cd99541461a08e")
.addRegressionTest(
new MultiTransformation<>(first, second),
"da83f63e1a473003712c18f5afc5a79044221943d1083c7c5a7ac7236d85e8d2")
.test();
}
}
|
MultiTransformationTest
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/config/plugins/validation/Constraint.java
|
{
"start": 1261,
"end": 1413
}
|
class ____ has a default constructor.
*
* @since 2.1
*/
@Documented
@Target(ElementType.ANNOTATION_TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @
|
that
|
java
|
apache__dubbo
|
dubbo-common/src/test/java/org/apache/dubbo/common/extension/ExtensionLoaderTest.java
|
{
"start": 36250,
"end": 36920
}
|
class ____ implements LoadingStrategy {
public DubboExternalLoadingStrategyTest(boolean overridden) {
this.overridden = overridden;
}
private boolean overridden;
@Override
public String directory() {
return "META-INF/dubbo/external/";
}
@Override
public boolean overridden() {
return this.overridden;
}
@Override
public int getPriority() {
return MAX_PRIORITY + 1;
}
}
/**
* The internal {@link LoadingStrategy}, which can set if it support overridden
*/
private static
|
DubboExternalLoadingStrategyTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/MappedSuperclassTest.java
|
{
"start": 3064,
"end": 3519
}
|
class ____ extends Account {
private BigDecimal creditLimit;
//Getters and setters are omitted for brevity
//end::entity-inheritance-mapped-superclass-example[]
public BigDecimal getCreditLimit() {
return creditLimit;
}
public void setCreditLimit(BigDecimal creditLimit) {
this.creditLimit = creditLimit;
}
//tag::entity-inheritance-mapped-superclass-example[]
}
//end::entity-inheritance-mapped-superclass-example[]
}
|
CreditAccount
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/UndertowEndpointBuilderFactory.java
|
{
"start": 71846,
"end": 72173
}
|
class ____ extends AbstractEndpointBuilder implements UndertowEndpointBuilder, AdvancedUndertowEndpointBuilder {
public UndertowEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new UndertowEndpointBuilderImpl(path);
}
}
|
UndertowEndpointBuilderImpl
|
java
|
apache__rocketmq
|
common/src/test/java/org/apache/rocketmq/common/producer/RecallMessageHandleTest.java
|
{
"start": 1088,
"end": 3092
}
|
class ____ {
@Test
public void testHandleInvalid() {
Assert.assertThrows(DecoderException.class, () -> {
RecallMessageHandle.decodeHandle("");
});
Assert.assertThrows(DecoderException.class, () -> {
RecallMessageHandle.decodeHandle(null);
});
Assert.assertThrows(DecoderException.class, () -> {
String invalidHandle = Base64.getUrlEncoder().encodeToString("v1 a b c".getBytes(StandardCharsets.UTF_8));
RecallMessageHandle.decodeHandle(invalidHandle);
});
Assert.assertThrows(DecoderException.class, () -> {
String invalidHandle = Base64.getUrlEncoder().encodeToString("v2 a b c d".getBytes(StandardCharsets.UTF_8));
RecallMessageHandle.decodeHandle(invalidHandle);
});
Assert.assertThrows(DecoderException.class, () -> {
String invalidHandle = "v1 a b c d";
RecallMessageHandle.decodeHandle(invalidHandle);
});
}
@Test
public void testEncodeAndDecodeV1() throws DecoderException {
String topic = "topic";
String brokerName = "broker-0";
String timestampStr = String.valueOf(System.currentTimeMillis());
String messageId = MessageClientIDSetter.createUniqID();
String handle = RecallMessageHandle.HandleV1.buildHandle(topic, brokerName, timestampStr, messageId);
RecallMessageHandle handleEntity = RecallMessageHandle.decodeHandle(handle);
Assert.assertTrue(handleEntity instanceof RecallMessageHandle.HandleV1);
RecallMessageHandle.HandleV1 handleV1 = (RecallMessageHandle.HandleV1) handleEntity;
Assert.assertEquals(handleV1.getVersion(), "v1");
Assert.assertEquals(handleV1.getTopic(), topic);
Assert.assertEquals(handleV1.getBrokerName(), brokerName);
Assert.assertEquals(handleV1.getTimestampStr(), timestampStr);
Assert.assertEquals(handleV1.getMessageId(), messageId);
}
}
|
RecallMessageHandleTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
|
{
"start": 58155,
"end": 58520
}
|
class ____ extends FinalTransition {
public AppKilledTransition() {
super(RMAppState.KILLED);
}
@Override
public void transition(RMAppImpl app, RMAppEvent event) {
app.diagnostics.append(event.getDiagnosticMsg());
super.transition(app, event);
RMAppImpl.auditLogKillEvent(event);
};
}
private static
|
AppKilledTransition
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/core/v2/event/service/ServiceEvent.java
|
{
"start": 1213,
"end": 2060
}
|
class ____ extends ServiceEvent {
private static final long serialVersionUID = 2123694271992630822L;
private final String changedType;
public ServiceChangedEvent(Service service, String changedType) {
this(service, changedType, false);
}
public ServiceChangedEvent(Service service, String changedType, boolean incrementRevision) {
super(service);
this.changedType = changedType;
service.renewUpdateTime();
if (incrementRevision) {
service.incrementRevision();
}
}
public String getChangedType() {
return changedType;
}
}
/**
* Service is subscribed by one client event.
*/
public static
|
ServiceChangedEvent
|
java
|
spring-projects__spring-boot
|
module/spring-boot-flyway/src/test/java/org/springframework/boot/flyway/autoconfigure/ResourceProviderCustomizerBeanRegistrationAotProcessorTests.java
|
{
"start": 2026,
"end": 4398
}
|
class ____ {
private final DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
private final ResourceProviderCustomizerBeanRegistrationAotProcessor processor = new ResourceProviderCustomizerBeanRegistrationAotProcessor();
@Test
void beanRegistrationAotProcessorIsRegistered() {
assertThat(AotServices.factories().load(BeanRegistrationAotProcessor.class))
.anyMatch(ResourceProviderCustomizerBeanRegistrationAotProcessor.class::isInstance);
}
@Test
void shouldIgnoreNonResourceProviderCustomizerBeans() {
RootBeanDefinition beanDefinition = new RootBeanDefinition(String.class);
this.beanFactory.registerBeanDefinition("test", beanDefinition);
BeanRegistrationAotContribution contribution = this.processor
.processAheadOfTime(RegisteredBean.of(this.beanFactory, "test"));
assertThat(contribution).isNull();
}
@Test
@CompileWithForkedClassLoader
void shouldReplaceResourceProviderCustomizer() {
compile(createContext(ResourceProviderCustomizerConfiguration.class), (freshContext) -> {
freshContext.refresh();
ResourceProviderCustomizer bean = freshContext.getBean(ResourceProviderCustomizer.class);
assertThat(bean).isInstanceOf(NativeImageResourceProviderCustomizer.class);
});
}
private GenericApplicationContext createContext(Class<?>... types) {
GenericApplicationContext context = new AnnotationConfigApplicationContext();
Arrays.stream(types).forEach((type) -> context.registerBean(type));
return context;
}
@SuppressWarnings("unchecked")
private void compile(GenericApplicationContext context, Consumer<GenericApplicationContext> freshContext) {
TestGenerationContext generationContext = new TestGenerationContext(TestTarget.class);
ClassName className = new ApplicationContextAotGenerator().processAheadOfTime(context, generationContext);
generationContext.writeGeneratedContent();
TestCompiler.forSystem().with(generationContext).compile((compiled) -> {
GenericApplicationContext freshApplicationContext = new GenericApplicationContext();
ApplicationContextInitializer<GenericApplicationContext> initializer = compiled
.getInstance(ApplicationContextInitializer.class, className.toString());
initializer.initialize(freshApplicationContext);
freshContext.accept(freshApplicationContext);
});
}
static
|
ResourceProviderCustomizerBeanRegistrationAotProcessorTests
|
java
|
apache__flink
|
flink-kubernetes/src/main/java/org/apache/flink/kubernetes/kubeclient/resources/KubernetesLeaderElector.java
|
{
"start": 8715,
"end": 8838
}
|
class ____ {
public abstract void isLeader();
public abstract void notLeader();
}
}
|
LeaderCallbackHandler
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnMissingBeanTests.java
|
{
"start": 34736,
"end": 34961
}
|
class ____ {
private final String value;
UnrelatedExampleBean(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
}
@TestAnnotation
static
|
UnrelatedExampleBean
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java
|
{
"start": 130745,
"end": 130825
}
|
class ____ extends RuntimeException {
}
}
@Controller
static
|
NotFoundException
|
java
|
apache__avro
|
lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityNameMismatch.java
|
{
"start": 1612,
"end": 2478
}
|
class ____ {
private static final Schema FIXED_4_ANOTHER_NAME = Schema.createFixed("AnotherName", null, null, 4);
public static Stream<Arguments> data() {
return Stream.of(Arguments.of(ENUM1_AB_SCHEMA, ENUM2_AB_SCHEMA, "expected: Enum2", "/name"),
Arguments.of(EMPTY_RECORD2, EMPTY_RECORD1, "expected: Record1", "/name"),
Arguments.of(FIXED_4_BYTES, FIXED_4_ANOTHER_NAME, "expected: AnotherName", "/name"),
Arguments.of(A_DINT_B_DENUM_1_RECORD1, A_DINT_B_DENUM_2_RECORD1, "expected: Enum2", "/fields/1/type/name"));
}
@ParameterizedTest
@MethodSource("data")
public void testNameMismatchSchemas(Schema reader, Schema writer, String details, String location) throws Exception {
validateIncompatibleSchemas(reader, writer, SchemaIncompatibilityType.NAME_MISMATCH, details, location);
}
}
|
TestSchemaCompatibilityNameMismatch
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/ContextCustomizerFactories.java
|
{
"start": 1057,
"end": 1699
}
|
class ____ configure which {@link ContextCustomizerFactory} implementations
* should be registered with the <em>Spring TestContext Framework</em>.
*
* <p>{@code @ContextCustomizerFactories} is used to register factories for a
* particular test class, its subclasses, and its nested classes. If you wish to
* register a factory globally, you should register it via the automatic discovery
* mechanism described in {@link ContextCustomizerFactory}.
*
* <p>This annotation may be used as a <em>meta-annotation</em> to create custom
* <em>composed annotations</em>. In addition, this annotation will be inherited
* from an enclosing test
|
to
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/rpc/model/ScopeModelAccessor.java
|
{
"start": 892,
"end": 949
}
|
interface ____ methods to get scope model.
*/
public
|
default
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/JoinTypeUtil.java
|
{
"start": 1026,
"end": 1739
}
|
class ____ {
/** Converts {@link JoinRelType} to {@link FlinkJoinType}. */
public static FlinkJoinType getFlinkJoinType(JoinRelType joinRelType) {
switch (joinRelType) {
case INNER:
return FlinkJoinType.INNER;
case LEFT:
return FlinkJoinType.LEFT;
case RIGHT:
return FlinkJoinType.RIGHT;
case FULL:
return FlinkJoinType.FULL;
case SEMI:
return FlinkJoinType.SEMI;
case ANTI:
return FlinkJoinType.ANTI;
default:
throw new IllegalArgumentException("invalid: " + joinRelType);
}
}
}
|
JoinTypeUtil
|
java
|
elastic__elasticsearch
|
x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java
|
{
"start": 470,
"end": 9111
}
|
class ____ implements ToXContentObject, Comparable<TopNFunction> {
private final String id;
private int rank;
private final int frameType;
private final boolean inline;
private final int addressOrLine;
private final String functionName;
private final String sourceFilename;
private final int sourceLine;
private final String exeFilename;
private long selfCount;
private long totalCount;
private double selfAnnualCO2Tons;
private double totalAnnualCO2Tons;
private double selfAnnualCostsUSD;
private double totalAnnualCostsUSD;
private SubGroup subGroups;
TopNFunction(
String id,
int frameType,
boolean inline,
int addressOrLine,
String functionName,
String sourceFilename,
int sourceLine,
String exeFilename
) {
this(
id,
0,
frameType,
inline,
addressOrLine,
functionName,
sourceFilename,
sourceLine,
exeFilename,
0,
0,
0.0d,
0.0d,
0.0d,
0.0d,
null
);
}
TopNFunction(
String id,
int rank,
int frameType,
boolean inline,
int addressOrLine,
String functionName,
String sourceFilename,
int sourceLine,
String exeFilename,
long selfCount,
long totalCount,
double selfAnnualCO2Tons,
double totalAnnualCO2Tons,
double selfAnnualCostsUSD,
double totalAnnualCostsUSD,
SubGroup subGroups
) {
this.id = id;
this.rank = rank;
this.frameType = frameType;
this.inline = inline;
this.addressOrLine = addressOrLine;
this.functionName = functionName;
this.sourceFilename = sourceFilename;
this.sourceLine = sourceLine;
this.exeFilename = exeFilename;
this.selfCount = selfCount;
this.totalCount = totalCount;
this.selfAnnualCO2Tons = selfAnnualCO2Tons;
this.totalAnnualCO2Tons = totalAnnualCO2Tons;
this.selfAnnualCostsUSD = selfAnnualCostsUSD;
this.totalAnnualCostsUSD = totalAnnualCostsUSD;
this.subGroups = subGroups;
}
public String getId() {
return this.id;
}
public void setRank(int rank) {
this.rank = rank;
}
public long getSelfCount() {
return selfCount;
}
public void addSelfCount(long selfCount) {
this.selfCount += selfCount;
}
public long getTotalCount() {
return totalCount;
}
public void addTotalCount(long totalCount) {
this.totalCount += totalCount;
}
public double getSelfAnnualCO2Tons() {
return selfAnnualCO2Tons;
}
public void addSelfAnnualCO2Tons(double co2Tons) {
this.selfAnnualCO2Tons += co2Tons;
}
public void addTotalAnnualCO2Tons(double co2Tons) {
this.totalAnnualCO2Tons += co2Tons;
}
public double getSelfAnnualCostsUSD() {
return selfAnnualCostsUSD;
}
public void addSelfAnnualCostsUSD(double costs) {
this.selfAnnualCostsUSD += costs;
}
public void addTotalAnnualCostsUSD(double costs) {
this.totalAnnualCostsUSD += costs;
}
public void addSubGroups(SubGroup subGroups) {
if (this.subGroups == null) {
this.subGroups = subGroups.copy();
} else {
this.subGroups.merge(subGroups);
}
}
public TopNFunction copy() {
return new TopNFunction(
id,
rank,
frameType,
inline,
addressOrLine,
functionName,
sourceFilename,
sourceLine,
exeFilename,
selfCount,
totalCount,
selfAnnualCO2Tons,
totalAnnualCO2Tons,
selfAnnualCostsUSD,
totalAnnualCostsUSD,
subGroups.copy()
);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("id", this.id);
builder.field("rank", this.rank);
builder.startObject("frame");
builder.field("frame_type", this.frameType);
builder.field("inline", this.inline);
builder.field("address_or_line", this.addressOrLine);
builder.field("function_name", this.functionName);
builder.field("file_name", this.sourceFilename);
builder.field("line_number", this.sourceLine);
builder.field("executable_file_name", this.exeFilename);
builder.endObject();
if (subGroups != null) {
builder.startObject("sub_groups");
subGroups.toXContent(builder, params);
builder.endObject();
}
builder.field("self_count", this.selfCount);
builder.field("total_count", this.totalCount);
builder.field("self_annual_co2_tons").rawValue(NumberUtils.doubleToString(selfAnnualCO2Tons));
builder.field("total_annual_co2_tons").rawValue(NumberUtils.doubleToString(totalAnnualCO2Tons));
builder.field("self_annual_costs_usd").rawValue(NumberUtils.doubleToString(selfAnnualCostsUSD));
builder.field("total_annual_costs_usd").rawValue(NumberUtils.doubleToString(totalAnnualCostsUSD));
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TopNFunction that = (TopNFunction) o;
return Objects.equals(id, that.id)
&& Objects.equals(rank, that.rank)
&& Objects.equals(frameType, that.frameType)
&& Objects.equals(inline, that.inline)
&& Objects.equals(addressOrLine, that.addressOrLine)
&& Objects.equals(functionName, that.functionName)
&& Objects.equals(sourceFilename, that.sourceFilename)
&& Objects.equals(sourceLine, that.sourceLine)
&& Objects.equals(exeFilename, that.exeFilename)
&& Objects.equals(selfCount, that.selfCount)
&& Objects.equals(totalCount, that.totalCount)
&& Objects.equals(selfAnnualCO2Tons, that.selfAnnualCO2Tons)
&& Objects.equals(totalAnnualCO2Tons, that.totalAnnualCO2Tons)
&& Objects.equals(selfAnnualCostsUSD, that.selfAnnualCostsUSD)
&& Objects.equals(totalAnnualCostsUSD, that.totalAnnualCostsUSD)
&& Objects.equals(subGroups, that.subGroups);
}
@Override
public int hashCode() {
return Objects.hash(
id,
rank,
frameType,
inline,
addressOrLine,
functionName,
sourceFilename,
sourceLine,
exeFilename,
selfCount,
totalCount,
selfAnnualCO2Tons,
totalAnnualCO2Tons,
selfAnnualCostsUSD,
totalAnnualCostsUSD,
subGroups
);
}
@Override
public String toString() {
return "TopNFunction{"
+ "id='"
+ id
+ '\''
+ ", rank="
+ rank
+ ", frameType="
+ frameType
+ ", inline="
+ inline
+ ", addressOrLine="
+ addressOrLine
+ ", functionName='"
+ functionName
+ '\''
+ ", sourceFilename='"
+ sourceFilename
+ '\''
+ ", sourceLine="
+ sourceLine
+ ", exeFilename='"
+ exeFilename
+ '\''
+ ", selfCount="
+ selfCount
+ ", totalCount="
+ totalCount
+ ", selfAnnualCO2Tons="
+ selfAnnualCO2Tons
+ ", totalAnnualCO2Tons="
+ totalAnnualCO2Tons
+ ", selfAnnualCostsUSD="
+ selfAnnualCostsUSD
+ ", totalAnnualCostsUSD="
+ totalAnnualCostsUSD
+ ", subGroups="
+ subGroups
+ '}';
}
@Override
public int compareTo(TopNFunction that) {
if (this.selfCount > that.selfCount) {
return 1;
}
if (this.selfCount < that.selfCount) {
return -1;
}
return this.id.compareTo(that.id);
}
}
|
TopNFunction
|
java
|
apache__kafka
|
test-common/test-common-runtime/src/main/java/org/apache/kafka/common/test/junit/ClusterTestExtensions.java
|
{
"start": 3678,
"end": 4446
}
|
class ____ {
* @ClusterTest(brokers = 1, controllers = 1, types = {Type.KRAFT, Type.CO_KRAFT})
* void someTest(ClusterInstance cluster) {
* assertTrue(condition)
* }
* }
* </pre>
*
* will generate two invocations of "someTest" (since two cluster types were specified). For each invocation, the test class
* SomeIntegrationTest will be instantiated, lifecycle methods (before/after) will be run, and "someTest" will be invoked.
*
* A special system property "kafka.cluster.test.repeat" can be used to cause repeated invocation of the tests.
*
* For example:
*
* <pre>
* ./gradlew -Pkafka.cluster.test.repeat=3 :core:test
* </pre>
*
* will cause all ClusterTest-s in the :core module to be invoked three times.
*/
public
|
SomeIntegrationTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/id/entities/Hotel.java
|
{
"start": 305,
"end": 563
}
|
class ____ {
@Id
private Long id;
private String name;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
Hotel
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FileWatchEndpointBuilderFactory.java
|
{
"start": 1474,
"end": 1604
}
|
interface ____ {
/**
* Builder for endpoint for the File Watch component.
*/
public
|
FileWatchEndpointBuilderFactory
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-gcp/src/main/java/org/apache/hadoop/fs/gs/GoogleCloudStorageClientWriteChannel.java
|
{
"start": 1432,
"end": 4292
}
|
class ____ implements WritableByteChannel {
private static final Logger LOG =
LoggerFactory.getLogger(GoogleCloudStorageClientWriteChannel.class);
private final StorageResourceId resourceId;
private WritableByteChannel writableByteChannel;
private GoogleCloudStorageItemInfo completedItemInfo = null;
GoogleCloudStorageClientWriteChannel(
final Storage storage,
final StorageResourceId resourceId,
final CreateFileOptions createOptions) throws IOException {
this.resourceId = resourceId;
BlobWriteSession blobWriteSession = getBlobWriteSession(storage, resourceId, createOptions);
try {
this.writableByteChannel = blobWriteSession.open();
} catch (StorageException e) {
throw new IOException(e);
}
}
private static BlobInfo getBlobInfo(final StorageResourceId resourceId,
final CreateFileOptions createOptions) {
BlobInfo blobInfo = BlobInfo.newBuilder(
BlobId.of(resourceId.getBucketName(), resourceId.getObjectName(),
resourceId.getGenerationId())).setContentType(createOptions.getContentType())
// .setMetadata(encodeMetadata(createOptions.getMetadata())) // TODO:
.build();
return blobInfo;
}
private static BlobWriteSession getBlobWriteSession(final Storage storage,
final StorageResourceId resourceId, final CreateFileOptions createOptions) {
return storage.blobWriteSession(getBlobInfo(resourceId, createOptions),
generateWriteOptions(createOptions));
}
private static BlobWriteOption[] generateWriteOptions(final CreateFileOptions createOptions) {
List<BlobWriteOption> blobWriteOptions = new ArrayList<>();
blobWriteOptions.add(BlobWriteOption.disableGzipContent());
blobWriteOptions.add(BlobWriteOption.generationMatch());
//TODO: Enable KMS and checksum
return blobWriteOptions.toArray(new BlobWriteOption[blobWriteOptions.size()]);
}
@Override
public boolean isOpen() {
return writableByteChannel != null && writableByteChannel.isOpen();
}
@Override
public void close() throws IOException {
try {
if (!isOpen()) {
return;
}
writableByteChannel.close();
} catch (Exception e) {
throw new IOException(
String.format("Upload failed for '%s'. reason=%s", resourceId, e.getMessage()), e);
} finally {
writableByteChannel = null;
}
}
private int writeInternal(final ByteBuffer byteBuffer) throws IOException {
int bytesWritten = writableByteChannel.write(byteBuffer);
LOG.trace("{} bytes were written out of provided buffer of capacity {}", bytesWritten,
byteBuffer.limit());
return bytesWritten;
}
@Override
public int write(final ByteBuffer src) throws IOException {
return writeInternal(src);
}
}
|
GoogleCloudStorageClientWriteChannel
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/pool/vendor/SybaseExceptionSorter.java
|
{
"start": 841,
"end": 1713
}
|
class ____ implements ExceptionSorter, Serializable {
private static final long serialVersionUID = 2742592563671255116L;
public SybaseExceptionSorter() {
this.configFromProperties(System.getProperties());
}
public boolean isExceptionFatal(SQLException e) {
if (e instanceof SQLRecoverableException) {
return true;
}
boolean result = false;
String errorText = e.getMessage();
if (errorText == null) {
return false;
}
errorText = errorText.toUpperCase();
if ((errorText.contains("JZ0C0")) || // ERR_CONNECTION_DEAD
(errorText.contains("JZ0C1")) // ERR_IOE_KILLED_CONNECTION
) {
result = true;
}
return result;
}
public void configFromProperties(Properties properties) {
}
}
|
SybaseExceptionSorter
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/store/impl/FederationStateStoreBaseTest.java
|
{
"start": 6980,
"end": 51277
}
|
class ____ {
private static final MonotonicClock CLOCK = new MonotonicClock();
private FederationStateStore stateStore;
private static final int NUM_APPS_10 = 10;
private static final int NUM_APPS_20 = 20;
protected abstract FederationStateStore createStateStore();
protected abstract void checkRouterMasterKey(DelegationKey delegationKey,
RouterMasterKey routerMasterKey) throws YarnException, IOException, SQLException;
protected abstract void checkRouterStoreToken(RMDelegationTokenIdentifier identifier,
RouterStoreToken token) throws YarnException, IOException, SQLException;
private Configuration conf;
@BeforeEach
public void before() throws IOException, YarnException {
stateStore = createStateStore();
stateStore.init(conf);
}
@AfterEach
public void after() throws Exception {
testDeleteStateStore();
testDeletePolicyStore();
stateStore.close();
}
// Test FederationMembershipStateStore
@Test
public void testRegisterSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
SubClusterInfo subClusterInfo = createSubClusterInfo(subClusterId);
long previousTimeStamp =
Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTimeInMillis();
SubClusterRegisterResponse result = stateStore.registerSubCluster(
SubClusterRegisterRequest.newInstance(subClusterInfo));
long currentTimeStamp =
Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTimeInMillis();
assertNotNull(result);
assertEquals(subClusterInfo, querySubClusterInfo(subClusterId));
// The saved heartbeat is between the old one and the current timestamp
assertTrue(querySubClusterInfo(subClusterId)
.getLastHeartBeat() <= currentTimeStamp);
assertTrue(querySubClusterInfo(subClusterId)
.getLastHeartBeat() >= previousTimeStamp);
}
@Test
public void testDeregisterSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
registerSubCluster(createSubClusterInfo(subClusterId));
SubClusterDeregisterRequest deregisterRequest = SubClusterDeregisterRequest
.newInstance(subClusterId, SubClusterState.SC_UNREGISTERED);
stateStore.deregisterSubCluster(deregisterRequest);
assertEquals(SubClusterState.SC_UNREGISTERED,
querySubClusterInfo(subClusterId).getState());
}
@Test
public void testDeregisterSubClusterUnknownSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
SubClusterDeregisterRequest deregisterRequest = SubClusterDeregisterRequest
.newInstance(subClusterId, SubClusterState.SC_UNREGISTERED);
LambdaTestUtils.intercept(YarnException.class,
"SubCluster SC not found", () -> stateStore.deregisterSubCluster(deregisterRequest));
}
@Test
public void testGetSubClusterInfo() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
SubClusterInfo subClusterInfo = createSubClusterInfo(subClusterId);
registerSubCluster(subClusterInfo);
GetSubClusterInfoRequest request =
GetSubClusterInfoRequest.newInstance(subClusterId);
assertEquals(subClusterInfo,
stateStore.getSubCluster(request).getSubClusterInfo());
}
@Test
public void testGetSubClusterInfoUnknownSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
GetSubClusterInfoRequest request =
GetSubClusterInfoRequest.newInstance(subClusterId);
GetSubClusterInfoResponse response = stateStore.getSubCluster(request);
assertNull(response);
}
@Test
public void testGetAllSubClustersInfo() throws Exception {
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
SubClusterInfo subClusterInfo1 = createSubClusterInfo(subClusterId1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
SubClusterInfo subClusterInfo2 = createSubClusterInfo(subClusterId2);
stateStore.registerSubCluster(
SubClusterRegisterRequest.newInstance(subClusterInfo1));
stateStore.registerSubCluster(
SubClusterRegisterRequest.newInstance(subClusterInfo2));
stateStore.subClusterHeartbeat(SubClusterHeartbeatRequest
.newInstance(subClusterId1, SubClusterState.SC_RUNNING, "capability"));
stateStore.subClusterHeartbeat(SubClusterHeartbeatRequest.newInstance(
subClusterId2, SubClusterState.SC_UNHEALTHY, "capability"));
List<SubClusterInfo> subClustersActive =
stateStore.getSubClusters(GetSubClustersInfoRequest.newInstance(true))
.getSubClusters();
List<SubClusterInfo> subClustersAll =
stateStore.getSubClusters(GetSubClustersInfoRequest.newInstance(false))
.getSubClusters();
// SC1 is the only active
assertEquals(1, subClustersActive.size());
SubClusterInfo sc1 = subClustersActive.get(0);
assertEquals(subClusterId1, sc1.getSubClusterId());
// SC1 and SC2 are the SubCluster present into the StateStore
assertEquals(2, subClustersAll.size());
assertTrue(subClustersAll.contains(sc1));
subClustersAll.remove(sc1);
SubClusterInfo sc2 = subClustersAll.get(0);
assertEquals(subClusterId2, sc2.getSubClusterId());
}
@Test
public void testSubClusterHeartbeat() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
registerSubCluster(createSubClusterInfo(subClusterId));
long previousHeartBeat =
querySubClusterInfo(subClusterId).getLastHeartBeat();
SubClusterHeartbeatRequest heartbeatRequest = SubClusterHeartbeatRequest
.newInstance(subClusterId, SubClusterState.SC_RUNNING, "capability");
stateStore.subClusterHeartbeat(heartbeatRequest);
long currentTimeStamp =
Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTimeInMillis();
assertEquals(SubClusterState.SC_RUNNING,
querySubClusterInfo(subClusterId).getState());
// The saved heartbeat is between the old one and the current timestamp
assertTrue(querySubClusterInfo(subClusterId)
.getLastHeartBeat() <= currentTimeStamp);
assertTrue(querySubClusterInfo(subClusterId)
.getLastHeartBeat() >= previousHeartBeat);
}
@Test
public void testSubClusterHeartbeatUnknownSubCluster() throws Exception {
SubClusterId subClusterId = SubClusterId.newInstance("SC");
SubClusterHeartbeatRequest heartbeatRequest = SubClusterHeartbeatRequest
.newInstance(subClusterId, SubClusterState.SC_RUNNING, "capability");
LambdaTestUtils.intercept(YarnException.class,
"SubCluster SC does not exist; cannot heartbeat",
() -> stateStore.subClusterHeartbeat(heartbeatRequest));
}
// Test FederationApplicationHomeSubClusterStore
@Test
public void testAddApplicationHomeSubCluster() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId = SubClusterId.newInstance("SC");
ApplicationHomeSubCluster ahsc =
ApplicationHomeSubCluster.newInstance(appId, subClusterId);
AddApplicationHomeSubClusterRequest request =
AddApplicationHomeSubClusterRequest.newInstance(ahsc);
AddApplicationHomeSubClusterResponse response =
stateStore.addApplicationHomeSubCluster(request);
assertEquals(subClusterId, response.getHomeSubCluster());
assertEquals(subClusterId, queryApplicationHomeSC(appId));
}
@Test
public void testAddApplicationHomeSubClusterAppAlreadyExists()
throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
addApplicationHomeSC(appId, subClusterId1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
ApplicationHomeSubCluster ahsc2 =
ApplicationHomeSubCluster.newInstance(appId, subClusterId2);
AddApplicationHomeSubClusterResponse response =
stateStore.addApplicationHomeSubCluster(
AddApplicationHomeSubClusterRequest.newInstance(ahsc2));
assertEquals(subClusterId1, response.getHomeSubCluster());
assertEquals(subClusterId1, queryApplicationHomeSC(appId));
}
@Test
public void testAddApplicationHomeSubClusterAppAlreadyExistsInTheSameSC()
throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
addApplicationHomeSC(appId, subClusterId1);
ApplicationHomeSubCluster ahsc2 =
ApplicationHomeSubCluster.newInstance(appId, subClusterId1);
AddApplicationHomeSubClusterResponse response =
stateStore.addApplicationHomeSubCluster(
AddApplicationHomeSubClusterRequest.newInstance(ahsc2));
assertEquals(subClusterId1, response.getHomeSubCluster());
assertEquals(subClusterId1, queryApplicationHomeSC(appId));
}
@Test
public void testDeleteApplicationHomeSubCluster() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId = SubClusterId.newInstance("SC");
addApplicationHomeSC(appId, subClusterId);
DeleteApplicationHomeSubClusterRequest delRequest =
DeleteApplicationHomeSubClusterRequest.newInstance(appId);
DeleteApplicationHomeSubClusterResponse response =
stateStore.deleteApplicationHomeSubCluster(delRequest);
assertNotNull(response);
try {
queryApplicationHomeSC(appId);
fail();
} catch (FederationStateStoreException e) {
assertTrue(e.getMessage()
.startsWith("Application " + appId + " does not exist"));
}
}
@Test
public void testDeleteApplicationHomeSubClusterUnknownApp() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
DeleteApplicationHomeSubClusterRequest delRequest =
DeleteApplicationHomeSubClusterRequest.newInstance(appId);
try {
stateStore.deleteApplicationHomeSubCluster(delRequest);
fail();
} catch (FederationStateStoreException e) {
assertTrue(e.getMessage()
.startsWith("Application " + appId.toString() + " does not exist"));
}
}
@Test
public void testGetApplicationHomeSubCluster() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId = SubClusterId.newInstance("SC");
addApplicationHomeSC(appId, subClusterId);
GetApplicationHomeSubClusterRequest getRequest =
GetApplicationHomeSubClusterRequest.newInstance(appId);
GetApplicationHomeSubClusterResponse result =
stateStore.getApplicationHomeSubCluster(getRequest);
assertEquals(appId,
result.getApplicationHomeSubCluster().getApplicationId());
assertEquals(subClusterId,
result.getApplicationHomeSubCluster().getHomeSubCluster());
}
@Test
public void testGetApplicationHomeSubClusterUnknownApp() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
GetApplicationHomeSubClusterRequest request =
GetApplicationHomeSubClusterRequest.newInstance(appId);
try {
stateStore.getApplicationHomeSubCluster(request);
fail();
} catch (FederationStateStoreException e) {
assertTrue(e.getMessage()
.startsWith("Application " + appId.toString() + " does not exist"));
}
}
@Test
public void testGetApplicationsHomeSubCluster() throws Exception {
ApplicationId appId1 = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
ApplicationHomeSubCluster ahsc1 =
ApplicationHomeSubCluster.newInstance(appId1, subClusterId1);
ApplicationId appId2 = ApplicationId.newInstance(1, 2);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
ApplicationHomeSubCluster ahsc2 =
ApplicationHomeSubCluster.newInstance(appId2, subClusterId2);
addApplicationHomeSC(appId1, subClusterId1);
addApplicationHomeSC(appId2, subClusterId2);
GetApplicationsHomeSubClusterRequest getRequest =
GetApplicationsHomeSubClusterRequest.newInstance();
GetApplicationsHomeSubClusterResponse result =
stateStore.getApplicationsHomeSubCluster(getRequest);
assertEquals(2, result.getAppsHomeSubClusters().size());
assertTrue(result.getAppsHomeSubClusters().contains(ahsc1));
assertTrue(result.getAppsHomeSubClusters().contains(ahsc2));
}
@Test
public void testGetApplicationsHomeSubClusterEmpty() throws Exception {
LambdaTestUtils.intercept(YarnException.class,
"Missing getApplicationsHomeSubCluster request",
() -> stateStore.getApplicationsHomeSubCluster(null));
}
@Test
public void testGetApplicationsHomeSubClusterFilter() throws Exception {
// Add ApplicationHomeSC - SC1
long now = Time.now();
Set<ApplicationHomeSubCluster> appHomeSubClusters = new HashSet<>();
for (int i = 0; i < NUM_APPS_10; i++) {
ApplicationId appId = ApplicationId.newInstance(now, i);
SubClusterId subClusterId = SubClusterId.newInstance("SC1");
addApplicationHomeSC(appId, subClusterId);
ApplicationHomeSubCluster ahsc =
ApplicationHomeSubCluster.newInstance(appId, subClusterId);
appHomeSubClusters.add(ahsc);
}
// Add ApplicationHomeSC - SC2
for (int i = 10; i < NUM_APPS_20; i++) {
ApplicationId appId = ApplicationId.newInstance(now, i);
SubClusterId subClusterId = SubClusterId.newInstance("SC2");
addApplicationHomeSC(appId, subClusterId);
}
GetApplicationsHomeSubClusterRequest getRequest =
GetApplicationsHomeSubClusterRequest.newInstance();
getRequest.setSubClusterId(SubClusterId.newInstance("SC1"));
GetApplicationsHomeSubClusterResponse result =
stateStore.getApplicationsHomeSubCluster(getRequest);
assertNotNull(result);
List<ApplicationHomeSubCluster> items = result.getAppsHomeSubClusters();
assertNotNull(items);
assertEquals(10, items.size());
for (ApplicationHomeSubCluster item : items) {
appHomeSubClusters.contains(item);
assertTrue(appHomeSubClusters.contains(item));
}
}
@Test
public void testGetApplicationsHomeSubClusterLimit() throws Exception {
// Add ApplicationHomeSC - SC1
long now = Time.now();
for (int i = 0; i < 50; i++) {
ApplicationId appId = ApplicationId.newInstance(now, i);
SubClusterId subClusterId = SubClusterId.newInstance("SC1");
addApplicationHomeSC(appId, subClusterId);
}
GetApplicationsHomeSubClusterRequest getRequest =
GetApplicationsHomeSubClusterRequest.newInstance();
getRequest.setSubClusterId(SubClusterId.newInstance("SC1"));
GetApplicationsHomeSubClusterResponse result =
stateStore.getApplicationsHomeSubCluster(getRequest);
assertNotNull(result);
// Write 50 records, but get 10 records because the maximum number is limited to 10
List<ApplicationHomeSubCluster> items = result.getAppsHomeSubClusters();
assertNotNull(items);
assertEquals(10, items.size());
GetApplicationsHomeSubClusterRequest getRequest1 =
GetApplicationsHomeSubClusterRequest.newInstance();
getRequest1.setSubClusterId(SubClusterId.newInstance("SC2"));
GetApplicationsHomeSubClusterResponse result1 =
stateStore.getApplicationsHomeSubCluster(getRequest1);
assertNotNull(result1);
// SC2 data does not exist, so the number of returned records is 0
List<ApplicationHomeSubCluster> items1 = result1.getAppsHomeSubClusters();
assertNotNull(items1);
assertEquals(0, items1.size());
}
@Test
public void testUpdateApplicationHomeSubCluster() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
addApplicationHomeSC(appId, subClusterId1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
ApplicationHomeSubCluster ahscUpdate =
ApplicationHomeSubCluster.newInstance(appId, subClusterId2);
UpdateApplicationHomeSubClusterRequest updateRequest =
UpdateApplicationHomeSubClusterRequest.newInstance(ahscUpdate);
UpdateApplicationHomeSubClusterResponse response =
stateStore.updateApplicationHomeSubCluster(updateRequest);
assertNotNull(response);
assertEquals(subClusterId2, queryApplicationHomeSC(appId));
}
@Test
public void testUpdateApplicationHomeSubClusterUnknownApp() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
ApplicationHomeSubCluster ahsc =
ApplicationHomeSubCluster.newInstance(appId, subClusterId1);
UpdateApplicationHomeSubClusterRequest updateRequest =
UpdateApplicationHomeSubClusterRequest.newInstance(ahsc);
try {
stateStore.updateApplicationHomeSubCluster((updateRequest));
fail();
} catch (FederationStateStoreException e) {
assertTrue(e.getMessage()
.startsWith("Application " + appId.toString() + " does not exist"));
}
}
// Test FederationPolicyStore
@Test
public void testSetPolicyConfiguration() throws Exception {
SetSubClusterPolicyConfigurationRequest request =
SetSubClusterPolicyConfigurationRequest
.newInstance(createSCPolicyConf("Queue", "PolicyType"));
SetSubClusterPolicyConfigurationResponse result =
stateStore.setPolicyConfiguration(request);
assertNotNull(result);
assertEquals(createSCPolicyConf("Queue", "PolicyType"),
queryPolicy("Queue"));
}
@Test
public void testSetPolicyConfigurationUpdateExisting() throws Exception {
setPolicyConf("Queue", "PolicyType1");
SetSubClusterPolicyConfigurationRequest request2 =
SetSubClusterPolicyConfigurationRequest
.newInstance(createSCPolicyConf("Queue", "PolicyType2"));
SetSubClusterPolicyConfigurationResponse result =
stateStore.setPolicyConfiguration(request2);
assertNotNull(result);
assertEquals(createSCPolicyConf("Queue", "PolicyType2"),
queryPolicy("Queue"));
}
@Test
public void testGetPolicyConfiguration() throws Exception {
setPolicyConf("Queue", "PolicyType");
GetSubClusterPolicyConfigurationRequest getRequest =
GetSubClusterPolicyConfigurationRequest.newInstance("Queue");
GetSubClusterPolicyConfigurationResponse result =
stateStore.getPolicyConfiguration(getRequest);
assertNotNull(result);
assertEquals(createSCPolicyConf("Queue", "PolicyType"),
result.getPolicyConfiguration());
}
@Test
public void testGetPolicyConfigurationUnknownQueue() throws Exception {
GetSubClusterPolicyConfigurationRequest request =
GetSubClusterPolicyConfigurationRequest.newInstance("Queue");
GetSubClusterPolicyConfigurationResponse response =
stateStore.getPolicyConfiguration(request);
assertNull(response);
}
@Test
public void testGetPoliciesConfigurations() throws Exception {
setPolicyConf("Queue1", "PolicyType1");
setPolicyConf("Queue2", "PolicyType2");
GetSubClusterPoliciesConfigurationsResponse response =
stateStore.getPoliciesConfigurations(
GetSubClusterPoliciesConfigurationsRequest.newInstance());
assertNotNull(response);
assertNotNull(response.getPoliciesConfigs());
assertEquals(2, response.getPoliciesConfigs().size());
assertTrue(response.getPoliciesConfigs()
.contains(createSCPolicyConf("Queue1", "PolicyType1")));
assertTrue(response.getPoliciesConfigs()
.contains(createSCPolicyConf("Queue2", "PolicyType2")));
}
// Convenience methods
SubClusterInfo createSubClusterInfo(SubClusterId subClusterId) {
String amRMAddress = "1.2.3.4:1";
String clientRMAddress = "1.2.3.4:2";
String rmAdminAddress = "1.2.3.4:3";
String webAppAddress = "1.2.3.4:4";
return SubClusterInfo.newInstance(subClusterId, amRMAddress,
clientRMAddress, rmAdminAddress, webAppAddress, SubClusterState.SC_NEW,
CLOCK.getTime(), "capability");
}
private SubClusterPolicyConfiguration createSCPolicyConf(String queueName,
String policyType) {
ByteBuffer bb = ByteBuffer.allocate(100);
bb.put((byte) 0x02);
return SubClusterPolicyConfiguration.newInstance(queueName, policyType, bb);
}
void addApplicationHomeSC(ApplicationId appId,
SubClusterId subClusterId) throws YarnException {
ApplicationHomeSubCluster ahsc =
ApplicationHomeSubCluster.newInstance(appId, subClusterId);
AddApplicationHomeSubClusterRequest request =
AddApplicationHomeSubClusterRequest.newInstance(ahsc);
stateStore.addApplicationHomeSubCluster(request);
}
void addApplicationHomeSC(ApplicationId appId, SubClusterId subClusterId,
ApplicationSubmissionContext submissionContext) throws YarnException {
long createTime = Time.now();
ApplicationHomeSubCluster ahsc = ApplicationHomeSubCluster.newInstance(
appId, createTime, subClusterId, submissionContext);
AddApplicationHomeSubClusterRequest request =
AddApplicationHomeSubClusterRequest.newInstance(ahsc);
stateStore.addApplicationHomeSubCluster(request);
}
private void setPolicyConf(String queue, String policyType)
throws YarnException {
SetSubClusterPolicyConfigurationRequest request =
SetSubClusterPolicyConfigurationRequest
.newInstance(createSCPolicyConf(queue, policyType));
stateStore.setPolicyConfiguration(request);
}
private void registerSubCluster(SubClusterInfo subClusterInfo)
throws YarnException {
stateStore.registerSubCluster(
SubClusterRegisterRequest.newInstance(subClusterInfo));
}
SubClusterInfo querySubClusterInfo(SubClusterId subClusterId)
throws YarnException {
GetSubClusterInfoRequest request =
GetSubClusterInfoRequest.newInstance(subClusterId);
return stateStore.getSubCluster(request).getSubClusterInfo();
}
SubClusterId queryApplicationHomeSC(ApplicationId appId)
throws YarnException {
GetApplicationHomeSubClusterRequest request =
GetApplicationHomeSubClusterRequest.newInstance(appId);
GetApplicationHomeSubClusterResponse response =
stateStore.getApplicationHomeSubCluster(request);
return response.getApplicationHomeSubCluster().getHomeSubCluster();
}
private SubClusterPolicyConfiguration queryPolicy(String queue)
throws YarnException {
GetSubClusterPolicyConfigurationRequest request =
GetSubClusterPolicyConfigurationRequest.newInstance(queue);
GetSubClusterPolicyConfigurationResponse result =
stateStore.getPolicyConfiguration(request);
return result.getPolicyConfiguration();
}
protected void setConf(Configuration conf) {
this.conf = conf;
}
protected Configuration getConf() {
return conf;
}
protected FederationStateStore getStateStore() {
return stateStore;
}
SubClusterId queryReservationHomeSC(ReservationId reservationId)
throws YarnException {
GetReservationHomeSubClusterRequest request =
GetReservationHomeSubClusterRequest.newInstance(reservationId);
GetReservationHomeSubClusterResponse response =
stateStore.getReservationHomeSubCluster(request);
return response.getReservationHomeSubCluster().getHomeSubCluster();
}
@Test
public void testAddReservationHomeSubCluster() throws Exception {
ReservationId reservationId = ReservationId.newInstance(Time.now(), 1);
SubClusterId subClusterId = SubClusterId.newInstance("SC");
ReservationHomeSubCluster reservationHomeSubCluster =
ReservationHomeSubCluster.newInstance(reservationId, subClusterId);
AddReservationHomeSubClusterRequest request =
AddReservationHomeSubClusterRequest.newInstance(reservationHomeSubCluster);
AddReservationHomeSubClusterResponse response =
stateStore.addReservationHomeSubCluster(request);
assertEquals(subClusterId, response.getHomeSubCluster());
assertEquals(subClusterId, queryReservationHomeSC(reservationId));
}
private void addReservationHomeSC(ReservationId reservationId, SubClusterId subClusterId)
throws YarnException {
ReservationHomeSubCluster reservationHomeSubCluster =
ReservationHomeSubCluster.newInstance(reservationId, subClusterId);
AddReservationHomeSubClusterRequest request =
AddReservationHomeSubClusterRequest.newInstance(reservationHomeSubCluster);
stateStore.addReservationHomeSubCluster(request);
}
@Test
public void testAddReservationHomeSubClusterReservationAlreadyExists() throws Exception {
ReservationId reservationId = ReservationId.newInstance(Time.now(), 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
addReservationHomeSC(reservationId, subClusterId1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
ReservationHomeSubCluster reservationHomeSubCluster2 =
ReservationHomeSubCluster.newInstance(reservationId, subClusterId2);
AddReservationHomeSubClusterRequest request2 =
AddReservationHomeSubClusterRequest.newInstance(reservationHomeSubCluster2);
AddReservationHomeSubClusterResponse response =
stateStore.addReservationHomeSubCluster(request2);
assertNotNull(response);
assertEquals(subClusterId1, response.getHomeSubCluster());
assertEquals(subClusterId1, queryReservationHomeSC(reservationId));
}
@Test
public void testAddReservationHomeSubClusterAppAlreadyExistsInTheSameSC()
throws Exception {
ReservationId reservationId = ReservationId.newInstance(Time.now(), 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
addReservationHomeSC(reservationId, subClusterId1);
ReservationHomeSubCluster reservationHomeSubCluster2 =
ReservationHomeSubCluster.newInstance(reservationId, subClusterId1);
AddReservationHomeSubClusterRequest request2 =
AddReservationHomeSubClusterRequest.newInstance(reservationHomeSubCluster2);
AddReservationHomeSubClusterResponse response =
stateStore.addReservationHomeSubCluster(request2);
assertNotNull(response);
assertEquals(subClusterId1, response.getHomeSubCluster());
assertEquals(subClusterId1, queryReservationHomeSC(reservationId));
}
@Test
public void testDeleteReservationHomeSubCluster() throws Exception {
ReservationId reservationId = ReservationId.newInstance(Time.now(), 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC");
addReservationHomeSC(reservationId, subClusterId1);
DeleteReservationHomeSubClusterRequest delReservationRequest =
DeleteReservationHomeSubClusterRequest.newInstance(reservationId);
DeleteReservationHomeSubClusterResponse delReservationResponse =
stateStore.deleteReservationHomeSubCluster(delReservationRequest);
assertNotNull(delReservationResponse);
LambdaTestUtils.intercept(YarnException.class,
"Reservation " + reservationId + " does not exist",
() -> queryReservationHomeSC(reservationId));
}
@Test
public void testDeleteReservationHomeSubClusterUnknownApp() throws Exception {
ReservationId reservationId = ReservationId.newInstance(Time.now(), 1);
DeleteReservationHomeSubClusterRequest delReservationRequest =
DeleteReservationHomeSubClusterRequest.newInstance(reservationId);
LambdaTestUtils.intercept(YarnException.class,
"Reservation " + reservationId + " does not exist",
() -> stateStore.deleteReservationHomeSubCluster(delReservationRequest));
}
@Test
public void testUpdateReservationHomeSubCluster() throws Exception {
ReservationId reservationId = ReservationId.newInstance(Time.now(), 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC");
addReservationHomeSC(reservationId, subClusterId1);
SubClusterId subClusterId2 = SubClusterId.newInstance("SC2");
ReservationHomeSubCluster reservationHomeSubCluster =
ReservationHomeSubCluster.newInstance(reservationId, subClusterId2);
UpdateReservationHomeSubClusterRequest updateReservationRequest =
UpdateReservationHomeSubClusterRequest.newInstance(reservationHomeSubCluster);
UpdateReservationHomeSubClusterResponse updateReservationResponse =
stateStore.updateReservationHomeSubCluster(updateReservationRequest);
assertNotNull(updateReservationResponse);
assertEquals(subClusterId2, queryReservationHomeSC(reservationId));
}
@Test
public void testUpdateReservationHomeSubClusterUnknownApp() throws Exception {
ReservationId reservationId = ReservationId.newInstance(Time.now(), 1);
SubClusterId subClusterId1 = SubClusterId.newInstance("SC1");
ReservationHomeSubCluster reservationHomeSubCluster =
ReservationHomeSubCluster.newInstance(reservationId, subClusterId1);
UpdateReservationHomeSubClusterRequest updateReservationRequest =
UpdateReservationHomeSubClusterRequest.newInstance(reservationHomeSubCluster);
LambdaTestUtils.intercept(YarnException.class,
"Reservation " + reservationId + " does not exist",
() -> stateStore.updateReservationHomeSubCluster(updateReservationRequest));
}
@Test
public void testStoreNewMasterKey() throws Exception {
// store delegation key;
DelegationKey key = new DelegationKey(1234, 4321, "keyBytes".getBytes());
Set<DelegationKey> keySet = new HashSet<>();
keySet.add(key);
RouterMasterKey routerMasterKey = RouterMasterKey.newInstance(key.getKeyId(),
ByteBuffer.wrap(key.getEncodedKey()), key.getExpiryDate());
RouterMasterKeyRequest routerMasterKeyRequest =
RouterMasterKeyRequest.newInstance(routerMasterKey);
RouterMasterKeyResponse response = stateStore.storeNewMasterKey(routerMasterKeyRequest);
assertNotNull(response);
RouterMasterKey routerMasterKeyResp = response.getRouterMasterKey();
assertNotNull(routerMasterKeyResp);
assertEquals(routerMasterKey.getKeyId(), routerMasterKeyResp.getKeyId());
assertEquals(routerMasterKey.getKeyBytes(), routerMasterKeyResp.getKeyBytes());
assertEquals(routerMasterKey.getExpiryDate(), routerMasterKeyResp.getExpiryDate());
checkRouterMasterKey(key, routerMasterKey);
}
@Test
public void testGetMasterKeyByDelegationKey() throws YarnException, IOException {
// store delegation key;
DelegationKey key = new DelegationKey(5678, 8765, "keyBytes".getBytes());
Set<DelegationKey> keySet = new HashSet<>();
keySet.add(key);
RouterMasterKey routerMasterKey = RouterMasterKey.newInstance(key.getKeyId(),
ByteBuffer.wrap(key.getEncodedKey()), key.getExpiryDate());
RouterMasterKeyRequest routerMasterKeyRequest =
RouterMasterKeyRequest.newInstance(routerMasterKey);
RouterMasterKeyResponse response = stateStore.storeNewMasterKey(routerMasterKeyRequest);
assertNotNull(response);
RouterMasterKeyResponse routerMasterKeyResponse =
stateStore.getMasterKeyByDelegationKey(routerMasterKeyRequest);
assertNotNull(routerMasterKeyResponse);
RouterMasterKey routerMasterKeyResp = routerMasterKeyResponse.getRouterMasterKey();
assertNotNull(routerMasterKeyResp);
assertEquals(routerMasterKey.getKeyId(), routerMasterKeyResp.getKeyId());
assertEquals(routerMasterKey.getKeyBytes(), routerMasterKeyResp.getKeyBytes());
assertEquals(routerMasterKey.getExpiryDate(), routerMasterKeyResp.getExpiryDate());
}
@Test
public void testRemoveStoredMasterKey() throws YarnException, IOException {
// store delegation key;
DelegationKey key = new DelegationKey(1234, 4321, "keyBytes".getBytes());
Set<DelegationKey> keySet = new HashSet<>();
keySet.add(key);
RouterMasterKey routerMasterKey = RouterMasterKey.newInstance(key.getKeyId(),
ByteBuffer.wrap(key.getEncodedKey()), key.getExpiryDate());
RouterMasterKeyRequest routerMasterKeyRequest =
RouterMasterKeyRequest.newInstance(routerMasterKey);
RouterMasterKeyResponse response = stateStore.storeNewMasterKey(routerMasterKeyRequest);
assertNotNull(response);
RouterMasterKeyResponse masterKeyResponse =
stateStore.removeStoredMasterKey(routerMasterKeyRequest);
assertNotNull(masterKeyResponse);
RouterMasterKey routerMasterKeyResp = masterKeyResponse.getRouterMasterKey();
assertEquals(routerMasterKey.getKeyId(), routerMasterKeyResp.getKeyId());
assertEquals(routerMasterKey.getKeyBytes(), routerMasterKeyResp.getKeyBytes());
assertEquals(routerMasterKey.getExpiryDate(), routerMasterKeyResp.getExpiryDate());
}
@Test
public void testStoreNewToken() throws IOException, YarnException, SQLException {
// prepare parameters
RMDelegationTokenIdentifier identifier = new RMDelegationTokenIdentifier(
new Text("owner1"), new Text("renewer1"), new Text("realuser1"));
int sequenceNumber = 1;
identifier.setSequenceNumber(sequenceNumber);
Long renewDate = Time.now();
String tokenInfo = "tokenInfo";
// store new rm-token
RouterStoreToken storeToken = RouterStoreToken.newInstance(identifier, renewDate, tokenInfo);
RouterRMTokenRequest request = RouterRMTokenRequest.newInstance(storeToken);
RouterRMTokenResponse routerRMTokenResponse = stateStore.storeNewToken(request);
// Verify the returned result to ensure that the returned Response is not empty
// and the returned result is consistent with the input parameters.
assertNotNull(routerRMTokenResponse);
RouterStoreToken storeTokenResp = routerRMTokenResponse.getRouterStoreToken();
assertNotNull(storeTokenResp);
assertEquals(storeToken.getRenewDate(), storeTokenResp.getRenewDate());
assertEquals(storeToken.getTokenIdentifier(), storeTokenResp.getTokenIdentifier());
assertEquals(storeToken.getTokenInfo(), storeTokenResp.getTokenInfo());
checkRouterStoreToken(identifier, storeTokenResp);
}
@Test
public void testUpdateStoredToken() throws IOException, YarnException, SQLException {
// prepare saveToken parameters
RMDelegationTokenIdentifier identifier = new RMDelegationTokenIdentifier(
new Text("owner2"), new Text("renewer2"), new Text("realuser2"));
int sequenceNumber = 2;
String tokenInfo = "tokenInfo";
identifier.setSequenceNumber(sequenceNumber);
Long renewDate = Time.now();
// store new rm-token
RouterStoreToken storeToken = RouterStoreToken.newInstance(identifier, renewDate, tokenInfo);
RouterRMTokenRequest request = RouterRMTokenRequest.newInstance(storeToken);
RouterRMTokenResponse routerRMTokenResponse = stateStore.storeNewToken(request);
assertNotNull(routerRMTokenResponse);
// prepare updateToken parameters
Long renewDate2 = Time.now();
String tokenInfo2 = "tokenInfo2";
// update rm-token
RouterStoreToken updateToken = RouterStoreToken.newInstance(identifier, renewDate2, tokenInfo2);
RouterRMTokenRequest updateTokenRequest = RouterRMTokenRequest.newInstance(updateToken);
RouterRMTokenResponse updateTokenResponse = stateStore.updateStoredToken(updateTokenRequest);
assertNotNull(updateTokenResponse);
RouterStoreToken updateTokenResp = updateTokenResponse.getRouterStoreToken();
assertNotNull(updateTokenResp);
assertEquals(updateToken.getRenewDate(), updateTokenResp.getRenewDate());
assertEquals(updateToken.getTokenIdentifier(), updateTokenResp.getTokenIdentifier());
assertEquals(updateToken.getTokenInfo(), updateTokenResp.getTokenInfo());
checkRouterStoreToken(identifier, updateTokenResp);
}
@Test
public void testRemoveStoredToken() throws IOException, YarnException {
// prepare saveToken parameters
RMDelegationTokenIdentifier identifier = new RMDelegationTokenIdentifier(
new Text("owner3"), new Text("renewer3"), new Text("realuser3"));
int sequenceNumber = 3;
identifier.setSequenceNumber(sequenceNumber);
Long renewDate = Time.now();
String tokenInfo = "tokenInfo";
// store new rm-token
RouterStoreToken storeToken = RouterStoreToken.newInstance(identifier, renewDate, tokenInfo);
RouterRMTokenRequest request = RouterRMTokenRequest.newInstance(storeToken);
RouterRMTokenResponse routerRMTokenResponse = stateStore.storeNewToken(request);
assertNotNull(routerRMTokenResponse);
// remove rm-token
RouterRMTokenResponse removeTokenResponse = stateStore.removeStoredToken(request);
assertNotNull(removeTokenResponse);
RouterStoreToken removeTokenResp = removeTokenResponse.getRouterStoreToken();
assertNotNull(removeTokenResp);
assertEquals(removeTokenResp.getRenewDate(), storeToken.getRenewDate());
assertEquals(removeTokenResp.getTokenIdentifier(), storeToken.getTokenIdentifier());
}
@Test
public void testGetTokenByRouterStoreToken() throws IOException, YarnException, SQLException {
// prepare saveToken parameters
RMDelegationTokenIdentifier identifier = new RMDelegationTokenIdentifier(
new Text("owner4"), new Text("renewer4"), new Text("realuser4"));
int sequenceNumber = 4;
identifier.setSequenceNumber(sequenceNumber);
Long renewDate = Time.now();
String tokenInfo = "tokenInfo";
// store new rm-token
RouterStoreToken storeToken = RouterStoreToken.newInstance(identifier, renewDate, tokenInfo);
RouterRMTokenRequest request = RouterRMTokenRequest.newInstance(storeToken);
RouterRMTokenResponse routerRMTokenResponse = stateStore.storeNewToken(request);
assertNotNull(routerRMTokenResponse);
// getTokenByRouterStoreToken
RouterRMTokenResponse getRouterRMTokenResp = stateStore.getTokenByRouterStoreToken(request);
assertNotNull(getRouterRMTokenResp);
RouterStoreToken getStoreTokenResp = getRouterRMTokenResp.getRouterStoreToken();
assertNotNull(getStoreTokenResp);
assertEquals(getStoreTokenResp.getRenewDate(), storeToken.getRenewDate());
assertEquals(storeToken.getTokenInfo(), getStoreTokenResp.getTokenInfo());
checkRouterStoreToken(identifier, getStoreTokenResp);
}
@Test
public void testGetCurrentVersion() {
Version version = stateStore.getCurrentVersion();
assertEquals(1, version.getMajorVersion());
assertEquals(1, version.getMinorVersion());
}
@Test
public void testStoreVersion() throws Exception {
stateStore.storeVersion();
Version version = stateStore.getCurrentVersion();
assertEquals(1, version.getMajorVersion());
assertEquals(1, version.getMinorVersion());
}
@Test
public void testLoadVersion() throws Exception {
stateStore.storeVersion();
Version version = stateStore.loadVersion();
assertEquals(1, version.getMajorVersion());
assertEquals(1, version.getMinorVersion());
}
@Test
public void testCheckVersion() throws Exception {
stateStore.checkVersion();
}
@Test
public void testGetApplicationHomeSubClusterWithContext() throws Exception {
FederationStateStore federationStateStore = this.getStateStore();
ApplicationId appId = ApplicationId.newInstance(1, 3);
SubClusterId subClusterId = SubClusterId.newInstance("SC");
ApplicationSubmissionContext context =
ApplicationSubmissionContext.newInstance(appId, "test", "default",
Priority.newInstance(0), null, true, true,
2, Resource.newInstance(10, 2), "test");
addApplicationHomeSC(appId, subClusterId, context);
GetApplicationHomeSubClusterRequest getRequest =
GetApplicationHomeSubClusterRequest.newInstance(appId, true);
GetApplicationHomeSubClusterResponse result =
federationStateStore.getApplicationHomeSubCluster(getRequest);
ApplicationHomeSubCluster applicationHomeSubCluster = result.getApplicationHomeSubCluster();
assertEquals(appId, applicationHomeSubCluster.getApplicationId());
assertEquals(subClusterId, applicationHomeSubCluster.getHomeSubCluster());
assertEquals(context, applicationHomeSubCluster.getApplicationSubmissionContext());
}
public void testDeleteStateStore() throws Exception {
// Step1. We clean the StateStore.
FederationStateStore federationStateStore = this.getStateStore();
federationStateStore.deleteStateStore();
// Step2. When we query the sub-cluster information, it should not exist.
GetSubClustersInfoRequest request = GetSubClustersInfoRequest.newInstance(true);
List<SubClusterInfo> subClustersActive = stateStore.getSubClusters(request).getSubClusters();
assertNotNull(subClustersActive);
assertEquals(0, subClustersActive.size());
// Step3. When we query the applications' information, it should not exist.
GetApplicationsHomeSubClusterRequest getRequest =
GetApplicationsHomeSubClusterRequest.newInstance();
GetApplicationsHomeSubClusterResponse result =
stateStore.getApplicationsHomeSubCluster(getRequest);
assertNotNull(result);
List<ApplicationHomeSubCluster> appsHomeSubClusters = result.getAppsHomeSubClusters();
assertNotNull(appsHomeSubClusters);
assertEquals(0, appsHomeSubClusters.size());
}
@Test
public void testDeletePoliciesConfigurations() throws Exception {
// Step1. We initialize the policy of the queue
FederationStateStore federationStateStore = this.getStateStore();
setPolicyConf("Queue1", "PolicyType1");
setPolicyConf("Queue2", "PolicyType2");
setPolicyConf("Queue3", "PolicyType3");
List<String> queues = new ArrayList<>();
queues.add("Queue1");
queues.add("Queue2");
queues.add("Queue3");
GetSubClusterPoliciesConfigurationsRequest policyRequest =
GetSubClusterPoliciesConfigurationsRequest.newInstance();
GetSubClusterPoliciesConfigurationsResponse response =
stateStore.getPoliciesConfigurations(policyRequest);
// Step2. Confirm that the initialized queue policy meets expectations.
assertNotNull(response);
List<SubClusterPolicyConfiguration> policiesConfigs = response.getPoliciesConfigs();
for (SubClusterPolicyConfiguration policyConfig : policiesConfigs) {
assertTrue(queues.contains(policyConfig.getQueue()));
}
// Step3. Delete the policy of queue (Queue1, Queue2).
List<String> deleteQueues = new ArrayList<>();
deleteQueues.add("Queue1");
deleteQueues.add("Queue2");
DeleteSubClusterPoliciesConfigurationsRequest deleteRequest =
DeleteSubClusterPoliciesConfigurationsRequest.newInstance(deleteQueues);
federationStateStore.deletePoliciesConfigurations(deleteRequest);
// Step4. Confirm that the queue has been deleted,
// that is, all currently returned queues do not exist in the deletion list.
GetSubClusterPoliciesConfigurationsRequest policyRequest2 =
GetSubClusterPoliciesConfigurationsRequest.newInstance();
GetSubClusterPoliciesConfigurationsResponse response2 =
stateStore.getPoliciesConfigurations(policyRequest2);
assertNotNull(response2);
List<SubClusterPolicyConfiguration> policiesConfigs2 = response2.getPoliciesConfigs();
for (SubClusterPolicyConfiguration policyConfig : policiesConfigs2) {
assertFalse(deleteQueues.contains(policyConfig.getQueue()));
}
}
@Test
public void testDeletePolicyStore() throws Exception {
// Step1. We delete all Policies Configurations.
FederationStateStore federationStateStore = this.getStateStore();
DeletePoliciesConfigurationsRequest request =
DeletePoliciesConfigurationsRequest.newInstance();
DeletePoliciesConfigurationsResponse response =
federationStateStore.deleteAllPoliciesConfigurations(request);
assertNotNull(response);
// Step2. We check the Policies size, the size should be 0 at this time.
GetSubClusterPoliciesConfigurationsRequest request1 =
GetSubClusterPoliciesConfigurationsRequest.newInstance();
GetSubClusterPoliciesConfigurationsResponse response1 =
stateStore.getPoliciesConfigurations(request1);
assertNotNull(response1);
List<SubClusterPolicyConfiguration> policiesConfigs =
response1.getPoliciesConfigs();
assertNotNull(policiesConfigs);
assertEquals(0, policiesConfigs.size());
}
}
|
FederationStateStoreBaseTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/cache/annotation/CacheEvict.java
|
{
"start": 1608,
"end": 6061
}
|
interface ____ {
/**
* Alias for {@link #cacheNames}.
* <p>Intended to be used when no other attributes are needed, for example:
* {@code @CacheEvict("books")}.
*/
@AliasFor("cacheNames")
String[] value() default {};
/**
* Names of the caches to use for the cache eviction operation.
* <p>Names may be used to determine the target cache (or caches), matching
* the qualifier value or bean name of a specific bean definition.
* @since 4.2
* @see #value
* @see CacheConfig#cacheNames
*/
@AliasFor("value")
String[] cacheNames() default {};
/**
* Spring Expression Language (SpEL) expression for computing the key dynamically.
* <p>Default is {@code ""}, meaning all method parameters are considered as a key,
* unless a custom {@link #keyGenerator} has been set.
* <p>The SpEL expression evaluates against a dedicated context that provides the
* following meta-data:
* <ul>
* <li>{@code #result} for a reference to the result of the method invocation, which
* can only be used if {@link #beforeInvocation()} is {@code false}. For supported
* wrappers such as {@code Optional}, {@code #result} refers to the actual object,
* not the wrapper</li>
* <li>{@code #root.method}, {@code #root.target}, and {@code #root.caches} for
* references to the {@link java.lang.reflect.Method method}, target object, and
* affected cache(s) respectively.</li>
* <li>Shortcuts for the method name ({@code #root.methodName}) and target class
* ({@code #root.targetClass}) are also available.
* <li>Method arguments can be accessed by index. For instance the second argument
* can be accessed via {@code #root.args[1]}, {@code #p1} or {@code #a1}. Arguments
* can also be accessed by name if that information is available.</li>
* </ul>
*/
String key() default "";
/**
* The bean name of the custom {@link org.springframework.cache.interceptor.KeyGenerator}
* to use.
* <p>Mutually exclusive with the {@link #key} attribute.
* @see CacheConfig#keyGenerator
*/
String keyGenerator() default "";
/**
* The bean name of the custom {@link org.springframework.cache.CacheManager} to use to
* create a default {@link org.springframework.cache.interceptor.CacheResolver} if none
* is set already.
* <p>Mutually exclusive with the {@link #cacheResolver} attribute.
* @see org.springframework.cache.interceptor.SimpleCacheResolver
* @see CacheConfig#cacheManager
*/
String cacheManager() default "";
/**
* The bean name of the custom {@link org.springframework.cache.interceptor.CacheResolver}
* to use.
* @see CacheConfig#cacheResolver
*/
String cacheResolver() default "";
/**
* Spring Expression Language (SpEL) expression used for making the cache
* eviction operation conditional. Evict that cache if the condition evaluates
* to {@code true}.
* <p>Default is {@code ""}, meaning the cache eviction is always performed.
* <p>The SpEL expression evaluates against a dedicated context that provides the
* following meta-data:
* <ul>
* <li>{@code #root.method}, {@code #root.target}, and {@code #root.caches} for
* references to the {@link java.lang.reflect.Method method}, target object, and
* affected cache(s) respectively.</li>
* <li>Shortcuts for the method name ({@code #root.methodName}) and target class
* ({@code #root.targetClass}) are also available.
* <li>Method arguments can be accessed by index. For instance the second argument
* can be accessed via {@code #root.args[1]}, {@code #p1} or {@code #a1}. Arguments
* can also be accessed by name if that information is available.</li>
* </ul>
*/
String condition() default "";
/**
* Whether all the entries inside the cache(s) are removed.
* <p>By default, only the value under the associated key is removed.
* <p>Note that setting this parameter to {@code true} and specifying a
* {@link #key} is not allowed.
*/
boolean allEntries() default false;
/**
* Whether the eviction should occur before the method is invoked.
* <p>Setting this attribute to {@code true}, causes the eviction to
* occur irrespective of the method outcome (i.e., whether it threw an
* exception or not).
* <p>Defaults to {@code false}, meaning that the cache eviction operation
* will occur <em>after</em> the advised method is invoked successfully (i.e.
* only if the invocation did not throw an exception).
*/
boolean beforeInvocation() default false;
}
|
CacheEvict
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/NumberUtils.java
|
{
"start": 9295,
"end": 11330
}
|
class ____ not be null");
DecimalFormat decimalFormat = null;
boolean resetBigDecimal = false;
if (numberFormat instanceof DecimalFormat dc) {
decimalFormat = dc;
if (BigDecimal.class == targetClass && !decimalFormat.isParseBigDecimal()) {
decimalFormat.setParseBigDecimal(true);
resetBigDecimal = true;
}
}
try {
Number number = numberFormat.parse(StringUtils.trimAllWhitespace(text));
return convertNumberToTargetClass(number, targetClass);
}
catch (ParseException ex) {
throw new IllegalArgumentException("Could not parse number: " + ex.getMessage());
}
finally {
if (resetBigDecimal) {
decimalFormat.setParseBigDecimal(false);
}
}
}
else {
return parseNumber(text, targetClass);
}
}
/**
* Determine whether the given {@code value} String indicates a hex number,
* i.e. needs to be passed into {@code Integer.decode} instead of
* {@code Integer.valueOf}, etc.
*/
private static boolean isHexNumber(String value) {
int index = (value.startsWith("-") ? 1 : 0);
return (value.startsWith("0x", index) || value.startsWith("0X", index) || value.startsWith("#", index));
}
/**
* Decode a {@link java.math.BigInteger} from the supplied {@link String} value.
* <p>Supports decimal, hex, and octal notation.
* @see BigInteger#BigInteger(String, int)
*/
private static BigInteger decodeBigInteger(String value) {
int radix = 10;
int index = 0;
boolean negative = false;
// Handle minus sign, if present.
if (value.startsWith("-")) {
negative = true;
index++;
}
// Handle radix specifier, if present.
if (value.startsWith("0x", index) || value.startsWith("0X", index)) {
index += 2;
radix = 16;
}
else if (value.startsWith("#", index)) {
index++;
radix = 16;
}
else if (value.startsWith("0", index) && value.length() > 1 + index) {
index++;
radix = 8;
}
BigInteger result = new BigInteger(value.substring(index), radix);
return (negative ? result.negate() : result);
}
}
|
must
|
java
|
apache__camel
|
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DefaultComponentResolver.java
|
{
"start": 1341,
"end": 2990
}
|
class ____ implements ComponentResolver {
public static final String RESOURCE_PATH = "META-INF/services/org/apache/camel/component/";
private static final Logger LOG = LoggerFactory.getLogger(DefaultComponentResolver.class);
private FactoryFinder factoryFinder;
@Override
public Component resolveComponent(String name, CamelContext context) {
// not in registry then use component factory
Class<?> type;
try {
type = findComponent(name, context);
if (type == null) {
// not found
return null;
}
} catch (Exception e) {
throw new IllegalArgumentException("Invalid URI, no Component registered for scheme: " + name, e);
}
if (getLog().isDebugEnabled()) {
getLog().debug("Found component: {} via type: {} via: {}{}", name, type.getName(), factoryFinder.getResourcePath(),
name);
}
// create the component
if (Component.class.isAssignableFrom(type)) {
return (Component) context.getInjector().newInstance(type, false);
} else {
throw new IllegalArgumentException("Type is not a Component implementation. Found: " + type.getName());
}
}
private Class<?> findComponent(String name, CamelContext context) {
if (factoryFinder == null) {
factoryFinder = context.getCamelContextExtension().getFactoryFinder(RESOURCE_PATH);
}
return factoryFinder.findClass(name).orElse(null);
}
protected Logger getLog() {
return LOG;
}
}
|
DefaultComponentResolver
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCh.java
|
{
"start": 3033,
"end": 3074
}
|
enum ____ { SUCCEED, FAIL }
static
|
Counter
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/ioc/validation/Person.java
|
{
"start": 825,
"end": 1181
}
|
class ____ {
private String name;
@Min(18)
private int age;
@NotBlank
public String getName() {
return name;
}
public int getAge() {
return age;
}
public void setName(String name) {
this.name = name;
}
public void setAge(int age) {
this.age = age;
}
}
// end::class[]
|
Person
|
java
|
apache__flink
|
flink-end-to-end-tests/flink-end-to-end-tests-common/src/main/java/org/apache/flink/tests/util/cache/AbstractDownloadCache.java
|
{
"start": 1501,
"end": 1641
}
|
class ____ the download and caching
* of files and provides hooks for encoding/decoding a time-to-live into the file name.
*/
abstract
|
handles
|
java
|
quarkusio__quarkus
|
extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRecorder.java
|
{
"start": 6977,
"end": 7536
}
|
class ____ implements FlywayExecutor.Command<BaselineResult> {
BaselineCommand(Flyway flyway) {
this.flyway = flyway;
}
final Flyway flyway;
@Override
public BaselineResult execute(CompositeMigrationResolver cmr, SchemaHistory schemaHistory, Database d,
Schema defaultSchema, Schema[] s, CallbackExecutor ce, StatementInterceptor si) {
if (!schemaHistory.exists()) {
return flyway.baseline();
}
return null;
}
}
}
|
BaselineCommand
|
java
|
bumptech__glide
|
integration/concurrent/src/main/java/com/bumptech/glide/integration/concurrent/GlideFutures.java
|
{
"start": 2019,
"end": 7179
}
|
interface ____<T> {
void act(T resource);
}
/**
* Submits the provided request, performs the provided {@code action} and returns a {@link
* ListenableFuture} that can be used to cancel the request or monitor its status.
*
* <p>Cancellation is best effort and may result in some resources not being returned back to
* Glide's pool. In particular, if the request is cancelled after the resource is loaded by Glide,
* but before {@code action} is run on {@code executor}, the resource will not be returned. We
* have the unfortunate choice between unsafely returning resources to the pool immediately when
* cancel is called while they may still be in use via {@link
* com.google.common.util.concurrent.ClosingFuture} or occasionally failing to return resources to
* the pool. Because failing to return resources to the pool is inefficient, but safe, that's the
* route we've chosen. A more sophisticated implementation may allow us to avoid the resource
* inefficiency.
*
* <p>If you do not need to interact with resource, use {@link #preload(RequestManager,
* RequestBuilder, Executor)}. {@code preload} is more efficient because it knows that the
* resource is never used and can always clear the resource immediately on cancellation, unlike
* this method.
*
* <p>An example usage:
*
* <pre>{@code
* ListenableFuture<Void> future =
* submitAndExecute(
* requestManager,
* requestBuilder,
* (bitmap) -> doSomethingWithBitmap(bitmap),
* backgroundExecutor);
* ;
* }</pre>
*
* @param <T> The type of resource that will be loaded (Bitmap, Drawable, etc).
*/
public static <T> ListenableFuture<Void> submitAndExecute(
final RequestManager requestManager,
RequestBuilder<T> requestBuilder,
final ResourceConsumer<T> action,
Executor executor) {
// If the request completes normally, then the target is cleared and the resource is returned.
// If the request fails while loading the image, there's no need to clear.
// If the request fails while calling the action, the target is cleared and the resource is
// returned.
// If the request is cancelled before the resource is loaded, then the resource is returned
// If the request is cancelled after the resource is loaded but before the transform runs,
// then the resource is dropped (but not leaked)
// If the request is cancelled after the transform method starts, then the resource is returned.
return FluentFuture.from(submitInternal(requestBuilder))
.transform(
new Function<TargetAndResult<T>, Void>() {
@Override
public Void apply(TargetAndResult<T> targetAndResult) {
try {
action.act(targetAndResult.result);
} finally {
requestManager.clear(targetAndResult.target);
}
return null;
}
},
executor);
}
/**
* Convert a pending load request into a ListenableFuture.
*
* <p>Sample code:
*
* <pre>{@code
* ListenableFuture<File> image =
* GlideFutures.submit(requestManager.asFile().load(url));
* }</pre>
*
* @param requestBuilder A request builder for the resource to load. It must be tied to an
* application Glide instance, and must not have a listener set.
* @param <T> The type of resource that will be loaded (Bitmap, Drawable, etc).
*/
public static <T> ListenableFuture<T> submit(final RequestBuilder<T> requestBuilder) {
return transformFromTargetAndResult(submitInternal(requestBuilder));
}
private static <T> ListenableFuture<T> transformFromTargetAndResult(
ListenableFuture<TargetAndResult<T>> future) {
return Futures.transform(
future,
new Function<TargetAndResult<T>, T>() {
@Override
public T apply(TargetAndResult<T> input) {
return input.result;
}
},
Executors.directExecutor());
}
private static <T> ListenableFuture<TargetAndResult<T>> submitInternal(
final RequestBuilder<T> requestBuilder) {
return CallbackToFutureAdapter.getFuture(
new Resolver<TargetAndResult<T>>() {
// Only used for toString
@SuppressWarnings("FutureReturnValueIgnored")
@Override
public Object attachCompleter(@NonNull Completer<TargetAndResult<T>> completer) {
GlideLoadingListener<T> listener = new GlideLoadingListener<>(completer);
final FutureTarget<T> futureTarget = requestBuilder.addListener(listener).submit();
completer.addCancellationListener(
new Runnable() {
@Override
public void run() {
futureTarget.cancel(/* mayInterruptIfRunning= */ true);
}
},
MoreExecutors.directExecutor());
return futureTarget;
}
});
}
/** Listener to convert Glide load results into ListenableFutures. */
private static final
|
ResourceConsumer
|
java
|
netty__netty
|
testsuite/src/main/java/io/netty/testsuite/transport/DefaultEventLoopTest.java
|
{
"start": 2787,
"end": 3714
}
|
class ____ extends SingleThreadEventLoop {
SuspendableDefaultEventLoop(EventLoopGroup parent, Executor executor) {
super(parent, executor, true, true, DEFAULT_MAX_PENDING_TASKS,
RejectedExecutionHandlers.reject());
}
@Override
protected void run() {
for (;;) {
Runnable task = takeTask();
if (task != null) {
runTask(task);
updateLastExecutionTime();
}
// Check if a suspend is requested and we have no more tasks. If so,
// exit the run() method to allow the suspension to complete.
if (canSuspend()) {
break;
}
if (confirmShutdown()) {
break;
}
}
}
}
private static final
|
SuspendableDefaultEventLoop
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/filter/ContentCachingResponseWrapperTests.java
|
{
"start": 13510,
"end": 13608
}
|
interface ____ {
void invoke(HttpServletResponse response, String contentType);
}
}
|
SetContentType
|
java
|
apache__camel
|
components/camel-box/camel-box-api/src/main/java/org/apache/camel/component/box/api/BoxHelper.java
|
{
"start": 899,
"end": 3461
}
|
class ____ {
public static final String BASE_ERROR_MESSAGE = "Box API returned the error code %d%n%n%s";
public static final String MISSING_LISTENER = "Parameter 'listener' is null: will not listen for events";
public static final String COLLABORATION_ID = "collaborationId";
public static final String FOLDER_ID = "folderId";
public static final String MESSAGE = "message";
public static final String FILE_ID = "fileId";
public static final String COMMENT_ID = "commentId";
public static final String INFO = "info";
public static final String PARENT_FOLDER_ID = "parentFolderId";
public static final String CONTENT = "content";
public static final String FILE_NAME = "fileName";
public static final String FILE_CONTENT = "fileContent";
public static final String VERSION = "version";
public static final String METADATA = "metadata";
public static final String SIZE = "size";
public static final String DESTINATION_FOLDER_ID = "destinationFolderId";
public static final String NEW_FOLDER_NAME = "newFolderName";
public static final String ACCESS = "access";
public static final String FOLDER_NAME = "folderName";
public static final String PATH = "path";
public static final String GROUP_ID = "groupId";
public static final String GROUP_INFO = "groupInfo";
public static final String USER_ID = "userId";
public static final String GROUP_MEMBERSHIP_ID = "groupMembershipId";
public static final String QUERY = "query";
public static final String ACTION = "action";
public static final String DUE_AT = "dueAt";
public static final String TASK_ID = "taskId";
public static final String ASSIGN_TO = "assignTo";
public static final String TASK_ASSIGNMENT_ID = "taskAssignmentId";
public static final String LOGIN = "login";
public static final String NAME = "name";
public static final String EMAIL = "email";
public static final String EMAIL_ALIAS_ID = "emailAliasId";
public static final String SOURCE_USER_ID = "sourceUserId";
public static final String COLLABORATOR = "collaborator";
public static final String ROLE = "role";
private BoxHelper() {
}
static String buildBoxApiErrorMessage(BoxAPIException e) {
return String.format(BASE_ERROR_MESSAGE, e.getResponseCode(), e.getResponse());
}
static <T> void notNull(T value, String name) {
if (value == null) {
throw new IllegalArgumentException("Parameter '" + name + "' cannot be null");
}
}
}
|
BoxHelper
|
java
|
netty__netty
|
handler/src/test/java/io/netty/handler/ssl/ParameterizedSslHandlerTest.java
|
{
"start": 3520,
"end": 33386
}
|
class ____ {
private static final String PARAMETERIZED_NAME = "{index}: clientProvider={0}, {index}: serverProvider={1}";
static Collection<Object[]> data() {
List<SslProvider> providers = new ArrayList<SslProvider>(3);
if (OpenSsl.isAvailable()) {
providers.add(SslProvider.OPENSSL);
providers.add(SslProvider.OPENSSL_REFCNT);
}
providers.add(SslProvider.JDK);
List<Object[]> params = new ArrayList<Object[]>();
for (SslProvider cp: providers) {
for (SslProvider sp: providers) {
params.add(new Object[] { cp, sp });
}
}
return params;
}
@ParameterizedTest(name = PARAMETERIZED_NAME)
@MethodSource("data")
@Timeout(value = 48000, unit = TimeUnit.MILLISECONDS)
public void testCompositeBufSizeEstimationGuaranteesSynchronousWrite(
SslProvider clientProvider, SslProvider serverProvider)
throws CertificateException, SSLException, ExecutionException, InterruptedException {
compositeBufSizeEstimationGuaranteesSynchronousWrite(serverProvider, clientProvider,
true, true, true);
compositeBufSizeEstimationGuaranteesSynchronousWrite(serverProvider, clientProvider,
true, true, false);
compositeBufSizeEstimationGuaranteesSynchronousWrite(serverProvider, clientProvider,
true, false, true);
compositeBufSizeEstimationGuaranteesSynchronousWrite(serverProvider, clientProvider,
true, false, false);
compositeBufSizeEstimationGuaranteesSynchronousWrite(serverProvider, clientProvider,
false, true, true);
compositeBufSizeEstimationGuaranteesSynchronousWrite(serverProvider, clientProvider,
false, true, false);
compositeBufSizeEstimationGuaranteesSynchronousWrite(serverProvider, clientProvider,
false, false, true);
compositeBufSizeEstimationGuaranteesSynchronousWrite(serverProvider, clientProvider,
false, false, false);
}
private static void compositeBufSizeEstimationGuaranteesSynchronousWrite(
SslProvider serverProvider, SslProvider clientProvider,
final boolean serverDisableWrapSize,
final boolean letHandlerCreateServerEngine, final boolean letHandlerCreateClientEngine)
throws CertificateException, SSLException, ExecutionException, InterruptedException {
SelfSignedCertificate ssc = CachedSelfSignedCertificate.getCachedCertificate();
final SslContext sslServerCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey())
.sslProvider(serverProvider)
.build();
final SslContext sslClientCtx = SslContextBuilder.forClient()
.trustManager(InsecureTrustManagerFactory.INSTANCE)
.sslProvider(clientProvider).build();
EventLoopGroup group = new MultiThreadIoEventLoopGroup(NioIoHandler.newFactory());
Channel sc = null;
Channel cc = null;
try {
final Promise<Void> donePromise = group.next().newPromise();
// The goal is to provide the SSLEngine with many ByteBuf components to ensure that the overhead for wrap
// is correctly accounted for on each component.
final int numComponents = 150;
// This is the TLS packet size. The goal is to divide the maximum amount of application data that can fit
// into a single TLS packet into many components to ensure the overhead is correctly taken into account.
final int desiredBytes = 16384;
final int singleComponentSize = desiredBytes / numComponents;
final int expectedBytes = numComponents * singleComponentSize;
sc = new ServerBootstrap()
.group(group)
.channel(NioServerSocketChannel.class)
.childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
final SslHandler handler = letHandlerCreateServerEngine
? sslServerCtx.newHandler(ch.alloc())
: new SslHandler(sslServerCtx.newEngine(ch.alloc()));
if (serverDisableWrapSize) {
handler.setWrapDataSize(-1);
}
ch.pipeline().addLast(handler);
ch.pipeline().addLast(new ChannelInboundHandlerAdapter() {
private boolean sentData;
private Throwable writeCause;
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
if (evt instanceof SslHandshakeCompletionEvent) {
SslHandshakeCompletionEvent sslEvt = (SslHandshakeCompletionEvent) evt;
if (sslEvt.isSuccess()) {
CompositeByteBuf content = ctx.alloc().compositeDirectBuffer(numComponents);
for (int i = 0; i < numComponents; ++i) {
ByteBuf buf = ctx.alloc().directBuffer(singleComponentSize);
buf.writerIndex(buf.writerIndex() + singleComponentSize);
content.addComponent(true, buf);
}
ctx.writeAndFlush(content).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
writeCause = future.cause();
if (writeCause == null) {
sentData = true;
}
}
});
} else {
donePromise.tryFailure(sslEvt.cause());
}
}
ctx.fireUserEventTriggered(evt);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
donePromise.tryFailure(new IllegalStateException("server exception sentData: " +
sentData + " writeCause: " + writeCause, cause));
}
@Override
public void channelInactive(ChannelHandlerContext ctx) {
donePromise.tryFailure(new IllegalStateException("server closed sentData: " +
sentData + " writeCause: " + writeCause));
}
});
}
}).bind(new InetSocketAddress(0)).syncUninterruptibly().channel();
cc = new Bootstrap()
.group(group)
.channel(NioSocketChannel.class)
.handler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
if (letHandlerCreateClientEngine) {
ch.pipeline().addLast(sslClientCtx.newHandler(ch.alloc()));
} else {
ch.pipeline().addLast(new SslHandler(sslClientCtx.newEngine(ch.alloc())));
}
ch.pipeline().addLast(new ChannelInboundHandlerAdapter() {
private int bytesSeen;
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof ByteBuf) {
bytesSeen += ((ByteBuf) msg).readableBytes();
if (bytesSeen == expectedBytes) {
donePromise.trySuccess(null);
}
}
ReferenceCountUtil.release(msg);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
if (evt instanceof SslHandshakeCompletionEvent) {
SslHandshakeCompletionEvent sslEvt = (SslHandshakeCompletionEvent) evt;
if (!sslEvt.isSuccess()) {
donePromise.tryFailure(sslEvt.cause());
}
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
donePromise.tryFailure(new IllegalStateException("client exception. bytesSeen: " +
bytesSeen, cause));
}
@Override
public void channelInactive(ChannelHandlerContext ctx) {
donePromise.tryFailure(new IllegalStateException("client closed. bytesSeen: " +
bytesSeen));
}
});
}
}).connect(sc.localAddress()).syncUninterruptibly().channel();
donePromise.get();
} finally {
if (cc != null) {
cc.close().syncUninterruptibly();
}
if (sc != null) {
sc.close().syncUninterruptibly();
}
group.shutdownGracefully();
ReferenceCountUtil.release(sslServerCtx);
ReferenceCountUtil.release(sslClientCtx);
}
}
@ParameterizedTest(name = PARAMETERIZED_NAME)
@MethodSource("data")
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void testAlertProducedAndSend(SslProvider clientProvider, SslProvider serverProvider) throws Exception {
SelfSignedCertificate ssc = CachedSelfSignedCertificate.getCachedCertificate();
final SslContext sslServerCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey())
.sslProvider(serverProvider)
.trustManager(new SimpleTrustManagerFactory() {
@Override
protected void engineInit(KeyStore keyStore) { }
@Override
protected void engineInit(ManagerFactoryParameters managerFactoryParameters) { }
@Override
protected TrustManager[] engineGetTrustManagers() {
return new TrustManager[] { new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s)
throws CertificateException {
// Fail verification which should produce an alert that is send back to the client.
throw new CertificateException();
}
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s) {
// NOOP
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return EmptyArrays.EMPTY_X509_CERTIFICATES;
}
} };
}
}).clientAuth(ClientAuth.REQUIRE).build();
final SslContext sslClientCtx = SslContextBuilder.forClient()
.trustManager(InsecureTrustManagerFactory.INSTANCE)
.keyManager(ResourcesUtil.getFile(getClass(), "test.crt"),
ResourcesUtil.getFile(getClass(), "test_unencrypted.pem"))
.sslProvider(clientProvider).build();
EventLoopGroup group = new MultiThreadIoEventLoopGroup(NioIoHandler.newFactory());
Channel sc = null;
Channel cc = null;
try {
final Promise<Void> promise = group.next().newPromise();
sc = new ServerBootstrap()
.group(group)
.channel(NioServerSocketChannel.class)
.childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(sslServerCtx.newHandler(ch.alloc()));
ch.pipeline().addLast(new ChannelInboundHandlerAdapter() {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
// Just trigger a close
ctx.close();
}
});
}
}).bind(new InetSocketAddress(0)).syncUninterruptibly().channel();
cc = new Bootstrap()
.group(group)
.channel(NioSocketChannel.class)
.handler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(sslClientCtx.newHandler(ch.alloc()));
ch.pipeline().addLast(new ChannelInboundHandlerAdapter() {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
if (cause.getCause() instanceof SSLException) {
// We received the alert and so produce an SSLException.
promise.trySuccess(null);
}
}
});
}
}).connect(sc.localAddress()).syncUninterruptibly().channel();
promise.syncUninterruptibly();
} finally {
if (cc != null) {
cc.close().syncUninterruptibly();
}
if (sc != null) {
sc.close().syncUninterruptibly();
}
group.shutdownGracefully();
ReferenceCountUtil.release(sslServerCtx);
ReferenceCountUtil.release(sslClientCtx);
}
}
@ParameterizedTest(name = PARAMETERIZED_NAME)
@MethodSource("data")
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void testCloseNotify(SslProvider clientProvider, SslProvider serverProvider) throws Exception {
testCloseNotify(clientProvider, serverProvider, 5000, false);
}
@ParameterizedTest(name = PARAMETERIZED_NAME)
@MethodSource("data")
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void testCloseNotifyReceivedTimeout(SslProvider clientProvider, SslProvider serverProvider)
throws Exception {
testCloseNotify(clientProvider, serverProvider, 100, true);
}
@ParameterizedTest(name = PARAMETERIZED_NAME)
@MethodSource("data")
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void testCloseNotifyNotWaitForResponse(SslProvider clientProvider, SslProvider serverProvider)
throws Exception {
testCloseNotify(clientProvider, serverProvider, 0, false);
}
private void testCloseNotify(SslProvider clientProvider, SslProvider serverProvider,
final long closeNotifyReadTimeout, final boolean timeout) throws Exception {
SelfSignedCertificate ssc = CachedSelfSignedCertificate.getCachedCertificate();
final SslContext sslServerCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey())
.sslProvider(serverProvider)
// Use TLSv1.2 as we depend on the fact that the handshake
// is done in an extra round trip in the test which
// is not true in TLSv1.3
.protocols(SslProtocols.TLS_v1_2)
.build();
final SslContext sslClientCtx = SslContextBuilder.forClient()
.trustManager(InsecureTrustManagerFactory.INSTANCE)
.sslProvider(clientProvider)
// Use TLSv1.2 as we depend on the fact that the handshake
// is done in an extra round trip in the test which
// is not true in TLSv1.3
.protocols(SslProtocols.TLS_v1_2)
.build();
EventLoopGroup group = new MultiThreadIoEventLoopGroup(NioIoHandler.newFactory());
Channel sc = null;
Channel cc = null;
try {
final Promise<Channel> clientPromise = group.next().newPromise();
final Promise<Channel> serverPromise = group.next().newPromise();
sc = new ServerBootstrap()
.group(group)
.channel(NioServerSocketChannel.class)
.childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
SslHandler handler = sslServerCtx.newHandler(ch.alloc());
handler.setCloseNotifyReadTimeoutMillis(closeNotifyReadTimeout);
PromiseNotifier.cascade(handler.sslCloseFuture(), serverPromise);
handler.handshakeFuture().addListener(new FutureListener<Channel>() {
@Override
public void operationComplete(Future<Channel> future) {
if (!future.isSuccess()) {
// Something bad happened during handshake fail the promise!
serverPromise.tryFailure(future.cause());
}
}
});
ch.pipeline().addLast(handler);
}
}).bind(new InetSocketAddress(0)).syncUninterruptibly().channel();
cc = new Bootstrap()
.group(group)
.channel(NioSocketChannel.class)
.handler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
final AtomicBoolean closeSent = new AtomicBoolean();
if (timeout) {
ch.pipeline().addFirst(new ChannelInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (closeSent.get()) {
// Drop data on the floor so we will get a timeout while waiting for the
// close_notify.
ReferenceCountUtil.release(msg);
} else {
super.channelRead(ctx, msg);
}
}
});
}
SslHandler handler = sslClientCtx.newHandler(ch.alloc());
handler.setCloseNotifyReadTimeoutMillis(closeNotifyReadTimeout);
PromiseNotifier.cascade(handler.sslCloseFuture(), clientPromise);
handler.handshakeFuture().addListener(new FutureListener<Channel>() {
@Override
public void operationComplete(Future<Channel> future) {
if (future.isSuccess()) {
closeSent.compareAndSet(false, true);
future.getNow().close();
} else {
// Something bad happened during handshake fail the promise!
clientPromise.tryFailure(future.cause());
}
}
});
ch.pipeline().addLast(handler);
}
}).connect(sc.localAddress()).syncUninterruptibly().channel();
serverPromise.awaitUninterruptibly();
clientPromise.awaitUninterruptibly();
// Server always received the close_notify as the client triggers the close sequence.
assertTrue(serverPromise.isSuccess());
// Depending on if we wait for the response or not the promise will be failed or not.
if (closeNotifyReadTimeout > 0 && !timeout) {
assertTrue(clientPromise.isSuccess());
} else {
assertFalse(clientPromise.isSuccess());
}
} finally {
if (cc != null) {
cc.close().syncUninterruptibly();
}
if (sc != null) {
sc.close().syncUninterruptibly();
}
group.shutdownGracefully();
ReferenceCountUtil.release(sslServerCtx);
ReferenceCountUtil.release(sslClientCtx);
}
}
@ParameterizedTest(name = PARAMETERIZED_NAME)
@MethodSource("data")
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void reentryOnHandshakeCompleteNioChannel(SslProvider clientProvider, SslProvider serverProvider)
throws Exception {
EventLoopGroup group = new MultiThreadIoEventLoopGroup(NioIoHandler.newFactory());
try {
Class<? extends ServerChannel> serverClass = NioServerSocketChannel.class;
Class<? extends Channel> clientClass = NioSocketChannel.class;
SocketAddress bindAddress = new InetSocketAddress(0);
reentryOnHandshakeComplete(clientProvider, serverProvider, group, bindAddress,
serverClass, clientClass, false, false);
reentryOnHandshakeComplete(clientProvider, serverProvider, group, bindAddress,
serverClass, clientClass, false, true);
reentryOnHandshakeComplete(clientProvider, serverProvider, group, bindAddress,
serverClass, clientClass, true, false);
reentryOnHandshakeComplete(clientProvider, serverProvider, group, bindAddress,
serverClass, clientClass, true, true);
} finally {
group.shutdownGracefully();
}
}
@ParameterizedTest(name = PARAMETERIZED_NAME)
@MethodSource("data")
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void reentryOnHandshakeCompleteLocalChannel(SslProvider clientProvider, SslProvider serverProvider)
throws Exception {
EventLoopGroup group = new MultiThreadIoEventLoopGroup(LocalIoHandler.newFactory());
try {
Class<? extends ServerChannel> serverClass = LocalServerChannel.class;
Class<? extends Channel> clientClass = LocalChannel.class;
SocketAddress bindAddress = new LocalAddress(String.valueOf(
ThreadLocalRandom.current().nextLong()));
reentryOnHandshakeComplete(clientProvider, serverProvider, group, bindAddress,
serverClass, clientClass, false, false);
reentryOnHandshakeComplete(clientProvider, serverProvider, group, bindAddress,
serverClass, clientClass, false, true);
reentryOnHandshakeComplete(clientProvider, serverProvider, group, bindAddress,
serverClass, clientClass, true, false);
reentryOnHandshakeComplete(clientProvider, serverProvider, group, bindAddress,
serverClass, clientClass, true, true);
} finally {
group.shutdownGracefully();
}
}
private void reentryOnHandshakeComplete(SslProvider clientProvider, SslProvider serverProvider,
EventLoopGroup group, SocketAddress bindAddress,
Class<? extends ServerChannel> serverClass,
Class<? extends Channel> clientClass, boolean serverAutoRead,
boolean clientAutoRead) throws Exception {
SelfSignedCertificate ssc = CachedSelfSignedCertificate.getCachedCertificate();
final SslContext sslServerCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey())
.sslProvider(serverProvider)
.build();
final SslContext sslClientCtx = SslContextBuilder.forClient()
.trustManager(InsecureTrustManagerFactory.INSTANCE)
.sslProvider(clientProvider)
.build();
Channel sc = null;
Channel cc = null;
try {
final String expectedContent = "HelloWorld";
final CountDownLatch serverLatch = new CountDownLatch(1);
final CountDownLatch clientLatch = new CountDownLatch(1);
final StringBuilder serverQueue = new StringBuilder(expectedContent.length());
final StringBuilder clientQueue = new StringBuilder(expectedContent.length());
sc = new ServerBootstrap()
.group(group)
.channel(serverClass)
.childOption(ChannelOption.AUTO_READ, serverAutoRead)
.childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) {
ch.pipeline().addLast(disableHandshakeTimeout(sslServerCtx.newHandler(ch.alloc())));
ch.pipeline().addLast(new ReentryWriteSslHandshakeHandler(expectedContent, serverQueue,
serverLatch));
}
}).bind(bindAddress).syncUninterruptibly().channel();
cc = new Bootstrap()
.group(group)
.channel(clientClass)
.option(ChannelOption.AUTO_READ, clientAutoRead)
.handler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) {
ch.pipeline().addLast(disableHandshakeTimeout(sslClientCtx.newHandler(ch.alloc())));
ch.pipeline().addLast(new ReentryWriteSslHandshakeHandler(expectedContent, clientQueue,
clientLatch));
}
}).connect(sc.localAddress()).syncUninterruptibly().channel();
serverLatch.await();
assertEquals(expectedContent, serverQueue.toString());
clientLatch.await();
assertEquals(expectedContent, clientQueue.toString());
} finally {
if (cc != null) {
cc.close().syncUninterruptibly();
}
if (sc != null) {
sc.close().syncUninterruptibly();
}
ReferenceCountUtil.release(sslServerCtx);
ReferenceCountUtil.release(sslClientCtx);
}
}
private static SslHandler disableHandshakeTimeout(SslHandler handler) {
handler.setHandshakeTimeoutMillis(0);
return handler;
}
private static final
|
ParameterizedSslHandlerTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LastDoubleByTimestampAggregator.java
|
{
"start": 1799,
"end": 4167
}
|
class ____ {
public static String describe() {
return "last_double_by_timestamp";
}
public static LongDoubleState initSingle(DriverContext driverContext) {
return new LongDoubleState(0, 0);
}
public static void first(LongDoubleState current, double value, long timestamp) {
current.v1(timestamp);
current.v2(value);
}
public static void combine(LongDoubleState current, double value, long timestamp) {
if (timestamp > current.v1()) {
current.v1(timestamp);
current.v2(value);
}
}
public static void combineIntermediate(LongDoubleState current, long timestamp, double value, boolean seen) {
if (seen) {
if (current.seen()) {
combine(current, value, timestamp);
} else {
first(current, value, timestamp);
current.seen(true);
}
}
}
public static Block evaluateFinal(LongDoubleState current, DriverContext ctx) {
return ctx.blockFactory().newConstantDoubleBlockWith(current.v2(), 1);
}
public static GroupingState initGrouping(DriverContext driverContext) {
return new GroupingState(driverContext.bigArrays());
}
public static void combine(GroupingState current, int groupId, double value, long timestamp) {
current.collectValue(groupId, timestamp, value);
}
public static void combineIntermediate(
GroupingState current,
int groupId,
LongBlock timestamps, // stylecheck
DoubleBlock values,
int otherPosition
) {
// TODO seen should probably be part of the intermediate representation
int valueCount = values.getValueCount(otherPosition);
if (valueCount > 0) {
long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition));
int firstIndex = values.getFirstValueIndex(otherPosition);
for (int i = 0; i < valueCount; i++) {
current.collectValue(groupId, timestamp, values.getDouble(firstIndex + i));
}
}
}
public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) {
return state.evaluateFinal(selected, ctx);
}
public static final
|
LastDoubleByTimestampAggregator
|
java
|
elastic__elasticsearch
|
qa/smoke-test-http/src/internalClusterTest/java/org/elasticsearch/http/RolloverRestCancellationIT.java
|
{
"start": 864,
"end": 1371
}
|
class ____ extends BlockedSearcherRestCancellationTestCase {
public void testRolloverRestCancellation() throws Exception {
assertAcked(
prepareCreate("test-000001").addAlias(new Alias("test-alias").writeIndex(true))
.setSettings(Settings.builder().put(BLOCK_SEARCHER_SETTING.getKey(), true))
);
ensureGreen("test-000001");
runTest(new Request(HttpPost.METHOD_NAME, "test-alias/_rollover"), RolloverAction.NAME);
}
}
|
RolloverRestCancellationIT
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java
|
{
"start": 1307,
"end": 3151
}
|
class ____
extends AtomicReferenceArrayAssertBaseTest {
private ObjectArrays arraysBefore;
private RecursiveComparisonConfiguration recursiveComparisonConfiguration = new RecursiveComparisonConfiguration();
@BeforeEach
void before() {
arraysBefore = getArrays(assertions);
}
@Override
protected AtomicReferenceArrayAssert<Object> invoke_api_method() {
return assertions.usingRecursiveFieldByFieldElementComparator(recursiveComparisonConfiguration);
}
@Override
protected void verify_internal_effects() {
then(arraysBefore).isNotSameAs(getArrays(assertions));
then(getArrays(assertions).getComparisonStrategy()).isInstanceOf(ComparatorBasedComparisonStrategy.class);
then(getObjects(assertions).getComparisonStrategy()).isInstanceOf(AtomicReferenceArrayElementComparisonStrategy.class);
ConfigurableRecursiveFieldByFieldComparator expectedComparator = new ConfigurableRecursiveFieldByFieldComparator(recursiveComparisonConfiguration);
then(getArrays(assertions).getComparator()).isEqualTo(expectedComparator);
then(getObjects(assertions).getComparisonStrategy()).extracting("elementComparator").isEqualTo(expectedComparator);
}
@Test
void should_be_able_to_use_specific_RecursiveComparisonConfiguration_when_using_recursive_field_by_field_element_comparator() {
// GIVEN
Foo actual = new Foo("1", new Bar(1));
Foo other = new Foo("2", new Bar(1));
RecursiveComparisonConfiguration configuration = new RecursiveComparisonConfiguration();
configuration.ignoreFields("id");
// WHEN/THEN
then(atomicArrayOf(actual)).usingRecursiveFieldByFieldElementComparator(configuration)
.contains(other);
}
public static
|
AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test
|
java
|
quarkusio__quarkus
|
integration-tests/spring-web/src/main/java/io/quarkus/it/spring/web/BookController.java
|
{
"start": 224,
"end": 410
}
|
class ____ {
@GetMapping(produces = MediaType.APPLICATION_XML_VALUE, path = "/book")
public Book someBook() {
return new Book("Guns germs and steel");
}
}
|
BookController
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/OtherPersonResource.java
|
{
"start": 212,
"end": 419
}
|
class ____ extends AbstractPersonResource {
@GET
@DisableSecureSerialization
@Path("no-security")
public Person nonSecurityPerson() {
return abstractPerson();
}
}
|
OtherPersonResource
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/async/AbstractAsynchronousOperationHandlersTest.java
|
{
"start": 10542,
"end": 11742
}
|
class ____
extends AsynchronousOperationTriggerMessageHeaders<
EmptyRequestBody, EmptyMessageParameters> {
static final TestingTriggerMessageHeaders INSTANCE = new TestingTriggerMessageHeaders();
private TestingTriggerMessageHeaders() {}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public String getDescription() {
return "";
}
@Override
protected String getAsyncOperationDescription() {
return "";
}
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public EmptyMessageParameters getUnresolvedMessageParameters() {
return EmptyMessageParameters.getInstance();
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.POST;
}
@Override
public String getTargetRestEndpointURL() {
return "barfoo";
}
}
private static final
|
TestingTriggerMessageHeaders
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/asyncprocessing/operators/AsyncKeyedProcessOperator.java
|
{
"start": 2460,
"end": 5788
}
|
class ____<K, IN, OUT>
extends AbstractAsyncStateUdfStreamOperator<OUT, KeyedProcessFunction<K, IN, OUT>>
implements OneInputStreamOperator<IN, OUT>, Triggerable<K, VoidNamespace> {
private static final long serialVersionUID = 1L;
// Shared timestamp variable for collector, context and onTimerContext.
private transient DeclaredVariable<Long> sharedTimestamp;
private transient TimestampedCollectorWithDeclaredVariable<OUT> collector;
private transient ContextImpl context;
private transient OnTimerContextImpl onTimerContext;
private transient ThrowingConsumer<IN, Exception> processor;
private transient ThrowingConsumer<Long, Exception> timerProcessor;
public AsyncKeyedProcessOperator(KeyedProcessFunction<K, IN, OUT> function) {
super(function);
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public void open() throws Exception {
super.open();
sharedTimestamp =
declarationContext.declareVariable(
LongSerializer.INSTANCE,
"_AsyncKeyedProcessOperator$sharedTimestamp",
null);
collector = new TimestampedCollectorWithDeclaredVariable<>(output, sharedTimestamp);
InternalTimerService<VoidNamespace> internalTimerService =
getInternalTimerService("user-timers", VoidNamespaceSerializer.INSTANCE, this);
TimerService timerService = new SimpleTimerService(internalTimerService);
context = new ContextImpl(userFunction, timerService, sharedTimestamp);
onTimerContext = new OnTimerContextImpl(userFunction, timerService, declarationContext);
if (userFunction instanceof DeclaringAsyncKeyedProcessFunction) {
DeclaringAsyncKeyedProcessFunction declaringFunction =
(DeclaringAsyncKeyedProcessFunction) userFunction;
declaringFunction.declareVariables(declarationContext);
processor = declaringFunction.declareProcess(declarationContext, context, collector);
timerProcessor =
declaringFunction.declareOnTimer(declarationContext, onTimerContext, collector);
} else {
processor = (in) -> userFunction.processElement(in, context, collector);
timerProcessor = (in) -> userFunction.onTimer(in, onTimerContext, collector);
}
}
@Override
public void onEventTime(InternalTimer<K, VoidNamespace> timer) throws Exception {
collector.setAbsoluteTimestamp(timer.getTimestamp());
invokeUserFunction(TimeDomain.EVENT_TIME, timer);
}
@Override
public void onProcessingTime(InternalTimer<K, VoidNamespace> timer) throws Exception {
collector.eraseTimestamp();
invokeUserFunction(TimeDomain.PROCESSING_TIME, timer);
}
@Override
public void processElement(StreamRecord<IN> element) throws Exception {
collector.setTimestamp(element);
processor.accept(element.getValue());
}
private void invokeUserFunction(TimeDomain timeDomain, InternalTimer<K, VoidNamespace> timer)
throws Exception {
onTimerContext.setTime(timer.getTimestamp(), timeDomain);
timerProcessor.accept(timer.getTimestamp());
}
private
|
AsyncKeyedProcessOperator
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/constraint/ForeignKeyNoConstraintTest.java
|
{
"start": 2594,
"end": 2931
}
|
class ____ {
@Id
private Integer id;
private String value;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
@Entity(name = "Post")
public static
|
VehicleNumber
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.