language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java | {
"start": 3335,
"end": 13053
} | class ____ extends AbstractBWCWireSerializationTestCase<Request> {
@Override
protected Request createTestInstance() {
var request = randomBoolean()
? Request.forIngestDocs(
randomAlphaOfLength(10),
Stream.generate(InferModelActionRequestTests::randomMap).limit(randomInt(10)).collect(Collectors.toList()),
randomInferenceConfigUpdate(),
randomBoolean(),
TimeValue.timeValueMillis(randomLongBetween(1, 2048))
)
: Request.forTextInput(
randomAlphaOfLength(10),
randomInferenceConfigUpdate(),
Arrays.asList(generateRandomStringArray(3, 5, false)),
randomBoolean(),
TimeValue.timeValueMillis(randomLongBetween(1, 2048))
);
request.setHighPriority(randomBoolean());
if (randomBoolean()) {
request.setPrefixType(randomFrom(TrainedModelPrefixStrings.PrefixType.values()));
}
request.setChunked(randomBoolean());
return request;
}
@Override
protected Request mutateInstance(Request instance) {
var modelId = instance.getId();
var objectsToInfer = instance.getObjectsToInfer();
var highPriority = instance.isHighPriority();
var textInput = instance.getTextInput();
var update = instance.getUpdate();
var previouslyLicensed = instance.isPreviouslyLicensed();
var timeout = instance.getInferenceTimeout();
var prefixType = instance.getPrefixType();
var chunked = instance.isChunked();
int change = randomIntBetween(0, 8);
switch (change) {
case 0:
modelId = modelId + "foo";
break;
case 1:
var newDocs = new ArrayList<>(objectsToInfer);
newDocs.add(randomMap());
objectsToInfer = newDocs;
break;
case 2:
highPriority = highPriority == false;
break;
case 3:
var newInput = new ArrayList<>(textInput == null ? List.of() : textInput);
newInput.add((randomAlphaOfLength(4)));
textInput = newInput;
break;
case 4:
var newUpdate = randomInferenceConfigUpdate();
while (newUpdate.getName().equals(update.getName())) {
newUpdate = randomInferenceConfigUpdate();
}
update = newUpdate;
break;
case 5:
previouslyLicensed = previouslyLicensed == false;
break;
case 6:
timeout = TimeValue.timeValueSeconds(timeout.getSeconds() - 1);
break;
case 7:
prefixType = TrainedModelPrefixStrings.PrefixType.values()[(prefixType.ordinal() + 1) % TrainedModelPrefixStrings.PrefixType
.values().length];
break;
case 8:
chunked = chunked == false;
break;
default:
throw new IllegalStateException();
}
var r = new Request(modelId, update, objectsToInfer, textInput, timeout, previouslyLicensed);
r.setHighPriority(highPriority);
r.setPrefixType(prefixType);
r.setChunked(chunked);
return r;
}
public static InferenceConfigUpdate randomInferenceConfigUpdate() {
return randomFrom(
ClassificationConfigUpdateTests.randomClassificationConfigUpdate(),
EmptyConfigUpdateTests.testInstance(),
FillMaskConfigUpdateTests.randomUpdate(),
NerConfigUpdateTests.randomUpdate(),
PassThroughConfigUpdateTests.randomUpdate(),
QuestionAnsweringConfigUpdateTests.randomUpdate(),
RegressionConfigUpdateTests.randomRegressionConfigUpdate(),
ResultsFieldUpdateTests.randomUpdate(),
TextClassificationConfigUpdateTests.randomUpdate(),
TextEmbeddingConfigUpdateTests.randomUpdate(),
TextExpansionConfigUpdateTests.randomUpdate(),
TextSimilarityConfigUpdateTests.randomUpdate(),
TokenizationConfigUpdateTests.randomUpdate(),
ZeroShotClassificationConfigUpdateTests.randomUpdate()
);
}
public static InferenceConfigUpdate mutateInferenceConfigUpdate(InferenceConfigUpdate currentUpdate, TransportVersion version) {
InferenceConfigUpdate adjustedUpdate;
if (currentUpdate instanceof NlpConfigUpdate nlpConfigUpdate) {
if (nlpConfigUpdate instanceof TextClassificationConfigUpdate update) {
adjustedUpdate = TextClassificationConfigUpdateTests.mutateForVersion(update, version);
} else if (nlpConfigUpdate instanceof TextEmbeddingConfigUpdate update) {
adjustedUpdate = TextEmbeddingConfigUpdateTests.mutateForVersion(update, version);
} else if (nlpConfigUpdate instanceof NerConfigUpdate update) {
adjustedUpdate = NerConfigUpdateTests.mutateForVersion(update, version);
} else if (nlpConfigUpdate instanceof FillMaskConfigUpdate update) {
adjustedUpdate = FillMaskConfigUpdateTests.mutateForVersion(update, version);
} else if (nlpConfigUpdate instanceof ZeroShotClassificationConfigUpdate update) {
adjustedUpdate = ZeroShotClassificationConfigUpdateTests.mutateForVersion(update, version);
} else if (nlpConfigUpdate instanceof PassThroughConfigUpdate update) {
adjustedUpdate = PassThroughConfigUpdateTests.mutateForVersion(update, version);
} else if (nlpConfigUpdate instanceof QuestionAnsweringConfigUpdate update) {
adjustedUpdate = QuestionAnsweringConfigUpdateTests.mutateForVersion(update, version);
} else if (nlpConfigUpdate instanceof TextExpansionConfigUpdate update) {
adjustedUpdate = TextExpansionConfigUpdateTests.mutateForVersion(update, version);
} else if (nlpConfigUpdate instanceof TextSimilarityConfigUpdate update) {
adjustedUpdate = TextSimilarityConfigUpdateTests.mutateForVersion(update, version);
} else {
throw new IllegalArgumentException("Unknown update [" + currentUpdate.getName() + "]");
}
} else {
adjustedUpdate = currentUpdate;
}
return adjustedUpdate;
}
private static Map<String, Object> randomMap() {
return Stream.generate(() -> randomAlphaOfLength(10))
.limit(randomInt(10))
.collect(Collectors.toMap(Function.identity(), (v) -> randomAlphaOfLength(10)));
}
@Override
protected Writeable.Reader<Request> instanceReader() {
return Request::new;
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(new MlInferenceNamedXContentProvider().getNamedWriteables());
return new NamedWriteableRegistry(entries);
}
@Override
protected Request mutateInstanceForVersion(Request instance, TransportVersion version) {
InferenceConfigUpdate adjustedUpdate = mutateInferenceConfigUpdate(instance.getUpdate(), version);
if (version.before(TransportVersions.V_8_3_0)) {
return new Request(
instance.getId(),
adjustedUpdate,
instance.getObjectsToInfer(),
null,
TimeValue.MAX_VALUE,
instance.isPreviouslyLicensed()
);
} else if (version.before(TransportVersions.V_8_7_0)) {
return new Request(
instance.getId(),
adjustedUpdate,
instance.getObjectsToInfer(),
null,
instance.getInferenceTimeout(),
instance.isPreviouslyLicensed()
);
} else if (version.before(TransportVersions.V_8_8_0)) {
var r = new Request(
instance.getId(),
adjustedUpdate,
instance.getObjectsToInfer(),
instance.getTextInput(),
instance.getInferenceTimeout(),
instance.isPreviouslyLicensed()
);
r.setHighPriority(false);
return r;
} else if (version.before(TransportVersions.V_8_12_0)) {
var r = new Request(
instance.getId(),
adjustedUpdate,
instance.getObjectsToInfer(),
instance.getTextInput(),
instance.getInferenceTimeout(),
instance.isPreviouslyLicensed()
);
r.setHighPriority(instance.isHighPriority());
r.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE);
return r;
} else if (version.before(TransportVersions.V_8_15_0)) {
var r = new Request(
instance.getId(),
adjustedUpdate,
instance.getObjectsToInfer(),
instance.getTextInput(),
instance.getInferenceTimeout(),
instance.isPreviouslyLicensed()
);
r.setHighPriority(instance.isHighPriority());
r.setPrefixType(instance.getPrefixType());
r.setChunked(false); // r.setChunked(instance.isChunked()); for the next version
return r;
}
return instance;
}
}
| InferModelActionRequestTests |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/model/Decorator.java | {
"start": 837,
"end": 5466
} | class ____ extends GeneratedTypeBuilder<Builder> {
private TypeElement mapperElement;
private DecoratedWithGem decorator;
private boolean hasDelegateConstructor;
private String implName;
private String implPackage;
private boolean suppressGeneratorTimestamp;
private Set<Annotation> customAnnotations;
public Builder() {
super( Builder.class );
}
public Builder mapperElement(TypeElement mapperElement) {
this.mapperElement = mapperElement;
return this;
}
public Builder decoratedWith(DecoratedWithGem decoratedGem) {
this.decorator = decoratedGem;
return this;
}
public Builder hasDelegateConstructor(boolean hasDelegateConstructor) {
this.hasDelegateConstructor = hasDelegateConstructor;
return this;
}
public Builder implName(String implName) {
this.implName = "default".equals( implName ) ? Mapper.DEFAULT_IMPLEMENTATION_CLASS : implName;
return this;
}
public Builder implPackage(String implPackage) {
this.implPackage = "default".equals( implPackage ) ? Mapper.DEFAULT_IMPLEMENTATION_PACKAGE : implPackage;
return this;
}
public Builder suppressGeneratorTimestamp(boolean suppressGeneratorTimestamp) {
this.suppressGeneratorTimestamp = suppressGeneratorTimestamp;
return this;
}
public Builder additionalAnnotations(Set<Annotation> customAnnotations) {
this.customAnnotations = customAnnotations;
return this;
}
public Decorator build() {
String implementationName = implName.replace( Mapper.CLASS_NAME_PLACEHOLDER,
Mapper.getFlatName( mapperElement ) );
Type decoratorType = typeFactory.getType( decorator.value().get() );
DecoratorConstructor decoratorConstructor = new DecoratorConstructor(
implementationName,
implementationName + "_",
hasDelegateConstructor );
Type mapperType = typeFactory.getType( mapperElement );
String elementPackage = mapperType.getPackageName();
String packageName = implPackage.replace( Mapper.PACKAGE_NAME_PLACEHOLDER, elementPackage );
return new Decorator(
typeFactory,
packageName,
implementationName,
decoratorType,
mapperType,
methods,
options,
versionInformation,
suppressGeneratorTimestamp,
Accessibility.fromModifiers( mapperElement.getModifiers() ),
extraImportedTypes,
decoratorConstructor,
customAnnotations
);
}
}
private final Type decoratorType;
private final Type mapperType;
@SuppressWarnings( "checkstyle:parameternumber" )
private Decorator(TypeFactory typeFactory, String packageName, String name, Type decoratorType,
Type mapperType,
List<MappingMethod> methods,
Options options, VersionInformation versionInformation,
boolean suppressGeneratorTimestamp,
Accessibility accessibility, SortedSet<Type> extraImports,
DecoratorConstructor decoratorConstructor,
Set<Annotation> customAnnotations) {
super(
typeFactory,
packageName,
name,
decoratorType,
methods,
Arrays.asList( new Field( mapperType, "delegate", true ) ),
options,
versionInformation,
suppressGeneratorTimestamp,
accessibility,
extraImports,
decoratorConstructor
);
this.decoratorType = decoratorType;
this.mapperType = mapperType;
// Add custom annotations
if ( customAnnotations != null ) {
customAnnotations.forEach( this::addAnnotation );
}
}
@Override
public SortedSet<Type> getImportTypes() {
SortedSet<Type> importTypes = super.getImportTypes();
// DecoratorType needs special handling in case it is nested
// calling addIfImportRequired is not the most correct approach since it would
// lead to checking if the type is to be imported and that would be false
// since the Decorator is a nested | Builder |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/typeutils/InternalTypeInfo.java | {
"start": 2729,
"end": 8018
} | class ____<T> extends TypeInformation<T> implements DataTypeQueryable {
private static final String FORMAT = "%s(%s, %s)";
private final LogicalType type;
private final Class<T> typeClass;
private final TypeSerializer<T> typeSerializer;
private InternalTypeInfo(
LogicalType type, Class<T> typeClass, TypeSerializer<T> typeSerializer) {
this.type = Preconditions.checkNotNull(type);
this.typeClass = Preconditions.checkNotNull(typeClass);
this.typeSerializer = Preconditions.checkNotNull(typeSerializer);
}
/**
* Creates type information for a {@link LogicalType} that is represented by internal data
* structures.
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public static <T> InternalTypeInfo<T> of(LogicalType type) {
final Class<?> typeClass = LogicalTypeUtils.toInternalConversionClass(type);
final TypeSerializer<?> serializer = InternalSerializers.create(type);
return (InternalTypeInfo<T>) new InternalTypeInfo(type, typeClass, serializer);
}
/** Creates type information for a {@link RowType} represented by internal data structures. */
public static InternalTypeInfo<RowData> of(RowType type) {
return of((LogicalType) type);
}
/** Creates type information for {@link RowType} represented by internal data structures. */
public static InternalTypeInfo<RowData> ofFields(LogicalType... fieldTypes) {
return of(RowType.of(fieldTypes));
}
/** Creates type information for {@link RowType} represented by internal data structures. */
public static InternalTypeInfo<RowData> ofFields(
LogicalType[] fieldTypes, String[] fieldNames) {
return of(RowType.of(fieldTypes, fieldNames));
}
// --------------------------------------------------------------------------------------------
// Internal methods for common tasks
// --------------------------------------------------------------------------------------------
public LogicalType toLogicalType() {
return type;
}
public TypeSerializer<T> toSerializer() {
return typeSerializer;
}
public RowType toRowType() {
return (RowType) type;
}
public RowDataSerializer toRowSerializer() {
return (RowDataSerializer) typeSerializer;
}
/**
* @deprecated {@link TypeInformation} should just be a thin wrapper of a serializer. This
* method only exists for legacy code. It is recommended to use the {@link RowType} instead
* for logical operations.
*/
@Deprecated
public LogicalType[] toRowFieldTypes() {
return toRowType().getFields().stream()
.map(RowType.RowField::getType)
.toArray(LogicalType[]::new);
}
/**
* @deprecated {@link TypeInformation} should just be a thin wrapper of a serializer. This
* method only exists for legacy code. It is recommended to use the {@link RowType} instead
* for logical operations.
*/
@Deprecated
public String[] toRowFieldNames() {
return toRowType().getFields().stream()
.map(RowType.RowField::getName)
.toArray(String[]::new);
}
/**
* @deprecated {@link TypeInformation} should just be a thin wrapper of a serializer. This
* method only exists for legacy code. It is recommended to use the {@link RowType} instead
* for logical operations.
*/
@Deprecated
public int toRowSize() {
return toRowType().getFieldCount();
}
// --------------------------------------------------------------------------------------------
@Override
public DataType getDataType() {
return DataTypeUtils.toInternalDataType(type);
}
// --------------------------------------------------------------------------------------------
@Override
public boolean isBasicType() {
return false;
}
@Override
public boolean isTupleType() {
return false;
}
@Override
public int getArity() {
return 1;
}
@Override
public int getTotalFields() {
return 1;
}
@Override
public Class<T> getTypeClass() {
return typeClass;
}
@Override
public boolean isKeyType() {
return false;
}
@Override
public TypeSerializer<T> createSerializer(SerializerConfig config) {
return typeSerializer;
}
@Override
public String toString() {
return String.format(
FORMAT,
type.asSummaryString(),
typeClass.getName(),
typeSerializer.getClass().getName());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final InternalTypeInfo<?> that = (InternalTypeInfo<?>) o;
return typeSerializer.equals(that.typeSerializer);
}
@Override
public int hashCode() {
return Objects.hash(typeSerializer);
}
@Override
public boolean canEqual(Object obj) {
return obj instanceof InternalTypeInfo;
}
}
| InternalTypeInfo |
java | quarkusio__quarkus | integration-tests/command-mode/src/main/java/org/acme/Main.java | {
"start": 246,
"end": 610
} | class ____ implements QuarkusApplication {
@Override
public int run(String... args) throws Exception {
System.out.println("ARGS: " + Arrays.asList(args));
final Path path = Paths.get("target/done.txt");
if (Files.exists(path)) {
Files.delete(path);
}
Files.createFile(path);
return 10;
}
}
| Main |
java | apache__camel | components/camel-huawei/camel-huaweicloud-frs/src/test/java/org/apache/camel/component/huaweicloud/frs/mock/FaceVerificationWithImageUrlAndMockClientTest.java | {
"start": 1474,
"end": 3794
} | class ____ extends CamelTestSupport {
TestConfiguration testConfiguration = new TestConfiguration();
@BindToRegistry("frsClient")
FrsClientMock frsClient = new FrsClientMock(null);
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:trigger_route")
.setProperty(FaceRecognitionProperties.FACE_IMAGE_URL,
constant(testConfiguration.getProperty("imageUrl")))
.setProperty(FaceRecognitionProperties.ANOTHER_FACE_IMAGE_URL,
constant(testConfiguration.getProperty("anotherImageUrl")))
.to("hwcloud-frs:faceVerification?"
+ "accessKey=" + testConfiguration.getProperty("accessKey")
+ "&secretKey=" + testConfiguration.getProperty("secretKey")
+ "&projectId=" + testConfiguration.getProperty("projectId")
+ "®ion=" + testConfiguration.getProperty("region")
+ "&ignoreSslVerification=true"
+ "&frsClient=#frsClient")
.log("perform faceVerification successfully")
.to("mock:perform_face_verification_result");
}
};
}
/**
* use imageUrl to perform faceVerification
*
* @throws Exception
*/
@Test
public void testFaceVerification() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:perform_face_verification_result");
mock.expectedMinimumMessageCount(1);
template.sendBody("direct:trigger_route", "");
Exchange responseExchange = mock.getExchanges().get(0);
mock.assertIsSatisfied();
assertTrue(responseExchange.getIn().getBody() instanceof CompareFaceByUrlResponse);
CompareFaceByUrlResponse response = (CompareFaceByUrlResponse) responseExchange.getIn().getBody();
assertEquals(MockResult.getCompareFaceResult(), response.getImage1Face());
assertEquals(MockResult.getCompareFaceResult(), response.getImage2Face());
assertEquals(1.0, response.getSimilarity());
}
}
| FaceVerificationWithImageUrlAndMockClientTest |
java | apache__dubbo | dubbo-test/dubbo-test-spring/src/main/java/org/apache/dubbo/test/spring/SpringXmlConfigTest.java | {
"start": 1648,
"end": 3506
} | class ____ {
private static ClassPathXmlApplicationContext providerContext;
@BeforeAll
public static void beforeAll() {
DubboBootstrap.reset();
}
@AfterAll
public static void afterAll() {
DubboBootstrap.reset();
providerContext.close();
}
private void startProvider() {
providerContext = new ClassPathXmlApplicationContext("/spring/dubbo-demo-provider.xml");
}
@Test
public void test() {
SysProps.setProperty(SHUTDOWN_WAIT_KEY, "2000");
// start provider context
startProvider();
// start consumer context
ClassPathXmlApplicationContext applicationContext = null;
try {
applicationContext = new ClassPathXmlApplicationContext("/spring/dubbo-demo.xml");
GreetingService greetingService = applicationContext.getBean("greetingService", GreetingService.class);
String greeting = greetingService.hello();
Assertions.assertEquals(greeting, "Greetings!");
DemoService demoService = applicationContext.getBean("demoService", DemoService.class);
String sayHelloResult = demoService.sayHello("dubbo");
Assertions.assertTrue(sayHelloResult.startsWith("Hello dubbo"), sayHelloResult);
RestDemoService restDemoService = applicationContext.getBean("restDemoService", RestDemoService.class);
String resetHelloResult = restDemoService.sayHello("dubbo");
Assertions.assertEquals("Hello, dubbo", resetHelloResult);
// check initialization customizer
MockSpringInitCustomizer.checkCustomizer(applicationContext);
} finally {
SysProps.clear();
if (applicationContext != null) {
applicationContext.close();
}
}
}
}
| SpringXmlConfigTest |
java | elastic__elasticsearch | x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/ingest/GeoGridProcessor.java | {
"start": 13104,
"end": 13779
} | enum ____ {
GEOHASH,
GEOTILE,
GEOHEX;
public static TileFieldType parse(String value, String tag) {
EnumSet<TileFieldType> validValues = EnumSet.allOf(TileFieldType.class);
try {
return valueOf(value.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw ConfigurationUtils.newConfigurationException(
TYPE,
tag,
"tile_type",
"illegal value [" + value + "], valid values are " + Arrays.toString(validValues.toArray())
);
}
}
}
}
| TileFieldType |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchRequestBuilder.java | {
"start": 503,
"end": 814
} | class ____ extends ActionRequestBuilder<ActivateWatchRequest, ActivateWatchResponse> {
public ActivateWatchRequestBuilder(ElasticsearchClient client, String id, boolean activate) {
super(client, ActivateWatchAction.INSTANCE, new ActivateWatchRequest(id, activate));
}
}
| ActivateWatchRequestBuilder |
java | reactor__reactor-core | reactor-core/src/jcstress/java/reactor/core/publisher/FluxOnErrorReturnStressTest.java | {
"start": 2312,
"end": 3302
} | class ____ {
final Throwable ERROR = new IllegalStateException("expected");
final StressSubscriber<Integer> subscriber = new StressSubscriber<>(0L);
final FluxOnErrorReturn.ReturnSubscriber<Integer> test = new FluxOnErrorReturn.ReturnSubscriber<>(subscriber, null, 100, true);
final StressSubscription<Integer> topmost = new StressSubscription<>(test);
{
test.onSubscribe(topmost);
}
@Actor
public void error() {
test.onError(ERROR);
}
@Actor
public void request1() {
subscriber.request(1);
}
@Actor
public void request2() {
subscriber.request(2);
}
@Actor
public void request3() {
subscriber.request(3);
}
@Arbiter
public void arbiter(IIIII_Result r) {
r.r1 = subscriber.onNextCalls.get();
r.r2 = subscriber.onCompleteCalls.get();
r.r3 = subscriber.onErrorCalls.get();
r.r4 = (int) topmost.requested;
r.r5 = topmost.cancelled.get() ? 1 : 0;
}
}
} | ErrorFallbackVsRequestStressTest |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/SyntheticBeanWithStereotypeTest.java | {
"start": 3406,
"end": 4280
} | class ____ implements AnnotationsTransformer {
@Override
public boolean appliesTo(AnnotationTarget.Kind kind) {
return kind == AnnotationTarget.Kind.CLASS;
}
@Override
public void transform(TransformationContext transformationContext) {
if (transformationContext.getTarget().asClass().name()
.equals(DotName.createSimple(ToBeStereotype.class.getName()))) {
transformationContext.transform()
.add(ApplicationScoped.class)
.add(SimpleBinding.class)
.add(Named.class)
.add(Alternative.class)
.add(Priority.class, AnnotationValue.createIntegerValue("value", 11))
.done();
}
}
}
static | MyAnnotationTrasnformer |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/filter/annotation/FilterAnnotationsTests.java | {
"start": 4063,
"end": 4184
} | class ____ {
}
@Filters(@Filter(type = FilterType.REGEX, pattern = ".*ExampleWithoutAnnotation"))
static | FilterByAspectJ |
java | apache__camel | components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/InlineQueryResultMpeg4Gif.java | {
"start": 2084,
"end": 6490
} | class ____ {
private String id;
private InlineKeyboardMarkup replyMarkup;
private String mpeg4Url;
private String mpeg4Width;
private Integer mpeg4Height;
private Integer mpeg4Duration;
private String thumbUrl;
private String title;
private String caption;
private String parseMode;
private InputMessageContent inputMessageContext;
private Builder() {
}
public Builder id(String id) {
this.id = id;
return this;
}
public Builder replyMarkup(InlineKeyboardMarkup replyMarkup) {
this.replyMarkup = replyMarkup;
return this;
}
public Builder mpeg4Url(String url) {
this.mpeg4Url = url;
return this;
}
public Builder mpeg4Width(String width) {
this.mpeg4Width = width;
return this;
}
public Builder mpeg4Height(Integer height) {
this.mpeg4Height = height;
return this;
}
public Builder mpeg4Duration(Integer duration) {
this.mpeg4Duration = duration;
return this;
}
public Builder thumbUrl(String thumbUrl) {
this.thumbUrl = thumbUrl;
return this;
}
public Builder title(String title) {
this.title = title;
return this;
}
public Builder caption(String caption) {
this.caption = caption;
return this;
}
public Builder parseMode(String parseMode) {
this.parseMode = parseMode;
return this;
}
public Builder inputMessageContext(InputMessageContent inputMessageContext) {
this.inputMessageContext = inputMessageContext;
return this;
}
public InlineQueryResultMpeg4Gif build() {
InlineQueryResultMpeg4Gif inlineQueryResultMpeg4Gif = new InlineQueryResultMpeg4Gif();
inlineQueryResultMpeg4Gif.setType(TYPE);
inlineQueryResultMpeg4Gif.setId(id);
inlineQueryResultMpeg4Gif.setReplyMarkup(replyMarkup);
inlineQueryResultMpeg4Gif.mpeg4Width = this.mpeg4Width;
inlineQueryResultMpeg4Gif.mpeg4Height = this.mpeg4Height;
inlineQueryResultMpeg4Gif.mpeg4Url = this.mpeg4Url;
inlineQueryResultMpeg4Gif.mpeg4Duration = this.mpeg4Duration;
inlineQueryResultMpeg4Gif.caption = this.caption;
inlineQueryResultMpeg4Gif.parseMode = this.parseMode;
inlineQueryResultMpeg4Gif.inputMessageContext = this.inputMessageContext;
inlineQueryResultMpeg4Gif.thumbUrl = this.thumbUrl;
inlineQueryResultMpeg4Gif.title = this.title;
return inlineQueryResultMpeg4Gif;
}
}
public String getMpeg4Url() {
return mpeg4Url;
}
public String getMpeg4Width() {
return mpeg4Width;
}
public Integer getMpeg4Height() {
return mpeg4Height;
}
public Integer getMpeg4Duration() {
return mpeg4Duration;
}
public String getThumbUrl() {
return thumbUrl;
}
public String getTitle() {
return title;
}
public String getCaption() {
return caption;
}
public String getParseMode() {
return parseMode;
}
public InputMessageContent getInputMessageContext() {
return inputMessageContext;
}
public void setMpeg4Url(String mpeg4Url) {
this.mpeg4Url = mpeg4Url;
}
public void setMpeg4Width(String mpeg4Width) {
this.mpeg4Width = mpeg4Width;
}
public void setMpeg4Height(Integer mpeg4Height) {
this.mpeg4Height = mpeg4Height;
}
public void setMpeg4Duration(Integer mpeg4Duration) {
this.mpeg4Duration = mpeg4Duration;
}
public void setThumbUrl(String thumbUrl) {
this.thumbUrl = thumbUrl;
}
public void setTitle(String title) {
this.title = title;
}
public void setCaption(String caption) {
this.caption = caption;
}
public void setParseMode(String parseMode) {
this.parseMode = parseMode;
}
public void setInputMessageContext(InputMessageContent inputMessageContext) {
this.inputMessageContext = inputMessageContext;
}
}
| Builder |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/chunking/EmbeddingRequestChunker.java | {
"start": 9144,
"end": 12999
} | class ____ implements ActionListener<InferenceServiceResults> {
private BatchRequest request;
DebatchingListener(BatchRequest request) {
this.request = request;
}
@Override
public void onResponse(InferenceServiceResults inferenceServiceResults) {
if (inferenceServiceResults instanceof EmbeddingResults<?> == false) {
onFailure(unexpectedResultTypeException(inferenceServiceResults.getWriteableName()));
return;
}
@SuppressWarnings("unchecked")
EmbeddingResults<E> embeddingResults = (EmbeddingResults<E>) inferenceServiceResults;
if (embeddingResults.embeddings().size() != request.requests.size()) {
onFailure(numResultsDoesntMatchException(embeddingResults.embeddings().size(), request.requests.size()));
return;
}
for (int i = 0; i < embeddingResults.embeddings().size(); i++) {
E newEmbedding = embeddingResults.embeddings().get(i);
resultEmbeddings.get(request.requests().get(i).inputIndex())
.updateAndGet(
request.requests().get(i).chunkIndex(),
oldEmbedding -> oldEmbedding == null ? newEmbedding : oldEmbedding.merge(newEmbedding)
);
}
request = null;
if (resultCount.incrementAndGet() == batchRequests.size()) {
sendFinalResponse();
}
}
private ElasticsearchStatusException numResultsDoesntMatchException(int numResults, int numRequests) {
return new ElasticsearchStatusException(
"Error the number of embedding responses [{}] does not equal the number of requests [{}]",
RestStatus.INTERNAL_SERVER_ERROR,
numResults,
numRequests
);
}
private ElasticsearchStatusException unexpectedResultTypeException(String resultType) {
return new ElasticsearchStatusException(
"Unexpected inference result type [{}], expected [EmbeddingResults]",
RestStatus.INTERNAL_SERVER_ERROR,
resultType
);
}
@Override
public void onFailure(Exception e) {
for (Request request : request.requests) {
resultsErrors.set(request.inputIndex(), e);
}
this.request = null;
if (resultCount.incrementAndGet() == batchRequests.size()) {
sendFinalResponse();
}
}
}
private void sendFinalResponse() {
var response = new ArrayList<ChunkedInference>(resultEmbeddings.size());
for (int i = 0; i < resultEmbeddings.size(); i++) {
if (resultsErrors.get(i) != null) {
response.add(new ChunkedInferenceError(resultsErrors.get(i)));
resultsErrors.set(i, null);
} else {
response.add(mergeResultsWithInputs(i));
}
}
finalListener.onResponse(response);
}
private ChunkedInference mergeResultsWithInputs(int inputIndex) {
List<Integer> startOffsets = resultOffsetStarts.get(inputIndex);
List<Integer> endOffsets = resultOffsetEnds.get(inputIndex);
AtomicReferenceArray<E> embeddings = resultEmbeddings.get(inputIndex);
List<EmbeddingResults.Chunk> chunks = new ArrayList<>();
for (int i = 0; i < embeddings.length(); i++) {
ChunkedInference.TextOffset offset = new ChunkedInference.TextOffset(startOffsets.get(i), endOffsets.get(i));
chunks.add(new EmbeddingResults.Chunk(embeddings.get(i), offset));
}
return new ChunkedInferenceEmbedding(chunks);
}
}
| DebatchingListener |
java | apache__camel | test-infra/camel-test-infra-weaviate/src/main/java/org/apache/camel/test/infra/weaviate/common/WeaviateProperties.java | {
"start": 871,
"end": 1261
} | class ____ {
public static final String WEAVIATE_ENDPOINT_URL = "weaviate.endpoint.url";
public static final String WEAVIATE_ENDPOINT_HOST = "weaviate.endpoint.host";
public static final String WEAVIATE_ENDPOINT_PORT = "weaviate.endpoint.port";
public static final String WEAVIATE_CONTAINER = "weaviate.container";
private WeaviateProperties() {
}
}
| WeaviateProperties |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/ClassTemplateInvocationTests.java | {
"start": 5567,
"end": 55457
} | class ____ extends AbstractJupiterTestEngineTests {
@ParameterizedTest
@ValueSource(strings = { //
"class:%s", //
"uid:[engine:junit-jupiter]/[class-template:%s]" //
})
void executesClassTemplateClassTwice(String selectorIdentifierTemplate) {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
TwoInvocationsTestCase.class.getName());
var invocationId1 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
var invocation1MethodAId = invocationId1.append(TestMethodTestDescriptor.SEGMENT_TYPE, "a()");
var invocation1NestedClassId = invocationId1.append(NestedClassTestDescriptor.SEGMENT_TYPE, "NestedTestCase");
var invocation1NestedMethodBId = invocation1NestedClassId.append(TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var invocation2MethodAId = invocationId2.append(TestMethodTestDescriptor.SEGMENT_TYPE, "a()");
var invocation2NestedClassId = invocationId2.append(NestedClassTestDescriptor.SEGMENT_TYPE, "NestedTestCase");
var invocation2NestedMethodBId = invocation2NestedClassId.append(TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var results = executeTests(DiscoverySelectors.parse(
selectorIdentifierTemplate.formatted(TwoInvocationsTestCase.class.getName())).orElseThrow());
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId1)), displayName("[1] A of TwoInvocationsTestCase"),
legacyReportingName("%s[1]".formatted(TwoInvocationsTestCase.class.getName()))), //
event(container(uniqueId(invocationId1)), started()), //
event(dynamicTestRegistered(uniqueId(invocation1MethodAId))), //
event(dynamicTestRegistered(uniqueId(invocation1NestedClassId))), //
event(dynamicTestRegistered(uniqueId(invocation1NestedMethodBId))), //
event(test(uniqueId(invocation1MethodAId)), started()), //
event(test(uniqueId(invocation1MethodAId)), finishedSuccessfully()), //
event(container(uniqueId(invocation1NestedClassId)), started()), //
event(test(uniqueId(invocation1NestedMethodBId)), started()), //
event(test(uniqueId(invocation1NestedMethodBId)), finishedSuccessfully()), //
event(container(uniqueId(invocation1NestedClassId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId1)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(invocationId2)), displayName("[2] B of TwoInvocationsTestCase"),
legacyReportingName("%s[2]".formatted(TwoInvocationsTestCase.class.getName()))), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(invocation2MethodAId))), //
event(dynamicTestRegistered(uniqueId(invocation2NestedClassId))), //
event(dynamicTestRegistered(uniqueId(invocation2NestedMethodBId))), //
event(test(uniqueId(invocation2MethodAId)), started()), //
event(test(uniqueId(invocation2MethodAId)), finishedSuccessfully()), //
event(container(uniqueId(invocation2NestedClassId)), started()), //
event(test(uniqueId(invocation2NestedMethodBId)), started()), //
event(test(uniqueId(invocation2NestedMethodBId)), finishedSuccessfully()), //
event(container(uniqueId(invocation2NestedClassId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(classTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void classTemplateAnnotationIsInherited() {
var results = executeTestsForClass(InheritedTwoInvocationsTestCase.class);
results.allEvents().assertStatistics(stats -> stats.started(12).succeeded(12));
}
@Test
void executesOnlySelectedMethodsDeclaredInClassTemplate() {
var results = executeTests(selectMethod(TwoInvocationsTestCase.class, "a"));
results.testEvents() //
.assertStatistics(stats -> stats.started(2).succeeded(2)) //
.assertEventsMatchLoosely(event(test(displayName("a()")), finishedSuccessfully()));
}
@Test
void executesOnlySelectedMethodsDeclaredInNestedClassOfClassTemplate() {
var results = executeTests(selectNestedMethod(List.of(TwoInvocationsTestCase.class),
TwoInvocationsTestCase.NestedTestCase.class, "b"));
results.testEvents().assertStatistics(stats -> stats.started(2).succeeded(2)) //
.assertEventsMatchLoosely(event(test(displayName("b()")), finishedSuccessfully()));
}
@Test
void executesOnlyTestsPassingPostDiscoveryFilter() {
var results = executeTests(request -> request //
.selectors(selectClass(TwoInvocationsTestCase.class)) //
.filters(includeTags("nested")));
results.testEvents().assertStatistics(stats -> stats.started(2).succeeded(2)) //
.assertEventsMatchLoosely(event(test(displayName("b()")), finishedSuccessfully()));
}
@Test
void prunesEmptyNestedTestClasses() {
var results = executeTests(request -> request //
.selectors(selectClass(TwoInvocationsTestCase.class)) //
.filters(excludeTags("nested")));
results.containerEvents().assertThatEvents() //
.noneMatch(container(TwoInvocationsTestCase.NestedTestCase.class.getSimpleName())::matches);
results.testEvents().assertStatistics(stats -> stats.started(2).succeeded(2)) //
.assertEventsMatchLoosely(event(test(displayName("a()")), finishedSuccessfully()));
}
@Test
void executesNestedClassTemplateClassTwiceWithClassSelectorForEnclosingClass() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classId = engineId.append(ClassTestDescriptor.SEGMENT_TYPE,
NestedClassTemplateWithTwoInvocationsTestCase.class.getName());
var methodAId = classId.append(TestMethodTestDescriptor.SEGMENT_TYPE, "a()");
var nestedClassTemplateId = classId.append(ClassTemplateTestDescriptor.NESTED_CLASS_SEGMENT_TYPE,
"NestedTestCase");
var invocationId1 = nestedClassTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
var invocation1NestedMethodBId = invocationId1.append(TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var invocationId2 = nestedClassTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var invocation2NestedMethodBId = invocationId2.append(TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var results = executeTestsForClass(NestedClassTemplateWithTwoInvocationsTestCase.class);
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classId)), started()), //
event(test(uniqueId(methodAId)), started()), //
event(test(uniqueId(methodAId)), finishedSuccessfully()), //
event(container(uniqueId(nestedClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId1)), displayName("[1] A of NestedTestCase"),
legacyReportingName(
"%s[1]".formatted(NestedClassTemplateWithTwoInvocationsTestCase.NestedTestCase.class.getName()))), //
event(container(uniqueId(invocationId1)), started()), //
event(dynamicTestRegistered(uniqueId(invocation1NestedMethodBId))), //
event(test(uniqueId(invocation1NestedMethodBId)), started()), //
event(test(uniqueId(invocation1NestedMethodBId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId1)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(invocationId2)), displayName("[2] B of NestedTestCase"),
legacyReportingName(
"%s[2]".formatted(NestedClassTemplateWithTwoInvocationsTestCase.NestedTestCase.class.getName()))), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(invocation2NestedMethodBId))), //
event(test(uniqueId(invocation2NestedMethodBId)), started()), //
event(test(uniqueId(invocation2NestedMethodBId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(nestedClassTemplateId)), finishedSuccessfully()), //
event(container(uniqueId(classId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void executesNestedClassTemplateClassTwiceWithNestedClassSelector() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classId = engineId.append(ClassTestDescriptor.SEGMENT_TYPE,
NestedClassTemplateWithTwoInvocationsTestCase.class.getName());
var nestedClassTemplateId = classId.append(ClassTemplateTestDescriptor.NESTED_CLASS_SEGMENT_TYPE,
"NestedTestCase");
var invocationId1 = nestedClassTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
var invocation1NestedMethodBId = invocationId1.append(TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var invocationId2 = nestedClassTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var invocation2NestedMethodBId = invocationId2.append(TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var results = executeTestsForClass(NestedClassTemplateWithTwoInvocationsTestCase.NestedTestCase.class);
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classId)), started()), //
event(container(uniqueId(nestedClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId1)), displayName("[1] A of NestedTestCase")), //
event(container(uniqueId(invocationId1)), started()), //
event(dynamicTestRegistered(uniqueId(invocation1NestedMethodBId))), //
event(test(uniqueId(invocation1NestedMethodBId)), started()), //
event(test(uniqueId(invocation1NestedMethodBId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId1)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(invocationId2)), displayName("[2] B of NestedTestCase")), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(invocation2NestedMethodBId))), //
event(test(uniqueId(invocation2NestedMethodBId)), started()), //
event(test(uniqueId(invocation2NestedMethodBId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(nestedClassTemplateId)), finishedSuccessfully()), //
event(container(uniqueId(classId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void executesNestedClassTemplatesTwiceEach() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var outerClassTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
TwoTimesTwoInvocationsTestCase.class.getName());
var outerInvocation1Id = outerClassTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
var outerInvocation1NestedClassTemplateId = outerInvocation1Id.append(
ClassTemplateTestDescriptor.NESTED_CLASS_SEGMENT_TYPE, "NestedTestCase");
var outerInvocation1InnerInvocation1Id = outerInvocation1NestedClassTemplateId.append(
ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
var outerInvocation1InnerInvocation1NestedMethodId = outerInvocation1InnerInvocation1Id.append(
TestMethodTestDescriptor.SEGMENT_TYPE, "test()");
var outerInvocation1InnerInvocation2Id = outerInvocation1NestedClassTemplateId.append(
ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var outerInvocation1InnerInvocation2NestedMethodId = outerInvocation1InnerInvocation2Id.append(
TestMethodTestDescriptor.SEGMENT_TYPE, "test()");
var outerInvocation2Id = outerClassTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var outerInvocation2NestedClassTemplateId = outerInvocation2Id.append(
ClassTemplateTestDescriptor.NESTED_CLASS_SEGMENT_TYPE, "NestedTestCase");
var outerInvocation2InnerInvocation1Id = outerInvocation2NestedClassTemplateId.append(
ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
var outerInvocation2InnerInvocation1NestedMethodId = outerInvocation2InnerInvocation1Id.append(
TestMethodTestDescriptor.SEGMENT_TYPE, "test()");
var outerInvocation2InnerInvocation2Id = outerInvocation2NestedClassTemplateId.append(
ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var outerInvocation2InnerInvocation2NestedMethodId = outerInvocation2InnerInvocation2Id.append(
TestMethodTestDescriptor.SEGMENT_TYPE, "test()");
var results = executeTestsForClass(TwoTimesTwoInvocationsTestCase.class);
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(outerClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1Id)),
displayName("[1] A of TwoTimesTwoInvocationsTestCase")), //
event(container(uniqueId(outerInvocation1Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1NestedClassTemplateId))), //
event(container(uniqueId(outerInvocation1NestedClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1InnerInvocation1Id)),
displayName("[1] A of NestedTestCase")), //
event(container(uniqueId(outerInvocation1InnerInvocation1Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1InnerInvocation1NestedMethodId))), //
event(test(uniqueId(outerInvocation1InnerInvocation1NestedMethodId)), started()), //
event(test(uniqueId(outerInvocation1InnerInvocation1NestedMethodId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation1InnerInvocation1Id)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1InnerInvocation2Id)),
displayName("[2] B of NestedTestCase")), //
event(container(uniqueId(outerInvocation1InnerInvocation2Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1InnerInvocation2NestedMethodId))), //
event(test(uniqueId(outerInvocation1InnerInvocation2NestedMethodId)), started()), //
event(test(uniqueId(outerInvocation1InnerInvocation2NestedMethodId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation1InnerInvocation2Id)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation1NestedClassTemplateId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation1Id)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2Id)),
displayName("[2] B of TwoTimesTwoInvocationsTestCase")), //
event(container(uniqueId(outerInvocation2Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2NestedClassTemplateId))), //
event(container(uniqueId(outerInvocation2NestedClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2InnerInvocation1Id)),
displayName("[1] A of NestedTestCase")), //
event(container(uniqueId(outerInvocation2InnerInvocation1Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2InnerInvocation1NestedMethodId))), //
event(test(uniqueId(outerInvocation2InnerInvocation1NestedMethodId)), started()), //
event(test(uniqueId(outerInvocation2InnerInvocation1NestedMethodId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2InnerInvocation1Id)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2InnerInvocation2Id)),
displayName("[2] B of NestedTestCase")), //
event(container(uniqueId(outerInvocation2InnerInvocation2Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2InnerInvocation2NestedMethodId))), //
event(test(uniqueId(outerInvocation2InnerInvocation2NestedMethodId)), started()), //
event(test(uniqueId(outerInvocation2InnerInvocation2NestedMethodId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2InnerInvocation2Id)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2NestedClassTemplateId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2Id)), finishedSuccessfully()), //
event(container(uniqueId(outerClassTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void invocationContextProviderCanRegisterAdditionalExtensions() {
var results = executeTestsForClass(AdditionalExtensionRegistrationTestCase.class);
results.testEvents().assertStatistics(stats -> stats.started(2).succeeded(2));
}
@Test
void eachInvocationHasSeparateExtensionContext() {
var results = executeTestsForClass(SeparateExtensionContextTestCase.class);
results.testEvents().assertStatistics(stats -> stats.started(2).succeeded(2));
}
@Test
void supportsTestTemplateMethodsInsideClassTemplateClasses() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
CombinationWithTestTemplateTestCase.class.getName());
var invocationId1 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
var testTemplateId1 = invocationId1.append(TestTemplateTestDescriptor.SEGMENT_TYPE, "test(int)");
var testTemplate1InvocationId1 = testTemplateId1.append(TestTemplateInvocationTestDescriptor.SEGMENT_TYPE,
"#1");
var testTemplate1InvocationId2 = testTemplateId1.append(TestTemplateInvocationTestDescriptor.SEGMENT_TYPE,
"#2");
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var testTemplateId2 = invocationId2.append(TestTemplateTestDescriptor.SEGMENT_TYPE, "test(int)");
var testTemplate2InvocationId1 = testTemplateId2.append(TestTemplateInvocationTestDescriptor.SEGMENT_TYPE,
"#1");
var testTemplate2InvocationId2 = testTemplateId2.append(TestTemplateInvocationTestDescriptor.SEGMENT_TYPE,
"#2");
var results = executeTestsForClass(CombinationWithTestTemplateTestCase.class);
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId1)),
displayName("[1] A of CombinationWithTestTemplateTestCase")), //
event(container(uniqueId(invocationId1)), started()), //
event(dynamicTestRegistered(uniqueId(testTemplateId1))), //
event(container(uniqueId(testTemplateId1)), started()), //
event(dynamicTestRegistered(uniqueId(testTemplate1InvocationId1))), //
event(test(uniqueId(testTemplate1InvocationId1)), started()), //
event(test(uniqueId(testTemplate1InvocationId1)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(testTemplate1InvocationId2))), //
event(test(uniqueId(testTemplate1InvocationId2)), started()), //
event(test(uniqueId(testTemplate1InvocationId2)), finishedSuccessfully()), //
event(container(uniqueId(testTemplateId1)), finishedSuccessfully()), //
event(container(uniqueId(invocationId1)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(invocationId2)),
displayName("[2] B of CombinationWithTestTemplateTestCase")), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(testTemplateId2))), //
event(container(uniqueId(testTemplateId2)), started()), //
event(dynamicTestRegistered(uniqueId(testTemplate2InvocationId1))), //
event(test(uniqueId(testTemplate2InvocationId1)), started()), //
event(test(uniqueId(testTemplate2InvocationId1)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(testTemplate2InvocationId2))), //
event(test(uniqueId(testTemplate2InvocationId2)), started()), //
event(test(uniqueId(testTemplate2InvocationId2)), finishedSuccessfully()), //
event(container(uniqueId(testTemplateId2)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(classTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void testTemplateInvocationInsideClassTemplateClassCanBeSelectedByUniqueId() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
CombinationWithTestTemplateTestCase.class.getName());
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var testTemplateId2 = invocationId2.append(TestTemplateTestDescriptor.SEGMENT_TYPE, "test(int)");
var testTemplate2InvocationId2 = testTemplateId2.append(TestTemplateInvocationTestDescriptor.SEGMENT_TYPE,
"#2");
var results = executeTests(selectUniqueId(testTemplate2InvocationId2));
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId2)),
displayName("[2] B of CombinationWithTestTemplateTestCase")), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(testTemplateId2))), //
event(container(uniqueId(testTemplateId2)), started()), //
event(dynamicTestRegistered(uniqueId(testTemplate2InvocationId2))), //
event(test(uniqueId(testTemplate2InvocationId2)), started()), //
event(test(uniqueId(testTemplate2InvocationId2)), finishedSuccessfully()), //
event(container(uniqueId(testTemplateId2)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(classTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void supportsTestFactoryMethodsInsideClassTemplateClasses() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
CombinationWithTestFactoryTestCase.class.getName());
var invocationId1 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
var testFactoryId1 = invocationId1.append(TestFactoryTestDescriptor.SEGMENT_TYPE, "test()");
var testFactory1DynamicTestId1 = testFactoryId1.append(TestFactoryTestDescriptor.DYNAMIC_TEST_SEGMENT_TYPE,
"#1");
var testFactory1DynamicTestId2 = testFactoryId1.append(TestFactoryTestDescriptor.DYNAMIC_TEST_SEGMENT_TYPE,
"#2");
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var testFactoryId2 = invocationId2.append(TestFactoryTestDescriptor.SEGMENT_TYPE, "test()");
var testFactory2DynamicTestId1 = testFactoryId2.append(TestFactoryTestDescriptor.DYNAMIC_TEST_SEGMENT_TYPE,
"#1");
var testFactory2DynamicTestId2 = testFactoryId2.append(TestFactoryTestDescriptor.DYNAMIC_TEST_SEGMENT_TYPE,
"#2");
var results = executeTestsForClass(CombinationWithTestFactoryTestCase.class);
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId1)),
displayName("[1] A of CombinationWithTestFactoryTestCase")), //
event(container(uniqueId(invocationId1)), started()), //
event(dynamicTestRegistered(uniqueId(testFactoryId1))), //
event(container(uniqueId(testFactoryId1)), started()), //
event(dynamicTestRegistered(uniqueId(testFactory1DynamicTestId1))), //
event(test(uniqueId(testFactory1DynamicTestId1)), started()), //
event(test(uniqueId(testFactory1DynamicTestId1)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(testFactory1DynamicTestId2))), //
event(test(uniqueId(testFactory1DynamicTestId2)), started()), //
event(test(uniqueId(testFactory1DynamicTestId2)), finishedSuccessfully()), //
event(container(uniqueId(testFactoryId1)), finishedSuccessfully()), //
event(container(uniqueId(invocationId1)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(invocationId2)),
displayName("[2] B of CombinationWithTestFactoryTestCase")), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(testFactoryId2))), //
event(container(uniqueId(testFactoryId2)), started()), //
event(dynamicTestRegistered(uniqueId(testFactory2DynamicTestId1))), //
event(test(uniqueId(testFactory2DynamicTestId1)), started()), //
event(test(uniqueId(testFactory2DynamicTestId1)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(testFactory2DynamicTestId2))), //
event(test(uniqueId(testFactory2DynamicTestId2)), started()), //
event(test(uniqueId(testFactory2DynamicTestId2)), finishedSuccessfully()), //
event(container(uniqueId(testFactoryId2)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(classTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void specificDynamicTestInsideClassTemplateClassCanBeSelectedByUniqueId() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
CombinationWithTestFactoryTestCase.class.getName());
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var testFactoryId2 = invocationId2.append(TestFactoryTestDescriptor.SEGMENT_TYPE, "test()");
var testFactory2DynamicTestId2 = testFactoryId2.append(TestFactoryTestDescriptor.DYNAMIC_TEST_SEGMENT_TYPE,
"#2");
var results = executeTests(selectUniqueId(testFactory2DynamicTestId2));
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId2)),
displayName("[2] B of CombinationWithTestFactoryTestCase")), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(testFactoryId2))), //
event(container(uniqueId(testFactoryId2)), started()), //
event(dynamicTestRegistered(uniqueId(testFactory2DynamicTestId2))), //
event(test(uniqueId(testFactory2DynamicTestId2)), started()), //
event(test(uniqueId(testFactory2DynamicTestId2)), finishedSuccessfully()), //
event(container(uniqueId(testFactoryId2)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(classTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void failsIfProviderReturnsZeroInvocationContextWithoutOptIn() {
var results = executeTestsForClass(InvalidZeroInvocationTestCase.class);
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(InvalidZeroInvocationTestCase.class), started()), //
event(container(InvalidZeroInvocationTestCase.class),
finishedWithFailure(
message("Provider [Ext] did not provide any invocation contexts, but was expected to do so. "
+ "You may override mayReturnZeroClassTemplateInvocationContexts() to allow this."))), //
event(engine(), finishedSuccessfully()));
}
@Test
void succeedsIfProviderReturnsZeroInvocationContextWithOptIn() {
var results = executeTestsForClass(ValidZeroInvocationTestCase.class);
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(ValidZeroInvocationTestCase.class), started()), //
event(container(ValidZeroInvocationTestCase.class), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@ParameterizedTest
@ValueSource(classes = { NoProviderRegisteredTestCase.class, NoSupportingProviderRegisteredTestCase.class })
void failsIfNoSupportingProviderIsRegistered(Class<?> testClass) {
var results = executeTestsForClass(testClass);
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(testClass), started()), //
event(container(testClass),
finishedWithFailure(
message("You must register at least one ClassTemplateInvocationContextProvider that supports "
+ "@ClassTemplate class [" + testClass.getName() + "]"))), //
event(engine(), finishedSuccessfully()));
}
@Test
void classTemplateInvocationCanBeSelectedByUniqueId() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
TwoInvocationsTestCase.class.getName());
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var methodAId = invocationId2.append(TestMethodTestDescriptor.SEGMENT_TYPE, "a()");
var nestedClassId = invocationId2.append(NestedClassTestDescriptor.SEGMENT_TYPE, "NestedTestCase");
var nestedMethodBId = nestedClassId.append(TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var results = executeTests(selectUniqueId(invocationId2));
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId2)), displayName("[2] B of TwoInvocationsTestCase")), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(methodAId))), //
event(dynamicTestRegistered(uniqueId(nestedClassId))), //
event(dynamicTestRegistered(uniqueId(nestedMethodBId))), //
event(test(uniqueId(methodAId)), started()), //
event(test(uniqueId(methodAId)), finishedSuccessfully()), //
event(container(uniqueId(nestedClassId)), started()), //
event(test(uniqueId(nestedMethodBId)), started()), //
event(test(uniqueId(nestedMethodBId)), finishedSuccessfully()), //
event(container(uniqueId(nestedClassId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(classTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void classTemplateInvocationCanBeSelectedByIteration() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
TwoInvocationsTestCase.class.getName());
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var methodAId = invocationId2.append(TestMethodTestDescriptor.SEGMENT_TYPE, "a()");
var nestedClassId = invocationId2.append(NestedClassTestDescriptor.SEGMENT_TYPE, "NestedTestCase");
var nestedMethodBId = nestedClassId.append(TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var results = executeTests(selectIteration(selectClass(TwoInvocationsTestCase.class), 1));
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId2)), displayName("[2] B of TwoInvocationsTestCase")), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(methodAId))), //
event(dynamicTestRegistered(uniqueId(nestedClassId))), //
event(dynamicTestRegistered(uniqueId(nestedMethodBId))), //
event(test(uniqueId(methodAId)), started()), //
event(test(uniqueId(methodAId)), finishedSuccessfully()), //
event(container(uniqueId(nestedClassId)), started()), //
event(test(uniqueId(nestedMethodBId)), started()), //
event(test(uniqueId(nestedMethodBId)), finishedSuccessfully()), //
event(container(uniqueId(nestedClassId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(classTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@ParameterizedTest
@ValueSource(strings = { //
"class:org.junit.jupiter.engine.ClassTemplateInvocationTests$TwoInvocationsTestCase", //
"uid:[engine:junit-jupiter]/[class-template:org.junit.jupiter.engine.ClassTemplateInvocationTests$TwoInvocationsTestCase]" //
})
void executesAllInvocationsForRedundantSelectors(String classTemplateSelectorIdentifier) {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
TwoInvocationsTestCase.class.getName());
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var results = executeTests(selectUniqueId(invocationId2),
DiscoverySelectors.parse(classTemplateSelectorIdentifier).orElseThrow());
results.testEvents().assertStatistics(stats -> stats.started(4).succeeded(4));
}
@Test
void methodInClassTemplateInvocationCanBeSelectedByUniqueId() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
TwoInvocationsTestCase.class.getName());
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var methodAId = invocationId2.append(TestMethodTestDescriptor.SEGMENT_TYPE, "a()");
var results = executeTests(selectUniqueId(methodAId));
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId2)), displayName("[2] B of TwoInvocationsTestCase")), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(methodAId))), //
event(test(uniqueId(methodAId)), started()), //
event(test(uniqueId(methodAId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(classTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void nestedMethodInClassTemplateInvocationCanBeSelectedByUniqueId() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var classTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
TwoInvocationsTestCase.class.getName());
var invocationId2 = classTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var nestedClassId = invocationId2.append(NestedClassTestDescriptor.SEGMENT_TYPE, "NestedTestCase");
var nestedMethodBId = nestedClassId.append(TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var results = executeTests(selectUniqueId(nestedMethodBId));
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(classTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(invocationId2)), displayName("[2] B of TwoInvocationsTestCase")), //
event(container(uniqueId(invocationId2)), started()), //
event(dynamicTestRegistered(uniqueId(nestedClassId))), //
event(dynamicTestRegistered(uniqueId(nestedMethodBId))), //
event(container(uniqueId(nestedClassId)), started()), //
event(test(uniqueId(nestedMethodBId)), started()), //
event(test(uniqueId(nestedMethodBId)), finishedSuccessfully()), //
event(container(uniqueId(nestedClassId)), finishedSuccessfully()), //
event(container(uniqueId(invocationId2)), finishedSuccessfully()), //
event(container(uniqueId(classTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void nestedClassTemplateInvocationCanBeSelectedByUniqueId() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var outerClassTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
TwoTimesTwoInvocationsWithMultipleMethodsTestCase.class.getName());
var outerInvocation2Id = outerClassTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var outerInvocation2NestedClassTemplateId = outerInvocation2Id.append(
ClassTemplateTestDescriptor.NESTED_CLASS_SEGMENT_TYPE, "NestedTestCase");
var outerInvocation2InnerInvocation2Id = outerInvocation2NestedClassTemplateId.append(
ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var outerInvocation2InnerInvocation2NestedMethodId = outerInvocation2InnerInvocation2Id.append(
TestMethodTestDescriptor.SEGMENT_TYPE, "b()");
var results = executeTests(selectUniqueId(outerInvocation2InnerInvocation2NestedMethodId));
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(outerClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2Id)),
displayName("[2] B of TwoTimesTwoInvocationsWithMultipleMethodsTestCase")), //
event(container(uniqueId(outerInvocation2Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2NestedClassTemplateId))), //
event(container(uniqueId(outerInvocation2NestedClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2InnerInvocation2Id)),
displayName("[2] B of NestedTestCase")), //
event(container(uniqueId(outerInvocation2InnerInvocation2Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2InnerInvocation2NestedMethodId))), //
event(test(uniqueId(outerInvocation2InnerInvocation2NestedMethodId)), started()), //
event(test(uniqueId(outerInvocation2InnerInvocation2NestedMethodId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2InnerInvocation2Id)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2NestedClassTemplateId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2Id)), finishedSuccessfully()), //
event(container(uniqueId(outerClassTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void nestedClassTemplateInvocationCanBeSelectedByIteration() {
var engineId = UniqueId.forEngine(JupiterEngineDescriptor.ENGINE_ID);
var outerClassTemplateId = engineId.append(ClassTemplateTestDescriptor.STANDALONE_CLASS_SEGMENT_TYPE,
TwoTimesTwoInvocationsTestCase.class.getName());
var outerInvocation1Id = outerClassTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
var outerInvocation1NestedClassTemplateId = outerInvocation1Id.append(
ClassTemplateTestDescriptor.NESTED_CLASS_SEGMENT_TYPE, "NestedTestCase");
var outerInvocation1InnerInvocation2Id = outerInvocation1NestedClassTemplateId.append(
ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var outerInvocation1InnerInvocation2NestedMethodId = outerInvocation1InnerInvocation2Id.append(
TestMethodTestDescriptor.SEGMENT_TYPE, "test()");
var outerInvocation2Id = outerClassTemplateId.append(ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var outerInvocation2NestedClassTemplateId = outerInvocation2Id.append(
ClassTemplateTestDescriptor.NESTED_CLASS_SEGMENT_TYPE, "NestedTestCase");
var outerInvocation2InnerInvocation2Id = outerInvocation2NestedClassTemplateId.append(
ClassTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
var outerInvocation2InnerInvocation2NestedMethodId = outerInvocation2InnerInvocation2Id.append(
TestMethodTestDescriptor.SEGMENT_TYPE, "test()");
var results = executeTests(selectIteration(selectNestedClass(List.of(TwoTimesTwoInvocationsTestCase.class),
TwoTimesTwoInvocationsTestCase.NestedTestCase.class), 1));
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(uniqueId(outerClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1Id)),
displayName("[1] A of TwoTimesTwoInvocationsTestCase")), //
event(container(uniqueId(outerInvocation1Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1NestedClassTemplateId))), //
event(container(uniqueId(outerInvocation1NestedClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1InnerInvocation2Id)),
displayName("[2] B of NestedTestCase")), //
event(container(uniqueId(outerInvocation1InnerInvocation2Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation1InnerInvocation2NestedMethodId))), //
event(test(uniqueId(outerInvocation1InnerInvocation2NestedMethodId)), started()), //
event(test(uniqueId(outerInvocation1InnerInvocation2NestedMethodId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation1InnerInvocation2Id)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation1NestedClassTemplateId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation1Id)), finishedSuccessfully()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2Id)),
displayName("[2] B of TwoTimesTwoInvocationsTestCase")), //
event(container(uniqueId(outerInvocation2Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2NestedClassTemplateId))), //
event(container(uniqueId(outerInvocation2NestedClassTemplateId)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2InnerInvocation2Id)),
displayName("[2] B of NestedTestCase")), //
event(container(uniqueId(outerInvocation2InnerInvocation2Id)), started()), //
event(dynamicTestRegistered(uniqueId(outerInvocation2InnerInvocation2NestedMethodId))), //
event(test(uniqueId(outerInvocation2InnerInvocation2NestedMethodId)), started()), //
event(test(uniqueId(outerInvocation2InnerInvocation2NestedMethodId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2InnerInvocation2Id)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2NestedClassTemplateId)), finishedSuccessfully()), //
event(container(uniqueId(outerInvocation2Id)), finishedSuccessfully()), //
event(container(uniqueId(outerClassTemplateId)), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void executesLifecycleCallbacksInNestedClassTemplates() {
var results = executeTestsForClass(TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase.class);
results.containerEvents().assertStatistics(stats -> stats.started(10).succeeded(10));
results.testEvents().assertStatistics(stats -> stats.started(8).succeeded(8));
// @formatter:off
assertThat(allReportEntryValues(results)).containsExactly(
"beforeAll: TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase",
"beforeClassTemplateInvocation: TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase",
"beforeAll: NestedTestCase",
"beforeClassTemplateInvocation: NestedTestCase",
"beforeEach: test1 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test1 [NestedTestCase]",
"test1",
"afterEach: test1 [NestedTestCase]",
"afterEach: test1 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test2 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test2 [NestedTestCase]",
"test2",
"afterEach: test2 [NestedTestCase]",
"afterEach: test2 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"afterClassTemplateInvocation: NestedTestCase",
"beforeClassTemplateInvocation: NestedTestCase",
"beforeEach: test1 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test1 [NestedTestCase]",
"test1",
"afterEach: test1 [NestedTestCase]",
"afterEach: test1 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test2 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test2 [NestedTestCase]",
"test2",
"afterEach: test2 [NestedTestCase]",
"afterEach: test2 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"afterClassTemplateInvocation: NestedTestCase",
"afterAll: NestedTestCase",
"afterClassTemplateInvocation: TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase",
"beforeClassTemplateInvocation: TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase",
"beforeAll: NestedTestCase",
"beforeClassTemplateInvocation: NestedTestCase",
"beforeEach: test1 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test1 [NestedTestCase]",
"test1",
"afterEach: test1 [NestedTestCase]",
"afterEach: test1 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test2 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test2 [NestedTestCase]",
"test2",
"afterEach: test2 [NestedTestCase]",
"afterEach: test2 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"afterClassTemplateInvocation: NestedTestCase",
"beforeClassTemplateInvocation: NestedTestCase",
"beforeEach: test1 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test1 [NestedTestCase]",
"test1",
"afterEach: test1 [NestedTestCase]",
"afterEach: test1 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test2 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"beforeEach: test2 [NestedTestCase]",
"test2",
"afterEach: test2 [NestedTestCase]",
"afterEach: test2 [TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase]",
"afterClassTemplateInvocation: NestedTestCase",
"afterAll: NestedTestCase",
"afterClassTemplateInvocation: TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase",
"afterAll: TwoTimesTwoInvocationsWithLifecycleCallbacksTestCase"
);
// @formatter:on
}
@Test
void guaranteesWrappingBehaviorForCallbacks() {
var results = executeTestsForClass(CallbackWrappingBehaviorTestCase.class);
results.containerEvents().assertStatistics(stats -> stats.started(4).succeeded(4));
results.testEvents().assertStatistics(stats -> stats.started(2).succeeded(2));
// @formatter:off
assertThat(allReportEntryValues(results)).containsExactly(
"1st -> beforeClassTemplateInvocation: CallbackWrappingBehaviorTestCase",
"2nd -> beforeClassTemplateInvocation: CallbackWrappingBehaviorTestCase",
"test",
"2nd -> afterClassTemplateInvocation: CallbackWrappingBehaviorTestCase",
"1st -> afterClassTemplateInvocation: CallbackWrappingBehaviorTestCase",
"1st -> beforeClassTemplateInvocation: CallbackWrappingBehaviorTestCase",
"2nd -> beforeClassTemplateInvocation: CallbackWrappingBehaviorTestCase",
"test",
"2nd -> afterClassTemplateInvocation: CallbackWrappingBehaviorTestCase",
"1st -> afterClassTemplateInvocation: CallbackWrappingBehaviorTestCase"
);
// @formatter:on
}
@Test
void propagatesExceptionsFromCallbacks() {
var results = executeTestsForClass(CallbackExceptionBehaviorTestCase.class);
results.allEvents().assertStatistics(stats -> stats.started(4).failed(2).succeeded(2));
results.containerEvents().assertThatEvents() //
.haveExactly(2, finishedWithFailure( //
message("2nd -> afterClassTemplateInvocation: CallbackExceptionBehaviorTestCase"), //
suppressed(0, message("1st -> beforeClassTemplateInvocation: CallbackExceptionBehaviorTestCase")), //
suppressed(1, message("1st -> afterClassTemplateInvocation: CallbackExceptionBehaviorTestCase"))));
assertThat(allReportEntryValues(results).distinct()) //
.containsExactly("1st -> beforeClassTemplateInvocation: CallbackExceptionBehaviorTestCase", //
"2nd -> afterClassTemplateInvocation: CallbackExceptionBehaviorTestCase", //
"1st -> afterClassTemplateInvocation: CallbackExceptionBehaviorTestCase");
}
@Test
void templateWithPreparations() {
var results = executeTestsForClass(ClassTemplateWithPreparationsTestCase.class);
results.allEvents().assertStatistics(stats -> stats.started(6).succeeded(6));
assertTrue(CustomCloseableResource.closed, "resource in store was closed");
}
@Test
void propagatesTagsFromEnclosingClassesToNestedClassTemplates() {
var request = defaultRequest() //
.selectors(selectClass(NestedClassTemplateWithTagOnEnclosingClassTestCase.class)) //
.build();
var engineDescriptor = discoverTestsWithoutIssues(request);
var classDescriptor = getOnlyElement(engineDescriptor.getChildren());
var nestedClassTemplateDescriptor = getOnlyElement(classDescriptor.getChildren());
assertThat(classDescriptor.getTags()).extracting(TestTag::getName) //
.containsExactly("top-level");
assertThat(nestedClassTemplateDescriptor.getTags()).extracting(TestTag::getName) //
.containsExactlyInAnyOrder("top-level", "nested");
}
@Test
void ignoresComposedAnnotations() {
var request = defaultRequest() //
.selectors(selectClass(ParameterizedClass.class)) //
.build();
var engineDescriptor = discoverTestsWithoutIssues(request);
assertThat(engineDescriptor.getDescendants()).isEmpty();
}
// -------------------------------------------------------------------
private static Stream<String> allReportEntryValues(EngineExecutionResults results) {
return results.allEvents().reportingEntryPublished() //
.map(event -> event.getRequiredPayload(ReportEntry.class)) //
.map(ReportEntry::getKeyValuePairs) //
.map(Map::values) //
.flatMap(Collection::stream);
}
@SuppressWarnings("JUnitMalformedDeclaration")
@ClassTemplate
@ExtendWith(TwoInvocationsClassTemplateInvocationContextProvider.class)
static | ClassTemplateInvocationTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FallThroughTest.java | {
"start": 881,
"end": 1250
} | class ____ {
private final CompilationTestHelper testHelper =
CompilationTestHelper.newInstance(FallThrough.class, getClass());
@Test
public void positive() {
testHelper
.addSourceLines(
"FallThroughPositiveCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
public | FallThroughTest |
java | netty__netty | handler/src/test/java/io/netty/handler/ssl/OpenSslServerContextTest.java | {
"start": 772,
"end": 1118
} | class ____ extends SslContextTest {
@BeforeAll
public static void checkOpenSsl() {
OpenSsl.ensureAvailability();
}
@Override
protected SslContext newSslContext(File crtFile, File keyFile, String pass) throws SSLException {
return new OpenSslServerContext(crtFile, keyFile, pass);
}
}
| OpenSslServerContextTest |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/context/ShutdownEndpoint.java | {
"start": 1506,
"end": 2490
} | class ____ implements ApplicationContextAware {
@Nullable private ConfigurableApplicationContext context;
@WriteOperation
public ShutdownDescriptor shutdown() {
if (this.context == null) {
return ShutdownDescriptor.NO_CONTEXT;
}
try {
return ShutdownDescriptor.DEFAULT;
}
finally {
Thread thread = new Thread(this::performShutdown);
thread.setContextClassLoader(getClass().getClassLoader());
thread.start();
}
}
private void performShutdown() {
try {
Thread.sleep(500L);
}
catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
Assert.state(this.context != null, "'context' must not be null");
this.context.close();
}
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
if (context instanceof ConfigurableApplicationContext configurableContext) {
this.context = configurableContext;
}
}
/**
* Description of the shutdown.
*/
public static | ShutdownEndpoint |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/reflect/FieldUtils.java | {
"start": 2506,
"end": 3071
} | class ____ {@code null}.
* @since 3.2
*/
public static List<Field> getAllFieldsList(final Class<?> cls) {
Objects.requireNonNull(cls, "cls");
final List<Field> allFields = new ArrayList<>();
Class<?> currentClass = cls;
while (currentClass != null) {
Collections.addAll(allFields, currentClass.getDeclaredFields());
currentClass = currentClass.getSuperclass();
}
return allFields;
}
/**
* Gets an accessible {@link Field} by name respecting scope. Only the specified | is |
java | quarkusio__quarkus | extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/GraphQLConfigMappingTest.java | {
"start": 572,
"end": 3223
} | class ____ extends AbstractGraphQLTest {
private static final Logger LOG = Logger.getLogger(GraphQLConfigMappingTest.class);
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(TestResource.class, TestPojo.class, TestRandom.class, TestGenericsPojo.class,
BusinessException.class, TestUnion.class, TestUnionMember.class)
.addAsResource(new StringAsset(getPropertyAsString(configuration())), "application.properties")
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"));
@Test
public void testBusinessError() {
String pingRequest = "{\n" +
" businesserror {\n" +
" message\n" +
" }\n" +
"}";
RestAssured.given().when()
.accept(MEDIATYPE_JSON)
.contentType(MEDIATYPE_JSON)
.queryParam(QUERY, pingRequest)
.get("/graphql")
.then()
.assertThat()
.statusCode(200)
.and()
.body(CoreMatchers.containsString("Some invalid case"),
CoreMatchers.containsString("io.quarkus.smallrye.graphql.deployment.BusinessException"), // exception
CoreMatchers.containsString("business")); // code
}
@Test
public void testSystemError() {
String pingRequest = "{\n" +
" systemserror {\n" +
" message\n" +
" }\n" +
"}";
RestAssured.given().when()
.accept(MEDIATYPE_JSON)
.contentType(MEDIATYPE_JSON)
.queryParam(QUERY, pingRequest)
.get("/graphql")
.then()
.assertThat()
.statusCode(200)
.and()
.body(CoreMatchers.containsString("O gats, daar is 'n probleem !"), // custom message
CoreMatchers.containsString("java.lang.RuntimeException")); // exception
}
private static Map<String, String> configuration() {
Map<String, String> m = new HashMap<>();
m.put("quarkus.smallrye-graphql.error-extension-fields",
"exception,classification,code,description,validationErrorType,queryPath");
m.put("quarkus.smallrye-graphql.default-error-message", "O gats, daar is 'n probleem !");
m.put("quarkus.smallrye-graphql.events.enabled", "true");
return m;
}
}
| GraphQLConfigMappingTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java | {
"start": 4570,
"end": 4811
} | enum ____ {
LAUNCH,
RELAUNCH
}
/**
* Environment for Applications.
*
* Some of the environment variables for applications are <em>final</em>
* i.e. they cannot be modified by the applications.
*/
| ContainerLaunchType |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java | {
"start": 1551,
"end": 5361
} | class ____ extends TextWriterImageVisitor {
final private LinkedList<ImageElement> elemQ = new LinkedList<ImageElement>();
private int numBlocks;
private String perms;
private int replication;
private String username;
private String group;
private long filesize;
private String modTime;
private String path;
private String linkTarget;
private boolean inInode = false;
final private StringBuilder sb = new StringBuilder();
final private Formatter formatter = new Formatter(sb);
public LsImageVisitor(String filename) throws IOException {
super(filename);
}
public LsImageVisitor(String filename, boolean printToScreen) throws IOException {
super(filename, printToScreen);
}
/**
* Start a new line of output, reset values.
*/
private void newLine() {
numBlocks = 0;
perms = username = group = path = linkTarget = "";
filesize = 0l;
replication = 0;
inInode = true;
}
/**
* All the values have been gathered. Print them to the console in an
* ls-style format.
*/
private final static int widthRepl = 2;
private final static int widthUser = 8;
private final static int widthGroup = 10;
private final static int widthSize = 10;
private final static int widthMod = 10;
private final static String lsStr = " %" + widthRepl + "s %" + widthUser +
"s %" + widthGroup + "s %" + widthSize +
"d %" + widthMod + "s %s";
private void printLine() throws IOException {
sb.append(numBlocks < 0 ? "d" : "-");
sb.append(perms);
if (0 != linkTarget.length()) {
path = path + " -> " + linkTarget;
}
formatter.format(lsStr, replication > 0 ? replication : "-",
username, group, filesize, modTime, path);
sb.append("\n");
write(sb.toString());
sb.setLength(0); // clear string builder
inInode = false;
}
@Override
void start() throws IOException {}
@Override
void finish() throws IOException {
super.finish();
}
@Override
void finishAbnormally() throws IOException {
System.out.println("Input ended unexpectedly.");
super.finishAbnormally();
}
@Override
void leaveEnclosingElement() throws IOException {
ImageElement elem = elemQ.pop();
if(elem == ImageElement.INODE)
printLine();
}
// Maintain state of location within the image tree and record
// values needed to display the inode in ls-style format.
@Override
void visit(ImageElement element, String value) throws IOException {
if(inInode) {
switch(element) {
case INODE_PATH:
if(value.equals("")) path = "/";
else path = value;
break;
case PERMISSION_STRING:
perms = value;
break;
case REPLICATION:
replication = Integer.parseInt(value);
break;
case USER_NAME:
username = value;
break;
case GROUP_NAME:
group = value;
break;
case NUM_BYTES:
filesize += Long.parseLong(value);
break;
case MODIFICATION_TIME:
modTime = value;
break;
case SYMLINK:
linkTarget = value;
break;
default:
// This is OK. We're not looking for all the values.
break;
}
}
}
@Override
void visitEnclosingElement(ImageElement element) throws IOException {
elemQ.push(element);
if(element == ImageElement.INODE)
newLine();
}
@Override
void visitEnclosingElement(ImageElement element,
ImageElement key, String value) throws IOException {
elemQ.push(element);
if(element == ImageElement.INODE)
newLine();
else if (element == ImageElement.BLOCKS)
numBlocks = Integer.parseInt(value);
}
}
| LsImageVisitor |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/builder/MapperBuilderAssistant.java | {
"start": 2200,
"end": 19870
} | class ____ extends BaseBuilder {
private String currentNamespace;
private final String resource;
private Cache currentCache;
private boolean unresolvedCacheRef; // issue #676
public MapperBuilderAssistant(Configuration configuration, String resource) {
super(configuration);
ErrorContext.instance().resource(resource);
this.resource = resource;
}
public String getCurrentNamespace() {
return currentNamespace;
}
public void setCurrentNamespace(String currentNamespace) {
if (currentNamespace == null) {
throw new BuilderException("The mapper element requires a namespace attribute to be specified.");
}
if (this.currentNamespace != null && !this.currentNamespace.equals(currentNamespace)) {
throw new BuilderException(
"Wrong namespace. Expected '" + this.currentNamespace + "' but found '" + currentNamespace + "'.");
}
this.currentNamespace = currentNamespace;
}
public String applyCurrentNamespace(String base, boolean isReference) {
if (base == null) {
return null;
}
if (isReference) {
// is it qualified with any namespace yet?
if (base.contains(".")) {
return base;
}
} else {
// is it qualified with this namespace yet?
if (base.startsWith(currentNamespace + ".")) {
return base;
}
if (base.contains(".")) {
throw new BuilderException("Dots are not allowed in element names, please remove it from " + base);
}
}
return currentNamespace + "." + base;
}
public Cache useCacheRef(String namespace) {
if (namespace == null) {
throw new BuilderException("cache-ref element requires a namespace attribute.");
}
try {
unresolvedCacheRef = true;
Cache cache = configuration.getCache(namespace);
if (cache == null) {
throw new IncompleteElementException("No cache for namespace '" + namespace + "' could be found.");
}
currentCache = cache;
unresolvedCacheRef = false;
return cache;
} catch (IllegalArgumentException e) {
throw new IncompleteElementException("No cache for namespace '" + namespace + "' could be found.", e);
}
}
public Cache useNewCache(Class<? extends Cache> typeClass, Class<? extends Cache> evictionClass, Long flushInterval,
Integer size, boolean readWrite, boolean blocking, Properties props) {
Cache cache = new CacheBuilder(currentNamespace).implementation(valueOrDefault(typeClass, PerpetualCache.class))
.addDecorator(valueOrDefault(evictionClass, LruCache.class)).clearInterval(flushInterval).size(size)
.readWrite(readWrite).blocking(blocking).properties(props).build();
configuration.addCache(cache);
currentCache = cache;
return cache;
}
public ParameterMap addParameterMap(String id, Class<?> parameterClass, List<ParameterMapping> parameterMappings) {
id = applyCurrentNamespace(id, false);
ParameterMap parameterMap = new ParameterMap.Builder(configuration, id, parameterClass, parameterMappings).build();
configuration.addParameterMap(parameterMap);
return parameterMap;
}
public ParameterMapping buildParameterMapping(Class<?> parameterType, String property, Class<?> javaType,
JdbcType jdbcType, String resultMap, ParameterMode parameterMode, Class<? extends TypeHandler<?>> typeHandler,
Integer numericScale) {
resultMap = applyCurrentNamespace(resultMap, true);
// Class parameterType = parameterMapBuilder.type();
Class<?> javaTypeClass = resolveParameterJavaType(parameterType, property, javaType, jdbcType);
TypeHandler<?> typeHandlerInstance = resolveTypeHandler(javaTypeClass, jdbcType, typeHandler);
return new ParameterMapping.Builder(configuration, property, javaTypeClass).jdbcType(jdbcType)
.resultMapId(resultMap).mode(parameterMode).numericScale(numericScale).typeHandler(typeHandlerInstance).build();
}
public ResultMap addResultMap(String id, Class<?> type, String extend, Discriminator discriminator,
List<ResultMapping> resultMappings, Boolean autoMapping) {
id = applyCurrentNamespace(id, false);
extend = applyCurrentNamespace(extend, true);
if (extend != null) {
if (!configuration.hasResultMap(extend)) {
throw new IncompleteElementException("Could not find a parent resultMap with id '" + extend + "'");
}
ResultMap resultMap = configuration.getResultMap(extend);
List<ResultMapping> extendedResultMappings = new ArrayList<>(resultMap.getResultMappings());
extendedResultMappings.removeAll(resultMappings);
// Remove parent constructor if this resultMap declares a constructor.
boolean declaresConstructor = false;
for (ResultMapping resultMapping : resultMappings) {
if (resultMapping.getFlags().contains(ResultFlag.CONSTRUCTOR)) {
declaresConstructor = true;
break;
}
}
if (declaresConstructor) {
extendedResultMappings.removeIf(resultMapping -> resultMapping.getFlags().contains(ResultFlag.CONSTRUCTOR));
}
resultMappings.addAll(extendedResultMappings);
}
ResultMap resultMap = new ResultMap.Builder(configuration, id, type, resultMappings, autoMapping)
.discriminator(discriminator).build();
configuration.addResultMap(resultMap);
return resultMap;
}
public Discriminator buildDiscriminator(Class<?> resultType, String column, Class<?> javaType, JdbcType jdbcType,
Class<? extends TypeHandler<?>> typeHandler, Map<String, String> discriminatorMap) {
ResultMapping resultMapping = buildResultMapping(resultType, null, column, javaType, jdbcType, null, null, null,
null, typeHandler, new ArrayList<>(), null, null, false);
Map<String, String> namespaceDiscriminatorMap = new HashMap<>();
for (Map.Entry<String, String> e : discriminatorMap.entrySet()) {
String resultMap = e.getValue();
resultMap = applyCurrentNamespace(resultMap, true);
namespaceDiscriminatorMap.put(e.getKey(), resultMap);
}
return new Discriminator.Builder(configuration, resultMapping, namespaceDiscriminatorMap).build();
}
public MappedStatement addMappedStatement(String id, SqlSource sqlSource, StatementType statementType,
SqlCommandType sqlCommandType, Integer fetchSize, Integer timeout, String parameterMap, Class<?> parameterType,
String resultMap, Class<?> resultType, ResultSetType resultSetType, boolean flushCache, boolean useCache,
boolean resultOrdered, KeyGenerator keyGenerator, String keyProperty, String keyColumn, String databaseId,
LanguageDriver lang, String resultSets, boolean dirtySelect, ParamNameResolver paramNameResolver) {
if (unresolvedCacheRef) {
throw new IncompleteElementException("Cache-ref not yet resolved");
}
id = applyCurrentNamespace(id, false);
MappedStatement.Builder statementBuilder = new MappedStatement.Builder(configuration, id, sqlSource, sqlCommandType)
.resource(resource).fetchSize(fetchSize).timeout(timeout).statementType(statementType)
.keyGenerator(keyGenerator).keyProperty(keyProperty).keyColumn(keyColumn).databaseId(databaseId).lang(lang)
.resultOrdered(resultOrdered).resultSets(resultSets)
.resultMaps(getStatementResultMaps(resultMap, resultType, id)).resultSetType(resultSetType)
.flushCacheRequired(flushCache).useCache(useCache).cache(currentCache).dirtySelect(dirtySelect)
.paramNameResolver(paramNameResolver);
ParameterMap statementParameterMap = getStatementParameterMap(parameterMap, parameterType, id);
if (statementParameterMap != null) {
statementBuilder.parameterMap(statementParameterMap);
}
MappedStatement statement = statementBuilder.build();
configuration.addMappedStatement(statement);
return statement;
}
/**
* Backward compatibility signature 'addMappedStatement'.
*
* @param id
* the id
* @param sqlSource
* the sql source
* @param statementType
* the statement type
* @param sqlCommandType
* the sql command type
* @param fetchSize
* the fetch size
* @param timeout
* the timeout
* @param parameterMap
* the parameter map
* @param parameterType
* the parameter type
* @param resultMap
* the result map
* @param resultType
* the result type
* @param resultSetType
* the result set type
* @param flushCache
* the flush cache
* @param useCache
* the use cache
* @param resultOrdered
* the result ordered
* @param keyGenerator
* the key generator
* @param keyProperty
* the key property
* @param keyColumn
* the key column
* @param databaseId
* the database id
* @param lang
* the lang
*
* @return the mapped statement
*/
public MappedStatement addMappedStatement(String id, SqlSource sqlSource, StatementType statementType,
SqlCommandType sqlCommandType, Integer fetchSize, Integer timeout, String parameterMap, Class<?> parameterType,
String resultMap, Class<?> resultType, ResultSetType resultSetType, boolean flushCache, boolean useCache,
boolean resultOrdered, KeyGenerator keyGenerator, String keyProperty, String keyColumn, String databaseId,
LanguageDriver lang, String resultSets) {
return addMappedStatement(id, sqlSource, statementType, sqlCommandType, fetchSize, timeout, parameterMap,
parameterType, resultMap, resultType, resultSetType, flushCache, useCache, resultOrdered, keyGenerator,
keyProperty, keyColumn, databaseId, lang, null, false, null);
}
public MappedStatement addMappedStatement(String id, SqlSource sqlSource, StatementType statementType,
SqlCommandType sqlCommandType, Integer fetchSize, Integer timeout, String parameterMap, Class<?> parameterType,
String resultMap, Class<?> resultType, ResultSetType resultSetType, boolean flushCache, boolean useCache,
boolean resultOrdered, KeyGenerator keyGenerator, String keyProperty, String keyColumn, String databaseId,
LanguageDriver lang) {
return addMappedStatement(id, sqlSource, statementType, sqlCommandType, fetchSize, timeout, parameterMap,
parameterType, resultMap, resultType, resultSetType, flushCache, useCache, resultOrdered, keyGenerator,
keyProperty, keyColumn, databaseId, lang, null);
}
private <T> T valueOrDefault(T value, T defaultValue) {
return value == null ? defaultValue : value;
}
private ParameterMap getStatementParameterMap(String parameterMapName, Class<?> parameterTypeClass,
String statementId) {
parameterMapName = applyCurrentNamespace(parameterMapName, true);
ParameterMap parameterMap = null;
if (parameterMapName != null) {
try {
parameterMap = configuration.getParameterMap(parameterMapName);
} catch (IllegalArgumentException e) {
throw new IncompleteElementException("Could not find parameter map " + parameterMapName, e);
}
} else if (parameterTypeClass != null) {
List<ParameterMapping> parameterMappings = new ArrayList<>();
parameterMap = new ParameterMap.Builder(configuration, statementId + "-Inline", parameterTypeClass,
parameterMappings).build();
}
return parameterMap;
}
private List<ResultMap> getStatementResultMaps(String resultMap, Class<?> resultType, String statementId) {
resultMap = applyCurrentNamespace(resultMap, true);
List<ResultMap> resultMaps = new ArrayList<>();
if (resultMap != null) {
String[] resultMapNames = resultMap.split(",");
for (String resultMapName : resultMapNames) {
try {
resultMaps.add(configuration.getResultMap(resultMapName.trim()));
} catch (IllegalArgumentException e) {
throw new IncompleteElementException(
"Could not find result map '" + resultMapName + "' referenced from '" + statementId + "'", e);
}
}
} else if (resultType != null) {
ResultMap inlineResultMap = new ResultMap.Builder(configuration, statementId + "-Inline", resultType,
new ArrayList<>(), null).build();
resultMaps.add(inlineResultMap);
}
return resultMaps;
}
public ResultMapping buildResultMapping(Class<?> resultType, String property, String column, Class<?> javaType,
JdbcType jdbcType, String nestedSelect, String nestedResultMap, String notNullColumn, String columnPrefix,
Class<? extends TypeHandler<?>> typeHandler, List<ResultFlag> flags, String resultSet, String foreignColumn,
boolean lazy) {
Entry<Type, Class<?>> setterType = resolveSetterType(resultType, property, javaType);
TypeHandler<?> typeHandlerInstance = resolveTypeHandler(setterType.getKey(), jdbcType, typeHandler);
List<ResultMapping> composites;
if ((nestedSelect == null || nestedSelect.isEmpty()) && (foreignColumn == null || foreignColumn.isEmpty())) {
composites = Collections.emptyList();
} else {
composites = parseCompositeColumnName(column);
}
return new ResultMapping.Builder(configuration, property, column, setterType.getValue()).jdbcType(jdbcType)
.nestedQueryId(applyCurrentNamespace(nestedSelect, true))
.nestedResultMapId(applyCurrentNamespace(nestedResultMap, true)).resultSet(resultSet)
.typeHandler(typeHandlerInstance).flags(flags == null ? new ArrayList<>() : flags).composites(composites)
.notNullColumns(parseMultipleColumnNames(notNullColumn)).columnPrefix(columnPrefix).foreignColumn(foreignColumn)
.lazy(lazy).build();
}
/**
* Backward compatibility signature 'buildResultMapping'.
*
* @param resultType
* the result type
* @param property
* the property
* @param column
* the column
* @param javaType
* the java type
* @param jdbcType
* the jdbc type
* @param nestedSelect
* the nested select
* @param nestedResultMap
* the nested result map
* @param notNullColumn
* the not null column
* @param columnPrefix
* the column prefix
* @param typeHandler
* the type handler
* @param flags
* the flags
*
* @return the result mapping
*/
public ResultMapping buildResultMapping(Class<?> resultType, String property, String column, Class<?> javaType,
JdbcType jdbcType, String nestedSelect, String nestedResultMap, String notNullColumn, String columnPrefix,
Class<? extends TypeHandler<?>> typeHandler, List<ResultFlag> flags) {
return buildResultMapping(resultType, property, column, javaType, jdbcType, nestedSelect, nestedResultMap,
notNullColumn, columnPrefix, typeHandler, flags, null, null, configuration.isLazyLoadingEnabled());
}
/**
* Gets the language driver.
*
* @param langClass
* the lang class
*
* @return the language driver
*
* @deprecated Use {@link Configuration#getLanguageDriver(Class)}
*/
@Deprecated
public LanguageDriver getLanguageDriver(Class<? extends LanguageDriver> langClass) {
return configuration.getLanguageDriver(langClass);
}
private Set<String> parseMultipleColumnNames(String columnName) {
Set<String> columns = new HashSet<>();
if (columnName != null) {
if (columnName.indexOf(',') > -1) {
StringTokenizer parser = new StringTokenizer(columnName, "{}, ", false);
while (parser.hasMoreTokens()) {
String column = parser.nextToken();
columns.add(column);
}
} else {
columns.add(columnName);
}
}
return columns;
}
private List<ResultMapping> parseCompositeColumnName(String columnName) {
List<ResultMapping> composites = new ArrayList<>();
if (columnName != null && (columnName.indexOf('=') > -1 || columnName.indexOf(',') > -1)) {
StringTokenizer parser = new StringTokenizer(columnName, "{}=, ", false);
while (parser.hasMoreTokens()) {
String property = parser.nextToken();
String column = parser.nextToken();
ResultMapping complexResultMapping = new ResultMapping.Builder(configuration, property, column,
(TypeHandler<?>) null).build();
composites.add(complexResultMapping);
}
}
return composites;
}
private Entry<Type, Class<?>> resolveSetterType(Class<?> resultType, String property, Class<?> javaType) {
if (javaType != null) {
return Map.entry(javaType, javaType);
}
if (property != null) {
MetaClass metaResultType = MetaClass.forClass(resultType, configuration.getReflectorFactory());
try {
return metaResultType.getGenericSetterType(property);
} catch (Exception e) {
// Not all property types are resolvable.
}
}
return Map.entry(Object.class, Object.class);
}
private Class<?> resolveParameterJavaType(Class<?> resultType, String property, Class<?> javaType,
JdbcType jdbcType) {
if (javaType == null) {
if (JdbcType.CURSOR.equals(jdbcType)) {
javaType = ResultSet.class;
} else if (Map.class.isAssignableFrom(resultType)) {
javaType = Object.class;
} else {
MetaClass metaResultType = MetaClass.forClass(resultType, configuration.getReflectorFactory());
javaType = metaResultType.getGetterType(property);
}
}
if (javaType == null) {
javaType = Object.class;
}
return javaType;
}
}
| MapperBuilderAssistant |
java | playframework__playframework | core/play/src/main/java/play/mvc/WebSocket.java | {
"start": 674,
"end": 3772
} | class ____ {
/**
* Invoke the WebSocket.
*
* @param request The request for the WebSocket.
* @return A future of either a result to reject the WebSocket connection with, or a Flow to
* handle the WebSocket.
*/
public abstract CompletionStage<F.Either<Result, Flow<Message, Message, ?>>> apply(
Http.RequestHeader request);
/** Acceptor for WebSockets to directly handle Play's Message objects. */
public static final MappedWebSocketAcceptor<Message, Message> Message =
new WebSocket.MappedWebSocketAcceptor<>(
Scala.partialFunction(message -> F.Either.Left(message)), Function.identity());
/** Acceptor for text WebSockets. */
public static final MappedWebSocketAcceptor<String, String> Text =
new MappedWebSocketAcceptor<>(
Scala.partialFunction(
message -> {
if (message instanceof Message.Text) {
return F.Either.Left(((Message.Text) message).data());
} else if (message instanceof Message.Binary) {
return F.Either.Right(
new Message.Close(
CloseCodes.Unacceptable(), "This websocket only accepts text frames"));
} else {
throw Scala.noMatch();
}
}),
Message.Text::new);
/** Acceptor for binary WebSockets. */
public static final MappedWebSocketAcceptor<ByteString, ByteString> Binary =
new MappedWebSocketAcceptor<>(
Scala.partialFunction(
message -> {
if (message instanceof Message.Binary) {
return F.Either.Left(((Message.Binary) message).data());
} else if (message instanceof Message.Text) {
return F.Either.Right(
new Message.Close(
CloseCodes.Unacceptable(), "This websocket only accepts binary frames"));
} else {
throw Scala.noMatch();
}
}),
Message.Binary::new);
/** Acceptor for JSON WebSockets. */
public static final MappedWebSocketAcceptor<JsonNode, JsonNode> Json =
new MappedWebSocketAcceptor<>(
Scala.partialFunction(
message -> {
try {
if (message instanceof Message.Binary) {
return F.Either.Left(
play.libs.Json.parse(((Message.Binary) message).data().asInputStream()));
} else if (message instanceof Message.Text) {
return F.Either.Left(play.libs.Json.parse(((Message.Text) message).data()));
}
} catch (RuntimeException e) {
return F.Either.Right(
new Message.Close(CloseCodes.Unacceptable(), "Unable to parse JSON message"));
}
throw Scala.noMatch();
}),
json -> new Message.Text(play.libs.Json.stringify(json)));
/**
* Acceptor for JSON WebSockets.
*
* @param in The | WebSocket |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/idclass/IdClassWithSuperclassTest.java | {
"start": 1673,
"end": 2021
} | class ____ {
@Id
private Integer parentId;
@Id
private Integer childId;
public MyEntity() {
}
public MyEntity(Integer parentId, Integer childId) {
this.parentId = parentId;
this.childId = childId;
}
public Integer getParentId() {
return parentId;
}
public Integer getChildId() {
return childId;
}
}
}
| MyEntity |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/BaseSyncParams.java | {
"start": 680,
"end": 1537
} | class ____<T> implements SyncArgs<T> {
private SyncMode syncMode = SyncMode.AUTO;
private SyncFailureMode syncFailureMode = SyncFailureMode.LOG_WARNING;
private long syncTimeout = 1000;
@Override
public T syncMode(SyncMode syncMode) {
this.syncMode = syncMode;
return (T) this;
}
@Override
public T syncFailureMode(SyncFailureMode syncFailureMode) {
this.syncFailureMode = syncFailureMode;
return (T) this;
}
@Override
public T syncTimeout(Duration timeout) {
this.syncTimeout = timeout.toMillis();
return (T) this;
}
public SyncMode getSyncMode() {
return syncMode;
}
public SyncFailureMode getSyncFailureMode() {
return syncFailureMode;
}
public long getSyncTimeout() {
return syncTimeout;
}
}
| BaseSyncParams |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DoNotCallCheckerTest.java | {
"start": 24145,
"end": 24481
} | class ____ {
void f(Thread t) {
// BUG: Diagnostic contains: start
t.run();
}
}
""")
.doTest();
}
@Test
public void negative_threadSuperRun() {
testHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/bitset/BitSetMutabilityPlan.java | {
"start": 377,
"end": 975
} | class ____ implements MutabilityPlan<BitSet> {
/**
* Singleton access
*/
public static final BitSetMutabilityPlan INSTANCE = new BitSetMutabilityPlan();
@Override
public boolean isMutable() {
return true;
}
@Override
public BitSet deepCopy(BitSet value) {
return BitSet.valueOf(value.toByteArray());
}
@Override
public Serializable disassemble(BitSet value, SharedSessionContract session) {
return value.toByteArray();
}
@Override
public BitSet assemble(Serializable cached, SharedSessionContract session) {
return BitSet.valueOf((byte[]) cached);
}
}
| BitSetMutabilityPlan |
java | quarkusio__quarkus | integration-tests/logging-panache/src/test/java/io/quarkus/logging/LoggingWithPanacheDevModeTest.java | {
"start": 473,
"end": 1602
} | class ____ {
@RegisterExtension
static final QuarkusDevModeTest TEST = new QuarkusDevModeTest()
.withApplicationRoot((jar) -> jar.addClasses(LoggingEndpoint.class))
.setLogRecordPredicate(record -> "io.quarkus.logging.LoggingEndpoint".equals(record.getLoggerName()));
@Test
public void testRepositoryIsReloaded() {
Formatter formatter = new PatternFormatter("[%p] %m");
{
when().get("/logging").then().body(is("hello"));
List<String> lines = TEST.getLogRecords().stream().map(formatter::format).collect(Collectors.toList());
assertThat(lines).containsExactly("[INFO] hello");
TEST.clearLogRecords();
}
TEST.modifySourceFile("LoggingEndpoint.java", s -> s.replace("hello", "hi"));
{
when().get("/logging").then().body(is("hi"));
List<String> lines = TEST.getLogRecords().stream().map(formatter::format).collect(Collectors.toList());
assertThat(lines).containsExactly("[INFO] hi");
TEST.clearLogRecords();
}
}
}
| LoggingWithPanacheDevModeTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/RMContainerTokenSecretManager.java | {
"start": 5702,
"end": 8413
} | class ____ extends TimerTask {
@Override
public void run() {
// Activation will happen after an absolute time interval. It will be good
// if we can force activation after an NM updates and acknowledges a
// roll-over. But that is only possible when we move to per-NM keys. TODO:
activateNextMasterKey();
}
}
@VisibleForTesting
public Token createContainerToken(ContainerId containerId,
int containerVersion, NodeId nodeId, String appSubmitter,
Resource capability, Priority priority, long createTime) {
return createContainerToken(containerId, containerVersion, nodeId,
appSubmitter, capability, priority, createTime,
null, null, ContainerType.TASK,
ExecutionType.GUARANTEED, -1, null);
}
/**
* Helper function for creating ContainerTokens.
*
* @param containerId Container Id
* @param containerVersion Container version
* @param nodeId Node Id
* @param appSubmitter App Submitter
* @param capability Capability
* @param priority Priority
* @param createTime Create Time
* @param logAggregationContext Log Aggregation Context
* @param nodeLabelExpression Node Label Expression
* @param containerType Container Type
* @param execType Execution Type
* @param allocationRequestId allocationRequestId
* @param allocationTags allocation Tags
* @return the container-token
*/
public Token createContainerToken(ContainerId containerId,
int containerVersion, NodeId nodeId, String appSubmitter,
Resource capability, Priority priority, long createTime,
LogAggregationContext logAggregationContext, String nodeLabelExpression,
ContainerType containerType, ExecutionType execType,
long allocationRequestId, Set<String> allocationTags) {
byte[] password;
ContainerTokenIdentifier tokenIdentifier;
long expiryTimeStamp =
System.currentTimeMillis() + containerTokenExpiryInterval;
// Lock so that we use the same MasterKey's keyId and its bytes
this.readLock.lock();
try {
tokenIdentifier =
new ContainerTokenIdentifier(containerId, containerVersion,
nodeId.toString(), appSubmitter, capability, expiryTimeStamp,
this.currentMasterKey.getMasterKey().getKeyId(),
ResourceManager.getClusterTimeStamp(), priority, createTime,
logAggregationContext, nodeLabelExpression, containerType,
execType, allocationRequestId, allocationTags);
password = this.createPassword(tokenIdentifier);
} finally {
this.readLock.unlock();
}
return BuilderUtils.newContainerToken(nodeId, password, tokenIdentifier);
}
} | NextKeyActivator |
java | spring-projects__spring-boot | module/spring-boot-session-jdbc/src/main/java/org/springframework/boot/session/jdbc/autoconfigure/JdbcSessionAutoConfiguration.java | {
"start": 4746,
"end": 4975
} | class ____ extends OnDatabaseInitializationCondition {
OnJdbcSessionDatasourceInitializationCondition() {
super("Jdbc Session", "spring.session.jdbc.initialize-schema");
}
}
}
| OnJdbcSessionDatasourceInitializationCondition |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/ShutdownableService.java | {
"start": 1225,
"end": 1456
} | interface ____ extends Service {
/**
* Shutdown the service, which means it cannot be started again.
*
* @throws RuntimeCamelException is thrown if shutdown failed
*/
void shutdown();
}
| ShutdownableService |
java | elastic__elasticsearch | libs/exponential-histogram/src/main/java/org/elasticsearch/exponentialhistogram/ExponentialHistogramMerger.java | {
"start": 1541,
"end": 12883
} | class ____ implements Accountable, Releasable {
private static final long BASE_SIZE = RamUsageEstimator.shallowSizeOfInstance(ExponentialHistogramMerger.class) + DownscaleStats.SIZE;
// Our algorithm is not in-place, therefore we use two histograms and ping-pong between them
@Nullable
private FixedCapacityExponentialHistogram result;
@Nullable
private FixedCapacityExponentialHistogram buffer;
private final int bucketLimit;
private final int maxScale;
private final DownscaleStats downscaleStats;
private final ExponentialHistogramCircuitBreaker circuitBreaker;
private boolean closed = false;
/**
* Creates a new instance with the specified bucket limit.
*
* @param bucketLimit the maximum number of buckets the result histogram is allowed to have, must be at least 4
* @param circuitBreaker the circuit breaker to use to limit memory allocations
*/
public static ExponentialHistogramMerger create(int bucketLimit, ExponentialHistogramCircuitBreaker circuitBreaker) {
circuitBreaker.adjustBreaker(BASE_SIZE);
boolean success = false;
try {
ExponentialHistogramMerger result = new ExponentialHistogramMerger(bucketLimit, circuitBreaker);
success = true;
return result;
} finally {
if (success == false) {
circuitBreaker.adjustBreaker(-BASE_SIZE);
}
}
}
private ExponentialHistogramMerger(int bucketLimit, ExponentialHistogramCircuitBreaker circuitBreaker) {
this(bucketLimit, ExponentialHistogram.MAX_SCALE, circuitBreaker);
}
// Only intended for testing, using this in production means an unnecessary reduction of precision
private ExponentialHistogramMerger(int bucketLimit, int maxScale, ExponentialHistogramCircuitBreaker circuitBreaker) {
// We need at least four buckets to represent any possible distribution
if (bucketLimit < 4) {
throw new IllegalArgumentException("The bucket limit must be at least 4");
}
this.bucketLimit = bucketLimit;
this.maxScale = maxScale;
this.circuitBreaker = circuitBreaker;
downscaleStats = new DownscaleStats();
}
public static ExponentialHistogramMerger createWithMaxScale(
int bucketLimit,
int maxScale,
ExponentialHistogramCircuitBreaker circuitBreaker
) {
circuitBreaker.adjustBreaker(BASE_SIZE);
return new ExponentialHistogramMerger(bucketLimit, maxScale, circuitBreaker);
}
@Override
public void close() {
if (closed) {
assert false : "ExponentialHistogramMerger closed multiple times";
} else {
closed = true;
if (result != null) {
result.close();
result = null;
}
if (buffer != null) {
buffer.close();
buffer = null;
}
circuitBreaker.adjustBreaker(-BASE_SIZE);
}
}
@Override
public long ramBytesUsed() {
long size = BASE_SIZE;
if (result != null) {
size += result.ramBytesUsed();
}
if (buffer != null) {
size += buffer.ramBytesUsed();
}
return size;
}
/**
* Returns the merged histogram and clears this merger.
* The caller takes ownership of the returned histogram and must ensure that {@link #close()} is called.
*
* @return the merged histogram
*/
public ReleasableExponentialHistogram getAndClear() {
assert closed == false : "ExponentialHistogramMerger already closed";
ReleasableExponentialHistogram retVal = (result == null) ? ReleasableExponentialHistogram.empty() : result;
result = null;
return retVal;
}
/**
* Gets the current merged histogram without clearing this merger.
* Note that the ownership of the returned histogram remains with this merger,
* so the caller must not close it.
* The returned histogram is only valid until the next call to {@link #add(ExponentialHistogram)}, or until the merger is closed.
*
* @return the current merged histogram
*/
public ExponentialHistogram get() {
assert closed == false : "ExponentialHistogramMerger already closed";
return (result == null) ? ExponentialHistogram.empty() : result;
}
/**
* Merges the given histogram into the current result, not upscaling it.
* This should be used when merging intermediate results to prevent accumulating errors.
*
* @param toAdd the histogram to merge
*/
public void addWithoutUpscaling(ExponentialHistogram toAdd) {
add(toAdd, false);
}
/**
* Merges the given histogram into the current result. The histogram might be upscaled if needed.
*
* @param toAdd the histogram to merge
*/
public void add(ExponentialHistogram toAdd) {
add(toAdd, true);
}
// This algorithm is very efficient if B has roughly as many buckets as A.
// However, if B is much smaller we still have to iterate over all buckets of A.
// This can be optimized by buffering the buckets of small histograms and only merging them when we have enough buckets.
// The buffered histogram buckets would first be merged with each other, and then be merged with accumulator.
//
// However, benchmarks of a PoC implementation have shown that this only brings significant improvements
// if the accumulator size is 500+ and the merged histograms are smaller than 50 buckets
// and otherwise slows down the merging.
// It would be possible to only enable the buffering for small histograms,
// but the optimization seems not worth the added complexity at this point.
private void add(ExponentialHistogram toAdd, boolean allowUpscaling) {
ExponentialHistogram a = result == null ? ExponentialHistogram.empty() : result;
ExponentialHistogram b = toAdd;
CopyableBucketIterator posBucketsA = a.positiveBuckets().iterator();
CopyableBucketIterator negBucketsA = a.negativeBuckets().iterator();
CopyableBucketIterator posBucketsB = b.positiveBuckets().iterator();
CopyableBucketIterator negBucketsB = b.negativeBuckets().iterator();
ZeroBucket zeroBucket = a.zeroBucket().merge(b.zeroBucket());
zeroBucket = zeroBucket.collapseOverlappingBucketsForAll(posBucketsA, negBucketsA, posBucketsB, negBucketsB);
if (buffer == null) {
buffer = FixedCapacityExponentialHistogram.create(bucketLimit, circuitBreaker);
}
buffer.setZeroBucket(zeroBucket);
buffer.setSum(a.sum() + b.sum());
buffer.setMin(nanAwareAggregate(a.min(), b.min(), Math::min));
buffer.setMax(nanAwareAggregate(a.max(), b.max(), Math::max));
// We attempt to bring everything to the scale of A.
// This might involve increasing the scale for B, which would increase its indices.
// We need to ensure that we do not exceed MAX_INDEX / MIN_INDEX in this case.
int targetScale = Math.min(maxScale, a.scale());
if (allowUpscaling == false) {
targetScale = Math.min(targetScale, b.scale());
}
if (targetScale > b.scale()) {
if (negBucketsB.hasNext()) {
long smallestIndex = negBucketsB.peekIndex();
OptionalLong maximumIndex = b.negativeBuckets().maxBucketIndex();
assert maximumIndex.isPresent()
: "We checked that the negative bucket range is not empty, therefore the maximum index should be present";
int maxScaleIncrease = Math.min(getMaximumScaleIncrease(smallestIndex), getMaximumScaleIncrease(maximumIndex.getAsLong()));
targetScale = Math.min(targetScale, b.scale() + maxScaleIncrease);
}
if (posBucketsB.hasNext()) {
long smallestIndex = posBucketsB.peekIndex();
OptionalLong maximumIndex = b.positiveBuckets().maxBucketIndex();
assert maximumIndex.isPresent()
: "We checked that the positive bucket range is not empty, therefore the maximum index should be present";
int maxScaleIncrease = Math.min(getMaximumScaleIncrease(smallestIndex), getMaximumScaleIncrease(maximumIndex.getAsLong()));
targetScale = Math.min(targetScale, b.scale() + maxScaleIncrease);
}
}
// Now we are sure that everything fits numerically into targetScale.
// However, we might exceed our limit for the total number of buckets.
// Therefore, we try the merge optimistically. If we fail, we reduce the target scale to make everything fit.
MergingBucketIterator positiveMerged = new MergingBucketIterator(posBucketsA.copy(), posBucketsB.copy(), targetScale);
MergingBucketIterator negativeMerged = new MergingBucketIterator(negBucketsA.copy(), negBucketsB.copy(), targetScale);
buffer.resetBuckets(targetScale);
downscaleStats.reset();
int overflowCount = putBuckets(buffer, negativeMerged, false, downscaleStats);
overflowCount += putBuckets(buffer, positiveMerged, true, downscaleStats);
if (overflowCount > 0) {
// UDD-sketch approach: decrease the scale and retry.
int reduction = downscaleStats.getRequiredScaleReductionToReduceBucketCountBy(overflowCount);
targetScale -= reduction;
buffer.resetBuckets(targetScale);
positiveMerged = new MergingBucketIterator(posBucketsA, posBucketsB, targetScale);
negativeMerged = new MergingBucketIterator(negBucketsA, negBucketsB, targetScale);
overflowCount = putBuckets(buffer, negativeMerged, false, null);
overflowCount += putBuckets(buffer, positiveMerged, true, null);
assert overflowCount == 0 : "Should never happen, the histogram should have had enough space";
}
FixedCapacityExponentialHistogram temp = result;
result = buffer;
buffer = temp;
}
private static int putBuckets(
FixedCapacityExponentialHistogram output,
BucketIterator buckets,
boolean isPositive,
DownscaleStats downscaleStats
) {
boolean collectDownScaleStatsOnNext = false;
long prevIndex = 0;
int overflowCount = 0;
while (buckets.hasNext()) {
long idx = buckets.peekIndex();
if (collectDownScaleStatsOnNext) {
downscaleStats.add(prevIndex, idx);
} else {
collectDownScaleStatsOnNext = downscaleStats != null;
}
if (output.tryAddBucket(idx, buckets.peekCount(), isPositive) == false) {
overflowCount++;
}
prevIndex = idx;
buckets.advance();
}
return overflowCount;
}
private static double nanAwareAggregate(double a, double b, DoubleBinaryOperator aggregator) {
if (Double.isNaN(a)) {
return b;
}
if (Double.isNaN(b)) {
return a;
}
return aggregator.applyAsDouble(a, b);
}
}
| ExponentialHistogramMerger |
java | redisson__redisson | redisson/src/test/java/org/redisson/transaction/RedissonTransactionalMapCacheTest.java | {
"start": 329,
"end": 2778
} | class ____ extends RedissonBaseTransactionalMapTest {
@Test
public void testSyncWait() {
String mapCacheName = "map";
String dataKey = "key";
Config redisConfig = new Config();
redisConfig.useReplicatedServers()
.addNodeAddress(redisson.getConfig().useSingleServer().getAddress());
RedissonClient client = Redisson.create(redisConfig);
RTransaction transaction = client.createTransaction(TransactionOptions.defaults());
RMapCache<String, String> cache = transaction.getMapCache(mapCacheName);
cache.putIfAbsent(dataKey, "foo", 1000, TimeUnit.MILLISECONDS);
transaction.commit();
RTransaction transaction2 = client.createTransaction(TransactionOptions.defaults());
RMapCache<String, String> cache2 = transaction2.getMapCache(mapCacheName);
cache2.putIfAbsent(dataKey, "bar", 1000, TimeUnit.MILLISECONDS);
transaction2.commit();
}
@Test
public void testPutIfAbsentTTL() throws InterruptedException {
RMapCache<Object, Object> m = redisson.getMapCache("test");
m.put("1", "2");
m.put("3", "4");
RTransaction transaction = redisson.createTransaction(TransactionOptions.defaults());
RMapCache<Object, Object> map = transaction.getMapCache("test");
assertThat(map.putIfAbsent("3", "2", 1, TimeUnit.SECONDS)).isEqualTo("4");
assertThat(map.putIfAbsent("5", "6", 3, TimeUnit.SECONDS)).isNull();
assertThat(map.putIfAbsent("5", "7", 1, TimeUnit.SECONDS)).isEqualTo("6");
assertThat(m.get("3")).isEqualTo("4");
assertThat(m.size()).isEqualTo(2);
transaction.commit();
assertThat(m.get("1")).isEqualTo("2");
assertThat(m.get("3")).isEqualTo("4");
assertThat(m.get("5")).isEqualTo("6");
Thread.sleep(1500);
assertThat(m.get("3")).isEqualTo("4");
assertThat(m.get("5")).isEqualTo("6");
Thread.sleep(1500);
assertThat(m.get("3")).isEqualTo("4");
assertThat(m.get("5")).isNull();
}
@Override
protected RMap<String, String> getMap() {
return redisson.getMapCache("test");
}
@Override
protected RMap<String, String> getTransactionalMap(RTransaction transaction) {
return transaction.getMapCache("test");
}
}
| RedissonTransactionalMapCacheTest |
java | elastic__elasticsearch | x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorFactory.java | {
"start": 1052,
"end": 2971
} | class ____ extends MultiValuesSourceAggregatorFactory {
private final boolean includeSort;
private final SortOrder sortOrder;
private final int size;
GeoLineAggregatorFactory(
String name,
Map<String, ValuesSourceConfig> configs,
DocValueFormat format,
AggregationContext aggregationContext,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder,
Map<String, Object> metaData,
boolean includeSort,
SortOrder sortOrder,
int size
) throws IOException {
super(name, configs, format, aggregationContext, parent, subFactoriesBuilder, metaData);
this.includeSort = includeSort;
this.sortOrder = sortOrder;
this.size = size;
}
@Override
protected Aggregator createUnmapped(Aggregator parent, Map<String, Object> metaData) throws IOException {
return new GeoLineAggregator.Empty(name, context, parent, metaData, includeSort, sortOrder, size);
}
@Override
protected Aggregator doCreateInternal(
Map<String, ValuesSourceConfig> configs,
DocValueFormat format,
Aggregator parent,
CardinalityUpperBound cardinality,
Map<String, Object> metaData
) throws IOException {
GeoLineMultiValuesSource valuesSources = new GeoLineMultiValuesSource(configs);
if (context.isInSortOrderExecutionRequired()) {
return new GeoLineAggregator.TimeSeries(name, valuesSources, context, parent, metaData, includeSort, sortOrder, size);
} else {
return new GeoLineAggregator.Normal(name, valuesSources, context, parent, metaData, includeSort, sortOrder, size);
}
}
@Override
public String getStatsSubtype() {
return configs.get(GeoLineAggregationBuilder.POINT_FIELD.getPreferredName()).valueSourceType().typeName();
}
}
| GeoLineAggregatorFactory |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 3818,
"end": 5363
} | class ____ extends Baz {",
" private final long buh;",
"",
" AutoValue_Baz(long buh) {",
" this.buh = buh;",
" }",
"",
" @Override public long buh() {",
" return buh;",
" }",
"",
" @Override public String toString() {",
" return \"Baz{\"",
" + \"buh=\" + buh",
" + \"}\";",
" }",
"",
" @Override public boolean equals(Object o) {",
" if (o == this) {",
" return true;",
" }",
" if (o instanceof Baz) {",
" Baz that = (Baz) o;",
" return this.buh == that.buh();",
" }",
" return false;",
" }",
"",
" @Override public int hashCode() {",
" int h$ = 1;",
" h$ *= 1000003;",
" h$ ^= (int) ((buh >>> 32) ^ buh);",
" return h$;",
" }",
"}");
Compilation compilation =
javac()
.withProcessors(new AutoValueProcessor())
.withOptions("-A" + Nullables.NULLABLE_OPTION + "=")
.compile(javaFileObject);
assertThat(compilation)
.generatedSourceFile("foo.bar.AutoValue_Baz")
.hasSourceEquivalentTo(expectedOutput);
}
@Test
public void importTwoWays() {
// Test that referring to the same | AutoValue_Baz |
java | apache__camel | components/camel-cxf/camel-cxf-spring-rest/src/test/java/org/apache/camel/component/cxf/spring/AbstractSpringBeanTestSupport.java | {
"start": 1063,
"end": 1461
} | class ____ {
protected ClassPathXmlApplicationContext ctx;
protected abstract String[] getApplicationContextFiles();
@BeforeEach
public void setUp() throws Exception {
ctx = new ClassPathXmlApplicationContext(getApplicationContextFiles());
}
@AfterEach
public void tearDown() throws Exception {
IOHelper.close(ctx);
}
}
| AbstractSpringBeanTestSupport |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/google/BiMapTestSuiteBuilder.java | {
"start": 2878,
"end": 6944
} | enum ____ implements Feature<Void> {
INVERSE;
@Override
public Set<Feature<? super Void>> getImpliedFeatures() {
return emptySet();
}
}
@Override
protected List<TestSuite> createDerivedSuites(
FeatureSpecificTestSuiteBuilder<
?, ? extends OneSizeTestContainerGenerator<BiMap<K, V>, Entry<K, V>>>
parentBuilder) {
List<TestSuite> derived = super.createDerivedSuites(parentBuilder);
// TODO(cpovirk): consider using this approach (derived suites instead of extension) in
// ListTestSuiteBuilder, etc.?
derived.add(
MapTestSuiteBuilder.using(new MapGenerator<K, V>(parentBuilder.getSubjectGenerator()))
.withFeatures(parentBuilder.getFeatures())
.named(parentBuilder.getName() + " [Map]")
.suppressing(parentBuilder.getSuppressedTests())
.suppressing(SetCreationTester.class.getMethods())
// BiMap.entrySet() duplicate-handling behavior is too confusing for SetCreationTester
.withSetUp(parentBuilder.getSetUp())
.withTearDown(parentBuilder.getTearDown())
.createTestSuite());
/*
* TODO(cpovirk): the Map tests duplicate most of this effort by using a
* CollectionTestSuiteBuilder on values(). It would be nice to avoid that
*/
derived.add(
SetTestSuiteBuilder.using(
new BiMapValueSetGenerator<K, V>(parentBuilder.getSubjectGenerator()))
.withFeatures(computeValuesSetFeatures(parentBuilder.getFeatures()))
.named(parentBuilder.getName() + " values [Set]")
.suppressing(parentBuilder.getSuppressedTests())
.suppressing(SetCreationTester.class.getMethods())
// BiMap.values() duplicate-handling behavior is too confusing for SetCreationTester
.withSetUp(parentBuilder.getSetUp())
.withTearDown(parentBuilder.getTearDown())
.createTestSuite());
if (!parentBuilder.getFeatures().contains(NoRecurse.INVERSE)) {
derived.add(
BiMapTestSuiteBuilder.using(
new InverseBiMapGenerator<K, V>(parentBuilder.getSubjectGenerator()))
.withFeatures(computeInverseFeatures(parentBuilder.getFeatures()))
.named(parentBuilder.getName() + " inverse")
.suppressing(parentBuilder.getSuppressedTests())
.withSetUp(parentBuilder.getSetUp())
.withTearDown(parentBuilder.getTearDown())
.createTestSuite());
}
return derived;
}
private static Set<Feature<?>> computeInverseFeatures(Set<Feature<?>> mapFeatures) {
Set<Feature<?>> inverseFeatures = new HashSet<>(mapFeatures);
boolean nullKeys = inverseFeatures.remove(MapFeature.ALLOWS_NULL_KEYS);
boolean nullValues = inverseFeatures.remove(MapFeature.ALLOWS_NULL_VALUES);
if (nullKeys) {
inverseFeatures.add(MapFeature.ALLOWS_NULL_VALUES);
}
if (nullValues) {
inverseFeatures.add(MapFeature.ALLOWS_NULL_KEYS);
}
inverseFeatures.add(NoRecurse.INVERSE);
inverseFeatures.remove(CollectionFeature.KNOWN_ORDER);
inverseFeatures.add(MapFeature.REJECTS_DUPLICATES_AT_CREATION);
return inverseFeatures;
}
// TODO(lowasser): can we eliminate the duplication from MapTestSuiteBuilder here?
private static Set<Feature<?>> computeValuesSetFeatures(Set<Feature<?>> mapFeatures) {
Set<Feature<?>> valuesCollectionFeatures = computeCommonDerivedCollectionFeatures(mapFeatures);
valuesCollectionFeatures.add(CollectionFeature.ALLOWS_NULL_QUERIES);
if (mapFeatures.contains(MapFeature.ALLOWS_NULL_VALUES)) {
valuesCollectionFeatures.add(CollectionFeature.ALLOWS_NULL_VALUES);
}
valuesCollectionFeatures.add(CollectionFeature.REJECTS_DUPLICATES_AT_CREATION);
return valuesCollectionFeatures;
}
private static Set<Feature<?>> computeCommonDerivedCollectionFeatures(
Set<Feature<?>> mapFeatures) {
return MapTestSuiteBuilder.computeCommonDerivedCollectionFeatures(mapFeatures);
}
}
| NoRecurse |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java | {
"start": 1183,
"end": 6159
} | class ____ extends AsyncRetryDuringSnapshotActionStep {
public static final String NAME = "delete";
private static final Logger logger = LogManager.getLogger(DeleteStep.class);
private static final BiFunction<String, LifecycleExecutionState, String> DEFAULT_TARGET_INDEX_NAME_SUPPLIER = (
indexName,
lifecycleState) -> indexName;
private final BiFunction<String, LifecycleExecutionState, String> targetIndexNameSupplier;
private final boolean indexSurvives;
/**
* Use this constructor to delete the index that ILM is currently operating on.
*/
public DeleteStep(StepKey key, StepKey nextStepKey, Client client) {
this(key, nextStepKey, client, DEFAULT_TARGET_INDEX_NAME_SUPPLIER, false);
}
/**
* Use this constructor to delete a specific index, potentially different from the one that ILM is currently operating on. The parameter
* {@code indexSurvives} indicates whether the index that ILM runs on will survive (i.e. not get deleted) this step.
* Look at the callers of {@link AsyncActionStep#indexSurvives()} for more details.
*/
public DeleteStep(
StepKey key,
StepKey nextStepKey,
Client client,
BiFunction<String, LifecycleExecutionState, String> targetIndexNameSupplier,
boolean indexSurvives
) {
super(key, nextStepKey, client);
this.targetIndexNameSupplier = targetIndexNameSupplier;
this.indexSurvives = indexSurvives;
}
@Override
public void performDuringNoSnapshot(IndexMetadata indexMetadata, ProjectMetadata currentProject, ActionListener<Void> listener) {
String policyName = indexMetadata.getLifecyclePolicyName();
String indexName = targetIndexNameSupplier.apply(indexMetadata.getIndex().getName(), indexMetadata.getLifecycleExecutionState());
IndexAbstraction indexAbstraction = currentProject.getIndicesLookup().get(indexName);
assert indexAbstraction != null : "invalid cluster metadata. index [" + indexName + "] was not found";
DataStream dataStream = indexAbstraction.getParentDataStream();
if (dataStream != null) {
Index failureStoreWriteIndex = dataStream.getWriteFailureIndex();
boolean isFailureStoreWriteIndex = failureStoreWriteIndex != null && indexName.equals(failureStoreWriteIndex.getName());
// using index name equality across this if/else branch as the UUID of the index might change via restoring a data stream
// with one index from snapshot
if (dataStream.getIndices().size() == 1
&& isFailureStoreWriteIndex == false
&& dataStream.getWriteIndex().getName().equals(indexName)) {
// This is the last backing index in the data stream, and it's being deleted because the policy doesn't have a rollover
// phase. The entire stream needs to be deleted, because we can't have an empty list of data stream backing indices.
// We do this even if there are multiple failure store indices because otherwise we would never delete the index.
DeleteDataStreamAction.Request deleteReq = new DeleteDataStreamAction.Request(
MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT,
dataStream.getName()
);
getClient(currentProject.id()).execute(
DeleteDataStreamAction.INSTANCE,
deleteReq,
listener.delegateFailureAndWrap((l, response) -> l.onResponse(null))
);
return;
} else if (isFailureStoreWriteIndex || dataStream.getWriteIndex().getName().equals(indexName)) {
String errorMessage = Strings.format(
"index [%s] is the%s write index for data stream [%s]. "
+ "stopping execution of lifecycle [%s] as a data stream's write index cannot be deleted. manually rolling over the"
+ " index will resume the execution of the policy as the index will not be the data stream's write index anymore",
indexName,
isFailureStoreWriteIndex ? " failure store" : "",
dataStream.getName(),
policyName
);
logger.debug(errorMessage);
listener.onFailure(new IllegalStateException(errorMessage));
return;
}
}
getClient(currentProject.id()).admin()
.indices()
.delete(
new DeleteIndexRequest(indexName).masterNodeTimeout(TimeValue.MAX_VALUE),
listener.delegateFailureAndWrap((l, response) -> l.onResponse(null))
);
}
@Override
public boolean indexSurvives() {
return indexSurvives;
}
@Override
public boolean isRetryable() {
return true;
}
}
| DeleteStep |
java | apache__hadoop | hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/IdentifierResolver.java | {
"start": 1303,
"end": 4024
} | class ____ {
// note that the identifiers are case insensitive
public static final String TEXT_ID = "text";
public static final String RAW_BYTES_ID = "rawbytes";
public static final String TYPED_BYTES_ID = "typedbytes";
public static final String KEY_ONLY_TEXT_ID = "keyonlytext";
private Class<? extends InputWriter> inputWriterClass = null;
private Class<? extends OutputReader> outputReaderClass = null;
private Class outputKeyClass = null;
private Class outputValueClass = null;
/**
* Resolves a given identifier. This method has to be called before calling
* any of the getters.
*/
public void resolve(String identifier) {
if (identifier.equalsIgnoreCase(RAW_BYTES_ID)) {
setInputWriterClass(RawBytesInputWriter.class);
setOutputReaderClass(RawBytesOutputReader.class);
setOutputKeyClass(BytesWritable.class);
setOutputValueClass(BytesWritable.class);
} else if (identifier.equalsIgnoreCase(TYPED_BYTES_ID)) {
setInputWriterClass(TypedBytesInputWriter.class);
setOutputReaderClass(TypedBytesOutputReader.class);
setOutputKeyClass(TypedBytesWritable.class);
setOutputValueClass(TypedBytesWritable.class);
} else if (identifier.equalsIgnoreCase(KEY_ONLY_TEXT_ID)) {
setInputWriterClass(KeyOnlyTextInputWriter.class);
setOutputReaderClass(KeyOnlyTextOutputReader.class);
setOutputKeyClass(Text.class);
setOutputValueClass(NullWritable.class);
} else { // assume TEXT_ID
setInputWriterClass(TextInputWriter.class);
setOutputReaderClass(TextOutputReader.class);
setOutputKeyClass(Text.class);
setOutputValueClass(Text.class);
}
}
/**
* Returns the resolved {@link InputWriter} class.
*/
public Class<? extends InputWriter> getInputWriterClass() {
return inputWriterClass;
}
/**
* Returns the resolved {@link OutputReader} class.
*/
public Class<? extends OutputReader> getOutputReaderClass() {
return outputReaderClass;
}
/**
* Returns the resolved output key class.
*/
public Class getOutputKeyClass() {
return outputKeyClass;
}
/**
* Returns the resolved output value class.
*/
public Class getOutputValueClass() {
return outputValueClass;
}
/**
* Sets the {@link InputWriter} class.
*/
protected void setInputWriterClass(Class<? extends InputWriter>
inputWriterClass) {
this.inputWriterClass = inputWriterClass;
}
/**
* Sets the {@link OutputReader} class.
*/
protected void setOutputReaderClass(Class<? extends OutputReader>
outputReaderClass) {
this.outputReaderClass = outputReaderClass;
}
/**
* Sets the output key | IdentifierResolver |
java | quarkusio__quarkus | extensions/funqy/funqy-server-common/runtime/src/main/java/io/quarkus/funqy/runtime/InternalError.java | {
"start": 42,
"end": 371
} | class ____ extends RuntimeException {
public InternalError() {
}
public InternalError(String message) {
super(message);
}
public InternalError(String message, Throwable cause) {
super(message, cause);
}
public InternalError(Throwable cause) {
super(cause);
}
}
| InternalError |
java | elastic__elasticsearch | x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/ScalarFunction.java | {
"start": 935,
"end": 1440
} | class ____ extends Function {
/**
* Limit for the BytesRef return of functions.
* <p>
* To be used when there's no CircuitBreaking, as an arbitrary measure to limit memory usage.
* </p>
*/
public static final long MAX_BYTES_REF_RESULT_SIZE = MB.toBytes(1);
protected ScalarFunction(Source source) {
super(source, emptyList());
}
protected ScalarFunction(Source source, List<Expression> fields) {
super(source, fields);
}
}
| ScalarFunction |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/domain/PredicateSpecificationUnitTests.java | {
"start": 5407,
"end": 5610
} | class ____ implements Serializable, PredicateSpecification<Object> {
@Override
public Predicate toPredicate(From<?, Object> root, CriteriaBuilder cb) {
return null;
}
}
}
| SerializableSpecification |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/MultipleOutputs.java | {
"start": 4211,
"end": 4958
} | class ____ implements
* Reducer<WritableComparable, Writable> {
* private MultipleOutputs mos;
*
* public void configure(JobConf conf) {
* ...
* mos = new MultipleOutputs(conf);
* }
*
* public void reduce(WritableComparable key, Iterator<Writable> values,
* OutputCollector output, Reporter reporter)
* throws IOException {
* ...
* mos.getCollector("text", reporter).collect(key, new Text("Hello"));
* mos.getCollector("seq", "A", reporter).collect(key, new Text("Bye"));
* mos.getCollector("seq", "B", reporter).collect(key, new Text("Chau"));
* ...
* }
*
* public void close() throws IOException {
* mos.close();
* ...
* }
*
* }
* </pre>
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public | MOReduce |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java | {
"start": 6609,
"end": 9254
} | class ____ {
final MeanMetric queryMetric = new MeanMetric();
final MeanMetric fetchMetric = new MeanMetric();
/* We store scroll statistics in microseconds because with nanoseconds we run the risk of overflowing the total stats if there are
* many scrolls. For example, on a system with 2^24 scrolls that have been executed, each executing for 2^10 seconds, then using
* nanoseconds would require a numeric representation that can represent at least 2^24 * 2^10 * 10^9 > 2^24 * 2^10 * 2^29 = 2^63
* which exceeds the largest value that can be represented by a long. By using microseconds, we enable capturing one-thousand
* times as many scrolls (i.e., billions of scrolls which at one per second would take 32 years to occur), or scrolls that execute
* for one-thousand times as long (i.e., scrolls that execute for almost twelve days on average).
*/
final MeanMetric scrollMetric = new MeanMetric();
final MeanMetric suggestMetric = new MeanMetric();
final CounterMetric queryCurrent = new CounterMetric();
final CounterMetric fetchCurrent = new CounterMetric();
final CounterMetric scrollCurrent = new CounterMetric();
final CounterMetric suggestCurrent = new CounterMetric();
final CounterMetric queryFailure = new CounterMetric();
final CounterMetric fetchFailure = new CounterMetric();
final ExponentiallyWeightedMovingRate recentSearchLoad;
StatsHolder(SearchStatsSettings searchStatsSettings) {
double lambdaInInverseNanos = Math.log(2.0) / searchStatsSettings.getRecentReadLoadHalfLifeForNewShards().nanos();
this.recentSearchLoad = new ExponentiallyWeightedMovingRate(lambdaInInverseNanos, System.nanoTime());
}
SearchStats.Stats stats() {
return new SearchStats.Stats(
queryMetric.count(),
TimeUnit.NANOSECONDS.toMillis(queryMetric.sum()),
queryCurrent.count(),
queryFailure.count(),
fetchMetric.count(),
TimeUnit.NANOSECONDS.toMillis(fetchMetric.sum()),
fetchCurrent.count(),
fetchFailure.count(),
scrollMetric.count(),
TimeUnit.MICROSECONDS.toMillis(scrollMetric.sum()),
scrollCurrent.count(),
suggestMetric.count(),
TimeUnit.NANOSECONDS.toMillis(suggestMetric.sum()),
suggestCurrent.count(),
recentSearchLoad.getRate(System.nanoTime())
);
}
}
}
| StatsHolder |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java | {
"start": 1939,
"end": 2414
} | class ____ built-in Hadoop Delegation Token
* functionality.
* <p>
* The authentication mechanisms supported by default are Hadoop Simple
* authentication (also known as pseudo authentication) and Kerberos SPNEGO
* authentication.
* <p>
* Additional authentication mechanisms can be supported via {@link
* DelegationTokenAuthenticator} implementations.
* <p>
* The default {@link DelegationTokenAuthenticator} is the {@link
* KerberosDelegationTokenAuthenticator} | with |
java | quarkusio__quarkus | integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/TestEndpoint.java | {
"start": 43542,
"end": 64155
} | enum ____ {
Iterable,
Variadic,
Stream;
}
private void testPersistDao(PersistTest persistTest) {
Person person1 = new Person();
person1.name = "stef1";
Person person2 = new Person();
person2.name = "stef2";
assertFalse(person1.isPersistent());
assertFalse(person2.isPersistent());
switch (persistTest) {
case Iterable:
personDao.persist(Arrays.asList(person1, person2));
break;
case Stream:
personDao.persist(Stream.of(person1, person2));
break;
case Variadic:
personDao.persist(person1, person2);
break;
}
assertTrue(person1.isPersistent());
assertTrue(person2.isPersistent());
}
private Person makeSavedPersonDao(String suffix) {
Person person = new Person();
person.name = "stef" + suffix;
person.status = Status.LIVING;
person.address = new Address("stef street");
addressDao.persist(person.address);
personDao.persist(person);
return person;
}
private Person makeSavedPersonDao() {
Person person = makeSavedPersonDao("");
Dog dog = new Dog("octave", "dalmatian");
dog.owner = person;
person.dogs.add(dog);
dogDao.persist(dog);
return person;
}
private void testPaging(PanacheQuery<Person> query) {
// No paging allowed until a page is setup
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.firstPage(),
"UnsupportedOperationException should have thrown");
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.previousPage(),
"UnsupportedOperationException should have thrown");
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.nextPage(),
"UnsupportedOperationException should have thrown");
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.lastPage(),
"UnsupportedOperationException should have thrown");
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.hasNextPage(),
"UnsupportedOperationException should have thrown");
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.hasPreviousPage(),
"UnsupportedOperationException should have thrown");
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.page(),
"UnsupportedOperationException should have thrown");
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.pageCount(),
"UnsupportedOperationException should have thrown");
// ints
List<Person> persons = query.page(0, 3).list();
Assertions.assertEquals(3, persons.size());
Assertions.assertEquals("stef0", persons.get(0).name);
Assertions.assertEquals("stef1", persons.get(1).name);
Assertions.assertEquals("stef2", persons.get(2).name);
persons = query.page(1, 3).list();
Assertions.assertEquals(3, persons.size());
Assertions.assertEquals("stef3", persons.get(0).name);
Assertions.assertEquals("stef4", persons.get(1).name);
Assertions.assertEquals("stef5", persons.get(2).name);
persons = query.page(2, 3).list();
Assertions.assertEquals(1, persons.size());
Assertions.assertEquals("stef6", persons.get(0).name);
persons = query.page(2, 4).list();
Assertions.assertEquals(0, persons.size());
// page
Page page = new Page(3);
persons = query.page(page).list();
Assertions.assertEquals(3, persons.size());
Assertions.assertEquals("stef0", persons.get(0).name);
Assertions.assertEquals("stef1", persons.get(1).name);
Assertions.assertEquals("stef2", persons.get(2).name);
page = page.next();
persons = query.page(page).list();
Assertions.assertEquals(3, persons.size());
Assertions.assertEquals("stef3", persons.get(0).name);
Assertions.assertEquals("stef4", persons.get(1).name);
Assertions.assertEquals("stef5", persons.get(2).name);
page = page.next();
persons = query.page(page).list();
Assertions.assertEquals(1, persons.size());
Assertions.assertEquals("stef6", persons.get(0).name);
page = page.next();
persons = query.page(page).list();
Assertions.assertEquals(0, persons.size());
// query paging
page = new Page(3);
persons = query.page(page).list();
Assertions.assertEquals(3, persons.size());
Assertions.assertEquals("stef0", persons.get(0).name);
Assertions.assertEquals("stef1", persons.get(1).name);
Assertions.assertEquals("stef2", persons.get(2).name);
assertTrue(query.hasNextPage());
assertFalse(query.hasPreviousPage());
persons = query.nextPage().list();
Assertions.assertEquals(1, query.page().index);
Assertions.assertEquals(3, query.page().size);
Assertions.assertEquals(3, persons.size());
Assertions.assertEquals("stef3", persons.get(0).name);
Assertions.assertEquals("stef4", persons.get(1).name);
Assertions.assertEquals("stef5", persons.get(2).name);
assertTrue(query.hasNextPage());
assertTrue(query.hasPreviousPage());
persons = query.nextPage().list();
Assertions.assertEquals(1, persons.size());
Assertions.assertEquals("stef6", persons.get(0).name);
assertFalse(query.hasNextPage());
assertTrue(query.hasPreviousPage());
persons = query.nextPage().list();
Assertions.assertEquals(0, persons.size());
Assertions.assertEquals(7, query.count());
Assertions.assertEquals(3, query.pageCount());
// mix page with range
persons = query.page(0, 3).range(0, 1).list();
Assertions.assertEquals(2, persons.size());
Assertions.assertEquals("stef0", persons.get(0).name);
Assertions.assertEquals("stef1", persons.get(1).name);
}
private void testRange(PanacheQuery<Person> query) {
List<Person> persons = query.range(0, 2).list();
Assertions.assertEquals(3, persons.size());
Assertions.assertEquals("stef0", persons.get(0).name);
Assertions.assertEquals("stef1", persons.get(1).name);
Assertions.assertEquals("stef2", persons.get(2).name);
persons = query.range(3, 5).list();
Assertions.assertEquals(3, persons.size());
Assertions.assertEquals("stef3", persons.get(0).name);
Assertions.assertEquals("stef4", persons.get(1).name);
Assertions.assertEquals("stef5", persons.get(2).name);
persons = query.range(6, 8).list();
Assertions.assertEquals(1, persons.size());
Assertions.assertEquals("stef6", persons.get(0).name);
persons = query.range(8, 12).list();
Assertions.assertEquals(0, persons.size());
// mix range with page
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).nextPage());
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).previousPage());
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).pageCount());
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).lastPage());
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).firstPage());
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).hasPreviousPage());
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).hasNextPage());
Assertions.assertThrows(UnsupportedOperationException.class, () -> query.range(0, 2).page());
// this is valid as we switch from range to page
persons = query.range(0, 2).page(0, 3).list();
Assertions.assertEquals(3, persons.size());
Assertions.assertEquals("stef0", persons.get(0).name);
Assertions.assertEquals("stef1", persons.get(1).name);
Assertions.assertEquals("stef2", persons.get(2).name);
}
@GET
@Path("accessors")
public String testAccessors() throws NoSuchMethodException, SecurityException {
checkMethod(AccessorEntity.class, "getString", String.class);
checkMethod(AccessorEntity.class, "isBool", boolean.class);
checkMethod(AccessorEntity.class, "getC", char.class);
checkMethod(AccessorEntity.class, "getS", short.class);
checkMethod(AccessorEntity.class, "getI", int.class);
checkMethod(AccessorEntity.class, "getL", long.class);
checkMethod(AccessorEntity.class, "getF", float.class);
checkMethod(AccessorEntity.class, "getD", double.class);
checkMethod(AccessorEntity.class, "getT", Object.class);
checkMethod(AccessorEntity.class, "getT2", Object.class);
checkMethod(AccessorEntity.class, "setString", void.class, String.class);
checkMethod(AccessorEntity.class, "setBool", void.class, boolean.class);
checkMethod(AccessorEntity.class, "setC", void.class, char.class);
checkMethod(AccessorEntity.class, "setS", void.class, short.class);
checkMethod(AccessorEntity.class, "setI", void.class, int.class);
checkMethod(AccessorEntity.class, "setL", void.class, long.class);
checkMethod(AccessorEntity.class, "setF", void.class, float.class);
checkMethod(AccessorEntity.class, "setD", void.class, double.class);
checkMethod(AccessorEntity.class, "setT", void.class, Object.class);
checkMethod(AccessorEntity.class, "setT2", void.class, Object.class);
try {
checkMethod(AccessorEntity.class, "getTrans2", Object.class);
Assertions.fail("transient field should have no getter: trans2");
} catch (NoSuchMethodException x) {
}
try {
checkMethod(AccessorEntity.class, "setTrans2", void.class, Object.class);
Assertions.fail("transient field should have no setter: trans2");
} catch (NoSuchMethodException x) {
}
// Now check that accessors are called
AccessorEntity entity = new AccessorEntity();
@SuppressWarnings("unused")
byte b = entity.b;
Assertions.assertEquals(1, entity.getBCalls);
entity.i = 2;
Assertions.assertEquals(1, entity.setICalls);
Object trans = entity.trans;
Assertions.assertEquals(0, entity.getTransCalls);
entity.trans = trans;
Assertions.assertEquals(0, entity.setTransCalls);
// accessors inside the entity itself
entity.method();
Assertions.assertEquals(2, entity.getBCalls);
Assertions.assertEquals(2, entity.setICalls);
return "OK";
}
private void checkMethod(Class<?> klass, String name, Class<?> returnType, Class<?>... params)
throws NoSuchMethodException, SecurityException {
Method method = klass.getMethod(name, params);
Assertions.assertEquals(returnType, method.getReturnType());
}
@GET
@Path("model1")
@Transactional
public String testModel1() {
Assertions.assertEquals(0, Person.count());
Person person = makeSavedPerson("");
SelfDirtinessTracker trackingPerson = (SelfDirtinessTracker) person;
String[] dirtyAttributes = trackingPerson.$$_hibernate_getDirtyAttributes();
Assertions.assertEquals(0, dirtyAttributes.length);
person.name = "1";
dirtyAttributes = trackingPerson.$$_hibernate_getDirtyAttributes();
Assertions.assertEquals(1, dirtyAttributes.length);
Assertions.assertEquals(1, Person.count());
return "OK";
}
@GET
@Path("model2")
@Transactional
public String testModel2() {
Assertions.assertEquals(1, Person.count());
Person person = Person.findAll().firstResult();
Assertions.assertEquals("1", person.name);
person.name = "2";
return "OK";
}
@GET
@Path("projection")
@Transactional
public String testProjection() {
Assertions.assertEquals(1, Person.count());
PersonName person = Person.findAll().project(PersonName.class).firstResult();
Assertions.assertEquals("2", person.name);
person = Person.find("name", "2").project(PersonName.class).firstResult();
Assertions.assertEquals("2", person.name);
person = Person.find("name = ?1", "2").project(PersonName.class).firstResult();
Assertions.assertEquals("2", person.name);
person = Person.find(String.format(
"select uniqueName, name%sfrom io.quarkus.it.panache.defaultpu.Person%swhere name = ?1",
LINE_SEPARATOR, LINE_SEPARATOR), "2")
.project(PersonName.class)
.firstResult();
Assertions.assertEquals("2", person.name);
person = Person.find("name = :name", Parameters.with("name", "2")).project(PersonName.class).firstResult();
Assertions.assertEquals("2", person.name);
person = Person.find("#Person.getByName", Parameters.with("name", "2")).project(PersonName.class).firstResult();
Assertions.assertEquals("2", person.name);
PanacheQuery<PersonName> query = Person.findAll().project(PersonName.class).page(0, 2);
Assertions.assertEquals(1, query.list().size());
query.nextPage();
Assertions.assertEquals(0, query.list().size());
Assertions.assertEquals(1, Person.findAll().project(PersonName.class).count());
Person owner = makeSavedPerson();
DogDto dogDto = Dog.findAll().project(DogDto.class).firstResult();
Assertions.assertEquals("stef", dogDto.ownerName);
owner.delete();
CatOwner catOwner = new CatOwner("Julie");
catOwner.persist();
Cat bubulle = new Cat("Bubulle", catOwner);
bubulle.weight = 8.5d;
bubulle.persist();
CatDto catDto = Cat.findAll().project(CatDto.class).firstResult();
Assertions.assertEquals("Julie", catDto.ownerName);
CatProjectionBean fieldsProjection = Cat.find("select c.name, c.owner.name as ownerName from Cat c")
.project(CatProjectionBean.class).firstResult();
Assertions.assertEquals("Julie", fieldsProjection.getOwnerName());
fieldsProjection = Cat.find("#Cat.NameAndOwnerName")
.project(CatProjectionBean.class).firstResult();
Assertions.assertEquals("Julie", fieldsProjection.getOwnerName());
PanacheQueryException exception = Assertions.assertThrows(PanacheQueryException.class,
() -> Cat.find("select new FakeClass('fake_cat', 'fake_owner', 12.5 from Cat c)")
.project(CatProjectionBean.class).firstResult());
Assertions.assertTrue(
exception.getMessage().startsWith("Unable to perform a projection on a 'select [distinct]? new' query"));
CatProjectionBean constantProjection = Cat.find("select 'fake_cat', 'fake_owner', 12.5D from Cat c")
.project(CatProjectionBean.class).firstResult();
Assertions.assertEquals("fake_cat", constantProjection.getName());
Assertions.assertEquals("fake_owner", constantProjection.getOwnerName());
Assertions.assertEquals(12.5d, constantProjection.getWeight());
PanacheQuery<CatProjectionBean> projectionQuery = Cat
// The spaces at the beginning are intentional
.find(" SELECT c.name, cast(null as string), SUM(c.weight) from Cat c where name = :name group by name ",
Parameters.with("name", bubulle.name))
.project(CatProjectionBean.class);
CatProjectionBean aggregationProjection = projectionQuery.firstResult();
Assertions.assertEquals(bubulle.name, aggregationProjection.getName());
Assertions.assertNull(aggregationProjection.getOwnerName());
Assertions.assertEquals(bubulle.weight, aggregationProjection.getWeight());
long count = projectionQuery.count();
Assertions.assertEquals(1L, count);
PanacheQuery<CatProjectionBean> projectionDistinctQuery = Cat
// The spaces at the beginning are intentional
.find(" SELECT disTINct c.name, cast(null as string), SUM(c.weight) from Cat c where name = :name group by name ",
Parameters.with("name", bubulle.name))
.project(CatProjectionBean.class);
CatProjectionBean aggregationDistinctProjection = projectionDistinctQuery.singleResult();
Assertions.assertEquals(bubulle.name, aggregationDistinctProjection.getName());
Assertions.assertNull(aggregationDistinctProjection.getOwnerName());
Assertions.assertEquals(bubulle.weight, aggregationDistinctProjection.getWeight());
long countDistinct = projectionDistinctQuery.count();
Assertions.assertEquals(1L, countDistinct);
// We are checking that not everything gets lowercased
PanacheQuery<CatProjectionBean> letterCaseQuery = Cat
// The spaces at the beginning are intentional
.find(" SELECT disTINct 'GARFIELD', 'JoN ArBuCkLe' from Cat c where name = :NamE group by name ",
Parameters.with("NamE", bubulle.name))
.project(CatProjectionBean.class);
CatProjectionBean catView = letterCaseQuery.firstResult();
// Must keep the letter case
Assertions.assertEquals("GARFIELD", catView.getName());
Assertions.assertEquals("JoN ArBuCkLe", catView.getOwnerName());
Cat.deleteAll();
CatOwner.deleteAll();
return "OK";
}
@GET
@Path("projection-nested")
@Transactional
public String testNestedProjection() {
Person person = new Person();
person.name = "2n";
person.uniqueName = "2n";
person.address = new Address("street 2");
person.persist();
PersonDTO personDTO = Person.find(
"select uniqueName, name, " +
" new io.quarkus.it.panache.defaultpu.PersonDTO$AddressDTO(address.street)," +
" new io.quarkus.it.panache.defaultpu.PersonDTO$DescriptionDTO(description.size, description.weight)," +
" description.size" +
" from Person2 where name = ?1",
"2n")
.project(PersonDTO.class)
.firstResult();
person.delete();
Assertions.assertEquals("2n", personDTO.name);
Assertions.assertEquals("street 2", personDTO.address.street);
person = new Person();
person.name = "3";
person.uniqueName = "3";
person.address = new Address("street 3");
person.address.persist();
person.description = new PersonDescription();
person.description.weight = 75;
person.description.size = 170;
person.persist();
personDTO = Person.find(" name = ?1", "3")
.project(PersonDTO.class)
.firstResult();
person.delete();
Assertions.assertEquals("3", personDTO.name);
Assertions.assertEquals("street 3", personDTO.address.street);
Assertions.assertEquals(170, personDTO.directHeight);
Assertions.assertEquals(170, personDTO.description.height);
Assertions.assertEquals("Height: 170, weight: 75", personDTO.description.getDescription());
Person hum = new Person();
hum.name = "hum";
hum.uniqueName = "hum";
Dog kit = new Dog("kit", "bulldog");
hum.dogs.add(kit);
kit.owner = hum;
hum.persist();
DogDto2 dogDto2 = Dog.find(" name = ?1", "kit")
.project(DogDto2.class)
.firstResult();
hum.delete();
Assertions.assertEquals("kit", dogDto2.name);
Assertions.assertEquals("hum", dogDto2.owner.name);
return "OK";
}
@GET
@Path("projection-constructor-annotation")
@Transactional(dontRollbackOn = SemanticException.class)
public String testProjectedConstructor() {
Assertions.assertEquals(1, Person.count());
//Test | PersistTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/ams/ApplicationMasterServiceProcessor.java | {
"start": 1588,
"end": 3162
} | interface ____ {
/**
* Initialize with and ApplicationMasterService Context as well as the
* next processor in the chain.
* @param amsContext AMSContext.
* @param nextProcessor next ApplicationMasterServiceProcessor
*/
void init(ApplicationMasterServiceContext amsContext,
ApplicationMasterServiceProcessor nextProcessor);
/**
* Register AM attempt.
* @param applicationAttemptId applicationAttemptId.
* @param request Register Request.
* @param response Register Response.
* @throws IOException IOException.
* @throws YarnException in critical situation where invalid
* profiles/resources are added.
*/
void registerApplicationMaster(ApplicationAttemptId applicationAttemptId,
RegisterApplicationMasterRequest request,
RegisterApplicationMasterResponse response)
throws IOException, YarnException;
/**
* Allocate call.
* @param appAttemptId appAttemptId.
* @param request Allocate Request.
* @param response Allocate Response.
* @throws YarnException YarnException.
*/
void allocate(ApplicationAttemptId appAttemptId,
AllocateRequest request, AllocateResponse response) throws YarnException;
/**
* Finish AM.
* @param applicationAttemptId applicationAttemptId.
* @param request Finish AM Request.
* @param response Finish AM Response.
*/
void finishApplicationMaster(
ApplicationAttemptId applicationAttemptId,
FinishApplicationMasterRequest request,
FinishApplicationMasterResponse response);
}
| ApplicationMasterServiceProcessor |
java | apache__flink | flink-python/src/main/java/org/apache/flink/client/python/PythonShellParser.java | {
"start": 1209,
"end": 12011
} | class ____ {
private static final Option OPTION_HELP =
Option.builder("h")
.required(false)
.longOpt("help")
.desc("Show the help message with descriptions of all options.")
.build();
private static final Option OPTION_JM_MEMORY =
Option.builder("jm")
.required(false)
.longOpt("jobManagerMemory")
.hasArg()
.desc("Memory for JobManager Container with optional unit (default: MB)")
.build();
private static final Option OPTION_NAME =
Option.builder("nm")
.required(false)
.longOpt("name")
.hasArg()
.desc("Set a custom name for the application on YARN")
.build();
private static final Option OPTION_QUEUE =
Option.builder("qu")
.required(false)
.longOpt("queue")
.hasArg()
.desc("Specify YARN queue.")
.build();
private static final Option OPTION_SLOTS =
Option.builder("s")
.required(false)
.longOpt("slots")
.hasArg()
.desc("Number of slots per TaskManager")
.build();
private static final Option OPTION_TM_MEMORY =
Option.builder("tm")
.required(false)
.longOpt("taskManagerMemory")
.hasArg()
.desc("Memory per TaskManager Container with optional unit (default: MB)")
.build();
// cluster types
private static final String LOCAL_RUN = "local";
private static final String REMOTE_RUN = "remote";
private static final String YARN_RUN = "yarn";
// Options that will be used in mini cluster.
private static final Options LOCAL_OPTIONS = getLocalOptions(new Options());
// Options that will be used in remote cluster.
private static final Options REMOTE_OPTIONS = getRemoteOptions(new Options());
// Options that will be used in yarn cluster.
private static final Options YARN_OPTIONS = getYarnOptions(new Options());
public static void main(String[] args) {
if (args.length < 1) {
printError("You should specify cluster type or -h | --help option");
System.exit(1);
}
String command = args[0];
List<String> commandOptions = null;
try {
switch (command) {
case LOCAL_RUN:
commandOptions = parseLocal(args);
break;
case REMOTE_RUN:
commandOptions = parseRemote(args);
break;
case YARN_RUN:
commandOptions = parseYarn(args);
break;
case "-h":
case "--help":
printHelp();
break;
default:
printError(
String.format(
"\"%s\" is not a valid cluster type or -h | --help option.\n",
command));
System.exit(1);
}
if (commandOptions != null) {
for (String option : commandOptions) {
System.out.print(option);
System.out.print('\0');
}
}
} catch (Throwable e) {
printError("Error while running the command.");
e.printStackTrace();
System.exit(1);
}
}
private static void buildGeneralOptions(Options options) {
options.addOption(OPTION_HELP);
}
private static Options getLocalOptions(Options options) {
buildGeneralOptions(options);
return options;
}
private static Options getRemoteOptions(Options options) {
buildGeneralOptions(options);
return options;
}
private static Options getYarnOptions(Options options) {
buildGeneralOptions(options);
options.addOption(OPTION_JM_MEMORY);
options.addOption(OPTION_NAME);
options.addOption(OPTION_QUEUE);
options.addOption(OPTION_SLOTS);
options.addOption(OPTION_TM_MEMORY);
return options;
}
/**
* Prints the error message and help for the client.
*
* @param msg error message
*/
private static void printError(String msg) {
System.err.println(msg);
System.err.println(
"Valid cluster type are \"local\", \"remote <hostname> <portnumber>\", \"yarn\".");
System.err.println();
System.err.println("Specify the help option (-h or --help) to get help on the command.");
}
/** Prints the help for the client. */
private static void printHelp() {
System.out.print("Flink Python Shell\n");
System.out.print("Usage: pyflink-shell.sh [local|remote|yarn] [options] <args>...\n");
System.out.print('\n');
printLocalHelp();
printRemoteHelp();
printYarnHelp();
System.out.println("-h | --help");
System.out.println(" Prints this usage text");
System.exit(0);
}
private static void printYarnHelp() {
HelpFormatter formatter = new HelpFormatter();
formatter.setLeftPadding(5);
formatter.setWidth(80);
System.out.println("Command: yarn [options]");
System.out.println("Starts Flink Python shell connecting to a yarn cluster");
formatter.printHelp(" ", YARN_OPTIONS);
}
private static void printRemoteHelp() {
HelpFormatter formatter = new HelpFormatter();
formatter.setLeftPadding(5);
formatter.setWidth(80);
System.out.println("Command: remote [options] <host> <port>");
System.out.println("Starts Flink Python shell connecting to a remote cluster");
System.out.println(" <host>");
System.out.println(" Remote host name as string");
System.out.println(" <port>");
System.out.println(" Remote port as integer");
System.out.println();
formatter.printHelp(" ", REMOTE_OPTIONS);
}
private static void printLocalHelp() {
HelpFormatter formatter = new HelpFormatter();
formatter.setLeftPadding(5);
formatter.setWidth(80);
System.out.println("Command: local [options]");
System.out.println("Starts Flink Python shell with a local Flink cluster");
formatter.printHelp(" ", LOCAL_OPTIONS);
}
/**
* Constructs yarn options. The python shell option will add prefix 'y' to align yarn options in
* `flink run`.
*
* @param options Options that will be used in `flink run`.
* @param yarnOption Python shell yarn options.
* @param commandLine Parsed Python shell parser options.
*/
private static void constructYarnOption(
List<String> options, Option yarnOption, CommandLine commandLine) {
if (commandLine.hasOption(yarnOption.getOpt())) {
options.add("-y" + yarnOption.getOpt());
options.add(commandLine.getOptionValue(yarnOption.getOpt()));
}
}
/**
* Parses Python shell yarn options and transfer to yarn options which will be used in `flink
* run` to submit flink job.
*
* @param args Python shell yarn options.
* @return Yarn options usrd in `flink run`.
*/
static List<String> parseYarn(String[] args) {
String[] params = new String[args.length - 1];
System.arraycopy(args, 1, params, 0, params.length);
CommandLine commandLine = parse(YARN_OPTIONS, params);
if (commandLine.hasOption(OPTION_HELP.getOpt())) {
printYarnHelp();
System.exit(0);
}
List<String> options = new ArrayList<>();
options.add(args[0]);
options.add("-m");
options.add("yarn-cluster");
constructYarnOption(options, OPTION_JM_MEMORY, commandLine);
constructYarnOption(options, OPTION_NAME, commandLine);
constructYarnOption(options, OPTION_QUEUE, commandLine);
constructYarnOption(options, OPTION_SLOTS, commandLine);
constructYarnOption(options, OPTION_TM_MEMORY, commandLine);
return options;
}
/**
* Parses Python shell options and transfer to options which will be used in `flink run -m
* ${jobmanager_address}` to submit flink job in a remote jobmanager. The Python shell options
* "remote ${hostname} ${portnumber}" will be transferred to "-m ${hostname}:${portnumber}".
*
* @param args Python shell options.
* @return Options used in `flink run`.
*/
static List<String> parseRemote(String[] args) {
if (args.length < 3) {
System.err.println("Specifies the <hostname> <portnumber> in 'remote' mode");
printRemoteHelp();
System.exit(0);
}
String[] params = new String[args.length - 3];
System.arraycopy(args, 3, params, 0, params.length);
CommandLine commandLine = parse(REMOTE_OPTIONS, params);
if (commandLine.hasOption(OPTION_HELP.getOpt())) {
printRemoteHelp();
System.exit(0);
}
String host = args[1];
String port = args[2];
List<String> options = new ArrayList<>();
options.add(args[0]);
options.add("-m");
options.add(host + ":" + port);
return options;
}
/**
* Parses Python shell options and transfer to options which will be used in `java` to exec a
* flink job in local mini cluster.
*
* @param args Python shell options.
* @return Options used in `java` run.
*/
static List<String> parseLocal(String[] args) {
String[] params = new String[args.length - 1];
System.arraycopy(args, 1, params, 0, params.length);
CommandLine commandLine = parse(LOCAL_OPTIONS, params);
if (commandLine.hasOption(OPTION_HELP.getOpt())) {
printLocalHelp();
System.exit(0);
}
List<String> options = new ArrayList<>();
options.add("local");
return options;
}
private static CommandLine parse(Options options, String[] args) {
final DefaultParser parser = new DefaultParser();
try {
return parser.parse(options, args, true);
} catch (ParseException e) {
throw new RuntimeException("Parser parses options failed.", e);
}
}
}
| PythonShellParser |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/exc/MissingInjectableValueExcepion.java | {
"start": 177,
"end": 1156
} | class ____
extends DatabindException
{
private static final long serialVersionUID = 1L;
protected final Object _valueId;
protected final BeanProperty _forProperty;
protected final Object _beanInstance;
protected MissingInjectableValueExcepion(JsonParser p, String msg,
Object valueId, BeanProperty forProperty, Object beanInstance)
{
super(p, msg);
_valueId = valueId;
_forProperty = forProperty;
_beanInstance = beanInstance;
}
public static MissingInjectableValueExcepion from(JsonParser p, String msg,
Object valueId, BeanProperty forProperty, Object beanInstance)
{
return new MissingInjectableValueExcepion(p, msg, valueId, forProperty, beanInstance);
}
public Object getValueId() { return _valueId; }
public BeanProperty getForProperty() { return _forProperty; }
public Object getBeanInstance() { return _beanInstance; }
}
| MissingInjectableValueExcepion |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableDoOnLifecycle.java | {
"start": 942,
"end": 1649
} | class ____<T> extends AbstractFlowableWithUpstream<T, T> {
private final Consumer<? super Subscription> onSubscribe;
private final LongConsumer onRequest;
private final Action onCancel;
public FlowableDoOnLifecycle(Flowable<T> source, Consumer<? super Subscription> onSubscribe,
LongConsumer onRequest, Action onCancel) {
super(source);
this.onSubscribe = onSubscribe;
this.onRequest = onRequest;
this.onCancel = onCancel;
}
@Override
protected void subscribeActual(Subscriber<? super T> s) {
source.subscribe(new SubscriptionLambdaSubscriber<>(s, onSubscribe, onRequest, onCancel));
}
static final | FlowableDoOnLifecycle |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/RuleNotRun.java | {
"start": 2321,
"end": 5976
} | class ____ extends BugChecker implements CompilationUnitTreeMatcher {
@Override
public Description matchCompilationUnit(CompilationUnitTree tree, VisitorState state) {
HashMap<VarSymbol, Tree> rules = new HashMap<>(findRules(state));
if (rules.isEmpty()) {
return NO_MATCH;
}
new TreePathScanner<Void, Void>() {
@Override
public Void visitMemberSelect(MemberSelectTree memberSelect, Void unused) {
handle(memberSelect);
return super.visitMemberSelect(memberSelect, null);
}
@Override
public Void visitIdentifier(IdentifierTree identifier, Void unused) {
handle(identifier);
return super.visitIdentifier(identifier, null);
}
private void handle(Tree tree) {
if (getSymbol(tree) instanceof VarSymbol varSymbol) {
// If the reference leaks anywhere, it might be being run, e.g. via RuleChain.
// _Most_ uses of rules just call methods on the rule, so this heuristic hopefully won't
// miss too many true positives.
if (!(getCurrentPath().getParentPath().getLeaf() instanceof MemberSelectTree)) {
rules.remove(varSymbol);
}
}
}
}.scan(state.getPath(), null);
for (Tree ruleTree : rules.values()) {
var fix = SuggestedFix.builder();
String rule = SuggestedFixes.qualifyType(state, fix, "org.junit.Rule");
state.reportMatch(
describeMatch(ruleTree, fix.prefixWith(ruleTree, format("@%s ", rule)).build()));
}
return NO_MATCH;
}
private ImmutableMap<VarSymbol, Tree> findRules(VisitorState state) {
ImmutableMap.Builder<VarSymbol, Tree> rules = ImmutableMap.builder();
new SuppressibleTreePathScanner<Void, Void>(state) {
@Override
public Void visitClass(ClassTree tree, Void unused) {
if (!JUnitMatchers.isJUnit4TestClass.matches(tree, state)) {
return null;
}
for (Tree m : tree.getMembers()) {
if (m instanceof VariableTree vt) {
scan(vt, null);
}
}
return null;
}
@Override
public Void visitVariable(VariableTree tree, Void unused) {
VarSymbol symbol = getSymbol(tree);
if (isSubtype(symbol.type, TEST_RULE.get(state), state)
&& STOP_ANNOTATIONS.stream()
.noneMatch(anno -> hasDirectAnnotationWithSimpleName(symbol, anno))
// Heuristic: rules should be public. If it's not, and is unused, we should pick it up
// via unused analysis anyway.
&& !symbol.isPrivate()
&& !ignoreBasedOnInitialiser(tree.getInitializer(), state)) {
rules.put(symbol, tree);
}
return null;
}
}.scan(state.getPath().getCompilationUnit(), null);
return rules.buildOrThrow();
}
private static final ImmutableSet<String> STOP_ANNOTATIONS =
ImmutableSet.of(
// keep-sorted start
"ClassRule", //
"Inject",
"Rule",
"TightRule"
// keep-sorted end
);
private static boolean ignoreBasedOnInitialiser(Tree tree, VisitorState state) {
if (tree == null) {
return false;
}
AtomicBoolean matched = new AtomicBoolean();
new TreeScanner<Void, Void>() {
@Override
public Void visitMethodInvocation(MethodInvocationTree tree, Void unused) {
return super.visitMethodInvocation(tree, null);
}
}.scan(tree, null);
return matched.get();
}
private static final Supplier<Type> TEST_RULE =
memoize(state -> state.getTypeFromString("org.junit.rules.TestRule"));
}
| RuleNotRun |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2800/Issue2903.java | {
"start": 3147,
"end": 3244
} | class ____ {
public java.time.LocalDate createTime;
}
public static | LoginRequestDTO2 |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/dataview/MapViewSpec.java | {
"start": 1240,
"end": 2748
} | class ____ extends DataViewSpec {
private final boolean containsNullKey;
private final @Nullable TypeSerializer<?> keySerializer;
private final @Nullable TypeSerializer<?> valueSerializer;
public MapViewSpec(String stateId, int fieldIndex, DataType dataType, boolean containsNullKey) {
this(stateId, fieldIndex, dataType, containsNullKey, null, null);
}
@Deprecated
public MapViewSpec(
String stateId,
int fieldIndex,
DataType dataType,
boolean containsNullKey,
TypeSerializer<?> keySerializer,
TypeSerializer<?> valueSerializer) {
super(stateId, fieldIndex, dataType);
this.containsNullKey = containsNullKey;
this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer;
}
public DataType getKeyDataType() {
final KeyValueDataType mapDataType = (KeyValueDataType) getDataType();
return mapDataType.getKeyDataType();
}
public DataType getValueDataType() {
final KeyValueDataType mapDataType = (KeyValueDataType) getDataType();
return mapDataType.getValueDataType();
}
public Optional<TypeSerializer<?>> getKeySerializer() {
return Optional.ofNullable(keySerializer);
}
public Optional<TypeSerializer<?>> getValueSerializer() {
return Optional.ofNullable(valueSerializer);
}
public boolean containsNullKey() {
return containsNullKey;
}
}
| MapViewSpec |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/table/SqlShowTablesConverter.java | {
"start": 1428,
"end": 3313
} | class ____ extends AbstractSqlShowConverter<SqlShowTables> {
@Override
public Operation getOperationWithoutPrep(
SqlShowTables sqlShowCall,
@Nullable String catalogName,
@Nullable String databaseName,
@Nullable ShowLikeOperator likeOp) {
switch (sqlShowCall.getTableKind()) {
case MATERIALIZED_TABLE:
return new ShowMaterializedTablesOperation(catalogName, databaseName, likeOp);
case TABLE:
return new ShowTablesOperation(catalogName, databaseName, likeOp);
case VIEW:
return new ShowViewsOperation(catalogName, databaseName, likeOp);
default:
throw new ValidationException(
"Not supported table kind " + sqlShowCall.getTableKind() + " yet");
}
}
@Override
public Operation getOperation(
SqlShowTables sqlShowCall,
@Nullable String catalogName,
@Nullable String databaseName,
String prep,
@Nullable ShowLikeOperator likeOp) {
switch (sqlShowCall.getTableKind()) {
case MATERIALIZED_TABLE:
return new ShowMaterializedTablesOperation(catalogName, databaseName, prep, likeOp);
case TABLE:
return new ShowTablesOperation(catalogName, databaseName, prep, likeOp);
case VIEW:
return new ShowViewsOperation(catalogName, databaseName, prep, likeOp);
default:
throw new ValidationException(
"Not supported table kind " + sqlShowCall.getTableKind() + " yet");
}
}
@Override
public Operation convertSqlNode(SqlShowTables sqlShowTables, ConvertContext context) {
return convertShowOperation(sqlShowTables, context);
}
}
| SqlShowTablesConverter |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/handler/HandlerFunctions.java | {
"start": 2199,
"end": 6271
} | class ____ {
private static final Log log = LogFactory.getLog(GatewayMvcClassPathWarningAutoConfiguration.class);
private HandlerFunctions() {
}
public static HandlerFunction<ServerResponse> fn(String functionName) {
Assert.hasText(functionName, "'functionName' must not be empty");
return request -> {
FunctionCatalog functionCatalog = MvcUtils.getApplicationContext(request).getBean(FunctionCatalog.class);
String expandedFunctionName = MvcUtils.expand(request, functionName);
FunctionInvocationWrapper function;
Object body = null;
if (expandedFunctionName.contains("/")) {
String[] functionBodySplit = expandedFunctionName.split("/");
function = functionCatalog.lookup(functionBodySplit[0],
request.headers().accept().stream().map(MimeType::toString).toArray(String[]::new));
if (function != null && function.isSupplier()) {
log.warn("Supplier must not have any arguments. Supplier: '" + function.getFunctionDefinition()
+ "' has '" + functionBodySplit[1] + "' as an argument which is ignored.");
}
body = functionBodySplit[1];
}
else {
function = functionCatalog.lookup(expandedFunctionName,
request.headers().accept().stream().map(MimeType::toString).toArray(String[]::new));
}
/*
* If function can not be found in the current runtime, we will default to
* RoutingFunction which has additional logic to determine the function to
* invoke.
*/
Map<String, String> additionalRequestHeaders = new HashMap<>();
if (function == null) {
additionalRequestHeaders.put(FunctionProperties.FUNCTION_DEFINITION, expandedFunctionName);
function = functionCatalog.lookup(RoutingFunction.FUNCTION_NAME,
request.headers().accept().stream().map(MimeType::toString).toArray(String[]::new));
}
if (function != null) {
if (body == null) {
body = function.isSupplier() ? null : request.body(function.getRawInputType());
}
return processRequest(request, function, body, false, Collections.emptyList(), Collections.emptyList(),
additionalRequestHeaders);
}
return ServerResponse.notFound().build();
};
}
public static HandlerFunction<ServerResponse> stream(String bindingName) {
Assert.hasText(bindingName, "'bindingName' must not be empty");
// TODO: validate bindingName
return request -> {
String expandedBindingName = MvcUtils.expand(request, bindingName);
StreamOperations streamOps = MvcUtils.getApplicationContext(request).getBean(StreamOperations.class);
byte[] body = request.body(byte[].class);
MessageHeaders messageHeaders = FunctionHandlerHeaderUtils
.fromHttp(FunctionHandlerHeaderUtils.sanitize(request.headers().asHttpHeaders()));
boolean send = streamOps.send(expandedBindingName, MessageBuilder.createMessage(body, messageHeaders));
if (send) {
return ServerResponse.accepted().build();
}
return ServerResponse.badRequest().build();
};
}
// for properties
public static HandlerFunction<ServerResponse> forward(RouteProperties routeProperties) {
Objects.requireNonNull(routeProperties.getUri(), "routeProperties uri must not be null");
return forward(routeProperties.getUri().getPath());
}
public static HandlerFunction<ServerResponse> forward(String path) {
// ok() is wrong, but can be overridden by the forwarded request.
return request -> GatewayServerResponse.ok().build((httpServletRequest, httpServletResponse) -> {
try {
String expandedFallback = MvcUtils.expand(request, path);
request.servletRequest()
.getServletContext()
.getRequestDispatcher(expandedFallback)
.forward(httpServletRequest, httpServletResponse);
return null;
}
catch (ServletException | IOException e) {
throw new RuntimeException(e);
}
});
}
public static HandlerFunction<ServerResponse> https() {
return http();
}
public static HandlerFunction<ServerResponse> http() {
return new LookupProxyExchangeHandlerFunction();
}
public static HandlerFunction<ServerResponse> no() {
return http();
}
static | HandlerFunctions |
java | alibaba__nacos | common/src/test/java/com/alibaba/nacos/common/utils/ConnLabelsUtilsTest.java | {
"start": 995,
"end": 2265
} | class ____ {
@Test
void testParsePropertyValue2Map() {
Properties properties = new Properties();
String property = "property";
String rawValue = "k1 = v1, k2 = v2";
properties.put(property, rawValue);
String property1 = "property2";
String rawValue1 = "k11=v11, kk2";
properties.put(property1, rawValue1);
Map<String, String> m = ConnLabelsUtils.parsePropertyValue2Map(properties, property);
assertEquals(2, m.size());
assertEquals("v1", m.get("k1"));
assertEquals("v2", m.get("k2"));
Map<String, String> m1 = ConnLabelsUtils.parsePropertyValue2Map(properties, property1);
assertEquals(1, m1.size());
assertEquals("v11", m1.get("k11"));
assertNull(m1.get("kk2"));
m = ConnLabelsUtils.mergeMapByOrder(m, m1);
assertEquals(3, m.size());
assertEquals("v1", m.get("k1"));
assertEquals("v2", m.get("k2"));
assertEquals("v11", m.get("k11"));
m = ConnLabelsUtils.addPrefixForEachKey(m, "test_prefix");
assertEquals(3, m.size());
m.forEach((k, v) -> {
assertTrue(k.startsWith("test_prefix"));
});
}
}
| ConnLabelsUtilsTest |
java | quarkusio__quarkus | extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientRuntimeConfig.java | {
"start": 9541,
"end": 11165
} | interface ____ {
// @formatter:off
/**
* Sets the host name/port to connect to. Each one is separated by a semicolon (eg. hostA:11222;hostB:11222).
*/
// @formatter:on
String hosts();
// @formatter:off
/**
* Sets client intelligence used by authentication
* Available values:
* * `BASIC` - Means that the client doesn't handle server topology changes and therefore will only use the list
* of servers supplied at configuration time.
* * `TOPOLOGY_AWARE` - Use this provider if you don't want the client to present any certificates to the
* remote TLS host.
* * `HASH_DISTRIBUTION_AWARE` - Like `TOPOLOGY_AWARE` but with the additional advantage that each request
* involving keys will be routed to the server who is the primary owner which improves performance
* greatly. This is the default.
*/
// @formatter:on
@WithDefault("HASH_DISTRIBUTION_AWARE")
Optional<ClientIntelligence> clientIntelligence();
// @formatter:off
/**
* Enables or disables Protobuf generated schemas upload to the backup.
* Set it to 'false' when you need to handle the lifecycle of the Protobuf Schemas on Server side yourself.
* Default is 'true'.
* This setting will be ignored if the Global Setting is set up to false.
*/
// @formatter:on
@WithDefault("true")
Optional<Boolean> useSchemaRegistration();
}
}
| BackupClusterConfig |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/package-info.java | {
"start": 966,
"end": 1742
} | interface ____
* {@link org.hibernate.query.sqm.internal.SqmCriteriaNodeBuilder}.
* It instantiates SQM nodes and arranges them into SQM tree using the
* standard operations for building a JPA criteria query.
*
* <h3>Transforming SQM to SQL</h3>
*
* The package {@link org.hibernate.sql.ast} defines an AST representing
* SQL. To generate SQL from SQM, we must transform the SQM tree to a
* SQL AST tree. This process is described
* {@linkplain org.hibernate.query.hql here}, and is handled by a
* {@link org.hibernate.query.sqm.sql.internal.StandardSqmTranslator}
* and a {@link org.hibernate.sql.ast.SqlAstTranslator}.
*
* @apiNote This entire package is in an incubating state.
*/
@Incubating
package org.hibernate.query.sqm;
import org.hibernate.Incubating;
| is |
java | redisson__redisson | redisson-hibernate/redisson-hibernate-5/src/main/java/org/redisson/hibernate/strategy/ReadWriteCollectionRegionAccessStrategy.java | {
"start": 1187,
"end": 2089
} | class ____ extends AbstractReadWriteAccessStrategy implements CollectionRegionAccessStrategy {
public ReadWriteCollectionRegionAccessStrategy(Settings settings, GeneralDataRegion region,
RMapCache<Object, Object> mapCache) {
super(settings, region, mapCache);
}
@Override
public CollectionRegion getRegion() {
return (CollectionRegion) region;
}
@Override
public Object generateCacheKey(Object id, CollectionPersister persister, SessionFactoryImplementor factory, String tenantIdentifier) {
return ((RedissonCollectionRegion)region).getCacheKeysFactory().createCollectionKey( id, persister, factory, tenantIdentifier );
}
@Override
public Object getCacheKeyId(Object cacheKey) {
return ((RedissonCollectionRegion)region).getCacheKeysFactory().getCollectionId(cacheKey);
}
}
| ReadWriteCollectionRegionAccessStrategy |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SampleLongAggregatorFunction.java | {
"start": 1042,
"end": 6081
} | class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("sample", ElementType.BYTES_REF) );
private final DriverContext driverContext;
private final SampleLongAggregator.SingleState state;
private final List<Integer> channels;
private final int limit;
public SampleLongAggregatorFunction(DriverContext driverContext, List<Integer> channels,
SampleLongAggregator.SingleState state, int limit) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
this.limit = limit;
}
public static SampleLongAggregatorFunction create(DriverContext driverContext,
List<Integer> channels, int limit) {
return new SampleLongAggregatorFunction(driverContext, channels, SampleLongAggregator.initSingle(driverContext.bigArrays(), limit), limit);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
LongBlock valueBlock = page.getBlock(channels.get(0));
LongVector valueVector = valueBlock.asVector();
if (valueVector == null) {
addRawBlock(valueBlock, mask);
return;
}
addRawVector(valueVector, mask);
}
private void addRawInputNotMasked(Page page) {
LongBlock valueBlock = page.getBlock(channels.get(0));
LongVector valueVector = valueBlock.asVector();
if (valueVector == null) {
addRawBlock(valueBlock);
return;
}
addRawVector(valueVector);
}
private void addRawVector(LongVector valueVector) {
for (int valuesPosition = 0; valuesPosition < valueVector.getPositionCount(); valuesPosition++) {
long valueValue = valueVector.getLong(valuesPosition);
SampleLongAggregator.combine(state, valueValue);
}
}
private void addRawVector(LongVector valueVector, BooleanVector mask) {
for (int valuesPosition = 0; valuesPosition < valueVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
long valueValue = valueVector.getLong(valuesPosition);
SampleLongAggregator.combine(state, valueValue);
}
}
private void addRawBlock(LongBlock valueBlock) {
for (int p = 0; p < valueBlock.getPositionCount(); p++) {
int valueValueCount = valueBlock.getValueCount(p);
if (valueValueCount == 0) {
continue;
}
int valueStart = valueBlock.getFirstValueIndex(p);
int valueEnd = valueStart + valueValueCount;
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
long valueValue = valueBlock.getLong(valueOffset);
SampleLongAggregator.combine(state, valueValue);
}
}
}
private void addRawBlock(LongBlock valueBlock, BooleanVector mask) {
for (int p = 0; p < valueBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int valueValueCount = valueBlock.getValueCount(p);
if (valueValueCount == 0) {
continue;
}
int valueStart = valueBlock.getFirstValueIndex(p);
int valueEnd = valueStart + valueValueCount;
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
long valueValue = valueBlock.getLong(valueOffset);
SampleLongAggregator.combine(state, valueValue);
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block sampleUncast = page.getBlock(channels.get(0));
if (sampleUncast.areAllValuesNull()) {
return;
}
BytesRefBlock sample = (BytesRefBlock) sampleUncast;
assert sample.getPositionCount() == 1;
BytesRef sampleScratch = new BytesRef();
SampleLongAggregator.combineIntermediate(state, sample);
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
blocks[offset] = SampleLongAggregator.evaluateFinal(state, driverContext);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
| SampleLongAggregatorFunction |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableCache.java | {
"start": 1185,
"end": 11226
} | class ____<T> extends AbstractFlowableWithUpstream<T, T>
implements FlowableSubscriber<T> {
/**
* The subscription to the source should happen at most once.
*/
final AtomicBoolean once;
/**
* The number of items per cached nodes.
*/
final int capacityHint;
/**
* The current known array of subscriber state to notify.
*/
final AtomicReference<CacheSubscription<T>[]> subscribers;
/**
* A shared instance of an empty array of subscribers to avoid creating
* a new empty array when all subscribers cancel.
*/
@SuppressWarnings("rawtypes")
static final CacheSubscription[] EMPTY = new CacheSubscription[0];
/**
* A shared instance indicating the source has no more events and there
* is no need to remember subscribers anymore.
*/
@SuppressWarnings("rawtypes")
static final CacheSubscription[] TERMINATED = new CacheSubscription[0];
/**
* The total number of elements in the list available for reads.
*/
volatile long size;
/**
* The starting point of the cached items.
*/
final Node<T> head;
/**
* The current tail of the linked structure holding the items.
*/
Node<T> tail;
/**
* How many items have been put into the tail node so far.
*/
int tailOffset;
/**
* If {@link #subscribers} is {@link #TERMINATED}, this holds the terminal error if not null.
*/
Throwable error;
/**
* True if the source has terminated.
*/
volatile boolean done;
/**
* Constructs an empty, non-connected cache.
* @param source the source to subscribe to for the first incoming subscriber
* @param capacityHint the number of items expected (reduce allocation frequency)
*/
@SuppressWarnings("unchecked")
public FlowableCache(Flowable<T> source, int capacityHint) {
super(source);
this.capacityHint = capacityHint;
this.once = new AtomicBoolean();
Node<T> n = new Node<>(capacityHint);
this.head = n;
this.tail = n;
this.subscribers = new AtomicReference<>(EMPTY);
}
@Override
protected void subscribeActual(Subscriber<? super T> t) {
CacheSubscription<T> consumer = new CacheSubscription<>(t, this);
t.onSubscribe(consumer);
add(consumer);
if (!once.get() && once.compareAndSet(false, true)) {
source.subscribe(this);
} else {
replay(consumer);
}
}
/**
* Check if this cached observable is connected to its source.
* @return true if already connected
*/
/* public */boolean isConnected() {
return once.get();
}
/**
* Returns true if there are observers subscribed to this observable.
* @return true if the cache has Subscribers
*/
/* public */ boolean hasSubscribers() {
return subscribers.get().length != 0;
}
/**
* Returns the number of events currently cached.
* @return the number of currently cached event count
*/
/* public */ long cachedEventCount() {
return size;
}
/**
* Atomically adds the consumer to the {@link #subscribers} copy-on-write array
* if the source has not yet terminated.
* @param consumer the consumer to add
*/
void add(CacheSubscription<T> consumer) {
for (;;) {
CacheSubscription<T>[] current = subscribers.get();
if (current == TERMINATED) {
return;
}
int n = current.length;
@SuppressWarnings("unchecked")
CacheSubscription<T>[] next = new CacheSubscription[n + 1];
System.arraycopy(current, 0, next, 0, n);
next[n] = consumer;
if (subscribers.compareAndSet(current, next)) {
return;
}
}
}
/**
* Atomically removes the consumer from the {@link #subscribers} copy-on-write array.
* @param consumer the consumer to remove
*/
@SuppressWarnings("unchecked")
void remove(CacheSubscription<T> consumer) {
for (;;) {
CacheSubscription<T>[] current = subscribers.get();
int n = current.length;
if (n == 0) {
return;
}
int j = -1;
for (int i = 0; i < n; i++) {
if (current[i] == consumer) {
j = i;
break;
}
}
if (j < 0) {
return;
}
CacheSubscription<T>[] next;
if (n == 1) {
next = EMPTY;
} else {
next = new CacheSubscription[n - 1];
System.arraycopy(current, 0, next, 0, j);
System.arraycopy(current, j + 1, next, j, n - j - 1);
}
if (subscribers.compareAndSet(current, next)) {
return;
}
}
}
/**
* Replays the contents of this cache to the given consumer based on its
* current state and number of items requested by it.
* @param consumer the consumer to continue replaying items to
*/
void replay(CacheSubscription<T> consumer) {
// make sure there is only one replay going on at a time
if (consumer.getAndIncrement() != 0) {
return;
}
// see if there were more replay request in the meantime
int missed = 1;
// read out state into locals upfront to avoid being re-read due to volatile reads
long index = consumer.index;
int offset = consumer.offset;
Node<T> node = consumer.node;
AtomicLong requested = consumer.requested;
Subscriber<? super T> downstream = consumer.downstream;
int capacity = capacityHint;
for (;;) {
// first see if the source has terminated, read order matters!
boolean sourceDone = done;
// and if the number of items is the same as this consumer has received
boolean empty = size == index;
// if the source is done and we have all items so far, terminate the consumer
if (sourceDone && empty) {
// release the node object to avoid leaks through retained consumers
consumer.node = null;
// if error is not null then the source failed
Throwable ex = error;
if (ex != null) {
downstream.onError(ex);
} else {
downstream.onComplete();
}
return;
}
// there are still items not sent to the consumer
if (!empty) {
// see how many items the consumer has requested in total so far
long consumerRequested = requested.get();
// MIN_VALUE indicates a cancelled consumer, we stop replaying
if (consumerRequested == Long.MIN_VALUE) {
// release the node object to avoid leaks through retained consumers
consumer.node = null;
return;
}
// if the consumer has requested more and there is more, we will emit an item
if (consumerRequested != index) {
// if the offset in the current node has reached the node capacity
if (offset == capacity) {
// switch to the subsequent node
node = node.next;
// reset the in-node offset
offset = 0;
}
// emit the cached item
downstream.onNext(node.values[offset]);
// move the node offset forward
offset++;
// move the total consumed item count forward
index++;
// retry for the next item/terminal event if any
continue;
}
}
// commit the changed references back
consumer.index = index;
consumer.offset = offset;
consumer.node = node;
// release the changes and see if there were more replay request in the meantime
missed = consumer.addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
@Override
public void onSubscribe(Subscription s) {
s.request(Long.MAX_VALUE);
}
@Override
public void onNext(T t) {
int tailOffset = this.tailOffset;
// if the current tail node is full, create a fresh node
if (tailOffset == capacityHint) {
Node<T> n = new Node<>(tailOffset);
n.values[0] = t;
this.tailOffset = 1;
tail.next = n;
tail = n;
} else {
tail.values[tailOffset] = t;
this.tailOffset = tailOffset + 1;
}
size++;
for (CacheSubscription<T> consumer : subscribers.get()) {
replay(consumer);
}
}
@SuppressWarnings("unchecked")
@Override
public void onError(Throwable t) {
if (done) {
RxJavaPlugins.onError(t);
return;
}
error = t;
done = true;
for (CacheSubscription<T> consumer : subscribers.getAndSet(TERMINATED)) {
replay(consumer);
}
}
@SuppressWarnings("unchecked")
@Override
public void onComplete() {
done = true;
for (CacheSubscription<T> consumer : subscribers.getAndSet(TERMINATED)) {
replay(consumer);
}
}
/**
* Hosts the downstream consumer and its current requested and replay states.
* {@code this} holds the work-in-progress counter for the serialized replay.
* @param <T> the value type
*/
static final | FlowableCache |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java | {
"start": 5990,
"end": 6445
} | class ____ the underlying {@link Type} which hierarchy is to
* be be registered for reflection.
*
* @param clazz the Class which hierarchy is to be registered for reflection
* @return a new {@link Builder} instance, initialized from the specified Class
*/
public static Builder builder(Class<?> clazz) {
return builder(clazz.getName());
}
/**
* Creates a new {@link Builder} instance, using the specified | for |
java | spring-projects__spring-boot | module/spring-boot-cloudfoundry/src/main/java/org/springframework/boot/cloudfoundry/autoconfigure/actuate/endpoint/EndpointCloudFoundryExtension.java | {
"start": 1578,
"end": 1713
} | class ____ the endpoint to extend
*/
@AliasFor(annotation = EndpointExtension.class, attribute = "endpoint")
Class<?> endpoint();
}
| of |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/nestedsourceproperties/NestedSourcePropertiesTest.java | {
"start": 1484,
"end": 6643
} | class ____ {
@RegisterExtension
final GeneratedSource generatedSource = new GeneratedSource();
@ProcessorTest
@WithClasses({ ArtistToChartEntry.class })
public void shouldGenerateImplementationForPropertyNamesOnly() {
generatedSource.addComparisonToFixtureFor( ArtistToChartEntry.class );
Studio studio = new Studio();
studio.setName( "Abbey Road" );
studio.setCity( "London" );
Label label = new Label();
label.setStudio( studio );
label.setName( "EMY" );
Artist artist = new Artist();
artist.setName( "The Beatles" );
artist.setLabel( label );
Song song = new Song();
song.setArtist( artist );
song.setTitle( "A Hard Day's Night" );
ChartEntry chartEntry = ArtistToChartEntry.MAPPER.map( song );
assertThat( chartEntry ).isNotNull();
assertThat( chartEntry.getArtistName() ).isEqualTo( "The Beatles" );
assertThat( chartEntry.getChartName() ).isNull();
assertThat( chartEntry.getCity() ).isEqualTo( "London" );
assertThat( chartEntry.getPosition() ).isEqualTo( 0 );
assertThat( chartEntry.getRecordedAt() ).isEqualTo( "Abbey Road" );
assertThat( chartEntry.getSongTitle() ).isEqualTo( "A Hard Day's Night" );
}
@ProcessorTest
@WithClasses({ ArtistToChartEntry.class })
public void shouldGenerateImplementationForMultipleParam() {
Studio studio = new Studio();
studio.setName( "Abbey Road" );
studio.setCity( "London" );
Label label = new Label();
label.setStudio( studio );
label.setName( "EMY" );
Artist artist = new Artist();
artist.setName( "The Beatles" );
artist.setLabel( label );
Song song = new Song();
song.setArtist( artist );
song.setTitle( "A Hard Day's Night" );
Chart chart = new Chart();
chart.setName( "Billboard" );
chart.setType( "record-sales" );
ChartEntry chartEntry = ArtistToChartEntry.MAPPER.map( chart, song, 1 );
assertThat( chartEntry ).isNotNull();
assertThat( chartEntry.getArtistName() ).isEqualTo( "The Beatles" );
assertThat( chartEntry.getChartName() ).isEqualTo( "Billboard" );
assertThat( chartEntry.getCity() ).isEqualTo( "London" );
assertThat( chartEntry.getPosition() ).isEqualTo( 1 );
assertThat( chartEntry.getRecordedAt() ).isEqualTo( "Abbey Road" );
assertThat( chartEntry.getSongTitle() ).isEqualTo( "A Hard Day's Night" );
}
@ProcessorTest
@WithClasses({ ArtistToChartEntry.class })
public void shouldPickPropertyNameOverParameterName() {
Chart chart = new Chart();
chart.setName( "Billboard" );
chart.setType( "record-sales" );
ChartEntry chartEntry = ArtistToChartEntry.MAPPER.map( chart );
assertThat( chartEntry ).isNotNull();
assertThat( chartEntry.getArtistName() ).isNull();
assertThat( chartEntry.getChartName() ).isEqualTo( "Billboard" );
assertThat( chartEntry.getCity() ).isNull();
assertThat( chartEntry.getPosition() ).isEqualTo( 0 );
assertThat( chartEntry.getRecordedAt() ).isNull();
assertThat( chartEntry.getSongTitle() ).isNull();
}
@ProcessorTest
@WithClasses({ ArtistToChartEntryAdder.class, ChartPositions.class, AdderUsageObserver.class })
public void shouldUseAddAsTargetAccessor() {
AdderUsageObserver.setUsed( false );
Song song = new Song();
song.setPositions( Arrays.asList( 3, 5 ) );
Chart chart = new Chart();
chart.setSong( song );
ChartPositions positions = ArtistToChartEntryAdder.MAPPER.map( chart );
assertThat( positions ).isNotNull();
assertThat( positions.getPositions() ).containsExactly( 3L, 5L );
assertThat( AdderUsageObserver.isUsed() ).isTrue();
}
@ProcessorTest
@WithClasses({ ArtistToChartEntryGetter.class, ChartPositions.class, AdderUsageObserver.class })
public void shouldUseGetAsTargetAccessor() {
AdderUsageObserver.setUsed( false );
Song song = new Song();
song.setPositions( Arrays.asList( 3, 5 ) );
Chart chart = new Chart();
chart.setSong( song );
ChartPositions positions = ArtistToChartEntryGetter.MAPPER.map( chart );
assertThat( positions ).isNotNull();
assertThat( positions.getPositions() ).containsExactly( 3L, 5L );
assertThat( AdderUsageObserver.isUsed() ).isFalse();
}
@ProcessorTest
@IssueKey("838")
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ArtistToChartEntryErroneous.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 34,
message = "ArtistToChartEntryErroneous.ChartPosition does not have an accessible constructor.")
}
)
@WithClasses({ ArtistToChartEntryErroneous.class })
public void inverseShouldRaiseErrorForNotAccessibleConstructor() {
}
}
| NestedSourcePropertiesTest |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/autoscope/AutoScopeBuildItemTest.java | {
"start": 4550,
"end": 4830
} | class ____ {
private String id;
public String ping() {
return id;
}
@PostConstruct
void init() {
id = UUID.randomUUID().toString();
}
}
// add @Singleton and make it unremovable
static | SimpleBean |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/util/ClassUtilsTests.java | {
"start": 22905,
"end": 28668
} | class ____ {
@Test
void publicMethodInPublicClass() throws Exception {
Class<?> originalType = String.class;
Method originalMethod = originalType.getDeclaredMethod("getBytes");
Method interfaceMethod = ClassUtils.getInterfaceMethodIfPossible(originalMethod, null);
assertThat(interfaceMethod.getDeclaringClass()).isEqualTo(originalType);
assertThat(interfaceMethod).isSameAs(originalMethod);
assertNotInterfaceMethod(interfaceMethod);
assertPubliclyAccessible(interfaceMethod);
}
@Test
void publicMethodInNonPublicInterface() throws Exception {
Class<?> originalType = PrivateInterface.class;
Method originalMethod = originalType.getDeclaredMethod("getMessage");
// Prerequisites for this use case:
assertPublic(originalMethod);
assertNotPublic(originalMethod.getDeclaringClass());
Method interfaceMethod = ClassUtils.getInterfaceMethodIfPossible(originalMethod, null);
assertThat(interfaceMethod).isSameAs(originalMethod);
assertInterfaceMethod(interfaceMethod);
assertNotPubliclyAccessible(interfaceMethod);
}
@Test
void publicInterfaceMethodInPublicClass() throws Exception {
Class<?> originalType = ArrayList.class;
Method originalMethod = originalType.getDeclaredMethod("size");
Method interfaceMethod = ClassUtils.getInterfaceMethodIfPossible(originalMethod, null);
assertThat(interfaceMethod.getDeclaringClass()).isEqualTo(List.class);
assertThat(interfaceMethod.getName()).isEqualTo("size");
assertThat(interfaceMethod.getParameterTypes()).isEmpty();
assertInterfaceMethod(interfaceMethod);
assertPubliclyAccessible(interfaceMethod);
}
@Test
void publicInterfaceMethodDeclaredInNonPublicClassWithLateBindingOfClassMethodToSubclassDeclaredInterface() throws Exception {
HashMap<String, String> hashMap = new HashMap<>();
// Returns a package-private java.util.HashMap.KeyIterator which extends java.util.HashMap.HashIterator
// which declares hasNext(), even though HashIterator does not implement Iterator. Rather, KeyIterator
// implements HashIterator.
Iterator<String> iterator = hashMap.keySet().iterator();
Class<?> targetClass = iterator.getClass();
// Prerequisites for this use case:
assertNotPublic(targetClass);
Method originalMethod = targetClass.getMethod("hasNext");
Method interfaceMethod = ClassUtils.getInterfaceMethodIfPossible(originalMethod, targetClass);
assertThat(interfaceMethod.getDeclaringClass()).isEqualTo(Iterator.class);
assertThat(interfaceMethod.getName()).isEqualTo("hasNext");
assertThat(interfaceMethod.getParameterTypes()).isEmpty();
assertInterfaceMethod(interfaceMethod);
assertPubliclyAccessible(interfaceMethod);
}
@Test
void privateSubclassOverridesPropertyInPublicInterface() throws Exception {
Method originalMethod = PrivateSubclass.class.getDeclaredMethod("getText");
// Prerequisite: type must not be public for this use case.
assertNotPublic(originalMethod.getDeclaringClass());
Method interfaceMethod = ClassUtils.getInterfaceMethodIfPossible(originalMethod, null);
assertThat(interfaceMethod.getDeclaringClass()).isEqualTo(PublicInterface.class);
assertThat(interfaceMethod.getName()).isEqualTo("getText");
assertThat(interfaceMethod.getParameterTypes()).isEmpty();
assertInterfaceMethod(interfaceMethod);
assertPubliclyAccessible(interfaceMethod);
}
@Test
void privateSubclassOverridesPropertyInPrivateInterface() throws Exception {
Method originalMethod = PrivateSubclass.class.getDeclaredMethod("getMessage");
// Prerequisite: type must not be public for this use case.
assertNotPublic(originalMethod.getDeclaringClass());
Method interfaceMethod = ClassUtils.getInterfaceMethodIfPossible(originalMethod, null);
assertThat(interfaceMethod.getDeclaringClass()).isEqualTo(PrivateInterface.class);
assertThat(interfaceMethod.getName()).isEqualTo("getMessage");
assertThat(interfaceMethod.getParameterTypes()).isEmpty();
assertInterfaceMethod(interfaceMethod);
assertNotPubliclyAccessible(interfaceMethod);
}
@Test
void packagePrivateSubclassOverridesMethodInPublicInterface() throws Exception {
List<String> unmodifiableList = Collections.unmodifiableList(Arrays.asList("foo", "bar"));
Class<?> targetClass = unmodifiableList.getClass();
// Prerequisites for this use case:
assertNotPublic(targetClass);
Method originalMethod = targetClass.getMethod("contains", Object.class);
// Prerequisite: type must not be public for this use case.
assertNotPublic(originalMethod.getDeclaringClass());
Method interfaceMethod = ClassUtils.getInterfaceMethodIfPossible(originalMethod, null);
assertThat(interfaceMethod.getDeclaringClass()).isEqualTo(Collection.class);
assertThat(interfaceMethod.getName()).isEqualTo("contains");
assertThat(interfaceMethod.getParameterTypes()).containsExactly(Object.class);
assertInterfaceMethod(interfaceMethod);
assertPubliclyAccessible(interfaceMethod);
}
@Test
void privateSubclassOverridesMethodInPrivateInterface() throws Exception {
Method originalMethod = PrivateSubclass.class.getMethod("greet", String.class);
// Prerequisite: type must not be public for this use case.
assertNotPublic(originalMethod.getDeclaringClass());
Method interfaceMethod = ClassUtils.getInterfaceMethodIfPossible(originalMethod, null);
assertThat(interfaceMethod.getDeclaringClass()).isEqualTo(PrivateInterface.class);
assertThat(interfaceMethod.getName()).isEqualTo("greet");
assertThat(interfaceMethod.getParameterTypes()).containsExactly(String.class);
assertInterfaceMethod(interfaceMethod);
assertNotPubliclyAccessible(interfaceMethod);
}
}
@Nested // gh-33216
| GetInterfaceMethodTests |
java | apache__kafka | server/src/test/java/org/apache/kafka/server/metrics/ClientMetricsTestUtils.java | {
"start": 4633,
"end": 5046
} | class ____ implements ClientTelemetryReceiver {
public int exportMetricsInvokedCount = 0;
public List<ByteBuffer> metricsData = new ArrayList<>();
public void exportMetrics(AuthorizableRequestContext context, ClientTelemetryPayload payload) {
exportMetricsInvokedCount += 1;
metricsData.add(payload.data());
}
}
public static | TestClientMetricsReceiver |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/Aws2KmsComponentBuilderFactory.java | {
"start": 14170,
"end": 17969
} | class ____
extends AbstractComponentBuilder<KMS2Component>
implements Aws2KmsComponentBuilder {
@Override
protected KMS2Component buildConcreteComponent() {
return new KMS2Component();
}
private org.apache.camel.component.aws2.kms.KMS2Configuration getOrCreateConfiguration(KMS2Component component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.aws2.kms.KMS2Configuration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "configuration": ((KMS2Component) component).setConfiguration((org.apache.camel.component.aws2.kms.KMS2Configuration) value); return true;
case "lazyStartProducer": ((KMS2Component) component).setLazyStartProducer((boolean) value); return true;
case "operation": getOrCreateConfiguration((KMS2Component) component).setOperation((org.apache.camel.component.aws2.kms.KMS2Operations) value); return true;
case "overrideEndpoint": getOrCreateConfiguration((KMS2Component) component).setOverrideEndpoint((boolean) value); return true;
case "pojoRequest": getOrCreateConfiguration((KMS2Component) component).setPojoRequest((boolean) value); return true;
case "region": getOrCreateConfiguration((KMS2Component) component).setRegion((java.lang.String) value); return true;
case "uriEndpointOverride": getOrCreateConfiguration((KMS2Component) component).setUriEndpointOverride((java.lang.String) value); return true;
case "autowiredEnabled": ((KMS2Component) component).setAutowiredEnabled((boolean) value); return true;
case "kmsClient": getOrCreateConfiguration((KMS2Component) component).setKmsClient((software.amazon.awssdk.services.kms.KmsClient) value); return true;
case "proxyHost": getOrCreateConfiguration((KMS2Component) component).setProxyHost((java.lang.String) value); return true;
case "proxyPort": getOrCreateConfiguration((KMS2Component) component).setProxyPort((java.lang.Integer) value); return true;
case "proxyProtocol": getOrCreateConfiguration((KMS2Component) component).setProxyProtocol((software.amazon.awssdk.core.Protocol) value); return true;
case "accessKey": getOrCreateConfiguration((KMS2Component) component).setAccessKey((java.lang.String) value); return true;
case "profileCredentialsName": getOrCreateConfiguration((KMS2Component) component).setProfileCredentialsName((java.lang.String) value); return true;
case "secretKey": getOrCreateConfiguration((KMS2Component) component).setSecretKey((java.lang.String) value); return true;
case "sessionToken": getOrCreateConfiguration((KMS2Component) component).setSessionToken((java.lang.String) value); return true;
case "trustAllCertificates": getOrCreateConfiguration((KMS2Component) component).setTrustAllCertificates((boolean) value); return true;
case "useDefaultCredentialsProvider": getOrCreateConfiguration((KMS2Component) component).setUseDefaultCredentialsProvider((boolean) value); return true;
case "useProfileCredentialsProvider": getOrCreateConfiguration((KMS2Component) component).setUseProfileCredentialsProvider((boolean) value); return true;
case "useSessionCredentials": getOrCreateConfiguration((KMS2Component) component).setUseSessionCredentials((boolean) value); return true;
default: return false;
}
}
}
} | Aws2KmsComponentBuilderImpl |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/annotation/JmsListenerConfigurer.java | {
"start": 1403,
"end": 2017
} | interface ____ {
/**
* Callback allowing a {@link org.springframework.jms.config.JmsListenerEndpointRegistry
* JmsListenerEndpointRegistry} and specific {@link org.springframework.jms.config.JmsListenerEndpoint
* JmsListenerEndpoint} instances to be registered against the given
* {@link JmsListenerEndpointRegistrar}. The default
* {@link org.springframework.jms.config.JmsListenerContainerFactory JmsListenerContainerFactory}
* can also be customized.
* @param registrar the registrar to be configured
*/
void configureJmsListeners(JmsListenerEndpointRegistrar registrar);
}
| JmsListenerConfigurer |
java | apache__camel | dsl/camel-jbang/camel-jbang-plugin-generate/src/main/java/org/apache/camel/dsl/jbang/core/commands/generate/CodeSchemaGenerator.java | {
"start": 11003,
"end": 11055
} | class ____.
*
* @param classLoader The | loader |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/client/DefaultBulkApiClient.java | {
"start": 2786,
"end": 21627
} | class ____ extends AbstractClientBase implements BulkApiClient {
private static final String TOKEN_HEADER = "X-SFDC-Session";
private static final ContentType DEFAULT_ACCEPT_TYPE = ContentType.XML;
private JAXBContext context;
private ObjectFactory objectFactory;
public DefaultBulkApiClient(String version, SalesforceSession session, SalesforceHttpClient httpClient,
SalesforceLoginConfig loginConfig) throws SalesforceException {
super(version, session, httpClient, loginConfig);
try {
context = JAXBContext.newInstance(JobInfo.class.getPackage().getName(), getClass().getClassLoader());
} catch (JAXBException e) {
String msg = "Error loading Bulk API DTOs: " + e.getMessage();
throw new IllegalArgumentException(msg, e);
}
this.objectFactory = new ObjectFactory();
}
@Override
public void createJob(JobInfo request, Map<String, List<String>> headers, final JobInfoResponseCallback callback) {
// clear system fields if set
sanitizeJobRequest(request);
final Request post = getRequest(HttpMethod.POST, jobUrl(null), headers);
try {
marshalRequest(objectFactory.createJobInfo(request), post, APPLICATION_XML_UTF8);
} catch (Exception e) {
callback.onResponse(null, Collections.emptyMap(), new SalesforceException(e));
return;
}
// make the call and parse the result in callback
doHttpRequest(post, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
JobInfo value = null;
if (response != null) {
try {
value = unmarshalResponse(response, post, JobInfo.class);
} catch (SalesforceException e) {
ex = e;
}
}
callback.onResponse(value, headers, ex);
}
});
}
// reset read only fields
private void sanitizeJobRequest(JobInfo request) {
request.setApexProcessingTime(null);
request.setApiActiveProcessingTime(null);
request.setApiVersion(null);
request.setCreatedById(null);
request.setCreatedDate(null);
request.setId(null);
request.setNumberBatchesCompleted(null);
request.setNumberBatchesFailed(null);
request.setNumberBatchesInProgress(null);
request.setNumberBatchesQueued(null);
request.setNumberBatchesTotal(null);
request.setNumberRecordsFailed(null);
request.setNumberRecordsProcessed(null);
request.setNumberRetries(null);
request.setState(null);
request.setSystemModstamp(null);
request.setSystemModstamp(null);
}
@Override
public void getJob(String jobId, Map<String, List<String>> headers, final JobInfoResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, jobUrl(jobId), headers);
// make the call and parse the result
doHttpRequest(get, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
JobInfo value = null;
try {
value = unmarshalResponse(response, get, JobInfo.class);
} catch (SalesforceException e) {
ex = e;
}
callback.onResponse(value, headers, ex);
}
});
}
@Override
public void closeJob(String jobId, Map<String, List<String>> headers, final JobInfoResponseCallback callback) {
final JobInfo request = new JobInfo();
request.setState(JobStateEnum.CLOSED);
final Request post = getRequest(HttpMethod.POST, jobUrl(jobId), headers);
try {
marshalRequest(objectFactory.createJobInfo(request), post, APPLICATION_XML_UTF8);
} catch (SalesforceException e) {
callback.onResponse(null, Collections.emptyMap(), e);
return;
}
// make the call and parse the result
doHttpRequest(post, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
JobInfo value = null;
try {
value = unmarshalResponse(response, post, JobInfo.class);
} catch (SalesforceException e) {
ex = e;
}
callback.onResponse(value, headers, ex);
}
});
}
@Override
public void abortJob(String jobId, Map<String, List<String>> headers, final JobInfoResponseCallback callback) {
final JobInfo request = new JobInfo();
request.setState(JobStateEnum.ABORTED);
final Request post = getRequest(HttpMethod.POST, jobUrl(jobId), headers);
try {
marshalRequest(objectFactory.createJobInfo(request), post, APPLICATION_XML_UTF8);
} catch (SalesforceException e) {
callback.onResponse(null, Collections.emptyMap(), e);
return;
}
// make the call and parse the result
doHttpRequest(post, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
JobInfo value = null;
try {
value = unmarshalResponse(response, post, JobInfo.class);
} catch (SalesforceException e) {
ex = e;
}
callback.onResponse(value, headers, ex);
}
});
}
@Override
public void createBatch(
InputStream batchStream, String jobId, ContentType contentTypeEnum, Map<String, List<String>> headers,
final BatchInfoResponseCallback callback) {
final Request post = getRequest(HttpMethod.POST, batchUrl(jobId, null), headers);
post.body(new InputStreamRequestContent(batchStream));
post.headers(mutable -> mutable.add(
new HttpField(
HttpHeader.CONTENT_TYPE,
getContentType(contentTypeEnum) + ";charset=" + StandardCharsets.UTF_8.name())));
// make the call and parse the result
doHttpRequest(post, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
BatchInfo value = null;
try {
value = unmarshalResponse(response, post, BatchInfo.class);
} catch (SalesforceException e) {
ex = e;
}
callback.onResponse(value, headers, ex);
}
});
}
@Override
public void getBatch(
String jobId, String batchId, Map<String, List<String>> headers, final BatchInfoResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, batchUrl(jobId, batchId), headers);
// make the call and parse the result
doHttpRequest(get, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
BatchInfo value = null;
try {
value = unmarshalResponse(response, get, BatchInfo.class);
} catch (SalesforceException e) {
ex = e;
}
callback.onResponse(value, headers, ex);
}
});
}
@Override
public void getAllBatches(String jobId, Map<String, List<String>> headers, final BatchInfoListResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, batchUrl(jobId, null), headers);
// make the call and parse the result
doHttpRequest(get, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
BatchInfoList value = null;
try {
value = unmarshalResponse(response, get, BatchInfoList.class);
} catch (SalesforceException e) {
ex = e;
}
callback.onResponse(value != null ? value.getBatchInfo() : null, headers, ex);
}
});
}
@Override
public void getRequest(
String jobId, String batchId, Map<String, List<String>> headers, final StreamResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, batchRequestUrl(jobId, batchId, null), headers);
// make the call and parse the result
doHttpRequest(get, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
callback.onResponse(response, headers, ex);
}
});
}
@Override
public void getResults(
String jobId, String batchId, Map<String, List<String>> headers, final StreamResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, batchResultUrl(jobId, batchId, null), headers);
// make the call and return the result
doHttpRequest(get, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
callback.onResponse(response, headers, ex);
}
});
}
@Override
public void createBatchQuery(
String jobId, String soqlQuery, ContentType jobContentType, Map<String, List<String>> headers,
final BatchInfoResponseCallback callback) {
final Request post = getRequest(HttpMethod.POST, batchUrl(jobId, null), headers);
final byte[] queryBytes;
try {
queryBytes = soqlQuery.getBytes(StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
callback.onResponse(null, Collections.emptyMap(),
new SalesforceException("Unexpected exception: " + e.getMessage(), e));
return;
}
post.body(new BytesRequestContent(queryBytes));
post.headers(mutable -> mutable.add(
new HttpField(
HttpHeader.CONTENT_TYPE,
getContentType(jobContentType) + ";charset=" + StandardCharsets.UTF_8.name())));
// make the call and parse the result
doHttpRequest(post, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
BatchInfo value = null;
try {
value = unmarshalResponse(response, post, BatchInfo.class);
} catch (SalesforceException e) {
ex = e;
}
callback.onResponse(value, headers, ex);
}
});
}
@Override
public void getQueryResultIds(
String jobId, String batchId, Map<String, List<String>> headers, final QueryResultIdsCallback callback) {
final Request get = getRequest(HttpMethod.GET, batchResultUrl(jobId, batchId, null), headers);
// make the call and parse the result
doHttpRequest(get, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
QueryResultList value = null;
try {
value = unmarshalResponse(response, get, QueryResultList.class);
} catch (SalesforceException e) {
ex = e;
}
callback.onResponse(value != null ? Collections.unmodifiableList(value.getResult()) : null, headers, ex);
}
});
}
@Override
public void getQueryResult(
String jobId, String batchId, String resultId, Map<String, List<String>> headers,
final StreamResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, batchResultUrl(jobId, batchId, resultId), headers);
// make the call and parse the result
doHttpRequest(get, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
callback.onResponse(response, headers, ex);
}
});
}
@Override
protected void setAccessToken(Request request) {
// Replace token
request.headers(headers -> headers.put(TOKEN_HEADER, accessToken));
}
@Override
protected void doHttpRequest(Request request, ClientResponseCallback callback) {
// set access token for all requests
setAccessToken(request);
// set default charset
request.headers(headers -> headers.add(HttpHeader.ACCEPT_CHARSET, StandardCharsets.UTF_8.name()));
// TODO check if this is really needed or not, since SF response content
// type seems fixed
// check if the default accept content type must be used
if (!request.getHeaders().contains(HttpHeader.ACCEPT)) {
final String contentType = getContentType(DEFAULT_ACCEPT_TYPE);
request.headers(headers -> headers.add(HttpHeader.ACCEPT, contentType));
// request content type and charset is set by the request entity
}
super.doHttpRequest(request, callback);
}
private static String getContentType(ContentType type) {
String result = null;
switch (type) {
case CSV:
result = "text/csv";
break;
case XML:
result = "application/xml";
break;
case ZIP_CSV:
case ZIP_XML:
result = type.toString().toLowerCase().replace('_', '/');
break;
default:
break;
}
return result;
}
@Override
protected SalesforceException createRestException(Response response, InputStream responseContent) {
// this must be of type Error
try {
final Error error = unmarshalResponse(responseContent, response.getRequest(), Error.class);
final RestError restError = new RestError();
restError.setErrorCode(error.getExceptionCode());
restError.setMessage(error.getExceptionMessage());
return new SalesforceException(Arrays.asList(restError), response.getStatus());
} catch (SalesforceException e) {
String msg = "Error un-marshaling Salesforce Error: " + e.getMessage();
return new SalesforceException(msg, e);
}
}
private <T> T unmarshalResponse(InputStream response, Request request, Class<T> resultClass) throws SalesforceException {
try {
Unmarshaller unmarshaller = context.createUnmarshaller();
// Disable XXE
SAXParserFactory spf = SAXParserFactory.newInstance();
spf.setNamespaceAware(true);
try {
spf.setFeature(javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, Boolean.TRUE);
spf.setFeature("http://xml.org/sax/features/external-general-entities", false);
spf.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
spf.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
} catch (ParserConfigurationException | SAXException ex) {
// LOG.debug("Error setting feature on parser: " +
// ex.getMessage());
}
Source xmlSource = new SAXSource(spf.newSAXParser().getXMLReader(), new InputSource(response));
JAXBElement<T> result = unmarshaller.unmarshal(xmlSource, resultClass);
return result.getValue();
} catch (JAXBException | SAXException | ParserConfigurationException e) {
throw new SalesforceException(
String.format("Error unmarshaling response {%s:%s} : %s", request.getMethod(), request.getURI(),
e.getMessage()),
e);
} catch (Exception e) {
throw new SalesforceException(
String.format("Error unmarshaling response for {%s:%s} : %s", request.getMethod(), request.getURI(),
e.getMessage()),
e);
}
}
private void marshalRequest(Object input, Request request, String contentType) throws SalesforceException {
try {
Marshaller marshaller = context.createMarshaller();
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
marshaller.marshal(input, byteStream);
request.body(new BytesRequestContent(contentType, byteStream.toByteArray()));
} catch (Exception e) {
throw new SalesforceException(
String.format("Error marshaling request for {%s:%s} : %s", request.getMethod(), request.getURI(),
e.getMessage()),
e);
}
}
private String jobUrl(String jobId) {
if (jobId != null) {
return super.instanceUrl + "/services/async/" + version + "/job/" + jobId;
} else {
return super.instanceUrl + "/services/async/" + version + "/job";
}
}
private String batchUrl(String jobId, String batchId) {
if (batchId != null) {
return jobUrl(jobId) + "/batch/" + batchId;
} else {
return jobUrl(jobId) + "/batch";
}
}
private String batchResultUrl(String jobId, String batchId, String resultId) {
if (resultId != null) {
return batchUrl(jobId, batchId) + "/result/" + resultId;
} else {
return batchUrl(jobId, batchId) + "/result";
}
}
private String batchRequestUrl(String jobId, String batchId, String requestId) {
if (requestId != null) {
return batchUrl(jobId, batchId) + "/request/" + requestId;
} else {
return batchUrl(jobId, batchId) + "/request";
}
}
}
| DefaultBulkApiClient |
java | greenrobot__greendao | tests/DaoTestBase/src/main/java/org/greenrobot/greendao/daotest/TreeEntityDao.java | {
"start": 664,
"end": 933
} | class ____ extends AbstractDao<TreeEntity, Long> {
public static final String TABLENAME = "TREE_ENTITY";
/**
* Properties of entity TreeEntity.<br/>
* Can be used for QueryBuilder and for referencing column names.
*/
public static | TreeEntityDao |
java | apache__camel | components/camel-cxf/camel-cxf-rest/src/test/java/org/apache/camel/component/cxf/jaxrs/simplebinding/CxfRsConsumerSimpleBindingImplTest.java | {
"start": 2087,
"end": 5398
} | class ____ extends CamelTestSupport {
private static final String PORT_PATH = CXFTestSupport.getPort1() + "/CxfRsConsumerTest";
private static final String CXF_RS_ENDPOINT_URI = "cxfrs://http://localhost:" + PORT_PATH
+ "/rest?resourceClasses=org.apache.camel.component.cxf.jaxrs.simplebinding.testbean.CustomerServiceImpl&bindingStyle=SimpleConsumer";
private JAXBContext jaxb;
private CloseableHttpClient httpclient;
@Override
public void setupResources() throws Exception {
httpclient = HttpClientBuilder.create().build();
jaxb = JAXBContext.newInstance(CustomerList.class, Customer.class, Order.class, Product.class);
}
@Override
public void cleanupResources() throws Exception {
httpclient.close();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() {
from(CXF_RS_ENDPOINT_URI)
.recipientList(simple("direct:${header.operationName}"));
from("direct:getCustomer").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
assertEquals("123", exchange.getIn().getHeader("id"));
exchange.getMessage().setBody(new Customer(123, "Raul"));
exchange.getMessage().setHeader(Exchange.HTTP_RESPONSE_CODE, 200);
}
});
from("direct:newCustomer").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Customer c = exchange.getIn().getBody(Customer.class);
assertNotNull(c);
assertEquals(123, c.getId());
assertEquals(12, exchange.getIn().getHeader("age"));
exchange.getMessage().setHeader(Exchange.HTTP_RESPONSE_CODE, 200);
}
});
}
};
}
@Test
public void testGetCustomerOnlyHeaders() throws Exception {
HttpGet get = new HttpGet("http://localhost:" + PORT_PATH + "/rest/customerservice/customers/123");
get.addHeader("Accept", "text/xml");
CloseableHttpResponse response = httpclient.execute(get);
assertEquals(200, response.getCode());
Customer entity = (Customer) jaxb.createUnmarshaller().unmarshal(response.getEntity().getContent());
assertEquals(123, entity.getId());
}
@Test
public void testNewCustomerWithQueryParam() throws Exception {
HttpPost post = new HttpPost("http://localhost:" + PORT_PATH + "/rest/customerservice/customers?age=12");
StringWriter sw = new StringWriter();
jaxb.createMarshaller().marshal(new Customer(123, "Raul"), sw);
post.setEntity(new StringEntity(sw.toString()));
post.addHeader("Content-Type", "text/xml");
post.addHeader("Accept", "text/xml");
CloseableHttpResponse response = httpclient.execute(post);
assertEquals(200, response.getCode());
}
}
| CxfRsConsumerSimpleBindingImplTest |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/serde/SortSpecSerdeTest.java | {
"start": 1306,
"end": 2279
} | class ____ {
private static final ObjectMapper OBJECT_MAPPER = JacksonMapperFactory.createObjectMapper();
@Test
void testSortSpec() throws JsonProcessingException {
SortSpec sortSpec =
SortSpec.builder()
.addField(1, true, true)
.addField(2, true, false)
.addField(3, false, true)
.addField(4, false, false)
.build();
assertThat(
OBJECT_MAPPER.readValue(
OBJECT_MAPPER.writeValueAsString(sortSpec), SortSpec.class))
.isEqualTo(sortSpec);
}
@Test
void testAny() throws JsonProcessingException {
assertThat(
OBJECT_MAPPER.readValue(
OBJECT_MAPPER.writeValueAsString(SortSpec.ANY), SortSpec.class))
.isEqualTo(SortSpec.ANY);
}
}
| SortSpecSerdeTest |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/util/InitTest.java | {
"start": 1153,
"end": 1569
} | class ____ {
@Test
void initTest() {
final Timer timer = new Timer("Log4j Initialization");
timer.start();
final Logger logger = LogManager.getLogger();
timer.stop();
final long elapsed = timer.getElapsedNanoTime();
System.out.println(timer);
assertTrue(elapsed < 1000000000, "Initialization time exceeded threshold; elapsed " + elapsed);
}
}
| InitTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/PropertiesUtil.java | {
"start": 993,
"end": 5434
} | class ____ {
/**
* Get integer from properties. This method throws an exception if the integer is not valid.
*
* @param config Properties
* @param key key in Properties
* @param defaultValue default value if value is not set
* @return default or value of key
*/
public static int getInt(Properties config, String key, int defaultValue) {
String val = config.getProperty(key);
if (val == null) {
return defaultValue;
} else {
try {
return Integer.parseInt(val);
} catch (NumberFormatException nfe) {
throw new IllegalArgumentException(
"Value for configuration key='"
+ key
+ "' is not set correctly. "
+ "Entered value='"
+ val
+ "'. Default value='"
+ defaultValue
+ "'");
}
}
}
/**
* Get long from properties. This method throws an exception if the long is not valid.
*
* @param config Properties
* @param key key in Properties
* @param defaultValue default value if value is not set
* @return default or value of key
*/
public static long getLong(Properties config, String key, long defaultValue) {
String val = config.getProperty(key);
if (val == null) {
return defaultValue;
} else {
try {
return Long.parseLong(val);
} catch (NumberFormatException nfe) {
throw new IllegalArgumentException(
"Value for configuration key='"
+ key
+ "' is not set correctly. "
+ "Entered value='"
+ val
+ "'. Default value='"
+ defaultValue
+ "'");
}
}
}
/**
* Get long from properties. This method only logs if the long is not valid.
*
* @param config Properties
* @param key key in Properties
* @param defaultValue default value if value is not set
* @return default or value of key
*/
public static long getLong(Properties config, String key, long defaultValue, Logger logger) {
try {
return getLong(config, key, defaultValue);
} catch (IllegalArgumentException iae) {
logger.warn(iae.getMessage());
return defaultValue;
}
}
/**
* Get boolean from properties. This method returns {@code true} iff the parsed value is "true".
*
* @param config Properties
* @param key key in Properties
* @param defaultValue default value if value is not set
* @return default or value of key
*/
public static boolean getBoolean(Properties config, String key, boolean defaultValue) {
String val = config.getProperty(key);
if (val == null) {
return defaultValue;
} else {
return Boolean.parseBoolean(val);
}
}
/**
* Flatten a recursive {@link Properties} to a first level property map.
*
* <p>In some cases, {@code KafkaProducer#propsToMap} for example, Properties is used purely as
* a HashTable without considering its default properties.
*
* @param config Properties to be flattened
* @return Properties without defaults; all properties are put in the first-level
*/
public static Properties flatten(Properties config) {
final Properties flattenProperties = new Properties();
Collections.list(config.propertyNames()).stream()
.forEach(
name -> {
Preconditions.checkArgument(name instanceof String);
flattenProperties.setProperty(
(String) name, config.getProperty((String) name));
});
return flattenProperties;
}
// ------------------------------------------------------------------------
/** Private default constructor to prevent instantiation. */
private PropertiesUtil() {}
}
| PropertiesUtil |
java | apache__camel | components/camel-mongodb-gridfs/src/main/java/org/apache/camel/component/mongodb/gridfs/QueryStrategy.java | {
"start": 862,
"end": 1015
} | enum ____ {
TimeStamp,
PersistentTimestamp,
FileAttribute,
TimeStampAndFileAttribute,
PersistentTimestampAndFileAttribute
}
| QueryStrategy |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/NativeQueryWithDuplicateColumnTest.java | {
"start": 3935,
"end": 4313
} | class ____ {
@Id
@GeneratedValue
@Column(name = "PUBLISHER_ID")
private Long id;
@Column(name = "DESCRIPTION")
private String description;
public Publisher() {
}
public Publisher(String description) {
this.description = description;
}
public Long getId() {
return id;
}
public String getDescription() {
return description;
}
}
}
| Publisher |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java | {
"start": 1157,
"end": 2268
} | interface ____<T extends Builder<T, U>, U extends InferenceConfigUpdate> {
U build();
T setResultsField(String resultsField);
}
Builder<? extends Builder<?, ?>, ? extends InferenceConfigUpdate> newBuilder();
default String getName() {
return getWriteableName();
}
static void checkFieldUniqueness(String... fieldNames) {
Set<String> duplicatedFieldNames = new HashSet<>();
Set<String> currentFieldNames = new HashSet<>(RESERVED_ML_FIELD_NAMES);
for (String fieldName : fieldNames) {
if (fieldName == null) {
continue;
}
if (currentFieldNames.contains(fieldName)) {
duplicatedFieldNames.add(fieldName);
} else {
currentFieldNames.add(fieldName);
}
}
if (duplicatedFieldNames.isEmpty() == false) {
throw ExceptionsHelper.badRequestException(
"Invalid inference config." + " More than one field is configured as {}",
duplicatedFieldNames
);
}
}
}
| Builder |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/converters/InConverter.java | {
"start": 1708,
"end": 2752
} | class ____ extends CustomizedConverter {
@Override
public RexNode convert(CallExpression call, CallExpressionConvertRule.ConvertContext context) {
checkArgument(call, call.getChildren().size() > 1);
Expression headExpr = call.getChildren().get(1);
if (headExpr instanceof TableReferenceExpression) {
QueryOperation tableOperation =
((TableReferenceExpression) headExpr).getQueryOperation();
RexNode child = context.toRexNode(call.getChildren().get(0));
return RexSubQuery.in(
((FlinkRelBuilder) context.getRelBuilder())
.queryOperation(tableOperation)
.build(),
ImmutableList.of(child));
} else {
List<RexNode> child = toRexNodes(context, call.getChildren());
return context.getRelBuilder()
.getRexBuilder()
.makeIn(child.get(0), child.subList(1, child.size()));
}
}
}
| InConverter |
java | elastic__elasticsearch | test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsClientThreadLeakFilter.java | {
"start": 1493,
"end": 2250
} | class ____ implements ThreadFilter {
private static final String OFFENDING_THREAD_NAME = "org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner";
@Override
public boolean reject(Thread t) {
return t.getName().contains(OFFENDING_THREAD_NAME)
|| t.getName().startsWith("LeaseRenewer")
|| t.getName().startsWith("SSL Certificates Store Monitor") // hadoop 3 brings that in
|| t.getName().startsWith("GcTimeMonitor") // hadoop 3
|| t.getName().startsWith("Command processor") // hadoop 3
|| t.getName().startsWith("ForkJoinPool-") // hadoop 3
|| t.getName().startsWith("ForkJoinPool.commonPool-worker-"); // hadoop 3
}
}
| HdfsClientThreadLeakFilter |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/catalog/ConfigurationPropertiesValidationResult.java | {
"start": 1063,
"end": 4805
} | class ____ extends PropertiesValidationResult implements Serializable {
private String fileName;
private String text;
private int lineNumber;
private boolean accepted;
public ConfigurationPropertiesValidationResult() {
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
public int getLineNumber() {
return lineNumber;
}
public void setLineNumber(int lineNumber) {
this.lineNumber = lineNumber;
}
public boolean isAccepted() {
return accepted;
}
public void setAccepted(boolean accepted) {
this.accepted = accepted;
}
/**
* A human readable summary of the validation errors.
*
* @param includeHeader whether to include a header
* @return the summary, or <tt>null</tt> if no validation errors
*/
public String summaryErrorMessage(boolean includeHeader) {
return summaryErrorMessage(includeHeader, true, false);
}
/**
* A human readable summary of the validation errors.
*
* @param includeHeader whether to include a header
* @param ignoreDeprecated whether to ignore deprecated options in use as an error or not
* @param includeWarnings whether to include warnings as an error or not
* @return the summary, or <tt>null</tt> if no validation errors
*/
public String summaryErrorMessage(boolean includeHeader, boolean ignoreDeprecated, boolean includeWarnings) {
boolean ok = isSuccess();
// special check if we should ignore deprecated options being used
if (ok && !ignoreDeprecated) {
ok = deprecated == null;
}
if (includeWarnings) {
if (unknownComponent != null) {
return "\tUnknown component: " + unknownComponent;
}
}
if (ok) {
return null;
}
// for each invalid option build a reason message
Map<String, String> options = new LinkedHashMap<>();
if (unknown != null) {
for (String name : unknown) {
if (unknownSuggestions != null && unknownSuggestions.containsKey(name)) {
String[] suggestions = unknownSuggestions.get(name);
if (suggestions != null && suggestions.length > 0) {
String str = Arrays.asList(suggestions).toString();
options.put(name, "Unknown option. Did you mean: " + str);
} else {
options.put(name, "Unknown option");
}
} else {
options.put(name, "Unknown option");
}
}
}
if (required != null) {
for (String name : required) {
options.put(name, "Missing required option");
}
}
if (deprecated != null) {
for (String name : deprecated) {
options.put(name, "Deprecated option");
}
}
if (invalidEnum != null) {
for (Map.Entry<String, String> entry : invalidEnum.entrySet()) {
String name = entry.getKey();
String[] choices = invalidEnumChoices.get(name);
String defaultValue = defaultValues != null ? defaultValues.get(entry.getKey()) : null;
String str = Arrays.asList(choices).toString();
String msg = "Invalid | ConfigurationPropertiesValidationResult |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/caching/CachingWithBatchAndFetchModeSelectTest.java | {
"start": 1212,
"end": 2910
} | class ____ {
public final static String CATEGORY_A_NAME = "A";
public final static String CATEGORY_B_NAME = "B";
public final static String CATEGORY_C_NAME = "C";
@BeforeAll
public void setUp(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
Category categoryA = new Category( CATEGORY_A_NAME );
entityManager.persist( categoryA );
Category categoryB = new Category( CATEGORY_B_NAME, categoryA );
entityManager.persist( categoryB );
Category categoryC = new Category( CATEGORY_C_NAME );
entityManager.persist( categoryC );
}
);
}
@Test
public void testSelectAll(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
List<Category> categories =
entityManager.createQuery( "Select c from Category c order by c.name", Category.class )
.getResultList();
Category categoryA = categories.get( 0 );
assertThat( categoryA.getName() ).isEqualTo( CATEGORY_A_NAME );
assertThat( categoryA.getParentCategory() ).isNull();
Category categoryB = categories.get( 1 );
Category parentCategory = categoryB.getParentCategory();
assertThat( categoryB.getName() ).isEqualTo( CATEGORY_B_NAME );
assertThat( parentCategory ).isNotNull();
assertThat( parentCategory ).isEqualTo( categoryA );
Category categoryC = categories.get( 2 );
assertThat( categoryC.getName() ).isEqualTo( CATEGORY_C_NAME );
assertThat( categoryC.getParentCategory() ).isNull();
}
);
}
@Entity(name = "Category")
@BatchSize(size = 500)
@Cacheable
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public static | CachingWithBatchAndFetchModeSelectTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/ops/Hoarder.java | {
"start": 218,
"end": 977
} | class ____ {
private Long id;
private String name;
private Item favoriteItem;
private Set<Item> items = new HashSet<Item>();
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Item getFavoriteItem() {
return favoriteItem;
}
public void setFavoriteItem(Item favoriteItem) {
this.favoriteItem = favoriteItem;
}
public Set<Item> getItems() {
return items;
}
public void setItems(Set<Item> items) {
this.items = items;
}
@Override
public String toString() {
return "Hoarder{" +
"id=" + id +
", name='" + name + '\'' +
", favoriteItem=" + favoriteItem +
'}';
}
}
| Hoarder |
java | spring-projects__spring-boot | module/spring-boot-webclient-test/src/test/java/org/springframework/boot/webclient/test/autoconfigure/ExampleWebClientService.java | {
"start": 1107,
"end": 1786
} | class ____ {
private final WebClient.Builder builder;
private final WebClient webClient;
public ExampleWebClientService(WebClient.Builder builder) {
this.builder = builder;
this.webClient = builder.build();
}
protected WebClient.Builder getWebClientBuilder() {
return this.builder;
}
public @Nullable String test() {
ResponseEntity<String> response = this.webClient.get().uri("/test").retrieve().toEntity(String.class).block();
assertThat(response).isNotNull();
return response.getBody();
}
public void testPostWithBody(String body) {
this.webClient.post().uri("/test").bodyValue(body).retrieve().toBodilessEntity().block();
}
}
| ExampleWebClientService |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/LogAggregationContext.java | {
"start": 9202,
"end": 9436
} | class ____ decide how to parse
* the parameters string.
*
* @param parameters log aggregation policy parameters.
*/
@Public
@Unstable
public abstract void setLogAggregationPolicyParameters(
String parameters);
}
| to |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/body/BrokerReplicasInfo.java | {
"start": 4630,
"end": 6937
} | class ____ extends RemotingSerializable {
private String brokerName;
private Long brokerId;
private String brokerAddress;
private Boolean alive;
public ReplicaIdentity(String brokerName, Long brokerId, String brokerAddress) {
this.brokerName = brokerName;
this.brokerId = brokerId;
this.brokerAddress = brokerAddress;
this.alive = false;
}
public ReplicaIdentity(String brokerName, Long brokerId, String brokerAddress, Boolean alive) {
this.brokerName = brokerName;
this.brokerId = brokerId;
this.brokerAddress = brokerAddress;
this.alive = alive;
}
public String getBrokerName() {
return brokerName;
}
public void setBrokerName(String brokerName) {
this.brokerName = brokerName;
}
public String getBrokerAddress() {
return brokerAddress;
}
public void setBrokerAddress(String brokerAddress) {
this.brokerAddress = brokerAddress;
}
public Long getBrokerId() {
return brokerId;
}
public void setBrokerId(Long brokerId) {
this.brokerId = brokerId;
}
public Boolean getAlive() {
return alive;
}
public void setAlive(Boolean alive) {
this.alive = alive;
}
@Override
public String toString() {
return "ReplicaIdentity{" +
"brokerName='" + brokerName + '\'' +
", brokerId=" + brokerId +
", brokerAddress='" + brokerAddress + '\'' +
", alive=" + alive +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ReplicaIdentity that = (ReplicaIdentity) o;
return brokerName.equals(that.brokerName) && brokerId.equals(that.brokerId) && brokerAddress.equals(that.brokerAddress);
}
@Override
public int hashCode() {
return Objects.hash(brokerName, brokerId, brokerAddress);
}
}
}
| ReplicaIdentity |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java | {
"start": 923,
"end": 2692
} | class ____ {
private final List<FieldAndFormat> fields;
/**
* Create a new FetchDocValuesContext using the provided input list.
* Field patterns containing wildcards are resolved and unmapped fields are filtered out.
*/
public FetchDocValuesContext(SearchExecutionContext searchExecutionContext, List<FieldAndFormat> fieldPatterns) {
// Use Linked HashMap to reserve the fetching order
final Map<String, FieldAndFormat> fieldToFormats = new LinkedHashMap<>();
for (FieldAndFormat field : fieldPatterns) {
Collection<String> fieldNames = searchExecutionContext.getMatchingFieldNames(field.field);
for (String fieldName : fieldNames) {
// the last matching field wins
fieldToFormats.put(fieldName, new FieldAndFormat(fieldName, field.format, field.includeUnmapped));
}
}
this.fields = new ArrayList<>(fieldToFormats.values());
int maxAllowedDocvalueFields = searchExecutionContext.getIndexSettings().getMaxDocvalueFields();
if (fields.size() > maxAllowedDocvalueFields) {
throw new IllegalArgumentException(
"Trying to retrieve too many docvalue_fields. Must be less than or equal to: ["
+ maxAllowedDocvalueFields
+ "] but was ["
+ fields.size()
+ "]. This limit can be set by changing the ["
+ IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING.getKey()
+ "] index level setting."
);
}
}
/**
* Returns the required docvalue fields.
*/
public List<FieldAndFormat> fields() {
return this.fields;
}
}
| FetchDocValuesContext |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/MinioComponentBuilderFactory.java | {
"start": 1865,
"end": 25702
} | interface ____ extends ComponentBuilder<MinioComponent> {
/**
* Setting the autocreation of the bucket if bucket name not exist.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param autoCreateBucket the value to set
* @return the dsl builder
*/
default MinioComponentBuilder autoCreateBucket(boolean autoCreateBucket) {
doSetProperty("autoCreateBucket", autoCreateBucket);
return this;
}
/**
* The component configuration.
*
* The option is a:
* <code>org.apache.camel.component.minio.MinioConfiguration</code> type.
*
* Group: common
*
* @param configuration the value to set
* @return the dsl builder
*/
default MinioComponentBuilder configuration(org.apache.camel.component.minio.MinioConfiguration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* Endpoint can be an URL, domain name, IPv4 address or IPv6 address.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param endpoint the value to set
* @return the dsl builder
*/
default MinioComponentBuilder endpoint(java.lang.String endpoint) {
doSetProperty("endpoint", endpoint);
return this;
}
/**
* Reference to a Minio Client object in the registry.
*
* The option is a: <code>io.minio.MinioClient</code> type.
*
* Group: common
*
* @param minioClient the value to set
* @return the dsl builder
*/
default MinioComponentBuilder minioClient(io.minio.MinioClient minioClient) {
doSetProperty("minioClient", minioClient);
return this;
}
/**
* Set when creating new bucket.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param objectLock the value to set
* @return the dsl builder
*/
default MinioComponentBuilder objectLock(boolean objectLock) {
doSetProperty("objectLock", objectLock);
return this;
}
/**
* The policy for this queue to set in the method.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param policy the value to set
* @return the dsl builder
*/
default MinioComponentBuilder policy(java.lang.String policy) {
doSetProperty("policy", policy);
return this;
}
/**
* TCP/IP port number. 80 and 443 are used as defaults for HTTP and
* HTTPS.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: common
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default MinioComponentBuilder proxyPort(java.lang.Integer proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* The region in which Minio client needs to work. When using this
* parameter, the configuration will expect the lowercase name of the
* region (for example ap-east-1). You'll need to use the name
* Region.EU_WEST_1.id().
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param region the value to set
* @return the dsl builder
*/
default MinioComponentBuilder region(java.lang.String region) {
doSetProperty("region", region);
return this;
}
/**
* Flag to indicate to use secure connection to minio service or not.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param secure the value to set
* @return the dsl builder
*/
default MinioComponentBuilder secure(boolean secure) {
doSetProperty("secure", secure);
return this;
}
/**
* If this option is true and includeBody is true, then the
* MinioObject.close() method will be called on exchange completion.
* This option is strongly related to includeBody option. In case of
* setting includeBody to true and autocloseBody to false, it will be up
* to the caller to close the MinioObject stream. Setting autocloseBody
* to true, will close the MinioObject stream automatically.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param autoCloseBody the value to set
* @return the dsl builder
*/
default MinioComponentBuilder autoCloseBody(boolean autoCloseBody) {
doSetProperty("autoCloseBody", autoCloseBody);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default MinioComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Set this flag if you want to bypassGovernanceMode when deleting a
* particular object.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bypassGovernanceMode the value to set
* @return the dsl builder
*/
default MinioComponentBuilder bypassGovernanceMode(boolean bypassGovernanceMode) {
doSetProperty("bypassGovernanceMode", bypassGovernanceMode);
return this;
}
/**
* Delete objects from Minio after they have been retrieved. The delete
* is only performed if the Exchange is committed. If a rollback occurs,
* the object is not deleted. If this option is false, then the same
* objects will be retrieve over and over again on the polls. Therefore
* you need to use the Idempotent Consumer EIP in the route to filter
* out duplicates. You can filter using the MinioConstants#BUCKET_NAME
* and MinioConstants#OBJECT_NAME headers, or only the
* MinioConstants#OBJECT_NAME header.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param deleteAfterRead the value to set
* @return the dsl builder
*/
default MinioComponentBuilder deleteAfterRead(boolean deleteAfterRead) {
doSetProperty("deleteAfterRead", deleteAfterRead);
return this;
}
/**
* The delimiter which is used in the ListObjectsRequest to only consume
* objects we are interested in.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param delimiter the value to set
* @return the dsl builder
*/
default MinioComponentBuilder delimiter(java.lang.String delimiter) {
doSetProperty("delimiter", delimiter);
return this;
}
/**
* Destination bucket name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param destinationBucketName the value to set
* @return the dsl builder
*/
default MinioComponentBuilder destinationBucketName(java.lang.String destinationBucketName) {
doSetProperty("destinationBucketName", destinationBucketName);
return this;
}
/**
* Destination object name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param destinationObjectName the value to set
* @return the dsl builder
*/
default MinioComponentBuilder destinationObjectName(java.lang.String destinationObjectName) {
doSetProperty("destinationObjectName", destinationObjectName);
return this;
}
/**
* If it is true, the exchange body will be set to a stream to the
* contents of the file. If false, the headers will be set with the
* Minio object metadata, but the body will be null. This option is
* strongly related to autocloseBody option. In case of setting
* includeBody to true and autocloseBody to false, it will be up to the
* caller to close the MinioObject stream. Setting autocloseBody to
* true, will close the MinioObject stream automatically.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param includeBody the value to set
* @return the dsl builder
*/
default MinioComponentBuilder includeBody(boolean includeBody) {
doSetProperty("includeBody", includeBody);
return this;
}
/**
* The flag which is used in the ListObjectsRequest to set include
* folders.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param includeFolders the value to set
* @return the dsl builder
*/
default MinioComponentBuilder includeFolders(boolean includeFolders) {
doSetProperty("includeFolders", includeFolders);
return this;
}
/**
* The flag which is used in the ListObjectsRequest to get objects with
* user meta data.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param includeUserMetadata the value to set
* @return the dsl builder
*/
default MinioComponentBuilder includeUserMetadata(boolean includeUserMetadata) {
doSetProperty("includeUserMetadata", includeUserMetadata);
return this;
}
/**
* The flag which is used in the ListObjectsRequest to get objects with
* versioning.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param includeVersions the value to set
* @return the dsl builder
*/
default MinioComponentBuilder includeVersions(boolean includeVersions) {
doSetProperty("includeVersions", includeVersions);
return this;
}
/**
* Number of bytes of object data from offset.
*
* The option is a: <code>long</code> type.
*
* Group: consumer
*
* @param length the value to set
* @return the dsl builder
*/
default MinioComponentBuilder length(long length) {
doSetProperty("length", length);
return this;
}
/**
* Set match ETag parameter for get object(s).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param matchETag the value to set
* @return the dsl builder
*/
default MinioComponentBuilder matchETag(java.lang.String matchETag) {
doSetProperty("matchETag", matchETag);
return this;
}
/**
* Set the maxConnections parameter in the minio client configuration.
*
* The option is a: <code>int</code> type.
*
* Default: 60
* Group: consumer
*
* @param maxConnections the value to set
* @return the dsl builder
*/
default MinioComponentBuilder maxConnections(int maxConnections) {
doSetProperty("maxConnections", maxConnections);
return this;
}
/**
* Gets the maximum number of messages as a limit to poll at each
* polling. Gets the maximum number of messages as a limit to poll at
* each polling. The default value is 10. Use 0 or a negative number to
* set it as unlimited.
*
* The option is a: <code>int</code> type.
*
* Default: 10
* Group: consumer
*
* @param maxMessagesPerPoll the value to set
* @return the dsl builder
*/
default MinioComponentBuilder maxMessagesPerPoll(int maxMessagesPerPoll) {
doSetProperty("maxMessagesPerPoll", maxMessagesPerPoll);
return this;
}
/**
* Set modified since parameter for get object(s).
*
* The option is a: <code>java.time.ZonedDateTime</code>
* type.
*
* Group: consumer
*
* @param modifiedSince the value to set
* @return the dsl builder
*/
default MinioComponentBuilder modifiedSince(java.time.ZonedDateTime modifiedSince) {
doSetProperty("modifiedSince", modifiedSince);
return this;
}
/**
* Move objects from bucket to a different bucket after they have been
* retrieved. To accomplish the operation the destinationBucket option
* must be set. The copy bucket operation is only performed if the
* Exchange is committed. If a rollback occurs, the object is not moved.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param moveAfterRead the value to set
* @return the dsl builder
*/
default MinioComponentBuilder moveAfterRead(boolean moveAfterRead) {
doSetProperty("moveAfterRead", moveAfterRead);
return this;
}
/**
* Set not match ETag parameter for get object(s).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param notMatchETag the value to set
* @return the dsl builder
*/
default MinioComponentBuilder notMatchETag(java.lang.String notMatchETag) {
doSetProperty("notMatchETag", notMatchETag);
return this;
}
/**
* To get the object from the bucket with the given object name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param objectName the value to set
* @return the dsl builder
*/
default MinioComponentBuilder objectName(java.lang.String objectName) {
doSetProperty("objectName", objectName);
return this;
}
/**
* Start byte position of object data.
*
* The option is a: <code>long</code> type.
*
* Group: consumer
*
* @param offset the value to set
* @return the dsl builder
*/
default MinioComponentBuilder offset(long offset) {
doSetProperty("offset", offset);
return this;
}
/**
* Object name starts with prefix.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param prefix the value to set
* @return the dsl builder
*/
default MinioComponentBuilder prefix(java.lang.String prefix) {
doSetProperty("prefix", prefix);
return this;
}
/**
* List recursively than directory structure emulation.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param recursive the value to set
* @return the dsl builder
*/
default MinioComponentBuilder recursive(boolean recursive) {
doSetProperty("recursive", recursive);
return this;
}
/**
* list objects in bucket after this object name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param startAfter the value to set
* @return the dsl builder
*/
default MinioComponentBuilder startAfter(java.lang.String startAfter) {
doSetProperty("startAfter", startAfter);
return this;
}
/**
* Set un modified since parameter for get object(s).
*
* The option is a: <code>java.time.ZonedDateTime</code>
* type.
*
* Group: consumer
*
* @param unModifiedSince the value to set
* @return the dsl builder
*/
default MinioComponentBuilder unModifiedSince(java.time.ZonedDateTime unModifiedSince) {
doSetProperty("unModifiedSince", unModifiedSince);
return this;
}
/**
* when true, version 1 of REST API is used.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param useVersion1 the value to set
* @return the dsl builder
*/
default MinioComponentBuilder useVersion1(boolean useVersion1) {
doSetProperty("useVersion1", useVersion1);
return this;
}
/**
* Set specific version_ID of a object when deleting the object.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param versionId the value to set
* @return the dsl builder
*/
default MinioComponentBuilder versionId(java.lang.String versionId) {
doSetProperty("versionId", versionId);
return this;
}
/**
* Delete file object after the Minio file has been uploaded.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param deleteAfterWrite the value to set
* @return the dsl builder
*/
default MinioComponentBuilder deleteAfterWrite(boolean deleteAfterWrite) {
doSetProperty("deleteAfterWrite", deleteAfterWrite);
return this;
}
/**
* Setting the key name for an element in the bucket through endpoint
* parameter.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param keyName the value to set
* @return the dsl builder
*/
default MinioComponentBuilder keyName(java.lang.String keyName) {
doSetProperty("keyName", keyName);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default MinioComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* The operation to do in case the user don't want to do only an upload.
*
* The option is a:
* <code>org.apache.camel.component.minio.MinioOperations</code> type.
*
* Group: producer
*
* @param operation the value to set
* @return the dsl builder
*/
default MinioComponentBuilder operation(org.apache.camel.component.minio.MinioOperations operation) {
doSetProperty("operation", operation);
return this;
}
/**
* If we want to use a POJO request as body or not.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param pojoRequest the value to set
* @return the dsl builder
*/
default MinioComponentBuilder pojoRequest(boolean pojoRequest) {
doSetProperty("pojoRequest", pojoRequest);
return this;
}
/**
* The storage | MinioComponentBuilder |
java | apache__camel | components/camel-disruptor/src/main/java/org/apache/camel/component/disruptor/SynchronizedExchange.java | {
"start": 1112,
"end": 1258
} | interface ____ {
Exchange getExchange();
void consumed(Exchange result);
Exchange cancelAndGetOriginalExchange();
}
| SynchronizedExchange |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/metrics/MetricsTrackingStateConfig.java | {
"start": 2378,
"end": 3534
} | class ____<
T extends MetricsTrackingStateConfig, B extends Builder<T, B>>
implements Serializable {
private static final long serialVersionUID = 1L;
protected boolean enabled;
protected int sampleInterval;
protected int historySize;
protected boolean stateNameAsVariable;
protected MetricGroup metricGroup;
public B setEnabled(boolean enabled) {
this.enabled = enabled;
return (B) this;
}
public B setSampleInterval(int sampleInterval) {
this.sampleInterval = sampleInterval;
return (B) this;
}
public B setHistorySize(int historySize) {
this.historySize = historySize;
return (B) this;
}
public B setStateNameAsVariable(boolean stateNameAsVariable) {
this.stateNameAsVariable = stateNameAsVariable;
return (B) this;
}
public B setMetricGroup(MetricGroup metricGroup) {
this.metricGroup = metricGroup;
return (B) this;
}
public abstract T build();
}
}
| Builder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/IntervalThrottler.java | {
"start": 664,
"end": 1145
} | enum ____ {
DOCUMENT_PARSING_FAILURE(60);
static final int MILLISECONDS_IN_SECOND = 1000;
private final Acceptor acceptor;
IntervalThrottler(long intervalSeconds) {
acceptor = new Acceptor(intervalSeconds * MILLISECONDS_IN_SECOND);
}
/**
* @return true if the operation gets accepted, false if throttled.
*/
boolean accept() {
return acceptor.accept();
}
// Defined separately for testing.
static | IntervalThrottler |
java | apache__camel | components/camel-olingo2/camel-olingo2-component/src/generated/java/org/apache/camel/component/olingo2/internal/Olingo2ApiCollection.java | {
"start": 1915,
"end": 2047
} | class ____ {
private static final Olingo2ApiCollection INSTANCE = new Olingo2ApiCollection();
}
}
| Olingo2ApiCollectionHolder |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/OpenTelemetryEndpointBuilderFactory.java | {
"start": 1439,
"end": 1584
} | interface ____ {
/**
* Builder for endpoint for the OpenTelemetry Metrics component.
*/
public | OpenTelemetryEndpointBuilderFactory |
java | apache__dubbo | dubbo-registry/dubbo-registry-api/src/main/java/org/apache/dubbo/registry/integration/ExporterFactory.java | {
"start": 1043,
"end": 1730
} | class ____ {
private final ConcurrentHashMap<String, ReferenceCountExporter<?>> exporters = new ConcurrentHashMap<>();
protected ReferenceCountExporter<?> createExporter(String providerKey, Callable<Exporter<?>> exporterProducer) {
return ConcurrentHashMapUtils.computeIfAbsent(exporters, providerKey, key -> {
try {
return new ReferenceCountExporter<>(exporterProducer.call(), key, this);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
protected void remove(String key, ReferenceCountExporter<?> exporter) {
exporters.remove(key, exporter);
}
}
| ExporterFactory |
java | micronaut-projects__micronaut-core | http-client-core/src/main/java/io/micronaut/http/client/AbstractHttpClientFactory.java | {
"start": 1222,
"end": 3360
} | class ____<T extends HttpClient> implements HttpClientFactory {
protected final MediaTypeCodecRegistry mediaTypeCodecRegistry;
protected final MessageBodyHandlerRegistry messageBodyHandlerRegistry;
protected final ConversionService conversionService;
protected AbstractHttpClientFactory(@Nullable MediaTypeCodecRegistry mediaTypeCodecRegistry,
MessageBodyHandlerRegistry messageBodyHandlerRegistry,
ConversionService conversionService) {
this.mediaTypeCodecRegistry = mediaTypeCodecRegistry;
this.messageBodyHandlerRegistry = messageBodyHandlerRegistry;
this.conversionService = conversionService;
}
/**
* Creates a new {@link HttpClient} instance for a given URI.
* @param uri The URI
* @return The client
*/
@NonNull
protected abstract T createHttpClient(@Nullable URI uri);
/**
* Creates a new {@link HttpClient} instance for a given URI and configuration.
* @param uri The URI
* @param configuration The configuration
* @return The client
*/
@NonNull
protected abstract T createHttpClient(@Nullable URI uri, @NonNull HttpClientConfiguration configuration);
@Override
@NonNull
public HttpClient createClient(URL url) {
return createHttpClient(url);
}
@Override
@NonNull
public HttpClient createClient(URL url, @NonNull HttpClientConfiguration configuration) {
return createHttpClient(url, configuration);
}
private T createHttpClient(URL url) {
try {
return createHttpClient(url != null ? url.toURI() : null);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
@NonNull
private T createHttpClient(@Nullable URL url, @NonNull HttpClientConfiguration configuration) {
try {
return createHttpClient(url != null ? url.toURI() : null, configuration);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
}
| AbstractHttpClientFactory |
java | elastic__elasticsearch | modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DeleteDatabaseConfigurationAction.java | {
"start": 913,
"end": 1289
} | class ____ extends ActionType<AcknowledgedResponse> {
public static final DeleteDatabaseConfigurationAction INSTANCE = new DeleteDatabaseConfigurationAction();
public static final String NAME = "cluster:admin/ingest/geoip/database/delete";
protected DeleteDatabaseConfigurationAction() {
super(NAME);
}
public static | DeleteDatabaseConfigurationAction |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/util/subpackage/PersistentEntity.java | {
"start": 833,
"end": 979
} | class ____ {
private long id;
public long getId() {
return this.id;
}
protected void setId(long id) {
this.id = id;
}
}
| PersistentEntity |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/basicType/LongTest2.java | {
"start": 275,
"end": 3689
} | class ____ extends TestCase {
public void test_0() throws Exception {
String json = "{\"v1\":-1883391953414482124,\"v2\":-3019416596934963650,\"v3\":6497525620823745793,\"v4\":2136224289077142499,\"v5\":-2090575024006307745}";
String json2 = "{\"v1\":\"-1883391953414482124\",\"v2\":\"-3019416596934963650\",\"v3\":\"6497525620823745793\",\"v4\":\"2136224289077142499\",\"v5\":\"-2090575024006307745\"}";
Model m1 = JSON.parseObject(json, Model.class);
Model m2 = JSON.parseObject(json2, Model.class);
assertNotNull(m1);
assertNotNull(m2);
assertEquals(-1883391953414482124L, m1.v1);
assertEquals(-3019416596934963650L, m1.v2);
assertEquals(6497525620823745793L, m1.v3);
assertEquals(2136224289077142499L, m1.v4);
assertEquals(-2090575024006307745L, m1.v5);
assertEquals(-1883391953414482124L, m2.v1);
assertEquals(-3019416596934963650L, m2.v2);
assertEquals(6497525620823745793L, m2.v3);
assertEquals(2136224289077142499L, m2.v4);
assertEquals(-2090575024006307745L, m2.v5);
}
public void test_1() throws Exception {
String json = "{\"v1\":-1883391953414482124,\"v2\":-3019416596934963650,\"v3\":6497525620823745793,\"v4\":2136224289077142499,\"v5\":-2090575024006307745}";
String json2 = "{\"v1\":\"-1883391953414482124\",\"v2\":\"-3019416596934963650\",\"v3\":\"6497525620823745793\",\"v4\":\"2136224289077142499\",\"v5\":\"-2090575024006307745\"}";
Model m1 = new JSONReader(new StringReader(json)).readObject(Model.class);
Model m2 = new JSONReader(new StringReader(json2)).readObject(Model.class);
assertNotNull(m1);
assertNotNull(m2);
assertEquals(-1883391953414482124L, m1.v1);
assertEquals(-3019416596934963650L, m1.v2);
assertEquals(6497525620823745793L, m1.v3);
assertEquals(2136224289077142499L, m1.v4);
assertEquals(-2090575024006307745L, m1.v5);
assertEquals(-1883391953414482124L, m2.v1);
assertEquals(-3019416596934963650L, m2.v2);
assertEquals(6497525620823745793L, m2.v3);
assertEquals(2136224289077142499L, m2.v4);
assertEquals(-2090575024006307745L, m2.v5);
}
public void test_2() throws Exception {
String json = "[-1883391953414482124,-3019416596934963650,6497525620823745793,2136224289077142499,-2090575024006307745]";
String json2 = "[\"-1883391953414482124\",\"-3019416596934963650\",\"6497525620823745793\",\"2136224289077142499\",\"-2090575024006307745\"]";
Model m1 = new JSONReader(new StringReader(json), Feature.SupportArrayToBean).readObject(Model.class);
Model m2 = new JSONReader(new StringReader(json2), Feature.SupportArrayToBean).readObject(Model.class);
assertNotNull(m1);
assertNotNull(m2);
assertEquals(-1883391953414482124L, m1.v1);
assertEquals(-3019416596934963650L, m1.v2);
assertEquals(6497525620823745793L, m1.v3);
assertEquals(2136224289077142499L, m1.v4);
assertEquals(-2090575024006307745L, m1.v5);
assertEquals(-1883391953414482124L, m2.v1);
assertEquals(-3019416596934963650L, m2.v2);
assertEquals(6497525620823745793L, m2.v3);
assertEquals(2136224289077142499L, m2.v4);
assertEquals(-2090575024006307745L, m2.v5);
}
public static | LongTest2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.