language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/conversion/java8time/LocalDateToXMLGregorianCalendarConversionTest.java | {
"start": 1004,
"end": 2343
} | class ____ {
@ProcessorTest
public void shouldNullCheckOnBuiltinAndConversion() {
Target target = SourceTargetMapper.INSTANCE.toTarget( new Source() );
assertThat( target ).isNotNull();
assertThat( target.getDate() ).isNull();
Source source = SourceTargetMapper.INSTANCE.toSource( new Target() );
assertThat( source ).isNotNull();
assertThat( source.getDate() ).isNull();
}
@ProcessorTest
public void shouldMapCorrectlyOnBuiltinAndConversion() throws Exception {
XMLGregorianCalendar calendarDate = DatatypeFactory.newInstance().newXMLGregorianCalendarDate(
2007,
11,
14,
DatatypeConstants.FIELD_UNDEFINED );
LocalDate localDate = LocalDate.of( 2007, 11, 14 );
Source s1 = new Source();
s1.setDate( calendarDate );
Target target = SourceTargetMapper.INSTANCE.toTarget( s1 );
assertThat( target ).isNotNull();
assertThat( target.getDate() ).isEqualTo( localDate );
Target t1 = new Target();
t1.setDate( localDate );
Source source = SourceTargetMapper.INSTANCE.toSource( t1 );
assertThat( source ).isNotNull();
assertThat( source.getDate() ).isEqualTo( calendarDate );
}
}
| LocalDateToXMLGregorianCalendarConversionTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/ApplicationAttemptNotFoundException.java | {
"start": 1530,
"end": 1943
} | class ____ extends YarnException {
private static final long serialVersionUID = 8694508L;
public ApplicationAttemptNotFoundException(Throwable cause) {
super(cause);
}
public ApplicationAttemptNotFoundException(String message) {
super(message);
}
public ApplicationAttemptNotFoundException(String message, Throwable cause) {
super(message, cause);
}
}
| ApplicationAttemptNotFoundException |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/struct/TestPOJOAsArray.java | {
"start": 1360,
"end": 1797
} | class ____
{
public int x, y;
public String name;
public boolean complete;
public PojoAsArray() { }
protected PojoAsArray(String name, int x, int y, boolean c) {
this.name = name;
this.x = x;
this.y = y;
this.complete = c;
}
}
@JsonPropertyOrder(alphabetic=true)
@JsonFormat(shape=JsonFormat.Shape.ARRAY)
static | PojoAsArray |
java | playframework__playframework | core/play/src/main/java/play/mvc/StatusHeader.java | {
"start": 22388,
"end": 23597
} | class ____ from.
*
* @param resourceName The path of the resource to load.
* @param fileName The file name rendered in the {@code Content-Disposition} header. The response
* will also automatically include the MIME type in the {@code Content-Type} header deducing
* it from this file name if {@code fileMimeTypes} includes it or fallback to {@code
* application/octet-stream} if unknown.
* @param fileMimeTypes Used for file type mapping.
* @param onClose Useful in order to perform cleanup operations (e.g. deleting a temporary file
* generated for a download).
* @param executor The executor to use for asynchronous execution of {@code onClose}.
* @return a '200 OK' result containing the resource in the body with in-line content disposition.
*/
public Result sendResource(
String resourceName,
Optional<String> fileName,
FileMimeTypes fileMimeTypes,
Runnable onClose,
Executor executor) {
return sendResource(
resourceName, DEFAULT_INLINE_MODE, fileName, fileMimeTypes, onClose, executor);
}
/**
* Send the given resource.
*
* <p>The resource will be loaded from the same classloader that this | comes |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/onexception/DoCatchDirectRecipientListTest.java | {
"start": 985,
"end": 1941
} | class ____ extends ContextTestSupport {
@Test
public void testDoCatchDirectRoute() throws Exception {
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:c").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").doTry().to("direct:a").doCatch(Exception.class).to("direct:c").end();
from("direct:a").to("mock:a").recipientList(constant("direct:b"));
from("direct:b").to("mock:b").throwException(new IllegalArgumentException("Forced"));
from("direct:c").to("mock:c");
}
};
}
}
| DoCatchDirectRecipientListTest |
java | quarkusio__quarkus | extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/jackson/ObjectIdSerializer.java | {
"start": 290,
"end": 692
} | class ____ extends StdSerializer<ObjectId> {
public ObjectIdSerializer() {
super(ObjectId.class);
}
@Override
public void serialize(ObjectId objectId, JsonGenerator jsonGenerator, SerializerProvider serializerProvider)
throws IOException {
if (objectId != null) {
jsonGenerator.writeString(objectId.toString());
}
}
}
| ObjectIdSerializer |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/processor/Punctuator.java | {
"start": 947,
"end": 1151
} | interface ____ as an argument to
* {@link org.apache.kafka.streams.processor.api.ProcessorContext#schedule(Duration, PunctuationType, Punctuator)}.
*
* @see Cancellable
*/
@FunctionalInterface
public | used |
java | elastic__elasticsearch | libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/Main.java | {
"start": 1498,
"end": 2713
} | class ____ {
private static final String SEPARATOR = "\t";
private static String TRANSITIVE = "--transitive";
private static String CHECK_INSTRUMENTATION = "--check-instrumentation";
private static String INCLUDE_INCUBATOR = "--include-incubator";
private static Set<String> OPTIONAL_ARGS = Set.of(TRANSITIVE, CHECK_INSTRUMENTATION, INCLUDE_INCUBATOR);
private static final Set<MethodDescriptor> INSTRUMENTED_METHODS = new HashSet<>();
private static final Set<MethodDescriptor> ACCESSIBLE_JDK_METHODS = new HashSet<>();
record CallChain(EntryPoint entryPoint, CallChain next) {
boolean isPublic() {
return ExternalAccess.isExternallyAccessible(entryPoint.access());
}
CallChain prepend(MethodDescriptor method, EnumSet<ExternalAccess> access, String module, String source, int line) {
return new CallChain(new EntryPoint(module, source, line, method, access), this);
}
static CallChain firstLevel(MethodDescriptor method, EnumSet<ExternalAccess> access, String module, String source, int line) {
return new CallChain(new EntryPoint(module, source, line, method, access), null);
}
}
| Main |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java | {
"start": 4196,
"end": 5148
} | class ____ extends ModuleTestCase {
private ClusterInfoService clusterInfoService = EmptyClusterInfoService.INSTANCE;
private ClusterService clusterService;
private static ThreadPool threadPool;
@BeforeClass
public static void createThreadPool() {
threadPool = new TestThreadPool("test");
}
@AfterClass
public static void terminateThreadPool() {
assertTrue(ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS));
threadPool = null;
}
@Override
public void setUp() throws Exception {
super.setUp();
clusterService = new ClusterService(
Settings.EMPTY,
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS),
threadPool,
null
);
}
@Override
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
}
static | ClusterModuleTests |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/fields/RecursiveComparisonAssert_isEqualTo_with_iterables_Test.java | {
"start": 14714,
"end": 17021
} | class ____<E> {
public Iterable<E> group;
public WithIterable(Iterable<E> iterable) {
this.group = iterable;
}
@Override
public String toString() {
return "WithIterable(%s)".formatted(group);
}
}
record Item(String name, int quantity) {
}
@Test
void should_honor_representation_in_unmatched_elements_when_comparing_unordered_set() {
// GIVEN
Set<Item> expectedItems = newHashSet(new Item("Shoes", 2), new Item("Pants", 3));
Set<Item> actualItems = newHashSet(new Item("Pants", 3), new Item("Loafers", 1));
registerFormatterForType(Item.class, item -> "Item(%s, %d)".formatted(item.name(), item.quantity()));
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actualItems).usingRecursiveComparison(recursiveComparisonConfiguration)
.isEqualTo(expectedItems));
// THEN
then(assertionError).hasMessageContaining(format("The following expected elements were not matched in the actual HashSet:%n" +
" [Item(Shoes, 2)]"));
}
@ParameterizedTest
@MethodSource
void should_treat_null_and_empty_iterables_as_equal(Iterable<?> iterable1, Iterable<?> iterable2) {
// GIVEN
WithIterable<?> actual = new WithIterable<>(iterable1);
WithIterable<?> expected = new WithIterable<>(iterable2);
// THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.treatingNullAndEmptyIterablesAsEqual()
.isEqualTo(expected);
}
static Stream<Arguments> should_treat_null_and_empty_iterables_as_equal() {
List<Author> emptyList = emptyList();
Set<Author> emptySet = emptySet();
Set<String> emptyTreeSet = new TreeSet<>();
Set<String> emptyHashSet = new HashSet<>();
return Stream.of(Arguments.of(null, emptyList),
Arguments.of(emptyList, null),
Arguments.of(null, emptySet),
Arguments.of(emptySet, null),
Arguments.of(null, emptyHashSet),
Arguments.of(emptyHashSet, null),
Arguments.of(null, emptyTreeSet),
Arguments.of(emptyTreeSet, null));
}
}
| WithIterable |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlStatsRequest.java | {
"start": 619,
"end": 1042
} | class ____ extends BaseNodesRequest {
private boolean includeStats;
public EsqlStatsRequest() {
super((String[]) null);
}
public boolean includeStats() {
return includeStats;
}
public void includeStats(boolean includeStats) {
this.includeStats = includeStats;
}
@Override
public String toString() {
return "esql_stats";
}
static | EsqlStatsRequest |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/SubtaskCheckpointCoordinatorTest.java | {
"start": 5934,
"end": 22318
} | class ____ extends ChannelStateWriterImpl.NoOpChannelStateWriter {
private boolean started;
@Override
public void start(long checkpointId, CheckpointOptions checkpointOptions) {
started = true;
}
}
MockWriter writer = new MockWriter();
SubtaskCheckpointCoordinator coordinator = coordinator(writer);
CheckpointStorageLocationReference locationReference =
CheckpointStorageLocationReference.getDefault();
coordinator.initInputsCheckpoint(
1L,
unalignedCheckpointEnabled
? CheckpointOptions.unaligned(CheckpointType.CHECKPOINT, locationReference)
: CheckpointOptions.alignedNoTimeout(checkpointType, locationReference));
return writer.started;
}
@Test
void testNotifyCheckpointComplete() throws Exception {
TestTaskStateManager stateManager = new TestTaskStateManager();
MockEnvironment mockEnvironment =
MockEnvironment.builder().setTaskStateManager(stateManager).build();
try (SubtaskCheckpointCoordinator subtaskCheckpointCoordinator =
new MockSubtaskCheckpointCoordinatorBuilder()
.setEnvironment(mockEnvironment)
.build()) {
final OperatorChain<?, ?> operatorChain = getOperatorChain(mockEnvironment);
long checkpointId = 42L;
{
subtaskCheckpointCoordinator.notifyCheckpointComplete(
checkpointId, operatorChain, () -> true);
assertThat(stateManager.getNotifiedCompletedCheckpointId()).isEqualTo(checkpointId);
}
long newCheckpointId = checkpointId + 1;
{
subtaskCheckpointCoordinator.notifyCheckpointComplete(
newCheckpointId, operatorChain, () -> false);
// even task is not running, state manager could still receive the notification.
assertThat(stateManager.getNotifiedCompletedCheckpointId())
.isEqualTo(newCheckpointId);
}
}
}
@Test
void testSavepointNotResultingInPriorityEvents() throws Exception {
MockEnvironment mockEnvironment = MockEnvironment.builder().build();
try (SubtaskCheckpointCoordinator coordinator =
new MockSubtaskCheckpointCoordinatorBuilder()
.setUnalignedCheckpointEnabled(true)
.setEnvironment(mockEnvironment)
.build()) {
AtomicReference<Boolean> broadcastedPriorityEvent = new AtomicReference<>(null);
final OperatorChain<?, ?> operatorChain =
new RegularOperatorChain(
new MockStreamTaskBuilder(mockEnvironment).build(),
new NonRecordWriter<>()) {
@Override
public void broadcastEvent(AbstractEvent event, boolean isPriorityEvent)
throws IOException {
super.broadcastEvent(event, isPriorityEvent);
broadcastedPriorityEvent.set(isPriorityEvent);
}
};
coordinator.checkpointState(
new CheckpointMetaData(0, 0),
new CheckpointOptions(
SavepointType.savepoint(SavepointFormatType.CANONICAL),
CheckpointStorageLocationReference.getDefault()),
new CheckpointMetricsBuilder(),
operatorChain,
false,
() -> true);
assertThat(broadcastedPriorityEvent.get()).isFalse();
}
}
@Test
void testForceAlignedCheckpointResultingInPriorityEvents() throws Exception {
final long checkpointId = 42L;
MockEnvironment mockEnvironment = MockEnvironment.builder().build();
mockEnvironment.setCheckpointStorageAccess(
CHECKPOINT_STORAGE.createCheckpointStorage(mockEnvironment.getJobID()));
try (SubtaskCheckpointCoordinator coordinator =
new MockSubtaskCheckpointCoordinatorBuilder()
.setUnalignedCheckpointEnabled(true)
.setEnvironment(mockEnvironment)
.build()) {
AtomicReference<Boolean> broadcastedPriorityEvent = new AtomicReference<>(null);
final OperatorChain<?, ?> operatorChain =
new RegularOperatorChain(
new MockStreamTaskBuilder(mockEnvironment).build(),
new NonRecordWriter<>()) {
@Override
public void broadcastEvent(AbstractEvent event, boolean isPriorityEvent)
throws IOException {
super.broadcastEvent(event, isPriorityEvent);
broadcastedPriorityEvent.set(isPriorityEvent);
// test if we can write output data
coordinator
.getChannelStateWriter()
.addOutputData(
checkpointId,
new ResultSubpartitionInfo(0, 0),
0,
BufferBuilderTestUtils.buildSomeBuffer(500));
}
};
CheckpointOptions forcedAlignedOptions =
CheckpointOptions.unaligned(
CheckpointType.CHECKPOINT,
CheckpointStorageLocationReference.getDefault())
.withUnalignedUnsupported();
coordinator.checkpointState(
new CheckpointMetaData(checkpointId, 0),
forcedAlignedOptions,
new CheckpointMetricsBuilder(),
operatorChain,
false,
() -> true);
assertThat(broadcastedPriorityEvent.get()).isTrue();
}
}
@Test
void testSkipChannelStateForSavepoints() throws Exception {
try (SubtaskCheckpointCoordinator coordinator =
new MockSubtaskCheckpointCoordinatorBuilder()
.setUnalignedCheckpointEnabled(true)
.setPrepareInputSnapshot(
(u1, u2) -> {
fail("should not prepare input snapshot for savepoint");
return null;
})
.build()) {
coordinator.checkpointState(
new CheckpointMetaData(0, 0),
new CheckpointOptions(
SavepointType.savepoint(SavepointFormatType.CANONICAL),
CheckpointStorageLocationReference.getDefault()),
new CheckpointMetricsBuilder(),
new RegularOperatorChain<>(
new NoOpStreamTask<>(new DummyEnvironment()), new NonRecordWriter<>()),
false,
() -> true);
}
}
@Test
void testNotifyCheckpointSubsumed() throws Exception {
TestTaskStateManager stateManager = new TestTaskStateManager();
MockEnvironment mockEnvironment =
MockEnvironment.builder().setTaskStateManager(stateManager).build();
try (SubtaskCheckpointCoordinatorImpl subtaskCheckpointCoordinator =
(SubtaskCheckpointCoordinatorImpl)
new MockSubtaskCheckpointCoordinatorBuilder()
.setEnvironment(mockEnvironment)
.setUnalignedCheckpointEnabled(true)
.build()) {
StreamMap<String, String> streamMap =
new StreamMap<>((MapFunction<String, String>) value -> value);
setProcessingTimeService(streamMap, new TestProcessingTimeService());
final OperatorChain<String, AbstractStreamOperator<String>> operatorChain =
operatorChain(streamMap);
StreamTaskStateInitializerImpl stateInitializer =
new StreamTaskStateInitializerImpl(mockEnvironment, new TestStateBackend());
operatorChain.initializeStateAndOpenOperators(stateInitializer);
long checkpointId = 42L;
subtaskCheckpointCoordinator
.getChannelStateWriter()
.start(checkpointId, CheckpointOptions.forCheckpointWithDefaultLocation());
subtaskCheckpointCoordinator.checkpointState(
new CheckpointMetaData(checkpointId, System.currentTimeMillis()),
CheckpointOptions.forCheckpointWithDefaultLocation(),
new CheckpointMetricsBuilder(),
operatorChain,
false,
() -> false);
long notifySubsumeCheckpointId = checkpointId + 1L;
// notify checkpoint aborted before execution.
subtaskCheckpointCoordinator.notifyCheckpointSubsumed(
notifySubsumeCheckpointId, operatorChain, () -> true);
assertThat(
((TestStateBackend.TestKeyedStateBackend<?>)
streamMap.getKeyedStateBackend())
.getSubsumeCheckpointId())
.isEqualTo(notifySubsumeCheckpointId);
}
}
@Test
void testNotifyCheckpointAbortedManyTimes() throws Exception {
MockEnvironment mockEnvironment = MockEnvironment.builder().build();
int maxRecordAbortedCheckpoints = 256;
try (SubtaskCheckpointCoordinatorImpl subtaskCheckpointCoordinator =
(SubtaskCheckpointCoordinatorImpl)
new MockSubtaskCheckpointCoordinatorBuilder()
.setEnvironment(mockEnvironment)
.setMaxRecordAbortedCheckpoints(maxRecordAbortedCheckpoints)
.build()) {
final OperatorChain<?, ?> operatorChain = getOperatorChain(mockEnvironment);
long notifyAbortedTimes = maxRecordAbortedCheckpoints + 42;
for (int i = 1; i < notifyAbortedTimes; i++) {
subtaskCheckpointCoordinator.notifyCheckpointAborted(i, operatorChain, () -> true);
assertThat(subtaskCheckpointCoordinator.getAbortedCheckpointSize())
.isEqualTo(Math.min(maxRecordAbortedCheckpoints, i));
}
}
}
@Test
void testNotifyCheckpointAbortedBeforeAsyncPhase() throws Exception {
TestTaskStateManager stateManager = new TestTaskStateManager();
MockEnvironment mockEnvironment =
MockEnvironment.builder().setTaskStateManager(stateManager).build();
try (SubtaskCheckpointCoordinatorImpl subtaskCheckpointCoordinator =
(SubtaskCheckpointCoordinatorImpl)
new MockSubtaskCheckpointCoordinatorBuilder()
.setEnvironment(mockEnvironment)
.setUnalignedCheckpointEnabled(true)
.build()) {
CheckpointOperator checkpointOperator =
new CheckpointOperator(new OperatorSnapshotFutures());
final OperatorChain<String, AbstractStreamOperator<String>> operatorChain =
operatorChain(checkpointOperator);
long checkpointId = 42L;
// notify checkpoint aborted before execution.
subtaskCheckpointCoordinator.notifyCheckpointAborted(
checkpointId, operatorChain, () -> true);
assertThat(subtaskCheckpointCoordinator.getAbortedCheckpointSize()).isOne();
subtaskCheckpointCoordinator
.getChannelStateWriter()
.start(checkpointId, CheckpointOptions.forCheckpointWithDefaultLocation());
subtaskCheckpointCoordinator.checkpointState(
new CheckpointMetaData(checkpointId, System.currentTimeMillis()),
CheckpointOptions.forCheckpointWithDefaultLocation(),
new CheckpointMetricsBuilder(),
operatorChain,
false,
() -> false);
assertThat(checkpointOperator.isCheckpointed()).isFalse();
assertThat(stateManager.getReportedCheckpointId()).isEqualTo(-1);
assertThat(subtaskCheckpointCoordinator.getAbortedCheckpointSize()).isZero();
assertThat(subtaskCheckpointCoordinator.getAsyncCheckpointRunnableSize()).isZero();
}
}
@Test
void testBroadcastCancelCheckpointMarkerOnAbortingFromCoordinator() throws Exception {
OneInputStreamTaskTestHarness<String, String> testHarness =
new OneInputStreamTaskTestHarness<>(
OneInputStreamTask::new,
1,
1,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
StreamConfig streamConfig = testHarness.getStreamConfig();
streamConfig.setStreamOperator(new MapOperator());
StreamMockEnvironment mockEnvironment =
new StreamMockEnvironment(
testHarness.jobConfig,
testHarness.taskConfig,
testHarness.executionConfig,
testHarness.memorySize,
new MockInputSplitProvider(),
testHarness.bufferSize,
testHarness.taskStateManager);
try (SubtaskCheckpointCoordinator subtaskCheckpointCoordinator =
new MockSubtaskCheckpointCoordinatorBuilder()
.setEnvironment(mockEnvironment)
.build()) {
ArrayList<Object> recordOrEvents = new ArrayList<>();
StreamElementSerializer<String> stringStreamElementSerializer =
new StreamElementSerializer<>(StringSerializer.INSTANCE);
ResultPartitionWriter resultPartitionWriter =
new RecordOrEventCollectingResultPartitionWriter<>(
recordOrEvents, stringStreamElementSerializer);
mockEnvironment.addOutput(resultPartitionWriter);
testHarness.invoke(mockEnvironment);
testHarness.waitForTaskRunning();
OneInputStreamTask<String, String> task = testHarness.getTask();
OperatorChain<String, OneInputStreamOperator<String, String>> operatorChain =
task.operatorChain;
long checkpointId = 42L;
// notify checkpoint aborted before execution.
subtaskCheckpointCoordinator.notifyCheckpointAborted(
checkpointId, operatorChain, () -> true);
subtaskCheckpointCoordinator.checkpointState(
new CheckpointMetaData(checkpointId, System.currentTimeMillis()),
CheckpointOptions.forCheckpointWithDefaultLocation(),
new CheckpointMetricsBuilder(),
operatorChain,
false,
() -> false);
assertThat(recordOrEvents).hasSize(1);
Object recordOrEvent = recordOrEvents.get(0);
// ensure CancelCheckpointMarker is broadcast downstream.
assertThat(recordOrEvent).isInstanceOf(CancelCheckpointMarker.class);
assertThat(((CancelCheckpointMarker) recordOrEvent).getCheckpointId())
.isEqualTo(checkpointId);
testHarness.endInput();
testHarness.waitForTaskCompletion();
}
}
private static | MockWriter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/EqualsGetClassTest.java | {
"start": 7709,
"end": 8261
} | class ____ {
Object foo =
new Object() {
@Override
public boolean equals(Object o) {
if (o == null) {
return false;
}
return o.getClass() == getClass();
}
};
}
""")
.doTest();
}
@Test
public void negative_notOnParameter() {
helper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/ReadShareGroupStateResponse.java | {
"start": 1278,
"end": 5292
} | class ____ extends AbstractResponse {
private final ReadShareGroupStateResponseData data;
public ReadShareGroupStateResponse(ReadShareGroupStateResponseData data) {
super(ApiKeys.READ_SHARE_GROUP_STATE);
this.data = data;
}
@Override
public ReadShareGroupStateResponseData data() {
return data;
}
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.results().forEach(
result -> result.partitions().forEach(
partitionResult -> updateErrorCounts(counts, Errors.forCode(partitionResult.errorCode()))
)
);
return counts;
}
@Override
public int throttleTimeMs() {
return DEFAULT_THROTTLE_TIME;
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
// No op
}
public static ReadShareGroupStateResponse parse(Readable readable, short version) {
return new ReadShareGroupStateResponse(
new ReadShareGroupStateResponseData(readable, version)
);
}
public static ReadShareGroupStateResponseData toResponseData(
Uuid topicId,
int partition,
long startOffset,
int stateEpoch,
List<ReadShareGroupStateResponseData.StateBatch> stateBatches
) {
return new ReadShareGroupStateResponseData()
.setResults(List.of(
new ReadShareGroupStateResponseData.ReadStateResult()
.setTopicId(topicId)
.setPartitions(List.of(
new ReadShareGroupStateResponseData.PartitionResult()
.setPartition(partition)
.setStartOffset(startOffset)
.setStateEpoch(stateEpoch)
.setStateBatches(stateBatches)
))
));
}
public static ReadShareGroupStateResponseData toErrorResponseData(Uuid topicId, int partitionId, Errors error, String errorMessage) {
return new ReadShareGroupStateResponseData().setResults(
List.of(new ReadShareGroupStateResponseData.ReadStateResult()
.setTopicId(topicId)
.setPartitions(List.of(new ReadShareGroupStateResponseData.PartitionResult()
.setPartition(partitionId)
.setErrorCode(error.code())
.setErrorMessage(errorMessage)))));
}
public static ReadShareGroupStateResponseData.PartitionResult toErrorResponsePartitionResult(int partitionId, Errors error, String errorMessage) {
return new ReadShareGroupStateResponseData.PartitionResult()
.setPartition(partitionId)
.setErrorCode(error.code())
.setErrorMessage(errorMessage);
}
public static ReadShareGroupStateResponseData.ReadStateResult toResponseReadStateResult(Uuid topicId, List<ReadShareGroupStateResponseData.PartitionResult> partitionResults) {
return new ReadShareGroupStateResponseData.ReadStateResult()
.setTopicId(topicId)
.setPartitions(partitionResults);
}
public static ReadShareGroupStateResponseData toGlobalErrorResponse(ReadShareGroupStateRequestData request, Errors error) {
List<ReadShareGroupStateResponseData.ReadStateResult> readStateResults = new ArrayList<>();
request.topics().forEach(topicData -> {
List<ReadShareGroupStateResponseData.PartitionResult> partitionResults = new ArrayList<>();
topicData.partitions().forEach(partitionData -> partitionResults.add(
toErrorResponsePartitionResult(partitionData.partition(), error, error.message()))
);
readStateResults.add(toResponseReadStateResult(topicData.topicId(), partitionResults));
});
return new ReadShareGroupStateResponseData().setResults(readStateResults);
}
}
| ReadShareGroupStateResponse |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/conversion/AbstractNumberToStringConversion.java | {
"start": 727,
"end": 2369
} | class ____ extends SimpleConversion {
private final boolean sourceTypeNumberSubclass;
public AbstractNumberToStringConversion(boolean sourceTypeNumberSubclass) {
this.sourceTypeNumberSubclass = sourceTypeNumberSubclass;
}
@Override
public Set<Type> getToConversionImportTypes(ConversionContext conversionContext) {
if ( requiresDecimalFormat( conversionContext ) ) {
return Collections.singleton( conversionContext.getTypeFactory().getType( DecimalFormat.class ) );
}
else {
return super.getToConversionImportTypes( conversionContext );
}
}
protected boolean requiresDecimalFormat(ConversionContext conversionContext) {
return sourceTypeNumberSubclass && conversionContext.getNumberFormat() != null;
}
@Override
protected Set<Type> getFromConversionImportTypes(ConversionContext conversionContext) {
if ( requiresDecimalFormat( conversionContext ) ) {
return Collections.singleton( conversionContext.getTypeFactory().getType( DecimalFormat.class ) );
}
else {
return super.getFromConversionImportTypes( conversionContext );
}
}
@Override
protected List<Type> getFromConversionExceptionTypes(ConversionContext conversionContext) {
if ( requiresDecimalFormat( conversionContext ) ) {
return Collections.singletonList( conversionContext.getTypeFactory().getType( ParseException.class ) );
}
else {
return super.getFromConversionExceptionTypes( conversionContext );
}
}
}
| AbstractNumberToStringConversion |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/CollectionIncompatibleTypeTest.java | {
"start": 27538,
"end": 28078
} | class ____ {
void a(BiFunction<Set<Integer>, Set<Number>, Set<Integer>> b) {}
void b() {
a(Sets::difference);
}
}
""")
.doTest();
}
@Test
public void memberReferenceWithCustomFunctionalInterface() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.collect.Sets;
import java.util.function.BiFunction;
import java.util.Set;
public | Test |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/aot/generate/ValueCodeGeneratorTests.java | {
"start": 9509,
"end": 10158
} | class ____ {
@Test
void generateWhenPrimitiveArray() {
int[] array = { 0, 1, 2 };
assertThat(generateCode(array)).hasToString("new int[] {0, 1, 2}");
}
@Test
void generateWhenWrapperArray() {
Integer[] array = { 0, 1, 2 };
assertThat(resolve(generateCode(array))).hasValueCode("new Integer[] {0, 1, 2}");
}
@Test
void generateWhenClassArray() {
Class<?>[] array = new Class<?>[] { InputStream.class, OutputStream.class };
assertThat(resolve(generateCode(array))).hasImport(InputStream.class, OutputStream.class)
.hasValueCode("new Class[] {InputStream.class, OutputStream.class}");
}
}
@Nested
| ArrayTests |
java | apache__dubbo | dubbo-plugin/dubbo-auth/src/main/java/org/apache/dubbo/auth/exception/RpcAuthenticationException.java | {
"start": 851,
"end": 1147
} | class ____ extends Exception {
public RpcAuthenticationException() {}
public RpcAuthenticationException(String message) {
super(message);
}
public RpcAuthenticationException(String message, Throwable cause) {
super(message, cause);
}
}
| RpcAuthenticationException |
java | quarkusio__quarkus | extensions/vertx/deployment/src/test/java/io/quarkus/vertx/DuplicatedContextTest.java | {
"start": 1382,
"end": 4985
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyConsumers.class));
private Vertx vertx;
@BeforeEach
public void init() {
vertx = Vertx.vertx();
vertx.createHttpServer()
.requestHandler(req -> req.response().end("hey!"))
.listen(8082).toCompletionStage().toCompletableFuture().join();
}
@AfterEach
public void cleanup() {
if (vertx != null) {
vertx.close().toCompletionStage().toCompletableFuture().join();
}
}
@Inject
EventBus bus;
@Inject
MyConsumers consumers;
@Test
public void testThatMessageSentToTheEventBusAreProcessedOnUnsharedDuplicatedContext() {
String id1 = UUID.randomUUID().toString();
bus.send("context-send", id1);
await().until(() -> consumers.probes().contains(id1));
consumers.reset();
String id2 = UUID.randomUUID().toString();
bus.send("context-send-blocking", id2);
await().until(() -> consumers.probes().contains(id2));
consumers.reset();
String id3 = UUID.randomUUID().toString();
bus.publish("context-publish", id3);
await().until(() -> consumers.probes().size() == 2);
await().until(() -> consumers.probes().contains(id3));
consumers.reset();
String id4 = UUID.randomUUID().toString();
bus.publish("context-publish-blocking", id4);
await().until(() -> consumers.probes().size() == 2);
await().until(() -> consumers.probes().contains(id4));
}
@Test
public void testThatEventConsumersAreCalledOnDuplicatedContext() {
// Creates a bunch of requests that will be executed concurrently.
// So, we are sure that event loops are reused.
List<Uni<Void>> unis = new ArrayList<>();
for (int i = 0; i < 100; i++) {
String uuid = UUID.randomUUID().toString();
unis.add(
bus.<String> request("context", uuid)
.map(Message::body)
.invoke(resp -> {
Assertions.assertEquals(resp, "OK-" + uuid);
})
.replaceWithVoid());
}
Uni.join().all(unis).andFailFast()
.runSubscriptionOn(Infrastructure.getDefaultExecutor())
.await().atMost(Duration.ofSeconds(30));
}
@Test
@DisabledOnOs(value = OS.WINDOWS, disabledReason = "Failing on Windows very often")
public void testThatBlockingEventConsumersAreCalledOnDuplicatedContext() {
// Creates a bunch of requests that will be executed concurrently.
// So, we are sure that event loops are reused.
List<Uni<Void>> unis = new ArrayList<>();
for (int i = 0; i < 500; i++) {
String uuid = UUID.randomUUID().toString();
unis.add(
bus.<String> request("context-blocking", uuid)
.map(Message::body)
.invoke(resp -> {
Assertions.assertEquals(resp, "OK-" + uuid);
})
.replaceWithVoid());
}
Uni.join().all(unis).andFailFast()
.runSubscriptionOn(Infrastructure.getDefaultExecutor())
.await().atMost(Duration.ofSeconds(60));
}
@ApplicationScoped
public static | DuplicatedContextTest |
java | apache__camel | components/camel-consul/src/test/java/org/apache/camel/component/consul/cluster/ConsulClusterViewIT.java | {
"start": 1362,
"end": 2752
} | class ____ {
@RegisterExtension
public static ConsulService service = ConsulServiceFactory.createService();
@Test
public void getLeaderTest() throws Exception {
//Set up a single node cluster.
ConsulClusterService consulClusterService = new ConsulClusterService();
consulClusterService.setId("node");
consulClusterService.setUrl(service.getConsulUrl());
consulClusterService.setRootPath("root");
//Set up context with single locked route.
DefaultCamelContext context = new DefaultCamelContext();
context.getCamelContextExtension().setName("context");
context.addService(consulClusterService);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("master:key:timer:consul?repeatCount=1")
.routeId("route1")
.stop();
}
});
context.start();
//Get view and leader.
CamelClusterView view = consulClusterService.getView("key");
Optional<CamelClusterMember> leaderOptional = view.getLeader();
Assertions.assertTrue(leaderOptional.isPresent());
Assertions.assertTrue(leaderOptional.get().isLeader());
Assertions.assertTrue(leaderOptional.get().isLocal());
context.stop();
}
}
| ConsulClusterViewIT |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java | {
"start": 1223,
"end": 6002
} | class ____ extends AbstractAggregationTestCase {
public AvgTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
var suppliers = new ArrayList<TestCaseSupplier>();
Stream.of(
MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true),
MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true),
MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true),
MultiRowTestCaseSupplier.aggregateMetricDoubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE),
MultiRowTestCaseSupplier.exponentialHistogramCases(1, 100)
).flatMap(List::stream).map(AvgTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers));
suppliers.add(
// Folding
new TestCaseSupplier(
List.of(DataType.INTEGER),
() -> new TestCaseSupplier.TestCase(
List.of(TestCaseSupplier.TypedData.multiRow(List.of(200), DataType.INTEGER, "field")),
"Avg[field=Attribute[channel=0]]",
DataType.DOUBLE,
equalTo(200.)
)
)
);
return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers, true);
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new Avg(source, args.get(0));
}
private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) {
return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> {
var fieldTypedData = fieldSupplier.get();
var fieldData = fieldTypedData.multiRowData();
Object expected = null;
if (fieldData.size() == 1) {
// For single elements, we directly return them to avoid precision issues
expected = switch (fieldTypedData.type()) {
case AGGREGATE_METRIC_DOUBLE -> {
var aggMetric = (AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral) fieldData.get(0);
yield aggMetric.sum() / (aggMetric.count().doubleValue());
}
case EXPONENTIAL_HISTOGRAM -> {
var expHisto = (ExponentialHistogram) fieldData.get(0);
yield expHisto.sum() / expHisto.valueCount();
}
default -> ((Number) fieldData.get(0)).doubleValue();
};
} else if (fieldData.size() > 1) {
expected = switch (fieldTypedData.type().widenSmallNumeric()) {
case INTEGER -> fieldData.stream()
.map(v -> (Integer) v)
.collect(Collectors.summarizingInt(Integer::intValue))
.getAverage();
case LONG -> fieldData.stream().map(v -> (Long) v).collect(Collectors.summarizingLong(Long::longValue)).getAverage();
case DOUBLE -> fieldData.stream()
.map(v -> (Double) v)
.collect(Collectors.summarizingDouble(Double::doubleValue))
.getAverage();
case AGGREGATE_METRIC_DOUBLE -> {
double sum = fieldData.stream()
.mapToDouble(v -> ((AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral) v).sum())
.sum();
double count = fieldData.stream()
.mapToInt(v -> ((AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral) v).count())
.sum();
yield count == 0 ? null : sum / count;
}
case EXPONENTIAL_HISTOGRAM -> {
double sum = fieldData.stream().mapToDouble(v -> ((ExponentialHistogram) v).sum()).sum();
double count = fieldData.stream().mapToLong(v -> ((ExponentialHistogram) v).valueCount()).sum();
yield count == 0 ? null : sum / count;
}
default -> throw new IllegalStateException("Unexpected value: " + fieldTypedData.type());
};
}
return new TestCaseSupplier.TestCase(
List.of(fieldTypedData),
"Avg[field=Attribute[channel=0]]",
DataType.DOUBLE,
equalTo(expected)
);
});
}
}
| AvgTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/fielddata/SourceValueFetcherSortedBooleanIndexFieldData.java | {
"start": 1046,
"end": 1185
} | class ____ extends SourceValueFetcherIndexFieldData<SortedNumericLongValues> {
public static | SourceValueFetcherSortedBooleanIndexFieldData |
java | quarkusio__quarkus | integration-tests/rest-client-reactive/src/main/java/io/quarkus/it/rest/client/main/HelloClient.java | {
"start": 1146,
"end": 1394
} | class ____ {
private final String message;
@JsonCreator
public Message(String message) {
this.message = message;
}
public String getMessage() {
return message;
}
}
}
| Message |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/test/java/org/springframework/boot/jdbc/autoconfigure/DataSourceInitializationAutoConfigurationTests.java | {
"start": 5022,
"end": 5263
} | class ____ {
@Bean
ApplicationScriptDatabaseInitializer customInitializer() {
return mock(ApplicationScriptDatabaseInitializer.class);
}
}
@Configuration(proxyBeanMethods = false)
static | ApplicationDatabaseInitializerConfiguration |
java | quarkusio__quarkus | core/deployment/src/test/java/io/quarkus/deployment/recording/TestRecordingAnnotationsProvider.java | {
"start": 567,
"end": 868
} | interface ____ {
}
@Override
public Class<? extends Annotation> ignoredProperty() {
return TestIgnoreProperty.class;
}
@Override
public Class<? extends Annotation> recordableConstructor() {
return TestRecordableConstructor.class;
}
}
| TestRecordableConstructor |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/cli/DefaultCommandLine.java | {
"start": 1059,
"end": 5411
} | class ____ implements CommandLine {
private Properties systemProperties = new Properties();
private LinkedHashMap<String, Object> undeclaredOptions = new LinkedHashMap<>();
private LinkedHashMap<Option, Object> declaredOptions = new LinkedHashMap<>();
private List<String> remainingArgs = new ArrayList<>();
private String[] rawArguments = EMPTY_STRING_ARRAY;
@Override
public CommandLine parseNew(String[] args) {
DefaultCommandLine defaultCommandLine = new DefaultCommandLine();
defaultCommandLine.systemProperties.putAll(systemProperties);
defaultCommandLine.undeclaredOptions.putAll(undeclaredOptions);
defaultCommandLine.declaredOptions.putAll(declaredOptions);
CommandLineParser parser = new CommandLineParser();
return parser.parse(defaultCommandLine, args);
}
@Override
public Map<Option, Object> getOptions() {
return declaredOptions;
}
@Override
public List<String> getRemainingArgs() {
return remainingArgs;
}
@Override
public Properties getSystemProperties() {
return systemProperties;
}
@Override
public boolean hasOption(String name) {
return declaredOptions.containsKey(new Option(name, null)) || undeclaredOptions.containsKey(name);
}
@Override
public Object optionValue(String name) {
Option opt = new Option(name, null);
if (declaredOptions.containsKey(opt)) {
return declaredOptions.get(opt);
}
if (undeclaredOptions.containsKey(name)) {
return undeclaredOptions.get(name);
}
return null;
}
@Override
public String getRemainingArgsString() {
return remainingArgsToString(" ", false);
}
@Override
public Map.Entry<String, Object> lastOption() {
final Iterator<Map.Entry<String, Object>> i = undeclaredOptions.entrySet().iterator();
while (i.hasNext()) {
Map.Entry<String, Object> next = i.next();
if (!i.hasNext()) {
return next;
}
}
return null;
}
@Override
public String getRemainingArgsWithOptionsString() {
return remainingArgsToString(" ", true);
}
@Override
public Map<String, Object> getUndeclaredOptions() {
return Collections.unmodifiableMap(undeclaredOptions);
}
@Override
public String[] getRawArguments() {
return rawArguments;
}
/**
* @param option option
*/
void addDeclaredOption(Option option) {
addDeclaredOption(option, Boolean.TRUE);
}
/**
* @param option option
*/
void addUndeclaredOption(String option) {
undeclaredOptions.put(option, Boolean.TRUE);
}
/**
* @param option option
* @param value value
*/
void addUndeclaredOption(String option, Object value) {
undeclaredOptions.put(option, value);
}
/**
* @param option option
* @param value value
*/
void addDeclaredOption(Option option, Object value) {
declaredOptions.put(option, value);
}
/**
* @param arg arg
*/
void addRemainingArg(String arg) {
remainingArgs.add(arg);
}
/**
* @param name name
* @param value value
*/
void addSystemProperty(String name, String value) {
systemProperties.put(name, value);
}
/**
* @param args array of args
*/
void setRawArguments(String[] args) {
this.rawArguments = args;
}
private String remainingArgsToString(String separator, boolean includeOptions) {
StringBuilder sb = new StringBuilder();
String sep = "";
List<String> args = new ArrayList<>(remainingArgs);
if (includeOptions) {
for (Map.Entry<String, Object> entry : undeclaredOptions.entrySet()) {
if (entry.getValue() instanceof Boolean && ((Boolean) entry.getValue())) {
args.add('-' + entry.getKey());
} else {
args.add('-' + entry.getKey() + '=' + entry.getValue());
}
}
}
for (String arg : args) {
sb.append(sep).append(arg);
sep = separator;
}
return sb.toString();
}
}
| DefaultCommandLine |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/resource/OAuth2ResourceServerConfigurerTests.java | {
"start": 85224,
"end": 86037
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((requests) -> requests
.anyRequest().authenticated())
.oauth2ResourceServer((server) -> server
.jwt(Customizer.withDefaults()));
return http.build();
// @formatter:on
}
@Bean
BearerTokenResolver resolverOne() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowUriQueryParameter(true);
return resolver;
}
@Bean
BearerTokenResolver resolverTwo() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowFormEncodedBodyParameter(true);
return resolver;
}
}
@Configuration
@EnableWebSecurity
static | MultipleBearerTokenResolverBeansConfig |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/dev/filesystem/ReloadableFileManager.java | {
"start": 11093,
"end": 11626
} | class ____ extends ClassLoader {
private final ClassLoader[] delegateClassLoaders;
public JoinClassLoader(ClassLoader parent, ClassLoader... delegateClassLoaders) {
super(parent);
this.delegateClassLoaders = delegateClassLoaders;
}
@Override
protected Class<?> findClass(String name) throws ClassNotFoundException {
// It would be easier to call the loadClass() methods of the delegateClassLoaders
// here, but we have to load the | JoinClassLoader |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java | {
"start": 1462,
"end": 5184
} | class ____ {
public static final IndicesAccessControl ALLOW_NO_INDICES = new IndicesAccessControl(
true,
Collections.singletonMap(IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER, IndexAccessControl.ALLOW_ALL)
);
public static final IndicesAccessControl DENIED = new IndicesAccessControl(false, Collections.emptyMap());
private final boolean granted;
private final CachedSupplier<Map<String, IndexAccessControl>> indexPermissionsSupplier;
public IndicesAccessControl(boolean granted, Map<String, IndexAccessControl> indexPermissions) {
this(granted, () -> Objects.requireNonNull(indexPermissions));
}
public IndicesAccessControl(boolean granted, Supplier<Map<String, IndexAccessControl>> indexPermissionsSupplier) {
this.granted = granted;
this.indexPermissionsSupplier = CachedSupplier.wrap(Objects.requireNonNull(indexPermissionsSupplier));
}
protected IndicesAccessControl(IndicesAccessControl copy) {
this(copy.granted, copy.indexPermissionsSupplier);
}
/**
* @return The document and field permissions for an index if they exist, otherwise <code>null</code> is returned.
* If <code>null</code> is being returned this means that there are no field or document level restrictions.
*/
@Nullable
public IndexAccessControl getIndexPermissions(String index) {
assert false == IndexNameExpressionResolver.hasSelectorSuffix(index)
|| IndexNameExpressionResolver.hasSelector(index, IndexComponentSelector.FAILURES)
: "index name [" + index + "] cannot have explicit selector other than ::failures";
return getAllIndexPermissions().get(index);
}
public boolean hasIndexPermissions(String index) {
return getIndexPermissions(index) != null;
}
/**
* @return Whether any role / permission group is allowed to access all indices.
*/
public boolean isGranted() {
return granted;
}
public DlsFlsUsage getFieldAndDocumentLevelSecurityUsage() {
boolean hasFls = false;
boolean hasDls = false;
for (IndexAccessControl iac : this.getAllIndexPermissions().values()) {
if (iac.fieldPermissions.hasFieldLevelSecurity()) {
hasFls = true;
}
if (iac.documentPermissions.hasDocumentLevelPermissions()) {
hasDls = true;
}
if (hasFls && hasDls) {
return DlsFlsUsage.BOTH;
}
}
if (hasFls) {
return DlsFlsUsage.FLS;
} else if (hasDls) {
return DlsFlsUsage.DLS;
} else {
return DlsFlsUsage.NONE;
}
}
public List<String> getIndicesWithFieldOrDocumentLevelSecurity() {
return getIndexNames(iac -> iac.fieldPermissions.hasFieldLevelSecurity() || iac.documentPermissions.hasDocumentLevelPermissions());
}
public List<String> getIndicesWithFieldLevelSecurity() {
return getIndexNames(iac -> iac.fieldPermissions.hasFieldLevelSecurity());
}
public List<String> getIndicesWithDocumentLevelSecurity() {
return getIndexNames(iac -> iac.documentPermissions.hasDocumentLevelPermissions());
}
private List<String> getIndexNames(Predicate<IndexAccessControl> predicate) {
return this.getAllIndexPermissions()
.entrySet()
.stream()
.filter(entry -> predicate.test(entry.getValue()))
.map(Map.Entry::getKey)
.toList();
}
private Map<String, IndexAccessControl> getAllIndexPermissions() {
return this.indexPermissionsSupplier.get();
}
public | IndicesAccessControl |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java | {
"start": 2833,
"end": 7444
} | class ____ extends SimpleChannelInboundHandler<HttpRequest> {
public static final Logger LOG =
LoggerFactory.getLogger(FSImageHandler.class);
private final FSImageLoader image;
private final ChannelGroup activeChannels;
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
activeChannels.add(ctx.channel());
}
FSImageHandler(FSImageLoader image, ChannelGroup activeChannels) throws IOException {
this.image = image;
this.activeChannels = activeChannels;
}
@Override
public void channelRead0(ChannelHandlerContext ctx, HttpRequest request)
throws Exception {
if (request.method() != HttpMethod.GET) {
DefaultHttpResponse resp = new DefaultHttpResponse(HTTP_1_1,
METHOD_NOT_ALLOWED);
resp.headers().set(CONNECTION, CLOSE);
ctx.write(resp).addListener(ChannelFutureListener.CLOSE);
return;
}
QueryStringDecoder decoder = new QueryStringDecoder(request.uri());
// check path. throw exception if path doesn't start with WEBHDFS_PREFIX
String path = getPath(decoder);
final String op = getOp(decoder);
// check null op
if (op == null) {
throw new IllegalArgumentException("Param op must be specified.");
}
final String content;
switch (op) {
case "GETFILESTATUS":
content = image.getFileStatus(path);
break;
case "LISTSTATUS":
content = image.listStatus(path);
break;
case "GETACLSTATUS":
content = image.getAclStatus(path);
break;
case "GETXATTRS":
List<String> names = getXattrNames(decoder);
String encoder = getEncoder(decoder);
content = image.getXAttrs(path, names, encoder);
break;
case "LISTXATTRS":
content = image.listXAttrs(path);
break;
case "GETCONTENTSUMMARY":
content = image.getContentSummary(path);
break;
default:
throw new IllegalArgumentException("Invalid value for webhdfs parameter"
+ " \"op\"");
}
LOG.info("op=" + op + " target=" + path);
DefaultFullHttpResponse resp = new DefaultFullHttpResponse(HTTP_1_1,
HttpResponseStatus.OK, Unpooled.wrappedBuffer(content
.getBytes(StandardCharsets.UTF_8)));
resp.headers().set(CONTENT_TYPE, APPLICATION_JSON_UTF8);
resp.headers().set(CONTENT_LENGTH, resp.content().readableBytes());
resp.headers().set(CONNECTION, CLOSE);
ctx.write(resp).addListener(ChannelFutureListener.CLOSE);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
throws Exception {
Exception e = cause instanceof Exception ? (Exception) cause : new
Exception(cause);
final String output = JsonUtil.toJsonString(e);
ByteBuf content = Unpooled.wrappedBuffer(output.getBytes(StandardCharsets.UTF_8));
final DefaultFullHttpResponse resp = new DefaultFullHttpResponse(
HTTP_1_1, INTERNAL_SERVER_ERROR, content);
resp.headers().set(CONTENT_TYPE, APPLICATION_JSON_UTF8);
if (e instanceof IllegalArgumentException) {
resp.setStatus(BAD_REQUEST);
} else if (e instanceof FileNotFoundException) {
resp.setStatus(NOT_FOUND);
} else if (e instanceof IOException) {
resp.setStatus(FORBIDDEN);
}
resp.headers().set(CONTENT_LENGTH, resp.content().readableBytes());
resp.headers().set(CONNECTION, CLOSE);
ctx.write(resp).addListener(ChannelFutureListener.CLOSE);
}
private static String getOp(QueryStringDecoder decoder) {
Map<String, List<String>> parameters = decoder.parameters();
return parameters.containsKey("op")
? StringUtils.toUpperCase(parameters.get("op").get(0)) : null;
}
private static List<String> getXattrNames(QueryStringDecoder decoder) {
Map<String, List<String>> parameters = decoder.parameters();
return parameters.get("xattr.name");
}
private static String getEncoder(QueryStringDecoder decoder) {
Map<String, List<String>> parameters = decoder.parameters();
return parameters.containsKey("encoding") ? parameters.get("encoding").get(
0) : null;
}
private static String getPath(QueryStringDecoder decoder)
throws FileNotFoundException {
String path = decoder.path();
if (path.startsWith(WEBHDFS_PREFIX)) {
return path.substring(WEBHDFS_PREFIX_LENGTH);
} else {
throw new FileNotFoundException("Path: " + path + " should " +
"start with " + WEBHDFS_PREFIX);
}
}
}
| FSImageHandler |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/source/constants/ConstantsTest.java | {
"start": 852,
"end": 4577
} | class ____ {
@RegisterExtension
final GeneratedSource generatedSrc =
new GeneratedSource().addComparisonToFixtureFor( ConstantsMapper.class );
@ProcessorTest
public void testNumericConstants() {
ConstantsTarget target = ConstantsMapper.INSTANCE.mapFromConstants( "dummy" );
assertThat( target ).isNotNull();
assertThat( target.isBooleanValue() ).isEqualTo( true );
assertThat( target.getBooleanBoxed() ).isEqualTo( false );
assertThat( target.getCharValue() ).isEqualTo( 'b' );
assertThat( target.getCharBoxed() ).isEqualTo( 'a' );
assertThat( target.getByteValue() ).isEqualTo( (byte) 20 );
assertThat( target.getByteBoxed() ).isEqualTo( (byte) -128 );
assertThat( target.getShortValue() ).isEqualTo( (short) 1996 );
assertThat( target.getShortBoxed() ).isEqualTo( (short) -1996 );
assertThat( target.getIntValue() ).isEqualTo( -03777777 );
assertThat( target.getIntBoxed() ).isEqualTo( 15 );
assertThat( target.getLongValue() ).isEqualTo( 0x7fffffffffffffffL );
assertThat( target.getLongBoxed() ).isEqualTo( 0xCAFEBABEL );
assertThat( target.getFloatValue() ).isEqualTo( 1.40e-45f );
assertThat( target.getFloatBoxed() ).isEqualTo( 3.4028235e38f );
assertThat( target.getDoubleValue() ).isEqualTo( 1e137 );
assertThat( target.getDoubleBoxed() ).isEqualTo( 0x0.001P-1062d );
assertThat( target.getDoubleBoxedZero() ).isEqualTo( 0.0 );
}
@ProcessorTest
@IssueKey("1458")
@WithClasses({
ConstantsTarget.class,
ErroneousConstantMapper.class
})
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousConstantMapper.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 25,
message =
"Can't map \"zz\" to \"boolean booleanValue\". Reason: only 'true' or 'false' are supported."),
@Diagnostic(type = ErroneousConstantMapper.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 26,
message = "Can't map \"'ba'\" to \"char charValue\". Reason: invalid character literal."),
@Diagnostic(type = ErroneousConstantMapper.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 27,
message =
"Can't map \"200\" to \"byte byteValue\". Reason: Value out of range. Value:\"200\" Radix:10."),
@Diagnostic(type = ErroneousConstantMapper.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 28,
message = "Can't map \"0xFFFF_FFFF_FFFF\" to \"int intValue\". Reason: integer number too large."),
@Diagnostic(type = ErroneousConstantMapper.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 29,
message = "Can't map \"1\" to \"long longValue\". Reason: L/l mandatory for long types."),
@Diagnostic(type = ErroneousConstantMapper.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 30,
message = "Can't map \"1.40e-_45f\" to \"float floatValue\". Reason: improperly placed underscores."),
@Diagnostic(type = ErroneousConstantMapper.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 31,
message = "Can't map \"1e-137000\" to \"double doubleValue\". Reason: floating point number too small.")
}
)
public void miscellaneousDetailMessages() {
}
}
| ConstantsTest |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/api/DisplayNameGenerationTests.java | {
"start": 17902,
"end": 18138
} | class ____ {
@SentenceFragment("is instantiated with its constructor")
@Test
void instantiateViaConstructor() {
new Stack<>();
}
@SentenceFragment("when new")
@Nested
| IndicativeGeneratorWithCustomSentenceFragmentsTestCase |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/config/MultipleTriggeringPolicyTest.java | {
"start": 1639,
"end": 3790
} | class ____ {
@Test
@LoggerContextSource("LOG4J2-1100/log4j2.xml")
void xml(final Configuration configuration) {
assertBothTriggeringPoliciesConfigured(configuration);
}
@Test
@Tag("json")
@LoggerContextSource("LOG4J2-1100/log4j2.json")
void json(final Configuration configuration) {
assertBothTriggeringPoliciesConfigured(configuration);
}
@Test
@Tag("yaml")
@LoggerContextSource("LOG4J2-1100/log4j2-good.yaml")
void yaml(final Configuration configuration) {
assertBothTriggeringPoliciesConfigured(configuration);
}
@Test
@Tag("yaml")
@Disabled("LOG4J2-1100 demonstration")
@LoggerContextSource("LOG4J2-1100/log4j2-good.yaml")
void unsupportedYamlSyntax(final Configuration configuration) {
assertBothTriggeringPoliciesConfigured(configuration);
}
void assertBothTriggeringPoliciesConfigured(final Configuration configuration) {
final RollingFileAppender appender = configuration.getAppender("File");
assertNotNull(appender);
final CompositeTriggeringPolicy compositeTriggeringPolicy = appender.getTriggeringPolicy();
assertNotNull(compositeTriggeringPolicy);
final TriggeringPolicy[] triggeringPolicies = compositeTriggeringPolicy.getTriggeringPolicies();
assertEquals(2, triggeringPolicies.length);
final SizeBasedTriggeringPolicy sizeBasedTriggeringPolicy;
final TimeBasedTriggeringPolicy timeBasedTriggeringPolicy;
if (triggeringPolicies[0] instanceof SizeBasedTriggeringPolicy) {
sizeBasedTriggeringPolicy = (SizeBasedTriggeringPolicy) triggeringPolicies[0];
timeBasedTriggeringPolicy = (TimeBasedTriggeringPolicy) triggeringPolicies[1];
} else {
sizeBasedTriggeringPolicy = (SizeBasedTriggeringPolicy) triggeringPolicies[1];
timeBasedTriggeringPolicy = (TimeBasedTriggeringPolicy) triggeringPolicies[0];
}
assertEquals(7, timeBasedTriggeringPolicy.getInterval());
assertEquals(100 * 1024 * 1024, sizeBasedTriggeringPolicy.getMaxFileSize());
}
}
| MultipleTriggeringPolicyTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/path/PathAssert_isEmptyFile_Test.java | {
"start": 817,
"end": 1125
} | class ____ extends PathAssertBaseTest {
@Override
protected PathAssert invoke_api_method() {
return assertions.isEmptyFile();
}
@Override
protected void verify_internal_effects() {
verify(paths).assertIsEmptyFile(getInfo(assertions), getActual(assertions));
}
}
| PathAssert_isEmptyFile_Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java | {
"start": 41434,
"end": 44692
} | class ____ extends
BaseRecoveryIterator<Entry<ApplicationAttemptId, MasterKey>> {
NMTokensStateIterator() throws IOException {
super(NM_TOKENS_KEY_PREFIX);
}
@Override
protected Entry<ApplicationAttemptId, MasterKey> getNextItem(
LeveldbIterator it) throws IOException {
return getNextMasterKeyEntry(it);
}
}
private Entry<ApplicationAttemptId, MasterKey> getNextMasterKeyEntry(
LeveldbIterator it) throws IOException {
Entry<ApplicationAttemptId, MasterKey> masterKeyentry = null;
try {
while (it.hasNext()) {
Entry<byte[], byte[]> entry = it.next();
String fullKey = asString(entry.getKey());
if (!fullKey.startsWith(NM_TOKENS_KEY_PREFIX)) {
break;
}
String key = fullKey.substring(NM_TOKENS_KEY_PREFIX.length());
if (key.startsWith(ApplicationAttemptId.appAttemptIdStrPrefix)) {
ApplicationAttemptId attempt;
try {
attempt = ApplicationAttemptId.fromString(key);
} catch (IllegalArgumentException e) {
throw new IOException("Bad application master key state for "
+ fullKey, e);
}
masterKeyentry = new AbstractMap.SimpleEntry<>(attempt,
parseMasterKey(entry.getValue()));
break;
}
}
} catch (DBException e) {
throw new IOException(e);
}
return masterKeyentry;
}
@Override
public RecoveredNMTokensState loadNMTokensState() throws IOException {
RecoveredNMTokensState state = new RecoveredNMTokensState();
state.currentMasterKey = getMasterKey(NM_TOKENS_KEY_PREFIX
+ CURRENT_MASTER_KEY_SUFFIX);
state.previousMasterKey = getMasterKey(NM_TOKENS_KEY_PREFIX
+ PREV_MASTER_KEY_SUFFIX);
state.it = new NMTokensStateIterator();
return state;
}
@Override
public void storeNMTokenCurrentMasterKey(MasterKey key)
throws IOException {
storeMasterKey(NM_TOKENS_CURRENT_MASTER_KEY, key);
}
@Override
public void storeNMTokenPreviousMasterKey(MasterKey key)
throws IOException {
storeMasterKey(NM_TOKENS_PREV_MASTER_KEY, key);
}
@Override
public void storeNMTokenApplicationMasterKey(
ApplicationAttemptId attempt, MasterKey key) throws IOException {
storeMasterKey(NM_TOKENS_KEY_PREFIX + attempt, key);
}
@Override
public void removeNMTokenApplicationMasterKey(
ApplicationAttemptId attempt) throws IOException {
String key = NM_TOKENS_KEY_PREFIX + attempt;
try {
db.delete(bytes(key));
} catch (DBException e) {
markStoreUnHealthy(e);
throw new IOException(e);
}
}
private MasterKey parseMasterKey(byte[] keyData) throws IOException {
return new MasterKeyPBImpl(MasterKeyProto.parseFrom(keyData));
}
private void storeMasterKey(String dbKey, MasterKey key)
throws IOException {
MasterKeyPBImpl pb = (MasterKeyPBImpl) key;
try {
db.put(bytes(dbKey), pb.getProto().toByteArray());
} catch (DBException e) {
markStoreUnHealthy(e);
throw new IOException(e);
}
}
// Recover ContainersToken Iterator.
private | NMTokensStateIterator |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/ml/SqlMLTableFunction.java | {
"start": 2856,
"end": 11612
} | class ____ extends SqlFunction implements SqlTableFunction {
private static final String TABLE_INPUT_ERROR =
"SqlMLTableFunction must have only one table as first operand.";
protected static final String PARAM_INPUT = "INPUT";
protected static final String PARAM_MODEL = "MODEL";
protected static final String PARAM_COLUMN = "ARGS";
protected static final String PARAM_CONFIG = "CONFIG";
public SqlMLTableFunction(String name, SqlOperandMetadata operandMetadata) {
super(
name,
SqlKind.OTHER_FUNCTION,
ReturnTypes.CURSOR,
null,
operandMetadata,
SqlFunctionCategory.SYSTEM);
}
@Override
public void validateCall(
SqlCall call,
SqlValidator validator,
SqlValidatorScope scope,
SqlValidatorScope operandScope) {
assert call.getOperator() == this;
final List<SqlNode> operandList = call.getOperandList();
// ML table function should take only one table as input and use descriptor to reference
// columns in the table. The scope for descriptor validation should be the input table which
// is also an operand of the call. We defer the validation of the descriptor since
// validation here will quality the descriptor columns to be NOT simple name which
// complicates checks in later stages. We validate the descriptor columns appear in table
// column in SqlOperandMetadata.
boolean foundSelect = false;
for (SqlNode operand : operandList) {
if (operand.getKind().equals(SqlKind.DESCRIPTOR)) {
continue;
}
if (operand.getKind().equals(SqlKind.SET_SEMANTICS_TABLE)) {
operand = ((SqlCall) operand).getOperandList().get(0);
if (foundSelect) {
throw new ValidationException(TABLE_INPUT_ERROR);
}
foundSelect = true;
}
if (operand.getKind().equals(SqlKind.SELECT)) {
if (foundSelect) {
throw new ValidationException(TABLE_INPUT_ERROR);
}
foundSelect = true;
}
operand.validate(validator, scope);
}
}
@Override
public SqlReturnTypeInference getRowTypeInference() {
return this::inferRowType;
}
protected abstract RelDataType inferRowType(SqlOperatorBinding opBinding);
protected static Optional<RuntimeException> checkModelSignature(
SqlCallBinding callBinding, int inputDescriptorIndex) {
SqlValidator validator = callBinding.getValidator();
// Check second operand is SqlModelCall
if (!(callBinding.operand(1) instanceof SqlModelCall)) {
return Optional.of(
new ValidationException("Second operand must be a model identifier."));
}
// Get input descriptor columns
SqlCall descriptorCall = (SqlCall) callBinding.operand(inputDescriptorIndex);
List<SqlNode> descriptCols = descriptorCall.getOperandList();
// Get model input size
SqlModelCall modelCall = (SqlModelCall) callBinding.operand(1);
RelDataType modelInputType = modelCall.getInputType(validator);
// Check sizes match
if (descriptCols.size() != modelInputType.getFieldCount()) {
return Optional.of(
new ValidationException(
String.format(
"Number of input descriptor columns (%d) does not match model input size (%d).",
descriptCols.size(), modelInputType.getFieldCount())));
}
// Check input types match
final RelDataType tableType = validator.getValidatedNodeType(callBinding.operand(0));
final SqlNameMatcher matcher = validator.getCatalogReader().nameMatcher();
for (int i = 0; i < descriptCols.size(); i++) {
Tuple3<Boolean, LogicalType, LogicalType> result =
checkModelDescriptorType(
tableType,
modelInputType.getFieldList().get(i).getType(),
descriptCols.get(i),
matcher);
if (!result.f0) {
return Optional.of(
new ValidationException(
String.format(
"Input descriptor column type %s cannot be assigned to model input type %s at position %d.",
result.f1, result.f2, i)));
}
}
return Optional.empty();
}
protected static Tuple3<Boolean, LogicalType, LogicalType> checkModelDescriptorType(
RelDataType tableType,
RelDataType modelType,
SqlNode descriptorNode,
SqlNameMatcher matcher) {
SqlIdentifier columnName = (SqlIdentifier) descriptorNode;
String descriptorColName =
columnName.isSimple() ? columnName.getSimple() : Util.last(columnName.names);
int index = matcher.indexOf(tableType.getFieldNames(), descriptorColName);
RelDataType sourceType = tableType.getFieldList().get(index).getType();
LogicalType sourceLogicalType = toLogicalType(sourceType);
LogicalType targetLogicalType = toLogicalType(modelType);
return Tuple3.of(
LogicalTypeCasts.supportsImplicitCast(sourceLogicalType, targetLogicalType),
sourceLogicalType,
targetLogicalType);
}
protected static Optional<RuntimeException> checkConfig(
SqlCallBinding callBinding, SqlNode configNode) {
if (!configNode.getKind().equals(SqlKind.MAP_VALUE_CONSTRUCTOR)) {
return Optional.of(new ValidationException("Config param should be a MAP."));
}
RelDataType mapType = callBinding.getValidator().getValidatedNodeType(configNode);
assert mapType instanceof MapSqlType;
LogicalType keyType = toLogicalType(mapType.getKeyType());
LogicalType valueType = toLogicalType(mapType.getValueType());
if (!keyType.is(CHARACTER_STRING) || !valueType.is(CHARACTER_STRING)) {
return Optional.of(
new ValidationException(
String.format(
"Config param can only be a MAP of string literals but node's type is %s at position %s.",
mapType, configNode.getParserPosition())));
}
List<SqlNode> operands = ((SqlCall) configNode).getOperandList();
Map<String, String> runtimeConfig = new HashMap<>();
for (int i = 0; i < operands.size(); i += 2) {
Either<String, RuntimeException> key =
reduceLiteralToString(operands.get(i), callBinding.getValidator());
Either<String, RuntimeException> value =
reduceLiteralToString(operands.get(i + 1), callBinding.getValidator());
if (key.isRight()) {
return Optional.of(key.right());
} else if (value.isRight()) {
return Optional.of(value.right());
} else {
runtimeConfig.put(key.left(), value.left());
}
}
return checkConfigValue(runtimeConfig);
}
public static Optional<RuntimeException> checkConfigValue(Map<String, String> runtimeConfig) {
Configuration config = Configuration.fromMap(runtimeConfig);
try {
VectorSearchRuntimeConfigOptions.getSupportedOptions().forEach(config::get);
} catch (Throwable t) {
return Optional.of(new ValidationException("Failed to parse the config.", t));
}
// option value check
// async options are all optional
Boolean async = config.get(ASYNC);
if (Boolean.TRUE.equals(async)) {
Integer maxConcurrentOperations = config.get(ASYNC_MAX_CONCURRENT_OPERATIONS);
if (maxConcurrentOperations != null && maxConcurrentOperations <= 0) {
return Optional.of(
new ValidationException(
String.format(
"Invalid runtime config option '%s'. Its value should be positive integer but was %s.",
ASYNC_MAX_CONCURRENT_OPERATIONS.key(),
maxConcurrentOperations)));
}
}
return Optional.empty();
}
}
| SqlMLTableFunction |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InconsistentHashCodeTest.java | {
"start": 1766,
"end": 2361
} | class ____ {
private int foo;
private int bar;
@Override
public boolean equals(Object o) {
Test that = (Test) o;
return foo == that.foo;
}
@Override
// BUG: Diagnostic contains: bar
public int hashCode() {
return foo + 31 * bar;
}
}
""")
.doTest();
}
@Test
public void positiveViaGetter() {
helper
.addSourceLines(
"Test.java",
"""
| Test |
java | google__dagger | javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java | {
"start": 30919,
"end": 31568
} | class ____ {",
" @Inject InnerClass() {}",
" }",
"}");
daggerCompiler(file)
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining("Dagger does not support injection into private classes")
.onSource(file)
.onLine(7);
});
}
@Test public void privateInjectClassWarning() {
Source file =
CompilerTests.javaSource(
"test.OuterClass",
"package test;",
"",
"import javax.inject.Inject;",
"",
"final | InnerClass |
java | apache__camel | components/camel-azure/camel-azure-storage-datalake/src/main/java/org/apache/camel/component/azure/storage/datalake/transform/DataLakeCloudEventDataTypeTransformer.java | {
"start": 1673,
"end": 2866
} | class ____ extends Transformer {
@Override
public void transform(Message message, DataType fromType, DataType toType) {
final Map<String, Object> headers = message.getHeaders();
CloudEvent cloudEvent = CloudEvents.v1_0;
headers.putIfAbsent(CloudEvent.CAMEL_CLOUD_EVENT_ID, message.getExchange().getExchangeId());
headers.putIfAbsent(CloudEvent.CAMEL_CLOUD_EVENT_VERSION, cloudEvent.version());
headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "org.apache.camel.event.azure.storage.datalake.consume");
if (message.getHeaders().containsKey(DataLakeConstants.ARCHIVE_STATUS)) {
headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE,
"azure.storage.datalake." + message.getHeader(DataLakeConstants.ARCHIVE_STATUS, String.class));
}
headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT, message.getHeader(DataLakeConstants.E_TAG, String.class));
headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TIME, cloudEvent.getEventTime(message.getExchange()));
headers.put(CloudEvent.CAMEL_CLOUD_EVENT_CONTENT_TYPE, CloudEvent.APPLICATION_OCTET_STREAM_MIME_TYPE);
}
}
| DataLakeCloudEventDataTypeTransformer |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/indices/analysis/StableAnalysisPluginsWithSettingsTests.java | {
"start": 9859,
"end": 10230
} | class ____ implements AnalyzerFactory {
private final TestAnalysisSettings settings;
@Inject
public TestAnalyzerFactory(TestAnalysisSettings settings) {
this.settings = settings;
}
@Override
public Analyzer create() {
return new CustomAnalyzer(settings);
}
static | TestAnalyzerFactory |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsTests.java | {
"start": 3395,
"end": 8338
} | class ____ {
@Test
void preconditions() {
assertThatIllegalArgumentException()
.isThrownBy(() -> MergedAnnotations.search(null))
.withMessage("SearchStrategy must not be null");
Search search = MergedAnnotations.search(SearchStrategy.SUPERCLASS);
assertThatIllegalArgumentException()
.isThrownBy(() -> search.withEnclosingClasses(null))
.withMessage("Predicate must not be null");
assertThatIllegalStateException()
.isThrownBy(() -> search.withEnclosingClasses(Search.always))
.withMessage("A custom 'searchEnclosingClass' predicate can only be combined with SearchStrategy.TYPE_HIERARCHY");
assertThatIllegalArgumentException()
.isThrownBy(() -> search.withAnnotationFilter(null))
.withMessage("AnnotationFilter must not be null");
assertThatIllegalArgumentException()
.isThrownBy(() -> search.withRepeatableContainers(null))
.withMessage("RepeatableContainers must not be null");
assertThatIllegalArgumentException()
.isThrownBy(() -> search.from(null))
.withMessage("AnnotatedElement must not be null");
}
@Test
void searchFromClassWithDefaultAnnotationFilterAndDefaultRepeatableContainers() {
Stream<Class<?>> classes = MergedAnnotations.search(SearchStrategy.DIRECT)
.from(TransactionalComponent.class)
.stream()
.map(MergedAnnotation::getType);
assertThat(classes).containsExactly(Transactional.class, Component.class, Indexed.class);
}
@Test
void searchFromClassWithCustomAnnotationFilter() {
Stream<Class<?>> classes = MergedAnnotations.search(SearchStrategy.DIRECT)
.withAnnotationFilter(annotationName -> annotationName.endsWith("Indexed"))
.from(TransactionalComponent.class)
.stream()
.map(MergedAnnotation::getType);
assertThat(classes).containsExactly(Transactional.class, Component.class);
}
@Test
void searchFromClassWithCustomRepeatableContainers() {
assertThat(MergedAnnotations.from(HierarchyClass.class).stream(TestConfiguration.class)).isEmpty();
RepeatableContainers containers = RepeatableContainers.explicitRepeatable(TestConfiguration.class, Hierarchy.class);
MergedAnnotations annotations = MergedAnnotations.search(SearchStrategy.DIRECT)
.withRepeatableContainers(containers)
.from(HierarchyClass.class);
assertThat(annotations.stream(TestConfiguration.class))
.map(annotation -> annotation.getString("location"))
.containsExactly("A", "B");
assertThat(annotations.stream(TestConfiguration.class))
.map(annotation -> annotation.getString("value"))
.containsExactly("A", "B");
}
/**
* @since 6.0
*/
@Test
void searchFromNonAnnotatedInnerClassWithAnnotatedEnclosingClassWithEnclosingClassPredicates() {
Class<?> testCase = AnnotatedClass.NonAnnotatedInnerClass.class;
Search search = MergedAnnotations.search(SearchStrategy.TYPE_HIERARCHY);
assertThat(search.from(testCase).stream()).isEmpty();
assertThat(search.withEnclosingClasses(Search.never).from(testCase).stream()).isEmpty();
assertThat(search.withEnclosingClasses(ClassUtils::isStaticClass).from(testCase).stream()).isEmpty();
Stream<Class<?>> classes = search.withEnclosingClasses(ClassUtils::isInnerClass)
.from(testCase)
.stream()
.map(MergedAnnotation::getType);
assertThat(classes).containsExactly(Component.class, Indexed.class);
classes = search.withEnclosingClasses(Search.always)
.from(testCase)
.stream()
.map(MergedAnnotation::getType);
assertThat(classes).containsExactly(Component.class, Indexed.class);
classes = search.withEnclosingClasses(ClassUtils::isInnerClass)
.withRepeatableContainers(RepeatableContainers.none())
.withAnnotationFilter(annotationName -> annotationName.endsWith("Indexed"))
.from(testCase)
.stream()
.map(MergedAnnotation::getType);
assertThat(classes).containsExactly(Component.class);
}
/**
* @since 6.0
*/
@Test
void searchFromNonAnnotatedStaticNestedClassWithAnnotatedEnclosingClassWithEnclosingClassPredicates() {
Class<?> testCase = AnnotatedClass.NonAnnotatedStaticNestedClass.class;
Search search = MergedAnnotations.search(SearchStrategy.TYPE_HIERARCHY);
assertThat(search.from(testCase).stream()).isEmpty();
assertThat(search.withEnclosingClasses(Search.never).from(testCase).stream()).isEmpty();
assertThat(search.withEnclosingClasses(ClassUtils::isInnerClass).from(testCase).stream()).isEmpty();
Stream<Class<?>> classes = search.withEnclosingClasses(ClassUtils::isStaticClass)
.from(testCase)
.stream()
.map(MergedAnnotation::getType);
assertThat(classes).containsExactly(Component.class, Indexed.class);
classes = search.withEnclosingClasses(Search.always)
.from(testCase)
.stream()
.map(MergedAnnotation::getType);
assertThat(classes).containsExactly(Component.class, Indexed.class);
}
}
@Nested
| FluentSearchApiTests |
java | apache__rocketmq | client/src/main/java/org/apache/rocketmq/client/trace/hook/SendMessageOpenTracingHookImpl.java | {
"start": 1352,
"end": 3689
} | class ____ implements SendMessageHook {
private Tracer tracer;
public SendMessageOpenTracingHookImpl(Tracer tracer) {
this.tracer = tracer;
}
@Override
public String hookName() {
return "SendMessageOpenTracingHook";
}
@Override
public void sendMessageBefore(SendMessageContext context) {
if (context == null) {
return;
}
Message msg = context.getMessage();
Tracer.SpanBuilder spanBuilder = tracer
.buildSpan(TraceConstants.TO_PREFIX + msg.getTopic())
.withTag(Tags.SPAN_KIND, Tags.SPAN_KIND_PRODUCER);
SpanContext spanContext = tracer.extract(Format.Builtin.TEXT_MAP, new TextMapAdapter(msg.getProperties()));
if (spanContext != null) {
spanBuilder.asChildOf(spanContext);
}
Span span = spanBuilder.start();
tracer.inject(span.context(), Format.Builtin.TEXT_MAP, new TextMapAdapter(msg.getProperties()));
span.setTag(Tags.PEER_SERVICE, TraceConstants.ROCKETMQ_SERVICE);
span.setTag(Tags.MESSAGE_BUS_DESTINATION, msg.getTopic());
span.setTag(TraceConstants.ROCKETMQ_TAGS, msg.getTags());
span.setTag(TraceConstants.ROCKETMQ_KEYS, msg.getKeys());
span.setTag(TraceConstants.ROCKETMQ_STORE_HOST, context.getBrokerAddr());
span.setTag(TraceConstants.ROCKETMQ_MSG_TYPE, context.getMsgType().name());
span.setTag(TraceConstants.ROCKETMQ_BODY_LENGTH, null == msg.getBody() ? 0 : msg.getBody().length);
context.setMqTraceContext(span);
}
@Override
public void sendMessageAfter(SendMessageContext context) {
if (context == null || context.getMqTraceContext() == null) {
return;
}
if (context.getSendResult() == null) {
return;
}
if (context.getSendResult().getRegionId() == null) {
return;
}
Span span = (Span) context.getMqTraceContext();
span.setTag(TraceConstants.ROCKETMQ_SUCCESS, context.getSendResult().getSendStatus().equals(SendStatus.SEND_OK));
span.setTag(TraceConstants.ROCKETMQ_MSG_ID, context.getSendResult().getMsgId());
span.setTag(TraceConstants.ROCKETMQ_REGION_ID, context.getSendResult().getRegionId());
span.finish();
}
}
| SendMessageOpenTracingHookImpl |
java | micronaut-projects__micronaut-core | inject-groovy/src/main/groovy/io/micronaut/ast/groovy/scan/AnnotationClassReader.java | {
"start": 5867,
"end": 6296
} | class ____ be read.
* @throws IOException if an exception occurs during reading.
*/
public AnnotationClassReader(final String name) throws IOException {
this(readClass(
ClassLoader.getSystemResourceAsStream(name.replace('.', '/')
+ ".class"), true));
}
/**
* Constructs a new {@link groovyjarjarasm.asm.ClassReader} object.
*
* @param b the bytecode of the | to |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/CacheFile.java | {
"start": 10324,
"end": 12056
} | class ____ throw exceptions.
*/
public void startEviction() {
if (evicted.compareAndSet(false, true)) {
final Set<EvictionListener> evictionListeners;
synchronized (listeners) {
evictionListeners = new HashSet<>(listeners);
}
decrementRefCount();
evictionListeners.forEach(eachListener -> eachListener.onEviction(this));
}
assert invariant();
}
private boolean invariant() {
synchronized (listeners) {
if (listeners.isEmpty()) {
assert channelRef == null;
} else {
assert channelRef != null;
assert refCounter.refCount() > 0;
assert channelRef.refCount() > 0;
assert Files.exists(file);
}
}
return true;
}
@Override
public String toString() {
synchronized (listeners) {
return "CacheFile{"
+ "key='"
+ cacheKey
+ "', file="
+ file
+ ", length="
+ tracker.getLength()
+ ", channel="
+ (channelRef != null ? "yes" : "no")
+ ", listeners="
+ listeners.size()
+ ", evicted="
+ evicted
+ ", tracker="
+ tracker
+ '}';
}
}
private void ensureOpen() {
if (evicted.get()) {
throwAlreadyEvicted();
}
}
private static void throwAlreadyEvicted() {
throw new AlreadyClosedException("Cache file is evicted");
}
@FunctionalInterface
public | with |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Having.java | {
"start": 690,
"end": 1383
} | class ____ extends Filter {
public Having(Source source, LogicalPlan child, Expression condition) {
super(source, child, condition);
}
@Override
protected NodeInfo<Filter> info() {
return NodeInfo.create(this, Having::new, child(), condition());
}
@Override
public Having replaceChild(LogicalPlan newChild) {
return new Having(source(), newChild, condition());
}
@Override
public Filter with(Expression condition) {
return new Having(source(), child(), condition);
}
@Override
public Filter with(LogicalPlan child, Expression condition) {
return new Having(source(), child, condition);
}
}
| Having |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/observers/MaybeConsumersTest.java | {
"start": 1831,
"end": 8302
} | class ____ implements Consumer<Object>, Action {
final CompositeDisposable composite = new CompositeDisposable();
final MaybeSubject<Integer> processor = MaybeSubject.create();
final List<Object> events = new ArrayList<>();
@Override
public void run() throws Exception {
events.add("OnComplete");
}
@Override
public void accept(Object t) throws Exception {
events.add(t);
}
static <T> Disposable subscribeAutoDispose(Maybe<T> source, CompositeDisposable composite,
Consumer<? super T> onSuccess, Consumer<? super Throwable> onError, Action onComplete) {
return source.subscribe(onSuccess, onError, onComplete, composite);
}
@Test
public void onSuccessNormal() {
Disposable d = subscribeAutoDispose(processor, composite, this, Functions.ON_ERROR_MISSING, () -> { });
assertFalse(d.getClass().toString(), ((LambdaConsumerIntrospection)d).hasCustomOnError());
assertTrue(composite.size() > 0);
assertTrue(events.toString(), events.isEmpty());
processor.onSuccess(1);
assertEquals(0, composite.size());
assertEquals(Arrays.<Object>asList(1), events);
}
@Test
public void onErrorNormal() {
subscribeAutoDispose(processor, composite, this, this, this);
assertTrue(composite.size() > 0);
assertTrue(events.toString(), events.isEmpty());
processor.onSuccess(1);
assertEquals(0, composite.size());
assertEquals(Arrays.<Object>asList(1), events);
}
@Test
public void onErrorError() {
Disposable d = subscribeAutoDispose(processor, composite, this, this, this);
assertTrue(d.getClass().toString(), ((LambdaConsumerIntrospection)d).hasCustomOnError());
assertTrue(composite.size() > 0);
assertTrue(events.toString(), events.isEmpty());
processor.onError(new IOException());
assertTrue(events.toString(), events.get(0) instanceof IOException);
assertEquals(0, composite.size());
}
@Test
public void onCompleteNormal() {
subscribeAutoDispose(processor, composite, this, this, this);
assertTrue(composite.size() > 0);
assertTrue(events.toString(), events.isEmpty());
processor.onComplete();
assertEquals(0, composite.size());
assertEquals(Arrays.<Object>asList("OnComplete"), events);
}
@Test
public void onCompleteError() {
subscribeAutoDispose(processor, composite, this, this, this);
assertTrue(composite.size() > 0);
assertTrue(events.toString(), events.isEmpty());
processor.onError(new IOException());
assertTrue(events.toString(), events.get(0) instanceof IOException);
assertEquals(0, composite.size());
}
@Test
public void onCompleteDispose() {
Disposable d = subscribeAutoDispose(processor, composite, this, this, this);
assertTrue(composite.size() > 0);
assertTrue(events.toString(), events.isEmpty());
assertFalse(d.isDisposed());
d.dispose();
d.dispose();
assertTrue(d.isDisposed());
assertEquals(0, composite.size());
assertFalse(processor.hasObservers());
}
@Test
public void onSuccessCrash() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
subscribeAutoDispose(processor, composite, new Consumer<Object>() {
@Override
public void accept(Object t) throws Exception {
throw new IOException();
}
}, this, this);
processor.onSuccess(1);
assertTrue(events.toString(), events.isEmpty());
TestHelper.assertUndeliverable(errors, 0, IOException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onErrorCrash() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
subscribeAutoDispose(processor, composite, this, new Consumer<Throwable>() {
@Override
public void accept(Throwable t) throws Exception {
throw new IOException(t);
}
}, this);
processor.onError(new IllegalArgumentException());
assertTrue(events.toString(), events.isEmpty());
TestHelper.assertError(errors, 0, CompositeException.class);
List<Throwable> inners = TestHelper.compositeList(errors.get(0));
TestHelper.assertError(inners, 0, IllegalArgumentException.class);
TestHelper.assertError(inners, 1, IOException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onCompleteCrash() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
subscribeAutoDispose(processor, composite, this, this, new Action() {
@Override
public void run() throws Exception {
throw new IOException();
}
});
processor.onComplete();
assertTrue(events.toString(), events.isEmpty());
TestHelper.assertUndeliverable(errors, 0, IOException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void badSource() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
subscribeAutoDispose(
new Maybe<Integer>() {
@Override
protected void subscribeActual(
MaybeObserver<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onComplete();
observer.onSubscribe(Disposable.empty());
observer.onSuccess(2);
observer.onComplete();
observer.onError(new IOException());
}
}, composite, this, this, this
);
assertEquals(Arrays.<Object>asList("OnComplete"), events);
TestHelper.assertUndeliverable(errors, 0, IOException.class);
} finally {
RxJavaPlugins.reset();
}
}
}
| MaybeConsumersTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java | {
"start": 1853,
"end": 24205
} | class ____ extends AbstractConfigurationFunctionTestCase {
public DateTruncTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
List<TestCaseSupplier> suppliers = new ArrayList<>();
makeTruncDurationTestCases().stream().map(DateTruncTests::ofDuration).forEach(suppliers::addAll);
makeTruncPeriodTestCases().stream().map(DateTruncTests::ofDatePeriod).forEach(suppliers::addAll);
suppliers.add(randomSecond());
return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers);
}
public record DurationTestCaseData(Duration duration, String inputDate, @Nullable String zoneIdString, String expectedDate) {
public String testCaseNameForMillis() {
var zoneIdName = zoneIdString == null ? "random" : zoneIdString;
return "duration, millis; " + duration + ", " + zoneIdName + ", " + inputDate;
}
public String testCaseNameForNanos() {
var zoneIdName = zoneIdString == null ? "random" : zoneIdString;
return "duration, nanos; " + duration + ", " + zoneIdName + ", " + inputDate;
}
public ZoneId zoneId() {
return zoneIdString == null ? randomZone() : ZoneId.of(zoneIdString);
}
public long inputDateAsMillis() {
return Instant.parse(inputDate).toEpochMilli();
}
public long inputDateAsNanos() {
assert canBeConvertedToNanos();
return DateUtils.toNanoSeconds(inputDateAsMillis());
}
public boolean canBeConvertedToNanos() {
return inputDateAsMillis() >= 0;
}
}
public record PeriodTestCaseData(Period period, String inputDate, @Nullable String zoneIdString, String expectedDate) {
public String testCaseNameForMillis() {
var zoneIdName = zoneIdString == null ? "random" : zoneIdString;
return "period, millis; " + period + ", " + zoneIdName + ", " + inputDate;
}
public String testCaseNameForNanos() {
var zoneIdName = zoneIdString == null ? "random" : zoneIdString;
return "period, nanos; " + period + ", " + zoneIdName + ", " + inputDate;
}
public ZoneId zoneId() {
return zoneIdString == null ? randomZone() : ZoneId.of(zoneIdString);
}
public long inputDateAsMillis() {
return Instant.parse(inputDate).toEpochMilli();
}
public long inputDateAsNanos() {
assert canBeConvertedToNanos();
return DateUtils.toNanoSeconds(inputDateAsMillis());
}
public boolean canBeConvertedToNanos() {
return inputDateAsMillis() >= 0;
}
}
private static final List<String> TEST_TIMEZONES = List.of("Z", "-08:00", "CET", "America/New_York");
public static List<DurationTestCaseData> makeTruncDurationTestCases() {
List<DurationTestCaseData> cases = new ArrayList<>();
// Add generic cases for either UTC, fixed timezones and timezones with minutes.
//
// For every unit, we test 2 cases: 1 unit, and multiple units.
// Then, for every case, we check 2 boundaries (↑Bucket1, ↓Bucket2) to ensure the exact size of the buckets.
List.of(
// Milliseconds
new DurationTestCaseData(Duration.ofMillis(1), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:33.385"),
new DurationTestCaseData(Duration.ofMillis(10), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:33.38"),
new DurationTestCaseData(Duration.ofMillis(100), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:33.3"),
new DurationTestCaseData(Duration.ofMillis(1000), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:33"),
new DurationTestCaseData(Duration.ofMillis(13), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:33.384"),
new DurationTestCaseData(Duration.ofMillis(13), "2023-02-17T10:25:33.399", "", "2023-02-17T10:25:33.397"),
// Seconds
new DurationTestCaseData(Duration.ofSeconds(1), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:33"),
new DurationTestCaseData(Duration.ofSeconds(10), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:30"),
new DurationTestCaseData(Duration.ofSeconds(60), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:00"),
new DurationTestCaseData(Duration.ofSeconds(300), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:00"),
new DurationTestCaseData(Duration.ofSeconds(3600), "2023-02-17T10:25:33.385", "", "2023-02-17T10:00:00"),
// Minutes
new DurationTestCaseData(Duration.ofMinutes(1), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:00"),
new DurationTestCaseData(Duration.ofMinutes(5), "2023-02-17T10:25:33.385", "", "2023-02-17T10:25:00"),
new DurationTestCaseData(Duration.ofMinutes(60), "2023-02-17T10:25:33.385", "", "2023-02-17T10:00:00"),
// Hours
new DurationTestCaseData(Duration.ofHours(1), "2023-02-17T10:25:33.385", "", "2023-02-17T10:00:00"),
new DurationTestCaseData(Duration.ofHours(3), "2023-02-17T10:25:33.385", "", "2023-02-17T09:00:00"),
new DurationTestCaseData(Duration.ofHours(6), "2023-02-17T10:25:33.385", "", "2023-02-17T06:00:00"),
new DurationTestCaseData(Duration.ofHours(5), "2023-02-17T09:59:59.999", "", "2023-02-17T05:00:00"),
new DurationTestCaseData(Duration.ofHours(5), "2023-02-17T10:25:33.385", "", "2023-02-17T10:00:00"),
new DurationTestCaseData(Duration.ofHours(24), "2023-02-17T10:25:33.385", "", "2023-02-17T00:00:00"),
new DurationTestCaseData(Duration.ofHours(48), "2023-02-17T10:25:33.385", "", "2023-02-16T00:00:00")
).forEach(c -> TEST_TIMEZONES.forEach(timezone -> {
// Convert the timezone to the offset in each local time.
// This is required as date strings can't have a zone name as its zone.
var inputOffset = timezoneToOffset(timezone, c.inputDate());
var expectedOffset = timezoneToOffset(timezone, c.expectedDate());
cases.add(new DurationTestCaseData(c.duration(), c.inputDate() + inputOffset, timezone, c.expectedDate() + expectedOffset));
}));
cases.addAll(
List.of(
// Timezone agnostic (<=1m intervals, "null" for randomized timezones)
new DurationTestCaseData(Duration.ofMillis(100), "2023-02-17T10:25:33.38Z", null, "2023-02-17T10:25:33.30Z"),
new DurationTestCaseData(Duration.ofSeconds(1), "2023-02-17T10:25:33.38Z", null, "2023-02-17T10:25:33Z"),
new DurationTestCaseData(Duration.ofMinutes(1), "2023-02-17T10:25:33.38Z", null, "2023-02-17T10:25:00Z"),
new DurationTestCaseData(Duration.ofSeconds(30), "2023-02-17T10:25:33.38Z", null, "2023-02-17T10:25:30Z"),
// Daylight savings boundaries
// - +1 -> +2 at 2025-03-30T02:00:00+01:00
new DurationTestCaseData(Duration.ofHours(3), "2025-03-30T00:00:00+01:00", "Europe/Paris", "2025-03-30T00:00:00+01:00"),
new DurationTestCaseData(Duration.ofHours(3), "2025-03-30T01:00:00+01:00", "Europe/Paris", "2025-03-30T00:00:00+01:00"),
new DurationTestCaseData(Duration.ofHours(3), "2025-03-30T03:00:00+02:00", "Europe/Paris", "2025-03-30T03:00:00+02:00"),
new DurationTestCaseData(Duration.ofHours(3), "2025-03-30T04:00:00+02:00", "Europe/Paris", "2025-03-30T03:00:00+02:00"),
// - +2 -> +1 at 2025-10-26T03:00:00+02:00
new DurationTestCaseData(Duration.ofHours(3), "2025-10-26T01:00:00+02:00", "Europe/Paris", "2025-10-26T00:00:00+02:00"),
new DurationTestCaseData(Duration.ofHours(3), "2025-10-26T02:00:00+02:00", "Europe/Paris", "2025-10-26T00:00:00+02:00"),
new DurationTestCaseData(Duration.ofHours(3), "2025-10-26T02:00:00+01:00", "Europe/Paris", "2025-10-26T00:00:00+02:00"),
new DurationTestCaseData(Duration.ofHours(3), "2025-10-26T03:00:00+01:00", "Europe/Paris", "2025-10-26T03:00:00+01:00"),
new DurationTestCaseData(Duration.ofHours(3), "2025-10-26T04:00:00+01:00", "Europe/Paris", "2025-10-26T03:00:00+01:00"),
// -5 to -4 at 2025-03-09T02:00:00-05, and -4 to -5 at 2025-11-02T02:00:00-04)
new DurationTestCaseData(
Duration.ofHours(24),
"2025-03-09T06:00:00-04:00",
"America/New_York",
"2025-03-09T00:00:00-05:00"
),
new DurationTestCaseData(
Duration.ofHours(24),
"2025-11-02T05:00:00-05:00",
"America/New_York",
"2025-11-02T00:00:00-04:00"
),
// Midnight DST (America/Goose_Bay: -3 to -4 at 2010-11-07T00:01:00-03:00)
new DurationTestCaseData(
Duration.ofMinutes(1),
"2010-11-07T00:00:59-03:00",
"America/Goose_Bay",
"2010-11-07T00:00:00-03:00"
),
new DurationTestCaseData(
Duration.ofMinutes(1),
"2010-11-07T00:01:00-04:00",
"America/Goose_Bay",
"2010-11-07T00:01:00-04:00"
),
new DurationTestCaseData(
Duration.ofMinutes(2),
"2010-11-07T00:01:00-04:00",
"America/Goose_Bay",
"2010-11-07T00:00:00-04:00"
),
new DurationTestCaseData(
Duration.ofMinutes(2),
"2010-11-07T00:02:00-04:00",
"America/Goose_Bay",
"2010-11-07T00:02:00-04:00"
),
new DurationTestCaseData(
Duration.ofMinutes(2),
"2010-11-06T23:01:59-04:00",
"America/Goose_Bay",
"2010-11-07T00:00:00-03:00"
),
new DurationTestCaseData(
Duration.ofMinutes(2),
"2010-11-06T20:03:00-03:00",
"America/Goose_Bay",
"2010-11-06T20:02:00-03:00"
),
new DurationTestCaseData(
Duration.ofHours(24),
"2010-11-07T00:00:59-03:00",
"America/Goose_Bay",
"2010-11-07T00:00:00-03:00"
),
new DurationTestCaseData(
Duration.ofHours(24),
"2010-11-06T23:01:00-04:00",
"America/Goose_Bay",
"2010-11-07T00:00:00-03:00"
),
new DurationTestCaseData(
Duration.ofHours(24),
"2010-11-07T00:03:00-04:00",
"America/Goose_Bay",
"2010-11-07T00:00:00-04:00"
),
// Bigger intervals
new DurationTestCaseData(Duration.ofHours(12), "2025-10-26T02:00:00+02:00", "Europe/Rome", "2025-10-26T00:00:00+02:00"),
new DurationTestCaseData(Duration.ofHours(24), "2025-10-26T02:00:00+02:00", "Europe/Rome", "2025-10-26T00:00:00+02:00"),
new DurationTestCaseData(Duration.ofHours(48), "2025-10-26T02:00:00+02:00", "Europe/Rome", "2025-10-25T00:00:00+02:00")
)
);
return cases;
}
public static List<PeriodTestCaseData> makeTruncPeriodTestCases() {
List<PeriodTestCaseData> cases = new ArrayList<>();
// Add generic cases for either UTC, fixed timezones and timezones with minutes.
//
// For every unit, we test 2 cases: 1 unit, and multiple units.
// Then, for every case, we check 2 boundaries (↑Bucket1, ↓Bucket2) to ensure the exact size of the buckets.
List.of(
// Days
new PeriodTestCaseData(Period.ofDays(1), "2023-02-16T23:59:59.99", "", "2023-02-16T00:00:00"),
new PeriodTestCaseData(Period.ofDays(1), "2023-02-17T00:00:00", "", "2023-02-17T00:00:00"),
new PeriodTestCaseData(Period.ofDays(10), "2023-02-11T23:59:59.99", "", "2023-02-02T00:00:00"),
new PeriodTestCaseData(Period.ofDays(10), "2023-02-12T00:00:00", "", "2023-02-12T00:00:00"),
// Weeks
new PeriodTestCaseData(Period.ofDays(7), "2023-02-05T23:59:59.99", "", "2023-01-30T00:00:00"),
new PeriodTestCaseData(Period.ofDays(7), "2023-02-06T00:00:00", "", "2023-02-06T00:00:00"),
new PeriodTestCaseData(Period.ofDays(21), "2023-01-25T23:59:59.99", "", "2023-01-05T00:00:00"),
new PeriodTestCaseData(Period.ofDays(21), "2023-01-26T00:00:00", "", "2023-01-26T00:00:00"),
// Months
new PeriodTestCaseData(Period.ofMonths(1), "2024-02-29T23:59:59.99", "", "2024-02-01T00:00:00"),
new PeriodTestCaseData(Period.ofMonths(1), "2024-03-01T00:00:00", "", "2024-03-01T00:00:00"),
new PeriodTestCaseData(Period.ofMonths(7), "2022-10-31T23:59:59.99", "", "2022-04-01T00:00:00"),
new PeriodTestCaseData(Period.ofMonths(7), "2022-11-01T00:00:00", "", "2022-11-01T00:00:00"),
// Quarters
new PeriodTestCaseData(Period.ofMonths(3), "2023-12-31T23:59:59.99", "", "2023-10-01T00:00:00"),
new PeriodTestCaseData(Period.ofMonths(3), "2024-01-01T00:00:00", "", "2024-01-01T00:00:00"),
new PeriodTestCaseData(Period.ofMonths(6), "2023-12-31T23:59:59.99", "", "2023-07-01T00:00:00"),
new PeriodTestCaseData(Period.ofMonths(6), "2024-01-01T00:00:00", "", "2024-01-01T00:00:00"),
// Years
new PeriodTestCaseData(Period.ofYears(1), "2022-12-31T23:59:59.99", "", "2022-01-01T00:00:00"),
new PeriodTestCaseData(Period.ofYears(1), "2023-01-01T00:00:00", "", "2023-01-01T00:00:00"),
new PeriodTestCaseData(Period.ofYears(5), "2020-12-31T23:59:59.99", "", "2016-01-01T00:00:00"),
new PeriodTestCaseData(Period.ofYears(5), "2021-01-01T00:00:00", "", "2021-01-01T00:00:00"),
// Negative years
new PeriodTestCaseData(Period.ofYears(4), "-0004-12-31T23:59:59.99", "", "-0007-01-01T00:00:00"),
new PeriodTestCaseData(Period.ofYears(4), "-0003-01-01T00:00:00", "", "-0003-01-01T00:00:00")
).forEach(c -> TEST_TIMEZONES.forEach(timezone -> {
// Convert the timezone to the offset in each local time.
// This is required as date strings can't have a zone name as its zone.
var inputOffset = timezoneToOffset(timezone, c.inputDate());
var expectedOffset = timezoneToOffset(timezone, c.expectedDate());
cases.add(new PeriodTestCaseData(c.period(), c.inputDate() + inputOffset, timezone, c.expectedDate() + expectedOffset));
}));
// Special cases
cases.addAll(
List.of(
// DST boundaries (e.g. New York: -5 to -4 at 2025-03-09T02:00:00-05, and -4 to -5 at 2025-11-02T02:00:00-04)
new PeriodTestCaseData(Period.ofDays(1), "2025-03-09T06:00:00-04:00", "America/New_York", "2025-03-09T00:00:00-05:00"),
new PeriodTestCaseData(Period.ofDays(1), "2025-11-02T05:00:00-05:00", "America/New_York", "2025-11-02T00:00:00-04:00"),
// Midnight DST (America/Goose_Bay: -3 to -4 at 2010-11-07T00:01:00-03:00)
new PeriodTestCaseData(Period.ofDays(1), "2010-11-07T00:00:59-03:00", "America/Goose_Bay", "2010-11-07T00:00:00-03:00"),
new PeriodTestCaseData(Period.ofDays(1), "2010-11-06T23:01:00-04:00", "America/Goose_Bay", "2010-11-06T00:00:00-03:00"),
new PeriodTestCaseData(Period.ofDays(1), "2010-11-07T00:03:00-04:00", "America/Goose_Bay", "2010-11-07T00:00:00-03:00"),
new PeriodTestCaseData(Period.ofDays(1), "2010-11-07T23:59:59-04:00", "America/Goose_Bay", "2010-11-07T00:00:00-03:00"),
new PeriodTestCaseData(Period.ofDays(1), "2010-11-08T00:00:00-04:00", "America/Goose_Bay", "2010-11-08T00:00:00-04:00")
)
);
return cases;
}
private static String timezoneToOffset(String timezone, String date) {
return timezone.startsWith("+") || timezone.startsWith("-")
? timezone
: LocalDateTime.parse(date).atZone(ZoneId.of(timezone)).getOffset().getId();
}
private static List<TestCaseSupplier> ofDuration(DurationTestCaseData data) {
List<TestCaseSupplier> suppliers = new ArrayList<>();
suppliers.add(
new TestCaseSupplier(
data.testCaseNameForMillis(),
List.of(DataType.TIME_DURATION, DataType.DATETIME),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(data.duration(), DataType.TIME_DURATION, "interval").forceLiteral(),
new TestCaseSupplier.TypedData(data.inputDateAsMillis(), DataType.DATETIME, "date")
),
Matchers.startsWith("DateTruncDatetimeEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["),
DataType.DATETIME,
matchesDateMillis(data.expectedDate())
).withConfiguration(TEST_SOURCE, configurationForTimezone(data.zoneId()))
)
);
if (data.canBeConvertedToNanos()) {
suppliers.add(
new TestCaseSupplier(
data.testCaseNameForNanos(),
List.of(DataType.TIME_DURATION, DataType.DATE_NANOS),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(data.duration(), DataType.TIME_DURATION, "interval").forceLiteral(),
new TestCaseSupplier.TypedData(data.inputDateAsNanos(), DataType.DATE_NANOS, "date")
),
Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["),
DataType.DATE_NANOS,
matchesDateNanos(data.expectedDate())
).withConfiguration(TEST_SOURCE, configurationForTimezone(data.zoneId()))
)
);
}
return suppliers;
}
private static List<TestCaseSupplier> ofDatePeriod(PeriodTestCaseData data) {
List<TestCaseSupplier> suppliers = new ArrayList<>();
suppliers.add(
new TestCaseSupplier(
data.testCaseNameForMillis(),
List.of(DataType.DATE_PERIOD, DataType.DATETIME),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(data.period(), DataType.DATE_PERIOD, "interval").forceLiteral(),
new TestCaseSupplier.TypedData(data.inputDateAsMillis(), DataType.DATETIME, "date")
),
Matchers.startsWith("DateTruncDatetimeEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["),
DataType.DATETIME,
matchesDateMillis(data.expectedDate())
).withConfiguration(TEST_SOURCE, configurationForTimezone(data.zoneId()))
)
);
if (data.canBeConvertedToNanos()) {
suppliers.add(
new TestCaseSupplier(
data.testCaseNameForNanos(),
List.of(DataType.DATE_PERIOD, DataType.DATE_NANOS),
() -> new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(data.period(), DataType.DATE_PERIOD, "interval").forceLiteral(),
new TestCaseSupplier.TypedData(data.inputDateAsNanos(), DataType.DATE_NANOS, "date")
),
Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["),
DataType.DATE_NANOS,
matchesDateNanos(data.expectedDate())
).withConfiguration(TEST_SOURCE, configurationForTimezone(data.zoneId()))
)
);
}
return suppliers;
}
private static TestCaseSupplier randomSecond() {
return new TestCaseSupplier("random second", List.of(DataType.TIME_DURATION, DataType.DATETIME), () -> {
String dateFragment = randomIntBetween(2000, 2050)
+ "-"
+ pad(randomIntBetween(1, 12))
+ "-"
+ pad(randomIntBetween(1, 28))
+ "T"
+ pad(randomIntBetween(0, 23))
+ ":"
+ pad(randomIntBetween(0, 59))
+ ":"
+ pad(randomIntBetween(0, 59));
return new TestCaseSupplier.TestCase(
List.of(
new TestCaseSupplier.TypedData(Duration.ofSeconds(1), DataType.TIME_DURATION, "interval").forceLiteral(),
new TestCaseSupplier.TypedData(toMillis(dateFragment + ".38Z"), DataType.DATETIME, "date")
),
Matchers.startsWith("DateTruncDatetimeEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["),
DataType.DATETIME,
equalTo(toMillis(dateFragment + ".00Z"))
);
});
}
private static String pad(int i) {
return i > 9 ? "" + i : "0" + i;
}
private static long toMillis(String timestamp) {
return Instant.parse(timestamp).toEpochMilli();
}
@Override
protected Expression buildWithConfiguration(Source source, List<Expression> args, Configuration configuration) {
return new DateTrunc(source, args.get(0), args.get(1), configuration);
}
}
| DateTruncTests |
java | apache__camel | components/camel-xmlsecurity/src/test/java/org/apache/camel/component/xmlsecurity/SignatureDigestMethodTest.java | {
"start": 3559,
"end": 18375
} | class ____ extends CamelTestSupport {
private static String payload;
private KeyPair keyPair;
static {
boolean includeNewLine = true;
if (!TestSupport.isJavaVendor("Azul")) {
includeNewLine = false;
}
payload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<root xmlns=\"http://test/test\"><test>Test Message</test></root>";
}
public SignatureDigestMethodTest() throws Exception {
// BouncyCastle is required for some algorithms
if (Security.getProvider("BC") == null) {
Constructor<?> cons;
Class<?> c = Class.forName("org.bouncycastle.jce.provider.BouncyCastleProvider");
cons = c.getConstructor(new Class[] {});
Provider provider = (java.security.Provider) cons.newInstance();
Security.insertProviderAt(provider, 2);
}
}
@Override
protected void bindToRegistry(Registry registry) throws Exception {
registry.bind("accessor", getKeyAccessor(keyPair.getPrivate()));
registry.bind("canonicalizationMethod1", getCanonicalizationMethod());
registry.bind("selector", KeySelector.singletonKeySelector(keyPair.getPublic()));
registry.bind("selectorKeyValue", getKeyValueKeySelector());
registry.bind("uriDereferencer", getSameDocumentUriDereferencer());
registry.bind("baseUri", getBaseUri());
registry.bind("cryptoContextProperties", getCrytoContextProperties());
registry.bind("keyAccessorDefault", getDefaultKeyAccessor());
registry.bind("keySelectorDefault", getDefaultKeySelector());
registry.bind("envelopingSignatureChecker", getEnvelopingXmlSignatureChecker());
registry.bind("xmlSignature2MessageWithTimestampProperty", getXmlSignature2MessageWithTimestampdProperty());
registry.bind("validationFailedHandlerIgnoreManifestFailures", getValidationFailedHandlerIgnoreManifestFailures());
registry.bind("signatureProperties", getSignatureProperties());
registry.bind("nodesearchxpath", getNodeSerachXPath());
Map<String, String> namespaceMap = Collections.singletonMap("ns", "http://test");
List<XPathFilterParameterSpec> xpaths = Collections
.singletonList(XmlSignatureHelper.getXpathFilter("/ns:root/a/@ID", namespaceMap));
registry.bind("xpathsToIdAttributes", xpaths);
registry.bind("parentXpathBean", getParentXPathBean());
}
@Override
protected RouteBuilder[] createRouteBuilders() {
return new RouteBuilder[] { new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:sha1")
.to("xmlsecurity-sign:sha1?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2000/09/xmldsig#sha1")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:sha224")
.to("xmlsecurity-sign:sha224?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2001/04/xmldsig-more#sha224")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:sha256")
.to("xmlsecurity-sign:sha256?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2001/04/xmlenc#sha256")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:sha384")
.to("xmlsecurity-sign:sha384?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2001/04/xmldsig-more#sha384")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:sha512")
.to("xmlsecurity-sign:sha512?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2001/04/xmlenc#sha512")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:ripemd160")
.to("xmlsecurity-sign:ripemd160?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2001/04/xmlenc#ripemd160")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:whirlpool")
.to("xmlsecurity-sign:whirlpool?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2007/05/xmldsig-more#whirlpool")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:sha3_224")
.to("xmlsecurity-sign:sha3_224?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2007/05/xmldsig-more#sha3-224")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:sha3_256")
.to("xmlsecurity-sign:sha3_256?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2007/05/xmldsig-more#sha3-256")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:sha3_384")
.to("xmlsecurity-sign:sha3_384?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2007/05/xmldsig-more#sha3-384")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() {
// START SNIPPET: signature and digest algorithm
from("direct:sha3_512")
.to("xmlsecurity-sign:sha3_512?keyAccessor=#accessor"
+ "&digestAlgorithm=http://www.w3.org/2007/05/xmldsig-more#sha3-512")
.to("xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector").to("mock:result");
// END SNIPPET: signature and digest algorithm
}
} };
}
@Test
public void testSHA1() throws Exception {
setupMock();
sendBody("direct:sha1", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSHA224() throws Exception {
setupMock();
sendBody("direct:sha224", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSHA256() throws Exception {
setupMock();
sendBody("direct:sha256", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSHA384() throws Exception {
setupMock();
sendBody("direct:sha384", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSHA512() throws Exception {
setupMock();
sendBody("direct:sha512", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testRIPEMD160() throws Exception {
setupMock();
sendBody("direct:ripemd160", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testWHIRLPOOL() throws Exception {
setupMock();
sendBody("direct:whirlpool", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSHA3224() throws Exception {
setupMock();
sendBody("direct:sha3_224", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSHA3256() throws Exception {
setupMock();
sendBody("direct:sha3_256", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSHA3384() throws Exception {
setupMock();
sendBody("direct:sha3_384", payload);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testSHA3512() throws Exception {
setupMock();
sendBody("direct:sha3_512", payload);
MockEndpoint.assertIsSatisfied(context);
}
private MockEndpoint setupMock() {
return setupMock(payload);
}
private MockEndpoint setupMock(String payload) {
String payload2;
int pos = payload.indexOf('\n');
if (pos != -1) {
payload2 = payload.substring(0, pos) + payload.substring(pos + 1);
} else {
payload2 = payload.replaceFirst("\\?>", "\\?>\n");
}
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.message(0).body(String.class).in(payload, payload2);
return mock;
}
public Exchange doTestSignatureRoute(RouteBuilder builder) throws Exception {
return doSignatureRouteTest(builder, null, Collections.<String, Object> emptyMap());
}
public Exchange doSignatureRouteTest(RouteBuilder builder, Exchange e, Map<String, Object> headers) throws Exception {
CamelContext context = new DefaultCamelContext();
try {
context.addRoutes(builder);
context.start();
MockEndpoint mock = context.getEndpoint("mock:result", MockEndpoint.class);
mock.setExpectedMessageCount(1);
ProducerTemplate template = context.createProducerTemplate();
if (e != null) {
template.send("direct:in", e);
} else {
template.sendBodyAndHeaders("direct:in", payload, headers);
}
MockEndpoint.assertIsSatisfied(SignatureDigestMethodTest.this.context);
return mock.getReceivedExchanges().get(0);
} finally {
context.stop();
}
}
@Override
public void doPreSetup() {
setUpKeys("RSA", 1024);
testConfigurationBuilder.withDisableJMX();
}
public void setUpKeys(String algorithm, int keylength) {
keyPair = getKeyPair(algorithm, keylength);
}
public static KeyPair getKeyPair(String algorithm, int keylength) {
KeyPairGenerator keyGen;
try {
keyGen = KeyPairGenerator.getInstance(algorithm);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeCamelException(e);
}
keyGen.initialize(keylength, new SecureRandom());
return keyGen.generateKeyPair();
}
public static KeyStore loadKeystore() throws Exception {
KeyStore keystore = KeyStore.getInstance(KeyStore.getDefaultType());
InputStream in = SignatureDigestMethodTest.class.getResourceAsStream("/bob.keystore");
keystore.load(in, "letmein".toCharArray());
return keystore;
}
public Certificate getCertificateFromKeyStore() throws Exception {
Certificate c = loadKeystore().getCertificate("bob");
return c;
}
public PrivateKey getKeyFromKeystore() throws Exception {
return (PrivateKey) loadKeystore().getKey("bob", "letmein".toCharArray());
}
private AlgorithmMethod getCanonicalizationMethod() {
List<String> inclusivePrefixes = new ArrayList<>(1);
inclusivePrefixes.add("ds");
return XmlSignatureHelper.getCanonicalizationMethod(CanonicalizationMethod.EXCLUSIVE, inclusivePrefixes);
}
static KeyAccessor getKeyAccessor(final PrivateKey privateKey) {
KeyAccessor accessor = new KeyAccessor() {
@Override
public KeySelector getKeySelector(Message message) {
return KeySelector.singletonKeySelector(privateKey);
}
@Override
public KeyInfo getKeyInfo(Message mess, Node messageBody, KeyInfoFactory keyInfoFactory) {
return null;
}
};
return accessor;
}
public static String getBaseUri() {
String uri = "file:/" + System.getProperty("user.dir") + "/src/test/resources/org/apache/camel/component/xmlsecurity/";
return uri.replace('\\', '/');
}
public static KeySelector getKeyValueKeySelector() {
return new KeyValueKeySelector();
}
/**
* KeySelector which retrieves the public key from the KeyValue element and returns it. NOTE: If the key algorithm
* doesn't match signature algorithm, then the public key will be ignored.
*/
static | SignatureDigestMethodTest |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinLeftSide.java | {
"start": 1292,
"end": 2219
} | class ____<K, VLeft, VRight, VOut> extends KStreamKStreamJoin<K, VLeft, VRight, VOut, VLeft, VRight> {
KStreamKStreamJoinLeftSide(final JoinWindowsInternal windows,
final ValueJoinerWithKey<? super K, ? super VLeft, ? super VRight, ? extends VOut> joiner,
final boolean outer,
final TimeTrackerSupplier sharedTimeTrackerSupplier,
final StoreFactory otherWindowStoreFactory,
final Optional<StoreFactory> outerJoinWindowStoreFactory) {
super(windows, joiner, outer, windows.beforeMs, windows.afterMs,
sharedTimeTrackerSupplier, otherWindowStoreFactory, outerJoinWindowStoreFactory);
}
@Override
public Processor<K, VLeft, K, VOut> get() {
return new KStreamKStreamJoinLeftProcessor();
}
private | KStreamKStreamJoinLeftSide |
java | apache__camel | components/camel-google/camel-google-secret-manager/src/main/java/org/apache/camel/component/google/secret/manager/GoogleSecretManagerEndpoint.java | {
"start": 1789,
"end": 4070
} | class ____ extends DefaultEndpoint implements EndpointServiceLocation {
@UriParam
private GoogleSecretManagerConfiguration configuration;
private SecretManagerServiceClient secretManagerServiceClient;
public GoogleSecretManagerEndpoint(String uri, GoogleSecretManagerComponent component,
GoogleSecretManagerConfiguration configuration) {
super(uri, component);
this.configuration = configuration;
}
public Producer createProducer() throws Exception {
return new GoogleSecretManagerProducer(this);
}
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException(
"Cannot consume from the google-secret-manager endpoint: " + getEndpointUri());
}
public GoogleSecretManagerConfiguration getConfiguration() {
return configuration;
}
/**
* Setup configuration
*/
public void setConfiguration(GoogleSecretManagerConfiguration configuration) {
this.configuration = configuration;
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (configuration.getClient() != null) {
secretManagerServiceClient = configuration.getClient();
} else {
secretManagerServiceClient = GoogleSecretManagerClientFactory.create(this.getCamelContext(), configuration);
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (configuration.getClient() == null && secretManagerServiceClient != null) {
secretManagerServiceClient.close();
}
}
public SecretManagerServiceClient getClient() {
return secretManagerServiceClient;
}
@Override
public String getServiceUrl() {
if (ObjectHelper.isNotEmpty(
ObjectHelper.isNotEmpty(configuration.getProject()) && ObjectHelper.isNotEmpty(configuration.getClient()))) {
return getServiceProtocol() + ":" + configuration.getProject() + ":" + configuration.getClient();
}
return null;
}
@Override
public String getServiceProtocol() {
return "secrets-manager";
}
}
| GoogleSecretManagerEndpoint |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/handler/GatewayRenderingResponseBuilder.java | {
"start": 1586,
"end": 4544
} | class ____ implements RenderingResponse.Builder {
private final String name;
private HttpStatusCode status = HttpStatus.OK;
private final HttpHeaders headers = new HttpHeaders();
private final MultiValueMap<String, Cookie> cookies = new LinkedMultiValueMap<>();
private final Map<String, Object> model = new LinkedHashMap<>();
GatewayRenderingResponseBuilder(RenderingResponse other) {
Objects.requireNonNull(other, "RenderingResponse must not be null");
this.name = other.name();
this.status = other.statusCode();
this.headers.putAll(other.headers());
this.model.putAll(other.model());
}
GatewayRenderingResponseBuilder(String name) {
Objects.requireNonNull(name, "Name must not be null");
this.name = name;
}
@Override
public RenderingResponse.Builder status(HttpStatusCode status) {
Objects.requireNonNull(status, "HttpStatusCode must not be null");
this.status = status;
return this;
}
@Override
public RenderingResponse.Builder status(int status) {
return status(HttpStatusCode.valueOf(status));
}
@Override
public RenderingResponse.Builder cookie(Cookie cookie) {
Objects.requireNonNull(cookie, "Cookie must not be null");
this.cookies.add(cookie.getName(), cookie);
return this;
}
@Override
public RenderingResponse.Builder cookies(Consumer<MultiValueMap<String, Cookie>> cookiesConsumer) {
cookiesConsumer.accept(this.cookies);
return this;
}
@Override
public RenderingResponse.Builder modelAttribute(Object attribute) {
Objects.requireNonNull(attribute, "Attribute must not be null");
if (attribute instanceof Collection<?> collection && collection.isEmpty()) {
return this;
}
return modelAttribute(Conventions.getVariableName(attribute), attribute);
}
@Override
public RenderingResponse.Builder modelAttribute(String name, @Nullable Object value) {
Objects.requireNonNull(name, "Name must not be null");
this.model.put(name, value);
return this;
}
@Override
public RenderingResponse.Builder modelAttributes(Object... attributes) {
modelAttributes(Arrays.asList(attributes));
return this;
}
@Override
public RenderingResponse.Builder modelAttributes(Collection<?> attributes) {
attributes.forEach(this::modelAttribute);
return this;
}
@Override
public RenderingResponse.Builder modelAttributes(Map<String, ?> attributes) {
this.model.putAll(attributes);
return this;
}
@Override
public RenderingResponse.Builder header(String headerName, String... headerValues) {
for (String headerValue : headerValues) {
this.headers.add(headerName, headerValue);
}
return this;
}
@Override
public RenderingResponse.Builder headers(Consumer<HttpHeaders> headersConsumer) {
headersConsumer.accept(this.headers);
return this;
}
@Override
public RenderingResponse build() {
return new GatewayRenderingResponse(this.status, this.headers, this.cookies, this.name, this.model);
}
private static final | GatewayRenderingResponseBuilder |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/initializers/annotation/PropertySourcesInitializerTests.java | {
"start": 1722,
"end": 2188
} | class ____ {
@Value("${enigma}")
// The following can also be used to directly access the
// environment instead of relying on placeholder replacement.
// @Value("#{ environment['enigma'] }")
private String enigma;
@Bean
public String enigma() {
return enigma;
}
}
@Autowired
private String enigma;
@Test
public void customPropertySourceConfiguredViaContextInitializer() {
assertThat(enigma).isEqualTo("foo");
}
public static | Config |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/DefaultJobLeaderIdService.java | {
"start": 2066,
"end": 6791
} | class ____ implements JobLeaderIdService {
private static final Logger LOG = LoggerFactory.getLogger(DefaultJobLeaderIdService.class);
/** High availability services to use by this service. */
private final HighAvailabilityServices highAvailabilityServices;
private final ScheduledExecutor scheduledExecutor;
private final Duration jobTimeout;
/** Map of currently monitored jobs. */
private final Map<JobID, JobLeaderIdListener> jobLeaderIdListeners;
/** Actions to call when the job leader changes. */
private JobLeaderIdActions jobLeaderIdActions;
public DefaultJobLeaderIdService(
HighAvailabilityServices highAvailabilityServices,
ScheduledExecutor scheduledExecutor,
Duration jobTimeout) {
this.highAvailabilityServices =
Preconditions.checkNotNull(highAvailabilityServices, "highAvailabilityServices");
this.scheduledExecutor = Preconditions.checkNotNull(scheduledExecutor, "scheduledExecutor");
this.jobTimeout = Preconditions.checkNotNull(jobTimeout, "jobTimeout");
jobLeaderIdListeners = CollectionUtil.newHashMapWithExpectedSize(4);
jobLeaderIdActions = null;
}
@Override
public void start(JobLeaderIdActions initialJobLeaderIdActions) throws Exception {
if (isStarted()) {
clear();
}
this.jobLeaderIdActions = Preconditions.checkNotNull(initialJobLeaderIdActions);
}
@Override
public void stop() throws Exception {
clear();
this.jobLeaderIdActions = null;
}
/**
* Checks whether the service has been started.
*
* @return True if the service has been started; otherwise false
*/
public boolean isStarted() {
return jobLeaderIdActions != null;
}
@Override
public void clear() throws Exception {
Exception exception = null;
for (JobLeaderIdListener listener : jobLeaderIdListeners.values()) {
try {
listener.stop();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
}
if (exception != null) {
ExceptionUtils.rethrowException(
exception,
"Could not properly stop the "
+ DefaultJobLeaderIdService.class.getSimpleName()
+ '.');
}
jobLeaderIdListeners.clear();
}
@Override
public void addJob(JobID jobId) throws Exception {
Preconditions.checkNotNull(jobLeaderIdActions);
LOG.debug("Add job {} to job leader id monitoring.", jobId);
if (!jobLeaderIdListeners.containsKey(jobId)) {
LeaderRetrievalService leaderRetrievalService =
highAvailabilityServices.getJobManagerLeaderRetriever(jobId);
JobLeaderIdListener jobIdListener =
new JobLeaderIdListener(jobId, jobLeaderIdActions, leaderRetrievalService);
jobLeaderIdListeners.put(jobId, jobIdListener);
}
}
@Override
public void removeJob(JobID jobId) throws Exception {
LOG.debug("Remove job {} from job leader id monitoring.", jobId);
JobLeaderIdListener listener = jobLeaderIdListeners.remove(jobId);
if (listener != null) {
listener.stop();
}
}
@Override
public boolean containsJob(JobID jobId) {
return jobLeaderIdListeners.containsKey(jobId);
}
@Override
public CompletableFuture<JobMasterId> getLeaderId(JobID jobId) throws Exception {
if (!jobLeaderIdListeners.containsKey(jobId)) {
addJob(jobId);
}
JobLeaderIdListener listener = jobLeaderIdListeners.get(jobId);
return listener.getLeaderIdFuture().thenApply(JobMasterId::fromUuidOrNull);
}
@Override
public boolean isValidTimeout(JobID jobId, UUID timeoutId) {
JobLeaderIdListener jobLeaderIdListener = jobLeaderIdListeners.get(jobId);
if (null != jobLeaderIdListener) {
return Objects.equals(timeoutId, jobLeaderIdListener.getTimeoutId());
} else {
return false;
}
}
// --------------------------------------------------------------------------------
// Static utility classes
// --------------------------------------------------------------------------------
/**
* Listener which stores the current leader id and exposes them as a future value when
* requested. The returned future will always be completed properly except when stopping the
* listener.
*/
private final | DefaultJobLeaderIdService |
java | grpc__grpc-java | netty/src/main/java/io/grpc/netty/InternalNettyChannelBuilder.java | {
"start": 1531,
"end": 4301
} | interface ____ {
InternalProtocolNegotiator.ProtocolNegotiator buildProtocolNegotiator();
}
/**
* Sets the {@link ProtocolNegotiatorFactory} to be used. Overrides any specified negotiation type
* and {@code SslContext}.
*/
public static void setProtocolNegotiatorFactory(
NettyChannelBuilder builder, final ProtocolNegotiatorFactory protocolNegotiator) {
builder.protocolNegotiatorFactory(new ProtocolNegotiator.ClientFactory() {
@Override public ProtocolNegotiator newNegotiator() {
return protocolNegotiator.buildProtocolNegotiator();
}
@Override public int getDefaultPort() {
return GrpcUtil.DEFAULT_PORT_SSL;
}
});
}
/**
* Sets the {@link ProtocolNegotiatorFactory} to be used. Overrides any specified negotiation type
* and {@code SslContext}.
*/
public static void setProtocolNegotiatorFactory(
NettyChannelBuilder builder, InternalProtocolNegotiator.ClientFactory protocolNegotiator) {
builder.protocolNegotiatorFactory(protocolNegotiator);
}
public static void setStatsEnabled(NettyChannelBuilder builder, boolean value) {
builder.setStatsEnabled(value);
}
public static void setTracingEnabled(NettyChannelBuilder builder, boolean value) {
builder.setTracingEnabled(value);
}
public static void setStatsRecordStartedRpcs(NettyChannelBuilder builder, boolean value) {
builder.setStatsRecordStartedRpcs(value);
}
public static void setStatsRecordFinishedRpcs(NettyChannelBuilder builder, boolean value) {
builder.setStatsRecordFinishedRpcs(value);
}
public static void setStatsRecordRealTimeMetrics(NettyChannelBuilder builder, boolean value) {
builder.setStatsRecordRealTimeMetrics(value);
}
public static void setStatsRecordRetryMetrics(NettyChannelBuilder builder, boolean value) {
builder.setStatsRecordRetryMetrics(value);
}
/**
* Sets {@link io.grpc.Channel} and {@link io.netty.channel.EventLoopGroup} to Nio. A major
* benefit over using setters is gRPC will manage the life cycle of {@link
* io.netty.channel.EventLoopGroup}.
*/
public static void useNioTransport(NettyChannelBuilder builder) {
builder.channelType(NioSocketChannel.class, InetSocketAddress.class);
builder
.eventLoopGroupPool(SharedResourcePool.forResource(Utils.NIO_WORKER_EVENT_LOOP_GROUP));
}
public static ClientTransportFactory buildTransportFactory(NettyChannelBuilder builder) {
return builder.buildTransportFactory();
}
@VisibleForTesting
public static void setTransportTracerFactory(
NettyChannelBuilder builder, TransportTracer.Factory factory) {
builder.setTransportTracerFactory(factory);
}
private InternalNettyChannelBuilder() {}
}
| ProtocolNegotiatorFactory |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/support/http/AbstractWebStatImpl.java | {
"start": 6272,
"end": 8941
} | class ____ extends StatFilterContextListenerAdapter {
@Override
public void addUpdateCount(int updateCount) {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.addJdbcUpdateCount(updateCount);
}
}
@Override
public void addFetchRowCount(int fetchRowCount) {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.addJdbcFetchRowCount(fetchRowCount);
}
}
@Override
public void executeBefore(String sql, boolean inTransaction) {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.incrementJdbcExecuteCount();
}
}
@Override
public void executeAfter(String sql, long nanos, Throwable error) {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.addJdbcExecuteTimeNano(nanos);
if (error != null) {
reqStat.incrementJdbcExecuteErrorCount();
}
}
}
@Override
public void commit() {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.incrementJdbcCommitCount();
}
}
@Override
public void rollback() {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.incrementJdbcRollbackCount();
}
}
@Override
public void pool_connect() {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.incrementJdbcPoolConnectCount();
}
}
@Override
public void pool_close(long nanos) {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.incrementJdbcPoolCloseCount();
}
}
@Override
public void resultSet_open() {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.incrementJdbcResultSetOpenCount();
}
}
@Override
public void resultSet_close(long nanos) {
WebRequestStat reqStat = WebRequestStat.current();
if (reqStat != null) {
reqStat.incrementJdbcResultSetCloseCount();
}
}
}
}
| WebStatFilterContextListener |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/processing/Find.java | {
"start": 677,
"end": 3167
} | class ____ {
* @Id String isbn;
* String title;
* ...
* }
* </pre>
* <p>
* Then we might define:
* <pre>
* @Find
* Book getBookForIsbn(String isbn);
*
* @Find
* List<Book> getBooksWithTitle(String title);
* </pre>
* <p>
* Notice that:
* <ul>
* <li>the types and names of the method parameters exactly match the
* types and names of the corresponding fields of the entity, and
* <li>there's no special naming convention for the {@code @Find}
* methods—they may be named arbitrarily, and their names
* encode no semantics.
* </ul>
* <p>
* Alternatively, a method parameter may have the type
* {@link org.hibernate.query.range.Range Range<T>} where
* {@code T} is the type of the corresponding field in the entity.
* <pre>
* @Find
* Book getBookForIsbn(Range<String> isbn);
*
* @Find
* List<Book> getBooksWithTitle(Range<String> title);
* </pre>
* This allows the matching field to be restricted based on a variety
* of criteria expressed via the static factory methods of {@code Range}.
* <p>
* It's even possible to query by a field of an embedded object:
* <pre>
* @Find
* List<Book> publishedBooks(String publisher$name);
* </pre>
* Here, {@code publisher$name} refers to the field {@code name} of
* the {@code Book}'s {@code Publisher}.
* <p>
* The Metamodel Generator automatically creates an "implementation"
* of every finder method in the static metamodel class {@code Books_}.
* The generated method may be called according to the following
* protocol:
* <pre>
* Book book = Books_.findBookByIsbn(session, isbn);
* List<Book> books = Books_.getBooksWithTitle(session, String title);
* </pre>
* <p>
* Notice the extra parameter of type {@code EntityManager} at the
* start of the parameter list.
* <p>
* Alternatively, the type to which the annotated method belongs may
* also declare an abstract method with no parameters which returns
* one of the types {@link jakarta.persistence.EntityManager},
* {@link org.hibernate.StatelessSession},
* or {@link org.hibernate.Session}, for example:
* <pre>
* EntityManager entityManager();
* </pre>
* In this case:
* <ul>
* <li>the generated method is no longer {@code static},
* <li>the generated method will use this method to obtain the
* session object, instead of having a parameter of type
* {@code EntityManager}, and
* <li>the generated static metamodel | Book |
java | grpc__grpc-java | api/src/testFixtures/java/io/grpc/StatusMatcher.java | {
"start": 845,
"end": 2633
} | class ____ implements ArgumentMatcher<Status> {
public static StatusMatcher statusHasCode(ArgumentMatcher<Status.Code> codeMatcher) {
return new StatusMatcher(codeMatcher, null);
}
public static StatusMatcher statusHasCode(Status.Code code) {
return statusHasCode(new EqualsMatcher<>(code));
}
private final ArgumentMatcher<Status.Code> codeMatcher;
private final ArgumentMatcher<String> descriptionMatcher;
private StatusMatcher(
ArgumentMatcher<Status.Code> codeMatcher,
ArgumentMatcher<String> descriptionMatcher) {
this.codeMatcher = checkNotNull(codeMatcher, "codeMatcher");
this.descriptionMatcher = descriptionMatcher;
}
public StatusMatcher andDescription(ArgumentMatcher<String> descriptionMatcher) {
checkState(this.descriptionMatcher == null, "Already has a description matcher");
return new StatusMatcher(codeMatcher, descriptionMatcher);
}
public StatusMatcher andDescription(String description) {
return andDescription(new EqualsMatcher<>(description));
}
public StatusMatcher andDescriptionContains(String substring) {
return andDescription(new StringContainsMatcher(substring));
}
@Override
public boolean matches(Status status) {
return status != null
&& codeMatcher.matches(status.getCode())
&& (descriptionMatcher == null || descriptionMatcher.matches(status.getDescription()));
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{code=");
sb.append(codeMatcher);
if (descriptionMatcher != null) {
sb.append(", description=");
sb.append(descriptionMatcher);
}
sb.append("}");
return sb.toString();
}
// Use instead of lambda for better error message.
static final | StatusMatcher |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/ActionModule.java | {
"start": 32914,
"end": 42431
} | class ____ extends AbstractModule {
private static final Logger logger = LogManager.getLogger(ActionModule.class);
/**
* This RestHandler is used as a placeholder for any routes that are unreachable (i.e. have no ServerlessScope annotation) when
* running in serverless mode. It does nothing, and its handleRequest method is never called. It just provides a way to register the
* routes so that we know they do exist.
*/
private static final RestHandler placeholderRestHandler = (request, channel, client) -> {};
private final Settings settings;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final NamedWriteableRegistry namedWriteableRegistry;
private final IndexScopedSettings indexScopedSettings;
private final ClusterSettings clusterSettings;
private final SettingsFilter settingsFilter;
private final List<ActionPlugin> actionPlugins;
private final Map<String, ActionHandler> actions;
private final ActionFilters actionFilters;
private final IncrementalBulkService bulkService;
private final ProjectIdResolver projectIdResolver;
private final AutoCreateIndex autoCreateIndex;
private final DestructiveOperations destructiveOperations;
private final RestController restController;
/** Rest headers that are copied to internal requests made during a rest request. */
private final Set<RestHeaderDefinition> headersToCopy;
private final RequestValidators<PutMappingRequest> mappingRequestValidators;
private final RequestValidators<IndicesAliasesRequest> indicesAliasesRequestRequestValidators;
private final ReservedClusterStateService reservedClusterStateService;
private final RestExtension restExtension;
private final ClusterService clusterService;
public ActionModule(
Settings settings,
IndexNameExpressionResolver indexNameExpressionResolver,
NamedWriteableRegistry namedWriteableRegistry,
IndexScopedSettings indexScopedSettings,
ClusterSettings clusterSettings,
SettingsFilter settingsFilter,
ThreadPool threadPool,
List<ActionPlugin> actionPlugins,
NodeClient nodeClient,
CircuitBreakerService circuitBreakerService,
UsageService usageService,
SystemIndices systemIndices,
TelemetryProvider telemetryProvider,
ClusterService clusterService,
RerouteService rerouteService,
List<ReservedClusterStateHandler<?>> reservedClusterStateHandlers,
List<ReservedProjectStateHandler<?>> reservedProjectStateHandlers,
RestExtension restExtension,
IncrementalBulkService bulkService,
ProjectIdResolver projectIdResolver
) {
this.settings = settings;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.namedWriteableRegistry = namedWriteableRegistry;
this.indexScopedSettings = indexScopedSettings;
this.clusterSettings = clusterSettings;
this.settingsFilter = settingsFilter;
this.actionPlugins = actionPlugins;
actions = setupActions(actionPlugins);
actionFilters = setupActionFilters(actionPlugins);
this.bulkService = bulkService;
this.projectIdResolver = projectIdResolver;
autoCreateIndex = new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver, systemIndices);
destructiveOperations = new DestructiveOperations(settings, clusterSettings);
Set<RestHeaderDefinition> headers = Stream.concat(
actionPlugins.stream().flatMap(p -> p.getRestHeaders().stream()),
Stream.of(
new RestHeaderDefinition(Task.X_OPAQUE_ID_HTTP_HEADER, false),
new RestHeaderDefinition(Task.TRACE_STATE, false),
new RestHeaderDefinition(Task.TRACE_PARENT_HTTP_HEADER, false),
new RestHeaderDefinition(Task.X_ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER, false)
)
).collect(Collectors.toSet());
final RestInterceptor restInterceptor = getRestServerComponent(
"REST interceptor",
actionPlugins,
restPlugin -> restPlugin.getRestHandlerInterceptor(threadPool.getThreadContext())
);
mappingRequestValidators = new RequestValidators<>(
actionPlugins.stream().flatMap(p -> p.mappingRequestValidators().stream()).toList()
);
indicesAliasesRequestRequestValidators = new RequestValidators<>(
actionPlugins.stream().flatMap(p -> p.indicesAliasesRequestValidators().stream()).toList()
);
headersToCopy = headers;
var customController = getRestServerComponent(
"REST controller",
actionPlugins,
restPlugin -> restPlugin.getRestController(restInterceptor, nodeClient, circuitBreakerService, usageService, telemetryProvider)
);
if (customController != null) {
restController = customController;
} else {
restController = new RestController(restInterceptor, nodeClient, circuitBreakerService, usageService, telemetryProvider);
}
reservedClusterStateService = new ReservedClusterStateService(
clusterService,
rerouteService,
reservedClusterStateHandlers,
reservedProjectStateHandlers
);
this.restExtension = restExtension;
this.clusterService = clusterService;
}
private static <T> T getRestServerComponent(
String type,
List<ActionPlugin> actionPlugins,
Function<RestServerActionPlugin, T> function
) {
T result = null;
for (ActionPlugin plugin : actionPlugins) {
if (plugin instanceof RestServerActionPlugin restPlugin) {
var newInstance = function.apply(restPlugin);
if (newInstance != null) {
logger.debug("Using custom {} from plugin {}", type, plugin.getClass().getName());
if (isInternalPlugin(plugin) == false) {
throw new IllegalArgumentException(
"The "
+ plugin.getClass().getName()
+ " plugin tried to install a custom "
+ type
+ ". This functionality is not available to external plugins."
);
}
if (result != null) {
throw new IllegalArgumentException("Cannot have more than one plugin implementing a " + type);
}
result = newInstance;
}
}
}
return result;
}
private static boolean isInternalPlugin(ActionPlugin plugin) {
final String canonicalName = plugin.getClass().getCanonicalName();
if (canonicalName == null) {
return false;
}
return canonicalName.startsWith("org.elasticsearch.xpack.") || canonicalName.startsWith("co.elastic.elasticsearch.");
}
/**
* Certain request header values need to be copied in the thread context under which request handlers are to be dispatched.
* Careful that this method modifies the thread context. The thread context must be reinstated after the request handler
* finishes and returns.
*/
public void copyRequestHeadersToThreadContext(HttpPreRequest request, ThreadContext threadContext) {
// the request's thread-context must always be populated (by calling this method) before undergoing any request related processing
// we use this opportunity to first record the request processing start time
threadContext.putTransient(Task.TRACE_START_TIME, Instant.ofEpochMilli(System.currentTimeMillis()));
for (final RestHeaderDefinition restHeader : headersToCopy) {
final String name = restHeader.getName();
final List<String> headerValues = request.getHeaders().get(name);
if (headerValues != null && headerValues.isEmpty() == false) {
final List<String> distinctHeaderValues = headerValues.stream().distinct().toList();
if (restHeader.isMultiValueAllowed() == false && distinctHeaderValues.size() > 1) {
throw new IllegalArgumentException("multiple values for single-valued header [" + name + "].");
} else if (name.equals(Task.TRACE_PARENT_HTTP_HEADER)) {
String traceparent = distinctHeaderValues.get(0);
Optional<String> traceId = RestUtils.extractTraceId(traceparent);
if (traceId.isPresent()) {
threadContext.putHeader(Task.TRACE_ID, traceId.get());
threadContext.putTransient(Task.PARENT_TRACE_PARENT_HEADER, traceparent);
}
} else if (name.equals(Task.TRACE_STATE)) {
threadContext.putTransient(Task.PARENT_TRACE_STATE, distinctHeaderValues.get(0));
} else {
threadContext.putHeader(name, String.join(",", distinctHeaderValues));
}
}
}
}
public Map<String, ActionHandler> getActions() {
return actions;
}
static Map<String, ActionHandler> setupActions(List<ActionPlugin> actionPlugins) {
// Subclass NamedRegistry for easy registration
| ActionModule |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/WireTapLogTest.java | {
"start": 973,
"end": 1524
} | class ____ extends ContextTestSupport {
@Test
public void testWireTapLog() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start").wireTap("log:com.foo.MyApp?level=WARN").to("mock:result");
}
};
}
}
| WireTapLogTest |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/masterreplica/MasterReplicaIntegrationTests.java | {
"start": 1117,
"end": 6463
} | class ____ extends AbstractRedisClientTest {
private RedisURI masterURI = RedisURI.Builder.redis(host, TestSettings.port(3)).withPassword(passwd)
.withClientName("my-client").withDatabase(5).build();
private StatefulRedisMasterReplicaConnection<String, String> connection;
private RedisURI upstream;
private RedisURI replica;
private RedisCommands<String, String> connection1;
private RedisCommands<String, String> connection2;
@BeforeEach
void before() {
RedisURI node1 = RedisURI.Builder.redis(host, TestSettings.port(3)).withDatabase(2).build();
RedisURI node2 = RedisURI.Builder.redis(host, TestSettings.port(4)).withDatabase(2).build();
connection1 = client.connect(node1).sync();
connection2 = client.connect(node2).sync();
RedisInstance node1Instance = RoleParser.parse(this.connection1.role());
RedisInstance node2Instance = RoleParser.parse(this.connection2.role());
if (node1Instance.getRole().isUpstream() && node2Instance.getRole().isReplica()) {
upstream = node1;
replica = node2;
} else if (node2Instance.getRole().isUpstream() && node1Instance.getRole().isReplica()) {
upstream = node2;
replica = node1;
} else {
assumeTrue(false,
String.format("Cannot run the test because I don't have a distinct master and replica but %s and %s",
node1Instance, node2Instance));
}
WithPassword.enableAuthentication(this.connection1);
this.connection1.auth(passwd);
this.connection1.configSet("masterauth", passwd.toString());
WithPassword.enableAuthentication(this.connection2);
this.connection2.auth(passwd);
this.connection2.configSet("masterauth", passwd.toString());
connection = MasterReplica.connect(client, StringCodec.UTF8, masterURI);
connection.setReadFrom(ReadFrom.REPLICA);
}
@AfterEach
void after() {
if (connection1 != null) {
WithPassword.disableAuthentication(connection1);
connection1.configRewrite();
connection1.getStatefulConnection().close();
}
if (connection2 != null) {
WithPassword.disableAuthentication(connection2);
connection2.configRewrite();
connection2.getStatefulConnection().close();
}
if (connection != null) {
connection.close();
}
}
@Test
void testMasterReplicaReadFromMaster() {
connection.setReadFrom(ReadFrom.UPSTREAM);
String server = connection.sync().info("server");
Pattern pattern = Pattern.compile("tcp_port:(\\d+)");
Matcher matcher = pattern.matcher(server);
assertThat(matcher.find()).isTrue();
assertThat(matcher.group(1)).isEqualTo("" + upstream.getPort());
}
@Test
void testMasterReplicaReadFromReplica() {
String server = connection.sync().info("server");
Pattern pattern = Pattern.compile("tcp_port:(\\d+)");
Matcher matcher = pattern.matcher(server);
assertThat(matcher.find()).isTrue();
assertThat(matcher.group(1)).isEqualTo("" + replica.getPort());
assertThat(connection.getReadFrom()).isEqualTo(ReadFrom.REPLICA);
}
@Test
void testMasterReplicaReadWrite() {
RedisCommands<String, String> redisCommands = connection.sync();
redisCommands.set(key, value);
redisCommands.waitForReplication(1, 100);
assertThat(redisCommands.get(key)).isEqualTo(value);
}
@Test
void testConnectToReplica() {
connection.close();
RedisURI replicaUri = RedisURI.Builder.redis(host, TestSettings.port(4)).withPassword(passwd).build();
connection = MasterReplica.connect(client, StringCodec.UTF8, replicaUri);
RedisCommands<String, String> sync = connection.sync();
sync.set(key, value);
}
@Test
void noReplicaForRead() {
connection.setReadFrom(new ReadFrom() {
@Override
public List<RedisNodeDescription> select(Nodes nodes) {
return Collections.emptyList();
}
});
assertThatThrownBy(() -> replicaCall(connection)).isInstanceOf(RedisException.class);
}
@Test
void masterReplicaConnectionShouldSetClientName() {
assertThat(connection.sync().clientGetname()).isEqualTo(masterURI.getClientName());
connection.sync().quit();
assertThat(connection.sync().clientGetname()).isEqualTo(masterURI.getClientName());
connection.close();
}
@Test
@EnabledOnCommand("ACL")
void testConnectToReplicaWithAcl() {
connection.close();
RedisURI replicaUri = RedisURI.Builder.redis(host, TestSettings.port(900 + 6)).withAuthentication("default", passwd)
.build();
connection = MasterReplica.connect(client, StringCodec.UTF8, replicaUri);
RedisCommands<String, String> sync = connection.sync();
assertThat(sync.ping()).isEqualTo("PONG");
}
static String replicaCall(StatefulRedisMasterReplicaConnection<String, String> connection) {
return connection.sync().info("replication");
}
}
| MasterReplicaIntegrationTests |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/condition/Join_constructor_with_array_Test.java | {
"start": 1052,
"end": 2084
} | class ____ {
@Test
void should_throw_error_if_Condition_array_is_null() {
// GIVEN
Condition<Object>[] conditions = null;
// THEN
assertThatNullPointerException().isThrownBy(() -> new ConcreteJoin(conditions))
.withMessage("The given conditions should not be null");
}
@Test
void should_throw_error_if_Condition_array_contains_nulls() {
// GIVEN
Condition<Object>[] conditions = array(new TestCondition<>(), null);
// THEN
assertThatNullPointerException().isThrownBy(() -> new ConcreteJoin(conditions))
.withMessage("The given conditions should not have null entries");
}
@Test
void should_create_new_Join_with_passed_Conditions() {
// GIVEN
Condition<Object>[] conditions = array(new TestCondition<>(), new TestCondition<>());
// WHEN
Join<Object> join = new ConcreteJoin(conditions);
// THEN
assertThat(join.conditions).containsExactly(conditions);
}
}
| Join_constructor_with_array_Test |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/Functions.java | {
"start": 5367,
"end": 5750
} | interface ____ {@link java.util.concurrent.Callable} that declares a {@link Throwable}.
*
* <p>TODO for 4.0: Move to org.apache.commons.lang3.function.</p>
*
* @param <R> Return type.
* @param <T> Thrown exception.
* @deprecated Use {@link org.apache.commons.lang3.function.FailableCallable}.
*/
@Deprecated
@FunctionalInterface
public | like |
java | spring-projects__spring-boot | module/spring-boot-security/src/main/java/org/springframework/boot/security/autoconfigure/actuate/web/servlet/SecurityRequestMatchersManagementContextConfiguration.java | {
"start": 2052,
"end": 2267
} | class ____ {
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(DispatcherServlet.class)
@ConditionalOnBean(DispatcherServletPath.class)
public static | SecurityRequestMatchersManagementContextConfiguration |
java | apache__camel | components/camel-opentelemetry2/src/test/java/org/apache/camel/opentelemetry2/OpenTelemetryTracerTest.java | {
"start": 1619,
"end": 5544
} | class ____ extends OpenTelemetryTracerTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
OpenTelemetryTracer tst = new OpenTelemetryTracer();
tst.setTracer(otelExtension.getOpenTelemetry().getTracer("traceTest"));
tst.setContextPropagators(otelExtension.getOpenTelemetry().getPropagators());
CamelContext context = super.createCamelContext();
CamelContextAware.trySetCamelContext(tst, context);
tst.init(context);
return context;
}
@Test
void testRouteSingleRequest() throws IOException {
Exchange result = template.request("direct:start", null);
// Make sure the trace is propagated downstream
assertNotNull(result.getIn().getHeader("traceparent"));
Map<String, OtelTrace> traces = otelExtension.getTraces();
assertEquals(1, traces.size());
checkTrace(traces.values().iterator().next(), null);
}
@Test
void testRouteMultipleRequests() throws IOException {
for (int i = 1; i <= 10; i++) {
context.createProducerTemplate().sendBody("direct:start", "Hello!");
}
Map<String, OtelTrace> traces = otelExtension.getTraces();
// Each trace should have a unique trace id. It is enough to assert that
// the number of elements in the map is the same of the requests to prove
// all traces have been generated uniquely.
assertEquals(10, traces.size());
// Each trace should have the same structure
for (OtelTrace trace : traces.values()) {
checkTrace(trace, "Hello!");
}
}
private void checkTrace(OtelTrace trace, String expectedBody) {
List<SpanData> spans = trace.getSpans();
assertEquals(3, spans.size());
SpanData testProducer = spans.get(0);
SpanData direct = spans.get(1);
SpanData log = spans.get(2);
// Validate span completion
assertTrue(testProducer.hasEnded());
assertTrue(direct.hasEnded());
assertTrue(log.hasEnded());
// Validate same trace
assertEquals(testProducer.getSpanContext().getTraceId(), direct.getSpanContext().getTraceId());
assertEquals(direct.getSpanContext().getTraceId(), log.getSpanContext().getTraceId());
// Validate hierarchy
assertFalse(testProducer.getParentSpanContext().isValid());
assertEquals(testProducer.getSpanContext().getSpanId(), direct.getParentSpanContext().getSpanId());
assertEquals(direct.getSpanContext().getSpanId(), log.getParentSpanContext().getSpanId());
// Validate operations
assertEquals(Op.EVENT_SENT.toString(), testProducer.getAttributes().get(AttributeKey.stringKey("op")));
assertEquals(Op.EVENT_RECEIVED.toString(), direct.getAttributes().get(AttributeKey.stringKey("op")));
// Validate message logging
assertEquals("A message", direct.getEvents().get(0).getAttributes().get(AttributeKey.stringKey("message")));
if (expectedBody == null) {
assertEquals(
"Exchange[ExchangePattern: InOut, BodyType: null, Body: [Body is null]]",
log.getEvents().get(0).getAttributes().get(AttributeKey.stringKey("message")));
} else {
assertEquals(
"Exchange[ExchangePattern: InOnly, BodyType: String, Body: " + expectedBody + "]",
log.getEvents().get(0).getAttributes().get(AttributeKey.stringKey("message")));
}
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.routeId("start")
.log("A message")
.to("log:info");
}
};
}
}
| OpenTelemetryTracerTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/embeddable/naming/EmbeddedColumnNamingImplicitTests.java | {
"start": 1663,
"end": 1897
} | class ____ {
@Id
private Integer id;
private String name;
@Embedded
@EmbeddedColumnNaming
private Address homeAddress;
@Embedded
@EmbeddedColumnNaming
private Address workAddress;
}
@Embeddable
public static | Person |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/io/compression/GzipInflaterInputStreamFactory.java | {
"start": 1150,
"end": 1716
} | class ____ implements InflaterInputStreamFactory<GZIPInputStream> {
private static final GzipInflaterInputStreamFactory INSTANCE =
new GzipInflaterInputStreamFactory();
public static GzipInflaterInputStreamFactory getInstance() {
return INSTANCE;
}
@Override
public GZIPInputStream create(InputStream in) throws IOException {
return new GZIPInputStream(in);
}
@Override
public Collection<String> getCommonFileExtensions() {
return Arrays.asList("gz", "gzip");
}
}
| GzipInflaterInputStreamFactory |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/ServiceTest.java | {
"start": 1237,
"end": 1328
} | class ____ extends TestCase {
/** Assert on the comparison ordering of the State | ServiceTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/introspect/POJOPropertiesCollectorTest.java | {
"start": 1643,
"end": 1855
} | class ____ {
@JsonIgnore public int a;
@JsonIgnore public void setB(int b) { }
public int c;
}
// Should find just one setter for "y", due to partial ignore
static | ImplicitIgnores |
java | apache__flink | flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/socket/SocketWindowWordCount.java | {
"start": 1762,
"end": 4492
} | class ____ {
public static void main(String[] args) throws Exception {
// the host and the port to connect to
final String hostname;
final int port;
final boolean asyncState;
try {
final ParameterTool params = ParameterTool.fromArgs(args);
hostname = params.has("hostname") ? params.get("hostname") : "localhost";
port = params.getInt("port");
asyncState = params.has("async-state");
} catch (Exception e) {
System.err.println(
"No port specified. Please run 'SocketWindowWordCount "
+ "--hostname <hostname> --port <port> [--asyncState]', where hostname (localhost by default) "
+ "and port is the address of the text server");
System.err.println(
"To start a simple text server, run 'netcat -l <port>' and "
+ "type the input text into the command line");
return;
}
// get the execution environment
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// get input data by connecting to the socket
DataStream<String> text = env.socketTextStream(hostname, port, "\n");
// parse the data, group it, window it, and aggregate the counts
KeyedStream<WordWithCount, String> keyedStream =
text.flatMap(
(FlatMapFunction<String, WordWithCount>)
(value, out) -> {
for (String word : value.split("\\s")) {
out.collect(new WordWithCount(word, 1L));
}
},
Types.POJO(WordWithCount.class))
.keyBy(value -> value.word);
if (asyncState) {
keyedStream = keyedStream.enableAsyncState();
}
DataStream<WordWithCount> windowCounts =
keyedStream
.window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
.reduce((a, b) -> new WordWithCount(a.word, a.count + b.count))
.returns(WordWithCount.class);
// print the results with a single thread, rather than in parallel
windowCounts.print().setParallelism(1);
env.execute("Socket Window WordCount");
}
// ------------------------------------------------------------------------
/** Data type for words with count. */
public static | SocketWindowWordCount |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/procedures/ProcedureParameter.java | {
"start": 2381,
"end": 2434
} | enum ____ procedure parameter modes.
*/
| representing |
java | spring-projects__spring-framework | spring-orm/src/main/java/org/springframework/orm/jpa/JpaObjectRetrievalFailureException.java | {
"start": 1043,
"end": 1236
} | class ____ extends ObjectRetrievalFailureException {
public JpaObjectRetrievalFailureException(EntityNotFoundException ex) {
super(ex.getMessage(), ex);
}
}
| JpaObjectRetrievalFailureException |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/processor/MapperCreationProcessor.java | {
"start": 13347,
"end": 17848
} | class
____<Annotation> decoratorAnnotations = additionalAnnotationsBuilder.getProcessedAnnotations( decoratorElement );
Decorator decorator = new Decorator.Builder()
.elementUtils( elementUtils )
.typeFactory( typeFactory )
.mapperElement( element )
.decoratedWith( decoratedWith )
.methods( mappingMethods )
.hasDelegateConstructor( hasDelegateConstructor )
.options( options )
.versionInformation( versionInformation )
.implName( mapperOptions.implementationName() )
.implPackage( mapperOptions.implementationPackage() )
.extraImports( getExtraImports( element, mapperOptions ) )
.suppressGeneratorTimestamp( mapperOptions.suppressTimestampInGenerated() )
.additionalAnnotations( decoratorAnnotations )
.build();
return decorator;
}
private SortedSet<Type> getExtraImports(TypeElement element, MapperOptions mapperOptions) {
SortedSet<Type> extraImports = new TreeSet<>();
for ( TypeMirror extraImport : mapperOptions.imports() ) {
Type type = typeFactory.getAlwaysImportedType( extraImport );
extraImports.add( type );
}
// Add original package if a dest package has been set
if ( !"default".equals( mapperOptions.implementationPackage() ) ) {
extraImports.add( typeFactory.getType( element ) );
}
return extraImports;
}
private List<MappingMethod> getMappingMethods(MapperOptions mapperAnnotation, List<SourceMethod> methods) {
List<MappingMethod> mappingMethods = new ArrayList<>();
for ( SourceMethod method : methods ) {
if ( !method.overridesMethod() ) {
continue;
}
mergeInheritedOptions( method, mapperAnnotation, methods, new ArrayList<>(), null );
MappingMethodOptions mappingOptions = method.getOptions();
boolean hasFactoryMethod = false;
if ( method.isIterableMapping() ) {
this.messager.note( 1, Message.ITERABLEMAPPING_CREATE_NOTE, method );
IterableMappingMethod iterableMappingMethod = createWithElementMappingMethod(
method,
mappingOptions,
new IterableMappingMethod.Builder()
);
hasFactoryMethod = iterableMappingMethod.getFactoryMethod() != null;
mappingMethods.add( iterableMappingMethod );
}
else if ( method.isMapMapping() ) {
MapMappingMethod.Builder builder = new MapMappingMethod.Builder();
SelectionParameters keySelectionParameters = null;
FormattingParameters keyFormattingParameters = null;
SelectionParameters valueSelectionParameters = null;
FormattingParameters valueFormattingParameters = null;
NullValueMappingStrategyGem nullValueMappingStrategy = null;
if ( mappingOptions.getMapMapping() != null ) {
keySelectionParameters = mappingOptions.getMapMapping().getKeySelectionParameters();
keyFormattingParameters = mappingOptions.getMapMapping().getKeyFormattingParameters();
valueSelectionParameters = mappingOptions.getMapMapping().getValueSelectionParameters();
valueFormattingParameters = mappingOptions.getMapMapping().getValueFormattingParameters();
nullValueMappingStrategy = mappingOptions.getMapMapping().getNullValueMappingStrategy();
}
this.messager.note( 1, Message.MAPMAPPING_CREATE_NOTE, method );
MapMappingMethod mapMappingMethod = builder
.mappingContext( mappingContext )
.method( method )
.keyFormattingParameters( keyFormattingParameters )
.keySelectionParameters( keySelectionParameters )
.valueFormattingParameters( valueFormattingParameters )
.valueSelectionParameters( valueSelectionParameters )
.build();
hasFactoryMethod = mapMappingMethod.getFactoryMethod() != null;
mappingMethods.add( mapMappingMethod );
}
else if ( method.isValueMapping() ) {
// prefer value mappings over | Set |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java | {
"start": 2372,
"end": 5931
} | class ____ {
final Map<String, String> tags = Maps.newHashMap();
final Map<String, AbstractMetric> metrics = Maps.newHashMap();
/**
* Lookup a tag value
* @param key name of the tag
* @return the tag value
*/
public String getTag(String key) {
return tags.get(key);
}
/**
* Lookup a metric value
* @param key name of the metric
* @return the metric value
*/
public Number getMetric(String key) {
AbstractMetric metric = metrics.get(key);
return metric != null ? metric.value() : null;
}
/**
* Lookup a metric instance
* @param key name of the metric
* @return the metric instance
*/
public AbstractMetric getMetricInstance(String key) {
return metrics.get(key);
}
/**
* @return the entry set of the tags of the record
*/
public Set<Map.Entry<String, String>> tags() {
return tags.entrySet();
}
/**
* @deprecated use metricsEntrySet() instead
* @return entry set of metrics
*/
@Deprecated
public Set<Map.Entry<String, Number>> metrics() {
Map<String, Number> map = new LinkedHashMap<String, Number>(
metrics.size());
for (Map.Entry<String, AbstractMetric> mapEntry : metrics.entrySet()) {
map.put(mapEntry.getKey(), mapEntry.getValue().value());
}
return map.entrySet();
}
/**
* @return entry set of metrics
*/
public Set<Map.Entry<String, AbstractMetric>> metricsEntrySet() {
return metrics.entrySet();
}
@Override public String toString() {
return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
.add("tags=" + tags)
.add("metrics=" + metrics)
.toString();
}
}
public MetricsCache() {
this(MAX_RECS_PER_NAME_DEFAULT);
}
/**
* Construct a metrics cache
* @param maxRecsPerName limit of the number records per record name
*/
public MetricsCache(int maxRecsPerName) {
this.maxRecsPerName = maxRecsPerName;
}
/**
* Update the cache and return the current cached record
* @param mr the update record
* @param includingTags cache tag values (for later lookup by name) if true
* @return the updated cache record
*/
public Record update(MetricsRecord mr, boolean includingTags) {
String name = mr.name();
RecordCache recordCache = map.get(name);
if (recordCache == null) {
recordCache = new RecordCache();
map.put(name, recordCache);
}
Collection<MetricsTag> tags = mr.tags();
Record record = recordCache.get(tags);
if (record == null) {
record = new Record();
recordCache.put(tags, record);
}
for (AbstractMetric m : mr.metrics()) {
record.metrics.put(m.name(), m);
}
if (includingTags) {
// mostly for some sinks that include tags as part of a dense schema
for (MetricsTag t : mr.tags()) {
record.tags.put(t.name(), t.value());
}
}
return record;
}
/**
* Update the cache and return the current cache record
* @param mr the update record
* @return the updated cache record
*/
public Record update(MetricsRecord mr) {
return update(mr, false);
}
/**
* Get the cached record
* @param name of the record
* @param tags of the record
* @return the cached record or null
*/
public Record get(String name, Collection<MetricsTag> tags) {
RecordCache rc = map.get(name);
if (rc == null) return null;
return rc.get(tags);
}
}
| Record |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/function/DefaultServerResponseBuilder.java | {
"start": 1662,
"end": 6650
} | class ____ implements ServerResponse.BodyBuilder {
private final HttpStatusCode statusCode;
private final HttpHeaders headers = new HttpHeaders();
private final MultiValueMap<String, Cookie> cookies = new LinkedMultiValueMap<>();
public DefaultServerResponseBuilder(ServerResponse other) {
Assert.notNull(other, "ServerResponse must not be null");
this.statusCode = other.statusCode();
this.headers.addAll(other.headers());
this.cookies.addAll(other.cookies());
}
public DefaultServerResponseBuilder(HttpStatusCode status) {
Assert.notNull(status, "HttpStatusCode must not be null");
this.statusCode = status;
}
@Override
public ServerResponse.BodyBuilder header(String headerName, @Nullable String... headerValues) {
Assert.notNull(headerName, "HeaderName must not be null");
for (String headerValue : headerValues) {
this.headers.add(headerName, headerValue);
}
return this;
}
@Override
public ServerResponse.BodyBuilder headers(Consumer<HttpHeaders> headersConsumer) {
Assert.notNull(headersConsumer, "HeaderConsumer must not be null");
headersConsumer.accept(this.headers);
return this;
}
@Override
public ServerResponse.BodyBuilder cookie(Cookie cookie) {
Assert.notNull(cookie, "Cookie must not be null");
this.cookies.add(cookie.getName(), cookie);
return this;
}
@Override
public ServerResponse.BodyBuilder cookies(Consumer<MultiValueMap<String, Cookie>> cookiesConsumer) {
Assert.notNull(cookiesConsumer, "CookiesConsumer must not be null");
cookiesConsumer.accept(this.cookies);
return this;
}
@Override
public ServerResponse.BodyBuilder allow(HttpMethod... allowedMethods) {
Assert.notNull(allowedMethods, "Http AllowedMethods must not be null");
this.headers.setAllow(new LinkedHashSet<>(Arrays.asList(allowedMethods)));
return this;
}
@Override
public ServerResponse.BodyBuilder allow(Set<HttpMethod> allowedMethods) {
Assert.notNull(allowedMethods, "Http AllowedMethods must not be null");
this.headers.setAllow(allowedMethods);
return this;
}
@Override
public ServerResponse.BodyBuilder contentLength(long contentLength) {
this.headers.setContentLength(contentLength);
return this;
}
@Override
public ServerResponse.BodyBuilder contentType(MediaType contentType) {
Assert.notNull(contentType, "ContentType must not be null");
this.headers.setContentType(contentType);
return this;
}
@Override
public ServerResponse.BodyBuilder eTag(String tag) {
this.headers.setETag(tag);
return this;
}
@Override
public ServerResponse.BodyBuilder lastModified(ZonedDateTime lastModified) {
this.headers.setLastModified(lastModified);
return this;
}
@Override
public ServerResponse.BodyBuilder lastModified(Instant lastModified) {
this.headers.setLastModified(lastModified);
return this;
}
@Override
public ServerResponse.BodyBuilder location(URI location) {
this.headers.setLocation(location);
return this;
}
@Override
public ServerResponse.BodyBuilder cacheControl(CacheControl cacheControl) {
this.headers.setCacheControl(cacheControl);
return this;
}
@Override
public ServerResponse.BodyBuilder varyBy(String... requestHeaders) {
this.headers.setVary(Arrays.asList(requestHeaders));
return this;
}
@Override
public ServerResponse build() {
return build((request, response) -> null);
}
@Override
public ServerResponse build(WriteFunction writeFunction) {
return new WriteFunctionResponse(this.statusCode, this.headers, this.cookies, writeFunction);
}
@Override
public ServerResponse body(Object body) {
return DefaultEntityResponseBuilder.fromObject(body)
.status(this.statusCode)
.headers(headers -> headers.putAll(this.headers))
.cookies(cookies -> cookies.addAll(this.cookies))
.build();
}
@Override
public <T> ServerResponse body(T body, ParameterizedTypeReference<T> bodyType) {
return DefaultEntityResponseBuilder.fromObject(body, bodyType)
.status(this.statusCode)
.headers(headers -> headers.putAll(this.headers))
.cookies(cookies -> cookies.addAll(this.cookies))
.build();
}
@Override
public ServerResponse render(String name, Object... modelAttributes) {
return new DefaultRenderingResponseBuilder(name)
.status(this.statusCode)
.headers(headers -> headers.putAll(this.headers))
.cookies(cookies -> cookies.addAll(this.cookies))
.modelAttributes(modelAttributes)
.build();
}
@Override
public ServerResponse render(String name, Map<String, ?> model) {
return new DefaultRenderingResponseBuilder(name)
.status(this.statusCode)
.headers(headers -> headers.putAll(this.headers))
.cookies(cookies -> cookies.addAll(this.cookies))
.modelAttributes(model)
.build();
}
@Override
public ServerResponse stream(Consumer<ServerResponse.StreamBuilder> streamConsumer) {
return StreamingServerResponse.create(this.statusCode, this.headers, this.cookies, streamConsumer, null);
}
private static | DefaultServerResponseBuilder |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/config/remote/request/ConfigBatchListenRequest.java | {
"start": 896,
"end": 2719
} | class ____ extends AbstractConfigRequest {
/**
* listen or remove listen.
*/
private boolean listen = true;
private List<ConfigListenContext> configListenContexts = new ArrayList<>();
/**
* add listen config.
*
* @param group group.
* @param dataId dataId.
* @param tenant tenant.
* @param md5 md5.
*/
public void addConfigListenContext(String group, String dataId, String tenant, String md5) {
ConfigListenContext configListenContext = new ConfigListenContext();
configListenContext.dataId = dataId;
configListenContext.group = group;
configListenContext.md5 = md5;
configListenContext.tenant = tenant;
configListenContexts.add(configListenContext);
}
/**
* Getter method for property <tt>configListenContexts</tt>.
*
* @return property value of configListenContexts
*/
public List<ConfigListenContext> getConfigListenContexts() {
return configListenContexts;
}
/**
* Setter method for property <tt>configListenContexts</tt>.
*
* @param configListenContexts value to be assigned to property configListenContexts
*/
public void setConfigListenContexts(List<ConfigListenContext> configListenContexts) {
this.configListenContexts = configListenContexts;
}
/**
* Getter method for property <tt>listen</tt>.
*
* @return property value of listen
*/
public boolean isListen() {
return listen;
}
/**
* Setter method for property <tt>listen</tt>.
*
* @param listen value to be assigned to property listen
*/
public void setListen(boolean listen) {
this.listen = listen;
}
public static | ConfigBatchListenRequest |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/ClassUtils.java | {
"start": 35193,
"end": 35580
} | interface ____ a common Java language interface:
* {@link Serializable}, {@link Externalizable}, {@link Closeable}, {@link AutoCloseable},
* {@link Cloneable}, {@link Comparable} - all of which can be ignored when looking
* for 'primary' user-level interfaces. Common characteristics: no service-level
* operations, no bean property methods, no default methods.
* @param ifc the | is |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/jackson2/PreAuthenticatedAuthenticationTokenDeserializer.java | {
"start": 2068,
"end": 3841
} | class ____ extends JsonDeserializer<PreAuthenticatedAuthenticationToken> {
private static final TypeReference<List<GrantedAuthority>> GRANTED_AUTHORITY_LIST = new TypeReference<>() {
};
/**
* This method construct {@link PreAuthenticatedAuthenticationToken} object from
* serialized json.
* @param jp the JsonParser
* @param ctxt the DeserializationContext
* @return the user
* @throws IOException if a exception during IO occurs
* @throws JsonProcessingException if an error during JSON processing occurs
*/
@Override
public PreAuthenticatedAuthenticationToken deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException, JsonProcessingException {
ObjectMapper mapper = (ObjectMapper) jp.getCodec();
JsonNode jsonNode = mapper.readTree(jp);
Boolean authenticated = readJsonNode(jsonNode, "authenticated").asBoolean();
JsonNode principalNode = readJsonNode(jsonNode, "principal");
Object principal = (!principalNode.isObject()) ? principalNode.asText()
: mapper.readValue(principalNode.traverse(mapper), Object.class);
Object credentials = readJsonNode(jsonNode, "credentials").asText();
List<GrantedAuthority> authorities = mapper.readValue(readJsonNode(jsonNode, "authorities").traverse(mapper),
GRANTED_AUTHORITY_LIST);
PreAuthenticatedAuthenticationToken token = (!authenticated)
? new PreAuthenticatedAuthenticationToken(principal, credentials)
: new PreAuthenticatedAuthenticationToken(principal, credentials, authorities);
token.setDetails(readJsonNode(jsonNode, "details"));
return token;
}
private JsonNode readJsonNode(JsonNode jsonNode, String field) {
return jsonNode.has(field) ? jsonNode.get(field) : MissingNode.getInstance();
}
}
| PreAuthenticatedAuthenticationTokenDeserializer |
java | google__guava | android/guava/src/com/google/common/io/CharSource.java | {
"start": 24769,
"end": 25065
} | class ____ extends StringCharSource {
private static final EmptyCharSource INSTANCE = new EmptyCharSource();
private EmptyCharSource() {
super("");
}
@Override
public String toString() {
return "CharSource.empty()";
}
}
private static final | EmptyCharSource |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/InnerClassDeserTest.java | {
"start": 261,
"end": 541
} | class ____
{
public String name;
public Brain brain;
public Dog() { }
protected Dog(String n, boolean thinking) {
name = n;
brain = new Brain();
brain.isThinking = thinking;
}
// note: non-static
public | Dog |
java | quarkusio__quarkus | integration-tests/rest-client-reactive/src/main/java/io/quarkus/it/rest/client/main/MyClientLogger.java | {
"start": 361,
"end": 917
} | class ____ implements ClientLogger {
private final AtomicBoolean used = new AtomicBoolean(false);
@Override
public void setBodySize(int bodySize) {
}
@Override
public void logResponse(HttpClientResponse response, boolean redirect) {
used.set(true);
}
@Override
public void logRequest(HttpClientRequest request, Buffer body, boolean omitBody) {
used.set(true);
}
public void reset() {
used.set(false);
}
public boolean wasUsed() {
return used.get();
}
}
| MyClientLogger |
java | apache__maven | impl/maven-executor/src/main/java/org/apache/maven/cling/executor/ExecutorHelper.java | {
"start": 1234,
"end": 2911
} | enum ____ {
/**
* Automatically decide. For example, presence of {@link ExecutorRequest#environmentVariables()} or
* {@link ExecutorRequest#jvmArguments()} will result in choosing {@link #FORKED} executor. Otherwise,
* {@link #EMBEDDED} executor is preferred.
*/
AUTO,
/**
* Forces embedded execution. May fail if {@link ExecutorRequest} contains input unsupported by executor.
*/
EMBEDDED,
/**
* Forces forked execution. Always carried out, most isolated and "most correct", but is slow as it uses child process.
*/
FORKED
}
/**
* Returns the preferred mode of this helper.
*/
@Nonnull
Mode getDefaultMode();
/**
* Creates pre-populated builder for {@link ExecutorRequest}. Users of helper must use this method to create
* properly initialized request builder.
*/
@Nonnull
ExecutorRequest.Builder executorRequest();
/**
* Executes the request with preferred mode executor.
*/
default int execute(ExecutorRequest executorRequest) throws ExecutorException {
return execute(getDefaultMode(), executorRequest);
}
/**
* Executes the request with passed in mode executor.
*/
int execute(Mode mode, ExecutorRequest executorRequest) throws ExecutorException;
/**
* High level operation, returns the version of the Maven covered by this helper. This method call caches
* underlying operation, and is safe to invoke as many times needed.
*
* @see Executor#mavenVersion(ExecutorRequest)
*/
@Nonnull
String mavenVersion();
}
| Mode |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/util/StringHelper.java | {
"start": 890,
"end": 11730
} | class ____ {
private static final int ALIAS_TRUNCATE_LENGTH = 10;
public static final String WHITESPACE = " \n\r\f\t";
public static final String[] EMPTY_STRINGS = EMPTY_STRING_ARRAY;
private static final Pattern COMMA_SEPARATED_PATTERN = Pattern.compile( "\\s*,\\s*" );
private StringHelper() { /* static methods only - hide constructor */
}
public static int lastIndexOfLetter(String string) {
for ( int i = 0; i < string.length(); i++ ) {
final char character = string.charAt( i );
// Include "_". See HHH-8073
if ( !isLetter( character ) && !( '_' == character ) ) {
return i - 1;
}
}
return string.length() - 1;
}
public static String join(String separator, String[] strings) {
final int length = strings.length;
if ( length == 0 ) {
return "";
}
else {
// Allocate space for length * firstStringLength;
// If strings[0] is null, then its length is defined as 4, since that's the
// length of "null".
final int firstStringLength = strings[0] != null ? strings[0].length() : 4;
final StringBuilder buf =
new StringBuilder( length * firstStringLength )
.append( strings[0] );
for ( int i = 1; i < length; i++ ) {
buf.append( separator ).append( strings[i] );
}
return buf.toString();
}
}
public static String join(String separator, Object[] values) {
final int length = values.length;
if ( length == 0 ) {
return "";
}
else {
// Allocate space for length * firstStringLength;
// If strings[0] is null, then its length is defined as 4, since that's the
// length of "null".
final int firstStringLength = values[0] != null ? values[0].toString().length() : 4;
final StringBuilder buf =
new StringBuilder( length * firstStringLength )
.append( values[0] );
for ( int i = 1; i < length; i++ ) {
buf.append( separator ).append( values[i] );
}
return buf.toString();
}
}
public static String join(String separator, Iterable<?> objects) {
return join( separator, objects.iterator() );
}
public static String join(String separator, Iterator<?> objects) {
final StringBuilder buf = new StringBuilder();
if ( objects.hasNext() ) {
buf.append( objects.next() );
}
while ( objects.hasNext() ) {
buf.append( separator ).append( objects.next() );
}
return buf.toString();
}
public static String joinWithQualifierAndSuffix(
String[] values,
String qualifier,
String suffix,
String deliminator) {
final int length = values.length;
if ( length == 0 ) {
return "";
}
final StringBuilder buf =
new StringBuilder( length * ( values[0].length() + suffix.length() ) )
.append( qualify( qualifier, values[0] ) ).append( suffix );
for ( int i = 1; i < length; i++ ) {
buf.append( deliminator ).append( qualify( qualifier, values[i] ) ).append( suffix );
}
return buf.toString();
}
public static String[] add(String[] x, String sep, String[] y) {
final String[] result = new String[x.length];
for ( int i = 0; i < x.length; i++ ) {
result[i] = x[i] + sep + y[i];
}
return result;
}
public static String repeat(String string, int times) {
return string.repeat( Math.max( 0, times ) );
}
public static String repeat(String string, int times, String deliminator) {
final StringBuilder buf =
new StringBuilder( string.length() * times + deliminator.length() * ( times - 1 ) )
.append( string );
for ( int i = 1; i < times; i++ ) {
buf.append( deliminator ).append( string );
}
return buf.toString();
}
public static String repeat(char character, int times) {
final char[] buffer = new char[times];
Arrays.fill( buffer, character );
return new String( buffer );
}
public static void repeat(String string, int times, String separator, StringBuilder buffer) {
buffer.append( string );
for ( int i = 1; i < times; i++ ) {
buffer.append( separator ).append( string );
}
}
public static String replace(String template, String placeholder, String replacement) {
return replace( template, placeholder, replacement, false );
}
public static String[] replace(String[] templates, String placeholder, String replacement) {
final String[] result = new String[templates.length];
for ( int i = 0; i < templates.length; i++ ) {
result[i] = replace( templates[i], placeholder, replacement );
}
return result;
}
public static String replace(String template, String placeholder, String replacement, boolean wholeWords) {
return replace( template, placeholder, replacement, wholeWords, false );
}
public static String replace(
String template,
String placeholder,
String replacement,
boolean wholeWords,
boolean encloseInParensIfNecessary) {
if ( template == null ) {
return null;
}
final int loc = indexOfPlaceHolder( template, placeholder, wholeWords );
if ( loc < 0 ) {
return template;
}
else {
final String beforePlaceholder = template.substring( 0, loc );
final String afterPlaceholder = template.substring( loc + placeholder.length() );
return replace(
beforePlaceholder,
afterPlaceholder,
placeholder,
replacement,
wholeWords,
encloseInParensIfNecessary
);
}
}
public static String replace(
String beforePlaceholder,
String afterPlaceholder,
String placeholder,
String replacement,
boolean wholeWords,
boolean encloseInParensIfNecessary) {
final boolean actuallyReplace =
!wholeWords
|| afterPlaceholder.isEmpty()
|| !isJavaIdentifierPart( afterPlaceholder.charAt( 0 ) );
// We only need to check the left param to determine if the placeholder is already
// enclosed in parentheses (HHH-10383)
// Examples:
// 1) "... IN (?1", we assume that "?1" does not need to be enclosed because there
// is already a right-parenthesis; we assume there will be a matching right-parenthesis.
// 2) "... IN ?1", we assume that "?1" needs to be enclosed in parentheses, because there
// is no left-parenthesis.
// We need to check the placeholder is not used in `Order By FIELD(...)` (HHH-10502)
// Examples:
// " ... Order By FIELD(id,?1)", after expand parameters, the sql is "... Order By FIELD(id,?,?,?)"
final boolean encloseInParens =
actuallyReplace
&& encloseInParensIfNecessary
&& !( getLastNonWhitespaceCharacter( beforePlaceholder ) == '(' )
&& !( getLastNonWhitespaceCharacter( beforePlaceholder ) == ','
&& getFirstNonWhitespaceCharacter( afterPlaceholder ) == ')' );
final StringBuilder buf = new StringBuilder( beforePlaceholder );
if ( encloseInParens ) {
buf.append( '(' );
}
buf.append( actuallyReplace ? replacement : placeholder );
if ( encloseInParens ) {
buf.append( ')' );
}
buf.append(
replace(
afterPlaceholder,
placeholder,
replacement,
wholeWords,
encloseInParensIfNecessary
)
);
return buf.toString();
}
private static int indexOfPlaceHolder(String template, String placeholder, boolean wholeWords) {
if ( wholeWords ) {
int placeholderIndex = -1;
boolean isPartialPlaceholderMatch;
do {
placeholderIndex = template.indexOf( placeholder, placeholderIndex + 1 );
isPartialPlaceholderMatch = placeholderIndex != -1 &&
template.length() > placeholderIndex + placeholder.length() &&
isJavaIdentifierPart( template.charAt( placeholderIndex + placeholder.length() ) );
} while ( placeholderIndex != -1 && isPartialPlaceholderMatch );
return placeholderIndex;
}
else {
return template.indexOf( placeholder );
}
}
/**
* Used to find the ordinal parameters (e.g. '?1') in a string.
*/
public static int indexOfIdentifierWord(String str, String word) {
if ( str != null && !str.isEmpty() && word != null && !word.isEmpty() ) {
int position = str.indexOf( word );
while ( position >= 0 && position < str.length() ) {
if (
( position == 0 || !isJavaIdentifierPart( str.charAt( position - 1 ) ) ) &&
( position + word.length() == str.length()
|| !isJavaIdentifierPart( str.charAt( position + word.length() ) ) )
) {
return position;
}
position = str.indexOf( word, position + 1 );
}
}
return -1;
}
public static char getLastNonWhitespaceCharacter(String str) {
if ( str != null && !str.isEmpty() ) {
for ( int i = str.length() - 1; i >= 0; i-- ) {
final char ch = str.charAt( i );
if ( !isWhitespace( ch ) ) {
return ch;
}
}
}
return '\0';
}
public static char getFirstNonWhitespaceCharacter(String str) {
if ( str != null && !str.isEmpty() ) {
for ( int i = 0; i < str.length(); i++ ) {
final char ch = str.charAt( i );
if ( !isWhitespace( ch ) ) {
return ch;
}
}
}
return '\0';
}
public static String replaceOnce(String template, String placeholder, String replacement) {
if ( template == null ) {
return null;
}
else {
final int loc = template.indexOf( placeholder );
return loc < 0 ? template
: template.substring( 0, loc )
+ replacement
+ template.substring( loc + placeholder.length() );
}
}
public static String[] split(String separators, String list) {
return split( separators, list, false );
}
public static String[] split(String separators, String list, boolean include) {
final StringTokenizer tokens = new StringTokenizer( list, separators, include );
final String[] result = new String[tokens.countTokens()];
int i = 0;
while ( tokens.hasMoreTokens() ) {
result[i++] = tokens.nextToken();
}
return result;
}
public static String[] splitTrimmingTokens(String separators, String list, boolean include) {
final StringTokenizer tokens = new StringTokenizer( list, separators, include );
final String[] result = new String[tokens.countTokens()];
int i = 0;
while ( tokens.hasMoreTokens() ) {
result[i++] = tokens.nextToken().trim();
}
return result;
}
public static String[] splitFull(String separators, String list) {
final List<String> parts = new ArrayList<>();
int prevIndex = 0;
int index;
while ( ( index = list.indexOf( separators, prevIndex ) ) != -1 ) {
parts.add( list.substring( prevIndex, index ) );
prevIndex = index + separators.length();
}
parts.add( list.substring( prevIndex ) );
return parts.toArray( EMPTY_STRING_ARRAY );
}
public static String unqualify(String qualifiedName) {
final int loc = qualifiedName.lastIndexOf( '.' );
return loc < 0 ? qualifiedName : qualifiedName.substring( loc + 1 );
}
public static String qualifier(String qualifiedName) {
final int loc = qualifiedName.lastIndexOf( '.' );
return loc < 0 ? "" : qualifiedName.substring( 0, loc );
}
/**
* Collapses a name. Mainly intended for use with classnames, where an example might serve best to explain.
* Imagine you have a | StringHelper |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/UnnecessaryCheckNotNullTest.java | {
"start": 2094,
"end": 4924
} | class ____ {
void positive_checkNotNull() {
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
Preconditions.checkNotNull(new String(""));
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
Preconditions.checkNotNull(new String(""), new Object());
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
Preconditions.checkNotNull(new String(""), "Message %s", "template");
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
String pa = Preconditions.checkNotNull(new String(""));
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
String pb = Preconditions.checkNotNull(new String(""), new Object());
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
String pc = Preconditions.checkNotNull(new String(""), "Message %s", "template");
}
void positive_verifyNotNull() {
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
Verify.verifyNotNull(new String(""));
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
Verify.verifyNotNull(new String(""), "Message");
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
Verify.verifyNotNull(new String(""), "Message %s", "template");
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
String va = Verify.verifyNotNull(new String(""));
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
String vb = Verify.verifyNotNull(new String(""), "Message");
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
String vc = Verify.verifyNotNull(new String(""), "Message %s", "template");
}
void positive_requireNonNull() {
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
Objects.requireNonNull(new String(""));
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
Objects.requireNonNull(new String(""), "Message");
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
String va = Objects.requireNonNull(new String(""));
// BUG: Diagnostic contains: UnnecessaryCheckNotNull
String vb = Objects.requireNonNull(new String(""), "Message");
}
}
""")
.doTest();
}
@Test
public void positive_newArray() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.base.Preconditions;
import com.google.common.base.Verify;
import java.util.Objects;
| Test |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedTestIntegrationTests.java | {
"start": 47764,
"end": 52046
} | class ____ {
@Test
void failsWithArgumentsSourceProvidingUnusedArguments() {
var results = execute(ArgumentCountValidationMode.STRICT, UnusedArgumentsTestCase.class,
"testWithTwoUnusedStringArgumentsProvider", String.class);
results.allEvents().assertThatEvents() //
.haveExactly(1, event(finishedWithFailure(message(
"Configuration error: @ParameterizedTest consumes 1 parameter but there were 2 arguments provided.%nNote: the provided arguments were [foo, unused1]".formatted()))));
}
@Test
void failsWithMethodSourceProvidingUnusedArguments() {
var results = execute(ArgumentCountValidationMode.STRICT, UnusedArgumentsTestCase.class,
"testWithMethodSourceProvidingUnusedArguments", String.class);
results.allEvents().assertThatEvents() //
.haveExactly(1, event(finishedWithFailure(message(
"Configuration error: @ParameterizedTest consumes 1 parameter but there were 2 arguments provided.%nNote: the provided arguments were [foo, unused1]".formatted()))));
}
@Test
void failsWithCsvSourceUnusedArgumentsAndStrictArgumentCountValidationAnnotationAttribute() {
var results = execute(ArgumentCountValidationMode.NONE, UnusedArgumentsTestCase.class,
"testWithStrictArgumentCountValidation", String.class);
results.allEvents().assertThatEvents() //
.haveExactly(1, event(finishedWithFailure(message(
"Configuration error: @ParameterizedTest consumes 1 parameter but there were 2 arguments provided.%nNote: the provided arguments were [foo, unused1]".formatted()))));
}
@Test
void failsWithCsvSourceUnusedArgumentsButExecutesRemainingArgumentsWhereThereIsNoUnusedArgument() {
var results = execute(ArgumentCountValidationMode.STRICT, UnusedArgumentsTestCase.class,
"testWithCsvSourceContainingDifferentNumbersOfArguments", String.class);
results.allEvents().assertThatEvents() //
.haveExactly(1, event(finishedWithFailure(message(
"Configuration error: @ParameterizedTest consumes 1 parameter but there were 2 arguments provided.%nNote: the provided arguments were [foo, unused1]".formatted())))) //
.haveExactly(1,
event(test(), displayName("[2] argument = bar"), finishedWithFailure(message("bar"))));
}
@Test
void executesWithCsvSourceUnusedArgumentsAndArgumentCountValidationAnnotationAttribute() {
var results = execute(ArgumentCountValidationMode.NONE, UnusedArgumentsTestCase.class,
"testWithNoneArgumentCountValidation", String.class);
results.allEvents().assertThatEvents() //
.haveExactly(1,
event(test(), displayName("[1] argument = foo"), finishedWithFailure(message("foo"))));
}
@Test
void executesWithMethodSourceProvidingUnusedArguments() {
var results = execute(ArgumentCountValidationMode.STRICT, RepeatableSourcesTestCase.class,
"testWithRepeatableCsvSource", String.class);
results.allEvents().assertThatEvents() //
.haveExactly(1, event(test(), displayName("[1] argument = a"), finishedWithFailure(message("a")))) //
.haveExactly(1, event(test(), displayName("[2] argument = b"), finishedWithFailure(message("b"))));
}
@Test
void evaluatesArgumentsAtMostOnce() {
var results = execute(ArgumentCountValidationMode.STRICT, UnusedArgumentsTestCase.class,
"testWithEvaluationReportingArgumentsProvider", String.class);
results.allEvents().assertThatEvents() //
.haveExactly(1, event(finishedWithFailure(message(
"Configuration error: @ParameterizedTest consumes 1 parameter but there were 2 arguments provided.%nNote: the provided arguments were [foo, unused]".formatted()))));
results.allEvents().reportingEntryPublished().assertThatEvents() //
.haveExactly(1, event(EventConditions.reportEntry(Map.of("evaluated", "true"))));
}
private EngineExecutionResults execute(ArgumentCountValidationMode configurationValue, Class<?> javaClass,
String methodName, Class<?>... methodParameterTypes) {
return EngineTestKit.engine(new JupiterTestEngine()) //
.selectors(selectMethod(javaClass, methodName, methodParameterTypes)) //
.configurationParameter(ArgumentCountValidator.ARGUMENT_COUNT_VALIDATION_KEY,
configurationValue.name().toLowerCase()) //
.execute();
}
}
@Nested
| UnusedArgumentsWithStrictArgumentsCountIntegrationTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/DefaultExecutionGraphCache.java | {
"start": 1362,
"end": 5438
} | class ____ implements ExecutionGraphCache {
private final Duration timeout;
private final Duration timeToLive;
private final ConcurrentHashMap<JobID, ExecutionGraphEntry> cachedExecutionGraphs;
private volatile boolean running = true;
public DefaultExecutionGraphCache(Duration timeout, Duration timeToLive) {
this.timeout = checkNotNull(timeout);
this.timeToLive = checkNotNull(timeToLive);
cachedExecutionGraphs = new ConcurrentHashMap<>(4);
}
@Override
public void close() {
running = false;
// clear all cached AccessExecutionGraphs
cachedExecutionGraphs.clear();
}
@Override
public int size() {
return cachedExecutionGraphs.size();
}
@Override
public CompletableFuture<ExecutionGraphInfo> getExecutionGraphInfo(
JobID jobId, RestfulGateway restfulGateway) {
return getExecutionGraphInternal(jobId, restfulGateway).thenApply(Function.identity());
}
private CompletableFuture<ExecutionGraphInfo> getExecutionGraphInternal(
JobID jobId, RestfulGateway restfulGateway) {
Preconditions.checkState(running, "ExecutionGraphCache is no longer running");
while (true) {
final ExecutionGraphEntry oldEntry = cachedExecutionGraphs.get(jobId);
final long currentTime = System.currentTimeMillis();
if (oldEntry != null && currentTime < oldEntry.getTTL()) {
final CompletableFuture<ExecutionGraphInfo> executionGraphInfoFuture =
oldEntry.getExecutionGraphInfoFuture();
if (!executionGraphInfoFuture.isCompletedExceptionally()) {
return executionGraphInfoFuture;
}
// otherwise it must be completed exceptionally
}
final ExecutionGraphEntry newEntry =
new ExecutionGraphEntry(currentTime + timeToLive.toMillis());
final boolean successfulUpdate;
if (oldEntry == null) {
successfulUpdate = cachedExecutionGraphs.putIfAbsent(jobId, newEntry) == null;
} else {
successfulUpdate = cachedExecutionGraphs.replace(jobId, oldEntry, newEntry);
// cancel potentially outstanding futures
oldEntry.getExecutionGraphInfoFuture().cancel(false);
}
if (successfulUpdate) {
final CompletableFuture<ExecutionGraphInfo> executionGraphInfoFuture =
restfulGateway.requestExecutionGraphInfo(jobId, timeout);
executionGraphInfoFuture.whenComplete(
(ExecutionGraphInfo executionGraph, Throwable throwable) -> {
if (throwable != null) {
newEntry.getExecutionGraphInfoFuture()
.completeExceptionally(throwable);
// remove exceptionally completed entry because it doesn't help
cachedExecutionGraphs.remove(jobId, newEntry);
} else {
newEntry.getExecutionGraphInfoFuture().complete(executionGraph);
}
});
if (!running) {
// delete newly added entry in case of a concurrent stopping operation
cachedExecutionGraphs.remove(jobId, newEntry);
}
return newEntry.getExecutionGraphInfoFuture();
}
}
}
@Override
public void cleanup() {
long currentTime = System.currentTimeMillis();
// remove entries which have exceeded their time to live
cachedExecutionGraphs
.values()
.removeIf((ExecutionGraphEntry entry) -> currentTime >= entry.getTTL());
}
/** Wrapper containing the current execution graph and it's time to live (TTL). */
private static final | DefaultExecutionGraphCache |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/context/properties/ConfigurationPropertiesReportEndpointFilteringTests.java | {
"start": 1825,
"end": 5950
} | class ____ {
@Test
void filterByPrefixSingleMatch() {
ApplicationContextRunner contextRunner = new ApplicationContextRunner().withUserConfiguration(Config.class)
.withPropertyValues("foo.primary.name:foo1", "foo.secondary.name:foo2", "only.bar.name:solo1");
assertProperties(contextRunner, "solo1");
}
@Test
void filterByPrefixMultipleMatches() {
ApplicationContextRunner contextRunner = new ApplicationContextRunner().withUserConfiguration(Config.class)
.withPropertyValues("foo.primary.name:foo1", "foo.secondary.name:foo2", "only.bar.name:solo1");
contextRunner.run((context) -> {
ConfigurationPropertiesReportEndpoint endpoint = context
.getBean(ConfigurationPropertiesReportEndpoint.class);
ConfigurationPropertiesDescriptor applicationProperties = endpoint
.configurationPropertiesWithPrefix("foo.");
assertThat(applicationProperties.getContexts()).containsOnlyKeys(context.getId());
ContextConfigurationPropertiesDescriptor contextProperties = applicationProperties.getContexts()
.get(context.getId());
assertThat(contextProperties).isNotNull();
assertThat(contextProperties.getBeans()).containsOnlyKeys("primaryFoo", "secondaryFoo");
});
}
@Test
void filterByPrefixNoMatches() {
ApplicationContextRunner contextRunner = new ApplicationContextRunner().withUserConfiguration(Config.class)
.withPropertyValues("foo.primary.name:foo1", "foo.secondary.name:foo2", "only.bar.name:solo1");
contextRunner.run((context) -> {
ConfigurationPropertiesReportEndpoint endpoint = context
.getBean(ConfigurationPropertiesReportEndpoint.class);
ConfigurationPropertiesDescriptor applicationProperties = endpoint
.configurationPropertiesWithPrefix("foo.third");
assertThat(applicationProperties.getContexts()).containsOnlyKeys(context.getId());
ContextConfigurationPropertiesDescriptor contextProperties = applicationProperties.getContexts()
.get(context.getId());
assertThat(contextProperties).isNotNull();
assertThat(contextProperties.getBeans()).isEmpty();
});
}
@Test
void noSanitizationWhenShowAlways() {
ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withUserConfiguration(ConfigWithAlways.class)
.withPropertyValues("foo.primary.name:foo1", "foo.secondary.name:foo2", "only.bar.name:solo1");
assertProperties(contextRunner, "solo1");
}
@Test
void sanitizationWhenShowNever() {
ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withUserConfiguration(ConfigWithNever.class)
.withPropertyValues("foo.primary.name:foo1", "foo.secondary.name:foo2", "only.bar.name:solo1");
assertProperties(contextRunner, "******");
}
private void assertProperties(ApplicationContextRunner contextRunner, String value) {
contextRunner.run((context) -> {
ConfigurationPropertiesReportEndpoint endpoint = context
.getBean(ConfigurationPropertiesReportEndpoint.class);
ConfigurationPropertiesDescriptor applicationProperties = endpoint
.configurationPropertiesWithPrefix("only.bar");
assertThat(applicationProperties.getContexts()).containsOnlyKeys(context.getId());
ContextConfigurationPropertiesDescriptor contextProperties = applicationProperties.getContexts()
.get(context.getId());
assertThat(contextProperties).isNotNull();
Optional<String> key = contextProperties.getBeans()
.keySet()
.stream()
.filter((id) -> findIdFromPrefix("only.bar", id))
.findAny();
ConfigurationPropertiesBeanDescriptor descriptor = contextProperties.getBeans().get(key.get());
assertThat(descriptor).isNotNull();
assertThat(descriptor.getPrefix()).isEqualTo("only.bar");
assertThat(descriptor.getProperties()).containsEntry("name", value);
});
}
private boolean findIdFromPrefix(String prefix, String id) {
int separator = id.indexOf("-");
String candidate = (separator != -1) ? id.substring(0, separator) : id;
return prefix.equals(candidate);
}
@Configuration(proxyBeanMethods = false)
@Import(BaseConfiguration.class)
@EnableConfigurationProperties(Bar.class)
static | ConfigurationPropertiesReportEndpointFilteringTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java | {
"start": 1055,
"end": 2840
} | class ____ extends ESTestCase {
public void testProcessInfo() {
long refreshInterval = randomNonNegativeLong();
ProcessInfo info = ProcessProbe.processInfo(refreshInterval);
assertNotNull(info);
assertEquals(refreshInterval, info.getRefreshInterval());
assertEquals(jvmInfo().pid(), info.getId());
assertEquals(BootstrapInfo.isMemoryLocked(), info.isMlockall());
}
public void testProcessStats() {
ProcessStats stats = ProcessProbe.processStats();
assertNotNull(stats);
assertThat(stats.getTimestamp(), greaterThan(0L));
if (Constants.WINDOWS) {
// Open/Max files descriptors are not supported on Windows platforms
assertThat(stats.getOpenFileDescriptors(), equalTo(-1L));
assertThat(stats.getMaxFileDescriptors(), equalTo(-1L));
} else {
assertThat(stats.getOpenFileDescriptors(), greaterThan(0L));
assertThat(stats.getMaxFileDescriptors(), greaterThan(0L));
}
ProcessStats.Cpu cpu = stats.getCpu();
assertNotNull(cpu);
// CPU percent can be negative if the system recent cpu usage is not available
assertThat(cpu.getPercent(), anyOf(lessThan((short) 0), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100))));
// CPU time can return -1 if the platform does not support this operation, let's see which platforms fail
assertThat(cpu.getTotal().millis(), greaterThan(0L));
ProcessStats.Mem mem = stats.getMem();
assertNotNull(mem);
// Committed total virtual memory can return -1 if not supported, let's see which platforms fail
assertThat(mem.getTotalVirtual().getBytes(), greaterThan(0L));
}
}
| ProcessProbeTests |
java | quarkusio__quarkus | extensions/kubernetes/spi/src/main/java/io/quarkus/kubernetes/spi/ConfiguratorBuildItem.java | {
"start": 389,
"end": 802
} | class ____ extends MultiBuildItem {
/**
* The configurator
*/
private final Object configurator;
public ConfiguratorBuildItem(Object configurator) {
this.configurator = configurator;
}
public Object getConfigurator() {
return this.configurator;
}
public boolean matches(Class type) {
return type.isInstance(configurator);
}
}
| ConfiguratorBuildItem |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/processor/internal/Processors.java | {
"start": 9818,
"end": 10094
} | class ____ with the {@code .} (dots) used for inner classes replaced with
* {@code _}.
*/
public static ClassName getEnclosedClassName(XTypeElement typeElement) {
return getEnclosedClassName(typeElement.getClassName());
}
/**
* Returns the fully qualified | name |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/alter/OracleAlterTableTestRename.java | {
"start": 977,
"end": 2455
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"alter table PRODUCT_NEW_CLUSTER_YZS_0210 rename to PRODUCT_NEW_CLUSTER_YZS_0210_1";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
statemen.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("PRODUCT_NEW_CLUSTER_YZS_0210")));
assertEquals(0, visitor.getColumns().size());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "*")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "YEAR")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "order_mode")));
}
}
| OracleAlterTableTestRename |
java | spring-projects__spring-security | rsocket/src/main/java/org/springframework/security/rsocket/authentication/AuthenticationPayloadInterceptor.java | {
"start": 1477,
"end": 3117
} | class ____ implements PayloadInterceptor, Ordered {
private final ReactiveAuthenticationManager authenticationManager;
private int order;
private PayloadExchangeAuthenticationConverter authenticationConverter = new BasicAuthenticationPayloadExchangeConverter();
/**
* Creates a new instance
* @param authenticationManager the manager to use. Cannot be null
*/
public AuthenticationPayloadInterceptor(ReactiveAuthenticationManager authenticationManager) {
Assert.notNull(authenticationManager, "authenticationManager cannot be null");
this.authenticationManager = authenticationManager;
}
@Override
public int getOrder() {
return this.order;
}
public void setOrder(int order) {
this.order = order;
}
/**
* Sets the convert to be used
* @param authenticationConverter
*/
public void setAuthenticationConverter(PayloadExchangeAuthenticationConverter authenticationConverter) {
Assert.notNull(authenticationConverter, "authenticationConverter cannot be null");
this.authenticationConverter = authenticationConverter;
}
@Override
public Mono<Void> intercept(PayloadExchange exchange, PayloadInterceptorChain chain) {
return this.authenticationConverter.convert(exchange)
.switchIfEmpty(chain.next(exchange).then(Mono.empty()))
.flatMap((a) -> this.authenticationManager.authenticate(a))
.flatMap((a) -> onAuthenticationSuccess(chain.next(exchange), a));
}
private Mono<Void> onAuthenticationSuccess(Mono<Void> payload, Authentication authentication) {
return payload.contextWrite(ReactiveSecurityContextHolder.withAuthentication(authentication));
}
}
| AuthenticationPayloadInterceptor |
java | apache__camel | components/camel-aws/camel-aws-bedrock/src/main/java/org/apache/camel/component/aws2/bedrock/runtime/stream/LlamaStreamParser.java | {
"start": 1455,
"end": 2964
} | class ____ implements StreamResponseParser {
private static final ObjectMapper MAPPER = new ObjectMapper();
@Override
public String extractText(String chunk) throws JsonProcessingException {
if (chunk == null || chunk.trim().isEmpty()) {
return "";
}
JsonNode node = MAPPER.readTree(chunk);
JsonNode generation = node.get("generation");
return generation != null && !generation.isNull() ? generation.asText() : "";
}
@Override
public String extractCompletionReason(String chunk) throws JsonProcessingException {
if (chunk == null || chunk.trim().isEmpty()) {
return null;
}
JsonNode node = MAPPER.readTree(chunk);
JsonNode stopReason = node.get("stop_reason");
return stopReason != null && !stopReason.isNull() ? stopReason.asText() : null;
}
@Override
public Integer extractTokenCount(String chunk) throws JsonProcessingException {
if (chunk == null || chunk.trim().isEmpty()) {
return null;
}
JsonNode node = MAPPER.readTree(chunk);
JsonNode tokenCount = node.get("generation_token_count");
return tokenCount != null && !tokenCount.isNull() ? tokenCount.asInt() : null;
}
@Override
public boolean isFinalChunk(String chunk) throws JsonProcessingException {
String stopReason = extractCompletionReason(chunk);
return stopReason != null && !stopReason.isEmpty();
}
}
| LlamaStreamParser |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java | {
"start": 19713,
"end": 20158
} | class ____ dev eth0 classid 42:7"
return String.format(FORMAT_DELETE_CLASS, device, ROOT_QDISC_HANDLE,
classId);
}
private String getStringForReadState() {
return String.format(FORMAT_READ_STATE, device);
}
private String getStringForReadClasses() {
return String.format(FORMAT_READ_CLASSES, device);
}
private String getStringForWipeState() {
return String.format(FORMAT_WIPE_STATE, device);
}
public | del |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveClassBuildItem.java | {
"start": 7936,
"end": 8760
} | class ____ {
private String[] className;
private boolean constructors = true;
private boolean publicConstructors = false;
private boolean queryConstructors;
private boolean methods;
private boolean queryMethods;
private boolean fields;
private boolean classes;
private boolean weak;
private boolean serialization;
private boolean unsafeAllocated;
private String reason;
private Builder() {
}
public Builder className(String[] className) {
this.className = className;
return this;
}
/**
* Configures whether constructors should be registered for reflection (true by default).
* Setting this enables getting all declared constructors for the | Builder |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_isNotEmpty_Test.java | {
"start": 897,
"end": 1215
} | class ____ extends ByteArrayAssertBaseTest {
@Override
protected ByteArrayAssert invoke_api_method() {
return assertions.isNotEmpty();
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertNotEmpty(getInfo(assertions), getActual(assertions));
}
}
| ByteArrayAssert_isNotEmpty_Test |
java | apache__kafka | jmh-benchmarks/src/main/java/org/apache/kafka/jmh/coordinator/MockLog.java | {
"start": 1649,
"end": 2781
} | class ____ extends UnifiedLog {
public MockLog(TopicPartition tp) throws IOException {
super(
0,
createMockLocalLog(tp),
mock(BrokerTopicStats.class),
Integer.MAX_VALUE,
mock(LeaderEpochFileCache.class),
mock(ProducerStateManager.class),
Optional.empty(),
false,
LogOffsetsListener.NO_OP_OFFSETS_LISTENER
);
}
@Override
public abstract long logStartOffset();
@Override
public abstract long logEndOffset();
@Override
public long highWatermark() {
return logEndOffset();
}
@Override
public abstract FetchDataInfo read(long startOffset, int maxLength, FetchIsolation isolation, boolean minOneMessage);
private static LocalLog createMockLocalLog(TopicPartition tp) {
LocalLog localLog = mock(LocalLog.class);
when(localLog.scheduler()).thenReturn(mock(Scheduler.class));
when(localLog.segments()).thenReturn(mock(LogSegments.class));
when(localLog.topicPartition()).thenReturn(tp);
return localLog;
}
}
| MockLog |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/diagnostics/analyzer/NoSuchBeanDefinitionFailureAnalyzerTests.java | {
"start": 13434,
"end": 13504
} | class ____ {
NumberHandler(Number foo) {
}
}
static | NumberHandler |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/filter/ReadOrWriteOnlyTest.java | {
"start": 2942,
"end": 3388
} | class ____ {
String works;
@JsonProperty(value = "t", access = JsonProperty.Access.READ_ONLY)
public String getDoesntWork() {
return "pleaseFixThisBug";
}
public String getWorks() {
return works;
}
public void setWorks(String works) {
this.works = works;
}
}
// for [databind#2951], add feature to inverse access logic
static | Bean2779 |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java | {
"start": 1673,
"end": 1875
} | class ____ extends AbstractScriptFieldType<LongFieldScript.LeafFactory> {
public static final RuntimeField.Parser PARSER = new RuntimeField.Parser(Builder::new);
private static | LongScriptFieldType |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/saml2/Saml2MetadataConfigurerTests.java | {
"start": 8041,
"end": 8327
} | class ____ {
Saml2MetadataResponseResolver metadataResponseResolver = mock(Saml2MetadataResponseResolver.class);
@Bean
Saml2MetadataResponseResolver metadataResponseResolver() {
return this.metadataResponseResolver;
}
}
@Configuration
static | MetadataResponseResolverConfig |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.