language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/logging/structured/StructuredLoggingJsonProperties.java | {
"start": 3769,
"end": 6794
} | class ____ of
* a {@link StackTracePrinter} implementation. A {@code null} value will be treated as
* {@code "standard"} when any other property is set, otherwise it will be treated as
* {@code "logging-system"}. {@link StackTracePrinter} implementations may optionally
* inject a {@link StandardStackTracePrinter} instance into their constructor which
* will be configured from the properties.
* @param root the root ordering (root first or root last)
* @param maxLength the maximum length to print
* @param maxThrowableDepth the maximum throwable depth to print
* @param includeCommonFrames whether common frames should be included
* @param includeHashes whether stack trace hashes should be included
*/
record StackTrace(@Nullable String printer, @Nullable Root root, @Nullable Integer maxLength,
@Nullable Integer maxThrowableDepth, @Nullable Boolean includeCommonFrames,
@Nullable Boolean includeHashes) {
@Nullable StackTracePrinter createPrinter() {
String name = sanitizePrinter();
if ("loggingsystem".equals(name) || (name.isEmpty() && !hasAnyOtherProperty())) {
return null;
}
StandardStackTracePrinter standardPrinter = createStandardPrinter();
if ("standard".equals(name) || name.isEmpty()) {
return standardPrinter;
}
Assert.state(printer() != null, "'printer' must not be null");
return (StackTracePrinter) new Instantiator<>(StackTracePrinter.class,
(parameters) -> parameters.add(StandardStackTracePrinter.class, standardPrinter))
.instantiate(printer());
}
boolean hasCustomPrinter() {
String name = sanitizePrinter();
if (name.isEmpty()) {
return false;
}
return !("loggingsystem".equals(name) || "standard".equals(name));
}
private String sanitizePrinter() {
return Objects.toString(printer(), "").toLowerCase(Locale.ROOT).replace("-", "");
}
private boolean hasAnyOtherProperty() {
return Stream.of(root(), maxLength(), maxThrowableDepth(), includeCommonFrames(), includeHashes())
.anyMatch(Objects::nonNull);
}
private StandardStackTracePrinter createStandardPrinter() {
StandardStackTracePrinter printer = (root() == Root.FIRST) ? StandardStackTracePrinter.rootFirst()
: StandardStackTracePrinter.rootLast();
PropertyMapper map = PropertyMapper.get();
printer = map.from(this::maxLength).to(printer, StandardStackTracePrinter::withMaximumLength);
printer = map.from(this::maxThrowableDepth)
.to(printer, StandardStackTracePrinter::withMaximumThrowableDepth);
printer = map.from(this::includeCommonFrames)
.to(printer, apply(StandardStackTracePrinter::withCommonFrames));
printer = map.from(this::includeHashes).to(printer, apply(StandardStackTracePrinter::withHashes));
return printer;
}
private BiFunction<StandardStackTracePrinter, Boolean, StandardStackTracePrinter> apply(
UnaryOperator<StandardStackTracePrinter> action) {
return (printer, value) -> (!value) ? printer : action.apply(printer);
}
/**
* Root ordering.
*/
| name |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/time/FastDateFormat.java | {
"start": 1959,
"end": 3254
} | class ____ especially useful in
* multi-threaded server environments. {@link SimpleDateFormat} is not thread-safe in any JDK version, nor will it be as Sun have closed the bug/RFE.
* </p>
*
* <p>
* All patterns are compatible with SimpleDateFormat (except time zones and some year patterns - see below).
* </p>
*
* <p>
* Since 3.2, FastDateFormat supports parsing as well as printing.
* </p>
*
* <p>
* Java 1.4 introduced a new pattern letter, {@code 'Z'}, to represent time zones in RFC822 format (for example, {@code +0800} or {@code -1100}). This pattern letter can
* be used here (on all JDK versions).
* </p>
*
* <p>
* In addition, the pattern {@code 'ZZ'} has been made to represent ISO 8601 extended format time zones (for example, {@code +08:00} or {@code -11:00}). This introduces
* a minor incompatibility with Java 1.4, but at a gain of useful functionality.
* </p>
*
* <p>
* Javadoc cites for the year pattern: <i>For formatting, if the number of pattern letters is 2, the year is truncated to 2 digits; otherwise it is interpreted
* as a number.</i> Starting with Java 1.7 a pattern of 'Y' or 'YYY' will be formatted as '2003', while it was '03' in former Java versions. FastDateFormat
* implements the behavior of Java 7.
* </p>
*
* @since 2.0
*/
public | is |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedTestIntegrationTests.java | {
"start": 68697,
"end": 69947
} | class ____ {
@ParameterizedTest
@NullAndEmptySource
void testWithNullAndEmptySourceForString(String argument) {
assertTrue(argument == null || argument.isEmpty());
}
@ParameterizedTest
@NullAndEmptySource
void testWithNullAndEmptySourceForStringAndTestInfo(String argument, TestInfo testInfo) {
assertTrue(argument == null || argument.isEmpty());
assertThat(testInfo).isNotNull();
}
@ParameterizedTest
@NullAndEmptySource
void testWithNullAndEmptySourceForList(List<?> argument) {
assertTrue(argument == null || argument.isEmpty());
}
@ParameterizedTest
@NullAndEmptySource
void testWithNullAndEmptySourceForArrayList(ArrayList<?> argument) {
assertTrue(argument == null || argument.isEmpty());
}
@ParameterizedTest
@NullAndEmptySource
void testWithNullAndEmptySourceForOneDimensionalPrimitiveArray(int[] argument) {
assertTrue(argument == null || argument.length == 0);
}
@ParameterizedTest
@NullAndEmptySource
void testWithNullAndEmptySourceForTwoDimensionalStringArray(String[][] argument) {
assertTrue(argument == null || argument.length == 0);
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@TestMethodOrder(OrderAnnotation.class)
static | NullAndEmptySourceTestCase |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/shard/ShardId.java | {
"start": 1015,
"end": 4246
} | class ____ implements Comparable<ShardId>, ToXContentFragment, Writeable {
private final Index index;
private final int shardId;
private final int hashCode;
public ShardId(Index index, int shardId) {
this.index = Objects.requireNonNull(index);
this.shardId = shardId;
this.hashCode = computeHashCode();
}
public ShardId(String index, String indexUUID, int shardId) {
this(new Index(index, indexUUID), shardId);
}
public ShardId(StreamInput in) throws IOException {
index = new Index(in);
shardId = in.readVInt();
hashCode = computeHashCode();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
index.writeTo(out);
out.writeVInt(shardId);
}
public Index getIndex() {
return index;
}
public String getIndexName() {
return index.getName();
}
public int id() {
return this.shardId;
}
public int getId() {
return id();
}
@Override
public String toString() {
return "[" + index.getName() + "][" + shardId + "]";
}
/**
* Parse the string representation of this shardId back to an object.
* We lose index uuid information here, but since we use toString in
* rest responses, this is the best we can do to reconstruct the object
* on the client side.
*/
public static ShardId fromString(String shardIdString) {
int splitPosition = shardIdString.indexOf("][");
if (splitPosition <= 0 || shardIdString.charAt(0) != '[' || shardIdString.charAt(shardIdString.length() - 1) != ']') {
throw new IllegalArgumentException("Unexpected shardId string format, expected [indexName][shardId] but got " + shardIdString);
}
String indexName = shardIdString.substring(1, splitPosition);
int shardId = Integer.parseInt(shardIdString.substring(splitPosition + 2, shardIdString.length() - 1));
return new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), shardId);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ShardId shardId1 = (ShardId) o;
return shardId == shardId1.shardId && index.equals(shardId1.index);
}
@Override
public int hashCode() {
return hashCode;
}
private int computeHashCode() {
int result = index != null ? index.hashCode() : 0;
result = 31 * result + shardId;
return result;
}
@Override
public int compareTo(ShardId o) {
final int res = Integer.compare(shardId, o.shardId);
if (res != 0) {
return res;
}
final Index index = this.index;
final Index otherIndex = o.index;
int compare = index.getName().compareTo(otherIndex.getName());
if (compare != 0) {
return compare;
}
return index.getUUID().compareTo(otherIndex.getUUID());
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.value(toString());
}
}
| ShardId |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java | {
"start": 21130,
"end": 22728
} | class ____ extends RstatementContext {
public TerminalNode FOR() {
return getToken(PainlessParser.FOR, 0);
}
public TerminalNode LP() {
return getToken(PainlessParser.LP, 0);
}
public List<TerminalNode> SEMICOLON() {
return getTokens(PainlessParser.SEMICOLON);
}
public TerminalNode SEMICOLON(int i) {
return getToken(PainlessParser.SEMICOLON, i);
}
public TerminalNode RP() {
return getToken(PainlessParser.RP, 0);
}
public TrailerContext trailer() {
return getRuleContext(TrailerContext.class, 0);
}
public EmptyContext empty() {
return getRuleContext(EmptyContext.class, 0);
}
public InitializerContext initializer() {
return getRuleContext(InitializerContext.class, 0);
}
public ExpressionContext expression() {
return getRuleContext(ExpressionContext.class, 0);
}
public AfterthoughtContext afterthought() {
return getRuleContext(AfterthoughtContext.class, 0);
}
public ForContext(RstatementContext ctx) {
copyFrom(ctx);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor<? extends T>) visitor).visitFor(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static | ForContext |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java | {
"start": 9460,
"end": 9847
} | class ____ {
private final float actual = 1.0f;
@Test
void should_run_test_when_assumption_passes() {
thenCode(() -> given(actual).isOne()).doesNotThrowAnyException();
}
@Test
void should_ignore_test_when_assumption_fails() {
expectAssumptionNotMetException(() -> given(actual).isZero());
}
}
@Nested
| BDDAssumptions_given_float_primitive_Test |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableSingleTest.java | {
"start": 1211,
"end": 26421
} | class ____ extends RxJavaTest {
@Test
public void singleFlowable() {
Flowable<Integer> flowable = Flowable.just(1).singleElement().toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onNext(1);
inOrder.verify(subscriber, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleWithTooManyElementsFlowable() {
Flowable<Integer> flowable = Flowable.just(1, 2).singleElement().toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onError(
isA(IllegalArgumentException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleWithEmptyFlowable() {
Flowable<Integer> flowable = Flowable.<Integer> empty().singleElement().toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber).onComplete();
inOrder.verify(subscriber, never()).onError(any(Throwable.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleDoesNotRequestMoreThanItNeedsIf1Then2RequestedFlowable() {
final List<Long> requests = new ArrayList<>();
Flowable.just(1)
//
.doOnRequest(new LongConsumer() {
@Override
public void accept(long n) {
requests.add(n);
}
})
//
.singleElement()
//
.toFlowable()
.subscribe(new DefaultSubscriber<Integer>() {
@Override
public void onStart() {
request(1);
}
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(Integer t) {
request(2);
}
});
// FIXME single now triggers fast-path
assertEquals(Arrays.asList(Long.MAX_VALUE), requests);
}
@Test
public void singleDoesNotRequestMoreThanItNeedsIf3RequestedFlowable() {
final List<Long> requests = new ArrayList<>();
Flowable.just(1)
//
.doOnRequest(new LongConsumer() {
@Override
public void accept(long n) {
requests.add(n);
}
})
//
.singleElement()
//
.toFlowable()
.subscribe(new DefaultSubscriber<Integer>() {
@Override
public void onStart() {
request(3);
}
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(Integer t) {
}
});
// FIXME single now triggers fast-path
assertEquals(Arrays.asList(Long.MAX_VALUE), requests);
}
@Test
public void singleRequestsExactlyWhatItNeedsIf1RequestedFlowable() {
final List<Long> requests = new ArrayList<>();
Flowable.just(1)
//
.doOnRequest(new LongConsumer() {
@Override
public void accept(long n) {
requests.add(n);
}
})
//
.singleElement()
//
.toFlowable()
.subscribe(new DefaultSubscriber<Integer>() {
@Override
public void onStart() {
request(1);
}
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(Integer t) {
}
});
// FIXME single now triggers fast-path
assertEquals(Arrays.asList(Long.MAX_VALUE), requests);
}
@Test
public void singleWithPredicateFlowable() {
Flowable<Integer> flowable = Flowable.just(1, 2)
.filter(
new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.singleElement().toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onNext(2);
inOrder.verify(subscriber, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleWithPredicateAndTooManyElementsFlowable() {
Flowable<Integer> flowable = Flowable.just(1, 2, 3, 4)
.filter(
new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.singleElement().toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onError(
isA(IllegalArgumentException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleWithPredicateAndEmptyFlowable() {
Flowable<Integer> flowable = Flowable.just(1)
.filter(
new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.singleElement().toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber).onComplete();
inOrder.verify(subscriber, never()).onError(any(Throwable.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultFlowable() {
Flowable<Integer> flowable = Flowable.just(1).single(2).toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onNext(1);
inOrder.verify(subscriber, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithTooManyElementsFlowable() {
Flowable<Integer> flowable = Flowable.just(1, 2).single(3).toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onError(
isA(IllegalArgumentException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithEmptyFlowable() {
Flowable<Integer> flowable = Flowable.<Integer> empty()
.single(1).toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onNext(1);
inOrder.verify(subscriber, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithPredicateFlowable() {
Flowable<Integer> flowable = Flowable.just(1, 2)
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.single(4).toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onNext(2);
inOrder.verify(subscriber, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithPredicateAndTooManyElementsFlowable() {
Flowable<Integer> flowable = Flowable.just(1, 2, 3, 4)
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.single(6).toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onError(
isA(IllegalArgumentException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithPredicateAndEmptyFlowable() {
Flowable<Integer> flowable = Flowable.just(1)
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.single(2).toFlowable();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onNext(2);
inOrder.verify(subscriber, times(1)).onComplete();
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleWithBackpressureFlowable() {
Flowable<Integer> flowable = Flowable.just(1, 2).singleElement().toFlowable();
Subscriber<Integer> subscriber = spy(new DefaultSubscriber<Integer>() {
@Override
public void onStart() {
request(1);
}
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(Integer integer) {
request(1);
}
});
flowable.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
inOrder.verify(subscriber, times(1)).onError(isA(IllegalArgumentException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void single() {
Maybe<Integer> maybe = Flowable.just(1).singleElement();
MaybeObserver<Integer> observer = TestHelper.mockMaybeObserver();
maybe.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onSuccess(1);
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleWithTooManyElements() {
Maybe<Integer> maybe = Flowable.just(1, 2).singleElement();
MaybeObserver<Integer> observer = TestHelper.mockMaybeObserver();
maybe.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onError(
isA(IllegalArgumentException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleWithEmpty() {
Maybe<Integer> maybe = Flowable.<Integer> empty().singleElement();
MaybeObserver<Integer> observer = TestHelper.mockMaybeObserver();
maybe.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer).onComplete();
inOrder.verify(observer, never()).onError(any(Throwable.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleDoesNotRequestMoreThanItNeedsToEmitItem() {
final AtomicLong request = new AtomicLong();
Flowable.just(1).doOnRequest(new LongConsumer() {
@Override
public void accept(long n) {
request.addAndGet(n);
}
}).blockingSingle();
// FIXME single now triggers fast-path
assertEquals(Long.MAX_VALUE, request.get());
}
@Test
public void singleDoesNotRequestMoreThanItNeedsToEmitErrorFromEmpty() {
final AtomicLong request = new AtomicLong();
try {
Flowable.empty().doOnRequest(new LongConsumer() {
@Override
public void accept(long n) {
request.addAndGet(n);
}
}).blockingSingle();
} catch (NoSuchElementException e) {
// FIXME single now triggers fast-path
assertEquals(Long.MAX_VALUE, request.get());
}
}
@Test
public void singleDoesNotRequestMoreThanItNeedsToEmitErrorFromMoreThanOne() {
final AtomicLong request = new AtomicLong();
try {
Flowable.just(1, 2).doOnRequest(new LongConsumer() {
@Override
public void accept(long n) {
request.addAndGet(n);
}
}).blockingSingle();
} catch (IllegalArgumentException e) {
// FIXME single now triggers fast-path
assertEquals(Long.MAX_VALUE, request.get());
}
}
@Test
public void singleWithPredicate() {
Maybe<Integer> maybe = Flowable.just(1, 2)
.filter(
new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.singleElement();
MaybeObserver<Integer> observer = TestHelper.mockMaybeObserver();
maybe.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onSuccess(2);
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleWithPredicateAndTooManyElements() {
Maybe<Integer> maybe = Flowable.just(1, 2, 3, 4)
.filter(
new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.singleElement();
MaybeObserver<Integer> observer = TestHelper.mockMaybeObserver();
maybe.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onError(
isA(IllegalArgumentException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleWithPredicateAndEmpty() {
Maybe<Integer> maybe = Flowable.just(1)
.filter(
new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.singleElement();
MaybeObserver<Integer> observer = TestHelper.mockMaybeObserver();
maybe.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer).onComplete();
inOrder.verify(observer, never()).onError(any(Throwable.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefault() {
Single<Integer> single = Flowable.just(1).single(2);
SingleObserver<Integer> observer = TestHelper.mockSingleObserver();
single.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onSuccess(1);
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithTooManyElements() {
Single<Integer> single = Flowable.just(1, 2).single(3);
SingleObserver<Integer> observer = TestHelper.mockSingleObserver();
single.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onError(
isA(IllegalArgumentException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithEmpty() {
Single<Integer> single = Flowable.<Integer> empty()
.single(1);
SingleObserver<Integer> observer = TestHelper.mockSingleObserver();
single.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onSuccess(1);
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithPredicate() {
Single<Integer> single = Flowable.just(1, 2)
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.single(4);
SingleObserver<Integer> observer = TestHelper.mockSingleObserver();
single.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onSuccess(2);
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithPredicateAndTooManyElements() {
Single<Integer> single = Flowable.just(1, 2, 3, 4)
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.single(6);
SingleObserver<Integer> observer = TestHelper.mockSingleObserver();
single.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onError(
isA(IllegalArgumentException.class));
inOrder.verifyNoMoreInteractions();
}
@Test
public void singleOrDefaultWithPredicateAndEmpty() {
Single<Integer> single = Flowable.just(1)
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer t1) {
return t1 % 2 == 0;
}
})
.single(2);
SingleObserver<Integer> observer = TestHelper.mockSingleObserver();
single.subscribe(observer);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onSuccess(2);
inOrder.verifyNoMoreInteractions();
}
@Test
public void issue1527() throws InterruptedException {
//https://github.com/ReactiveX/RxJava/pull/1527
Flowable<Integer> source = Flowable.just(1, 2, 3, 4, 5, 6);
Maybe<Integer> reduced = source.reduce(new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer i1, Integer i2) {
return i1 + i2;
}
});
Integer r = reduced.blockingGet();
assertEquals(21, r.intValue());
}
@Test
public void singleOrErrorNoElement() {
Flowable.empty()
.singleOrError()
.test()
.assertNoValues()
.assertError(NoSuchElementException.class);
}
@Test
public void singleOrErrorOneElement() {
Flowable.just(1)
.singleOrError()
.test()
.assertNoErrors()
.assertValue(1);
}
@Test
public void singleOrErrorMultipleElements() {
Flowable.just(1, 2, 3)
.singleOrError()
.test()
.assertNoValues()
.assertError(IllegalArgumentException.class);
}
@Test
public void singleOrErrorError() {
Flowable.error(new RuntimeException("error"))
.singleOrError()
.to(TestHelper.testConsumer())
.assertNoValues()
.assertErrorMessage("error")
.assertError(RuntimeException.class);
}
@Test
public void issue1527Flowable() throws InterruptedException {
//https://github.com/ReactiveX/RxJava/pull/1527
Flowable<Integer> source = Flowable.just(1, 2, 3, 4, 5, 6);
Flowable<Integer> reduced = source.reduce(new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer i1, Integer i2) {
return i1 + i2;
}
}).toFlowable();
Integer r = reduced.blockingFirst();
assertEquals(21, r.intValue());
}
@Test
public void singleElementOperatorDoNotSwallowExceptionWhenDone() {
final Throwable exception = new RuntimeException("some error");
final AtomicReference<Throwable> error = new AtomicReference<>();
try {
RxJavaPlugins.setErrorHandler(new Consumer<Throwable>() {
@Override public void accept(final Throwable throwable) throws Exception {
error.set(throwable);
}
});
Flowable.unsafeCreate(new Publisher<Integer>() {
@Override public void subscribe(final Subscriber<? super Integer> subscriber) {
subscriber.onComplete();
subscriber.onError(exception);
}
}).singleElement().test().assertComplete();
assertSame(exception, error.get().getCause());
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void badSource() {
TestHelper.checkBadSourceFlowable(new Function<Flowable<Object>, Object>() {
@Override
public Object apply(Flowable<Object> f) throws Exception {
return f.singleOrError();
}
}, false, 1, 1, 1);
TestHelper.checkBadSourceFlowable(new Function<Flowable<Object>, Object>() {
@Override
public Object apply(Flowable<Object> f) throws Exception {
return f.singleElement();
}
}, false, 1, 1, 1);
TestHelper.checkBadSourceFlowable(new Function<Flowable<Object>, Object>() {
@Override
public Object apply(Flowable<Object> f) throws Exception {
return f.singleOrError().toFlowable();
}
}, false, 1, 1, 1);
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeFlowableToSingle(new Function<Flowable<Object>, SingleSource<Object>>() {
@Override
public SingleSource<Object> apply(Flowable<Object> f) throws Exception {
return f.singleOrError();
}
});
TestHelper.checkDoubleOnSubscribeFlowable(new Function<Flowable<Object>, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Flowable<Object> f) throws Exception {
return f.singleOrError().toFlowable();
}
});
TestHelper.checkDoubleOnSubscribeFlowableToMaybe(new Function<Flowable<Object>, MaybeSource<Object>>() {
@Override
public MaybeSource<Object> apply(Flowable<Object> f) throws Exception {
return f.singleElement();
}
});
TestHelper.checkDoubleOnSubscribeFlowable(new Function<Flowable<Object>, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Flowable<Object> f) throws Exception {
return f.singleElement().toFlowable();
}
});
}
@Test
public void cancelAsFlowable() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = pp.singleOrError().toFlowable().test();
assertTrue(pp.hasSubscribers());
ts.assertEmpty();
ts.cancel();
assertFalse(pp.hasSubscribers());
}
@Test
public void singleOrError() {
Flowable.empty()
.singleOrError()
.toFlowable()
.test()
.assertFailure(NoSuchElementException.class);
}
@Test
public void dispose() {
TestHelper.checkDisposed(PublishProcessor.create().single(1));
}
}
| FlowableSingleTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/cfg/SessionEventSettings.java | {
"start": 879,
"end": 1770
} | class ____ implements {@code Interceptor}.
* </ul>
* <p>
* This setting identifies an {@code Interceptor} which is effectively a singleton
* across all the sessions opened from the {@code SessionFactory} to which it is
* applied; the same instance will be passed to each {@code Session}. If there
* should be a separate instance of {@code Interceptor} for each {@code Session},
* use {@link #SESSION_SCOPED_INTERCEPTOR} instead.
*
* @see org.hibernate.boot.SessionFactoryBuilder#applyInterceptor(Interceptor)
*
* @since 5.0
*/
String INTERCEPTOR = "hibernate.session_factory.interceptor";
/**
* Specifies an {@link org.hibernate.Interceptor} implementation associated with
* the {@link org.hibernate.SessionFactory} and propagated to each {@code Session}
* created from the {@code SessionFactory}. Either:
* <ul>
* <li>a {@link Class} representing a | that |
java | quarkusio__quarkus | devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/worker/BuildWorker.java | {
"start": 1003,
"end": 5492
} | class ____ extends QuarkusWorker<BuildWorkerParams> {
private static final Logger LOGGER = LoggerFactory.getLogger(BuildWorker.class);
@Override
public void execute() {
BuildWorkerParams params = getParameters();
Properties props = buildSystemProperties();
ResolvedDependency appArtifact = params.getAppModel().get().getAppArtifact();
String gav = appArtifact.getGroupId() + ":" + appArtifact.getArtifactId() + ":" + appArtifact.getVersion();
LOGGER.info("Building Quarkus application {}", gav);
LOGGER.info(" base name: {}", params.getBaseName().get());
LOGGER.info(" target directory: {}", params.getTargetDirectory().getAsFile().get());
LOGGER.info(" configured JAR type: {}", props.getProperty("quarkus.package.jar.type"));
LOGGER.info(" configured output directory: {}", props.getProperty("quarkus.package.output-directory"));
LOGGER.info(" configured output name: {}", props.getProperty("quarkus.package.output-name"));
LOGGER.info(" Gradle version: {}", params.getGradleVersion().get());
try (CuratedApplication appCreationContext = createAppCreationContext();
AnalyticsService analyticsService = new AnalyticsService(
FileLocationsImpl.INSTANCE,
new Slf4JMessageWriter(LOGGER))) {
// Processes launched from within the build task of Gradle (daemon) lose content
// generated on STDOUT/STDERR by the process (see https://github.com/gradle/gradle/issues/13522).
// We overcome this by letting build steps know that the STDOUT/STDERR should be explicitly
// streamed, if they need to make available that generated data.
// The io.quarkus.deployment.pkg.builditem.ProcessInheritIODisabled$Factory
// does the necessary work to generate such a build item which the build step(s) can rely on
AugmentAction augmentor = appCreationContext
.createAugmentor("io.quarkus.deployment.pkg.builditem.ProcessInheritIODisabled$Factory",
Collections.emptyMap());
AugmentResult result = augmentor.createProductionApplication();
if (result == null) {
System.err.println("createProductionApplication() returned 'null' AugmentResult");
} else {
Map<String, Object> buildInfo = new HashMap<>(result.getGraalVMInfo());
buildInfo.put(GRADLE_VERSION, params.getGradleVersion().get());
analyticsService.sendAnalytics(
TrackEventType.BUILD,
appCreationContext.getApplicationModel(),
buildInfo,
params.getTargetDirectory().getAsFile().get());
Path nativeResult = result.getNativeResult();
LOGGER.info("AugmentResult.nativeResult = {}", nativeResult);
List<ArtifactResult> results = result.getResults();
if (results == null) {
LOGGER.warn("AugmentResult.results = null");
} else {
LOGGER.info("AugmentResult.results = {}", results.stream().map(ArtifactResult::getPath)
.map(r -> r == null ? "null" : r.toString()).collect(Collectors.joining("\n ", "\n ", "")));
}
JarResult jar = result.getJar();
LOGGER.info("AugmentResult:");
if (jar == null) {
LOGGER.info(" .jar = null");
} else {
LOGGER.info(" .jar.path = {}", jar.getPath());
LOGGER.info(" .jar.libraryDir = {}", jar.getLibraryDir());
LOGGER.info(" .jar.originalArtifact = {}", jar.getOriginalArtifact());
LOGGER.info(" .jar.uberJar = {}", jar.isUberJar());
}
}
LOGGER.info("Quarkus application build was successful");
} catch (BootstrapException e) {
// Gradle "abbreviates" the stacktrace to something human-readable, but here the underlying cause might
// get lost in the error output, so add 'e' to the message.
throw new GradleException("Failed to build Quarkus application for " + gav + " due to " + e, e);
}
}
private static | BuildWorker |
java | resilience4j__resilience4j | resilience4j-spring/src/main/java/io/github/resilience4j/ratelimiter/configure/RateLimiterConfiguration.java | {
"start": 2561,
"end": 10032
} | class ____ {
@Bean
@Qualifier("compositeRateLimiterCustomizer")
public CompositeCustomizer<RateLimiterConfigCustomizer> compositeRateLimiterCustomizer(
@Nullable List<RateLimiterConfigCustomizer> configCustomizers) {
return new CompositeCustomizer<>(configCustomizers);
}
@Bean
public RateLimiterRegistry rateLimiterRegistry(
RateLimiterConfigurationProperties rateLimiterProperties,
EventConsumerRegistry<RateLimiterEvent> rateLimiterEventsConsumerRegistry,
RegistryEventConsumer<RateLimiter> rateLimiterRegistryEventConsumer,
@Qualifier("compositeRateLimiterCustomizer") CompositeCustomizer<RateLimiterConfigCustomizer> compositeRateLimiterCustomizer) {
RateLimiterRegistry rateLimiterRegistry = createRateLimiterRegistry(rateLimiterProperties,
rateLimiterRegistryEventConsumer, compositeRateLimiterCustomizer);
registerEventConsumer(rateLimiterRegistry, rateLimiterEventsConsumerRegistry,
rateLimiterProperties);
initRateLimiterRegistry(rateLimiterProperties, compositeRateLimiterCustomizer, rateLimiterRegistry);
return rateLimiterRegistry;
}
/**
* Initializes the RateLimiter registry with resilience4j instances.
*
* @param rateLimiterRegistry The rate limiter registry.
* @param compositeRateLimiterCustomizer customizers for instances and configs
*/
private void initRateLimiterRegistry(RateLimiterConfigurationProperties rateLimiterProperties,
CompositeCustomizer<RateLimiterConfigCustomizer> compositeRateLimiterCustomizer,
RateLimiterRegistry rateLimiterRegistry) {
rateLimiterProperties.getInstances().forEach((name, properties) ->
rateLimiterRegistry.rateLimiter(name, rateLimiterProperties
.createRateLimiterConfig(properties, compositeRateLimiterCustomizer, name))
);
compositeRateLimiterCustomizer.instanceNames()
.stream()
.filter(name -> rateLimiterRegistry.getConfiguration(name).isEmpty())
.forEach(name -> rateLimiterRegistry.rateLimiter(name, rateLimiterProperties
.createRateLimiterConfig(null, compositeRateLimiterCustomizer, name)));
}
@Bean
@Primary
public RegistryEventConsumer<RateLimiter> rateLimiterRegistryEventConsumer(
Optional<List<RegistryEventConsumer<RateLimiter>>> optionalRegistryEventConsumers) {
return new CompositeRegistryEventConsumer<>(
optionalRegistryEventConsumers.orElseGet(ArrayList::new));
}
/**
* Initializes a rate limiter registry.
*
* @param rateLimiterConfigurationProperties The rate limiter configuration properties.
* @param compositeRateLimiterCustomizer the composite rate limiter customizer delegate
* @return a RateLimiterRegistry
*/
private RateLimiterRegistry createRateLimiterRegistry(
RateLimiterConfigurationProperties rateLimiterConfigurationProperties,
RegistryEventConsumer<RateLimiter> rateLimiterRegistryEventConsumer,
CompositeCustomizer<RateLimiterConfigCustomizer> compositeRateLimiterCustomizer) {
Map<String, RateLimiterConfig> configs = rateLimiterConfigurationProperties.getConfigs()
.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey,
entry -> rateLimiterConfigurationProperties
.createRateLimiterConfig(entry.getValue(), compositeRateLimiterCustomizer,
entry.getKey())));
return RateLimiterRegistry.of(configs, rateLimiterRegistryEventConsumer,
Map.copyOf(rateLimiterConfigurationProperties.getTags()));
}
/**
* Registers the post creation consumer function that registers the consumer events to the rate
* limiters.
*
* @param rateLimiterRegistry The rate limiter registry.
* @param eventConsumerRegistry The event consumer registry.
*/
private void registerEventConsumer(RateLimiterRegistry rateLimiterRegistry,
EventConsumerRegistry<RateLimiterEvent> eventConsumerRegistry,
RateLimiterConfigurationProperties properties) {
rateLimiterRegistry.getEventPublisher()
.onEntryAdded(event -> registerEventConsumer(eventConsumerRegistry, event.getAddedEntry(), properties))
.onEntryReplaced(event -> registerEventConsumer(eventConsumerRegistry, event.getNewEntry(), properties))
.onEntryRemoved(event -> unregisterEventConsumer(eventConsumerRegistry, event.getRemovedEntry()));
}
private void unregisterEventConsumer(EventConsumerRegistry<RateLimiterEvent> eventConsumerRegistry, RateLimiter rateLimiter) {
eventConsumerRegistry.removeEventConsumer(rateLimiter.getName());
}
private void registerEventConsumer(
EventConsumerRegistry<RateLimiterEvent> eventConsumerRegistry, RateLimiter rateLimiter,
RateLimiterConfigurationProperties rateLimiterConfigurationProperties) {
InstanceProperties limiterProperties = rateLimiterConfigurationProperties.getInstances()
.get(rateLimiter.getName());
if (limiterProperties != null && limiterProperties.getSubscribeForEvents() != null
&& limiterProperties.getSubscribeForEvents()) {
rateLimiter.getEventPublisher().onEvent(
eventConsumerRegistry.createEventConsumer(rateLimiter.getName(),
limiterProperties.getEventConsumerBufferSize() != null
&& limiterProperties.getEventConsumerBufferSize() != 0 ? limiterProperties
.getEventConsumerBufferSize() : 100));
}
}
@Bean
@Conditional(value = {AspectJOnClasspathCondition.class})
public RateLimiterAspect rateLimiterAspect(
RateLimiterConfigurationProperties rateLimiterProperties,
RateLimiterRegistry rateLimiterRegistry,
@Autowired(required = false) List<RateLimiterAspectExt> rateLimiterAspectExtList,
FallbackExecutor fallbackExecutor,
SpelResolver spelResolver
) {
return new RateLimiterAspect(rateLimiterRegistry, rateLimiterProperties,
rateLimiterAspectExtList, fallbackExecutor, spelResolver);
}
@Bean
@Conditional(value = {RxJava2OnClasspathCondition.class, AspectJOnClasspathCondition.class})
public RxJava2RateLimiterAspectExt rxJava2RateLimiterAspectExt() {
return new RxJava2RateLimiterAspectExt();
}
@Bean
@Conditional(value = {RxJava3OnClasspathCondition.class, AspectJOnClasspathCondition.class})
public RxJava3RateLimiterAspectExt rxJava3RateLimiterAspectExt() {
return new RxJava3RateLimiterAspectExt();
}
@Bean
@Conditional(value = {ReactorOnClasspathCondition.class, AspectJOnClasspathCondition.class})
public ReactorRateLimiterAspectExt reactorRateLimiterAspectExt() {
return new ReactorRateLimiterAspectExt();
}
/**
* The EventConsumerRegistry is used to manage EventConsumer instances. The
* EventConsumerRegistry is used by the RateLimiterHealthIndicator to show the latest
* RateLimiterEvents events for each RateLimiter instance.
*
* @return The EventConsumerRegistry of RateLimiterEvent bean.
*/
@Bean
public EventConsumerRegistry<RateLimiterEvent> rateLimiterEventsConsumerRegistry() {
return new DefaultEventConsumerRegistry<>();
}
}
| RateLimiterConfiguration |
java | micronaut-projects__micronaut-core | inject-java/src/main/java/io/micronaut/annotation/processing/visitor/JavaVisitorContext.java | {
"start": 3455,
"end": 23927
} | class ____ implements VisitorContext, BeanElementVisitorContext {
private final Messager messager;
private final Elements elements;
private final Types types;
private final ModelUtils modelUtils;
private final AnnotationProcessingOutputVisitor outputVisitor;
private final MutableConvertibleValues<Object> visitorAttributes;
private final ProcessingEnvironment processingEnv;
private final List<String> generatedResources = new ArrayList<>();
private final List<AbstractBeanDefinitionBuilder> beanDefinitionBuilders = new ArrayList<>();
private final JavaElementFactory elementFactory;
private final TypeElementVisitor.VisitorKind visitorKind;
private final DefaultExpressionCompilationContextFactory expressionCompilationContextFactory;
@Nullable
private JavaFileManager standardFileManager;
private final JavaAnnotationMetadataBuilder annotationMetadataBuilder;
private final JavaElementAnnotationMetadataFactory elementAnnotationMetadataFactory;
private final JavaNativeElementsHelper nativeElementsHelper;
private final Filer filer;
private final Set<String> postponedTypes;
private boolean visitUnresolvedInterfaces;
/**
* The default constructor.
*
* @param processingEnv The processing environment
* @param messager The messager
* @param elements The elements
* @param annotationUtils The annotation utils
* @param types Type types
* @param modelUtils The model utils
* @param genericUtils The generic type utils
* @param filer The filer
* @param visitorAttributes The attributes
* @param visitorKind The visitor kind
* @deprecated No longer needed
*/
@Deprecated(forRemoval = true, since = "4.3.0")
public JavaVisitorContext(
ProcessingEnvironment processingEnv,
Messager messager,
Elements elements,
AnnotationUtils annotationUtils,
Types types,
ModelUtils modelUtils,
GenericUtils genericUtils,
Filer filer,
MutableConvertibleValues<Object> visitorAttributes,
TypeElementVisitor.VisitorKind visitorKind) {
this(processingEnv, messager, elements, types, modelUtils, filer, visitorAttributes, visitorKind, new HashSet<>());
}
/**
* The default constructor.
*
* @param processingEnv The processing environment
* @param messager The messager
* @param elements The elements
* @param types Type types
* @param modelUtils The model utils
* @param filer The filer
* @param visitorAttributes The attributes
* @param visitorKind The visitor kind
* @deprecated No longer needed
*/
@Deprecated(forRemoval = true, since = "4.7.0")
public JavaVisitorContext(
ProcessingEnvironment processingEnv,
Messager messager,
Elements elements,
Types types,
ModelUtils modelUtils,
Filer filer,
MutableConvertibleValues<Object> visitorAttributes,
TypeElementVisitor.VisitorKind visitorKind) {
this(processingEnv, messager, elements, types, modelUtils, filer, visitorAttributes, visitorKind, Set.of());
}
/**
* The default constructor.
*
* @param processingEnv The processing environment
* @param messager The messager
* @param elements The elements
* @param types Type types
* @param modelUtils The model utils
* @param filer The filer
* @param visitorAttributes The attributes
* @param visitorKind The visitor kind
* @param postponedTypes The postponed types
*/
public JavaVisitorContext(
ProcessingEnvironment processingEnv,
Messager messager,
Elements elements,
Types types,
ModelUtils modelUtils,
Filer filer,
MutableConvertibleValues<Object> visitorAttributes,
TypeElementVisitor.VisitorKind visitorKind,
Set<String> postponedTypes) {
this.messager = messager;
this.elements = elements;
this.types = types;
this.modelUtils = modelUtils;
this.outputVisitor = new AnnotationProcessingOutputVisitor(filer);
this.visitorAttributes = visitorAttributes;
this.processingEnv = processingEnv;
this.elementFactory = new JavaElementFactory(this);
this.visitorKind = visitorKind;
this.nativeElementsHelper = new JavaNativeElementsHelper(elements, types);
this.annotationMetadataBuilder = new JavaAnnotationMetadataBuilder(elements, messager, modelUtils, nativeElementsHelper, this);
this.elementAnnotationMetadataFactory = new JavaElementAnnotationMetadataFactory(false, this.annotationMetadataBuilder);
this.expressionCompilationContextFactory = new DefaultExpressionCompilationContextFactory(this);
this.filer = filer;
this.postponedTypes = postponedTypes;
}
@Override
public Language getLanguage() {
return Language.JAVA;
}
/**
* @return The visitor kind
*/
public TypeElementVisitor.VisitorKind getVisitorKind() {
return visitorKind;
}
/**
* @return The processing environment
*/
public ProcessingEnvironment getProcessingEnv() {
return processingEnv;
}
/**
* @return True if the unresolved interfaces should be visited
* @since 4.9
*/
public boolean isVisitUnresolvedInterfaces() {
return visitUnresolvedInterfaces;
}
/**
* @param visitUnresolvedInterfaces True to visit unresolved interfaces
* @since 4.9
*/
public void setVisitUnresolvedInterfaces(boolean visitUnresolvedInterfaces) {
this.visitUnresolvedInterfaces = visitUnresolvedInterfaces;
}
@NonNull
@Override
public Iterable<URL> getClasspathResources(@NonNull String path) {
// reflective hack required because no way to get the JavaFileManager
// from public processor API
info("EXPERIMENTAL: Compile time resource scanning is experimental", null);
JavaFileManager standardFileManager = getStandardFileManager(processingEnv).orElse(null);
if (standardFileManager != null) {
try {
final ClassLoader classLoader = standardFileManager
.getClassLoader(StandardLocation.CLASS_PATH);
if (classLoader != null) {
final Enumeration<URL> resources = classLoader.getResources(path);
return CollectionUtils.enumerationToIterable(resources);
}
} catch (IOException e) {
// ignore
}
}
return Collections.emptyList();
}
@Override
public Optional<ClassElement> getClassElement(String name) {
return getClassElement(name, elementAnnotationMetadataFactory);
}
@Override
public Optional<ClassElement> getClassElement(String name, ElementAnnotationMetadataFactory annotationMetadataFactory) {
try {
TypeElement typeElement = elements.getTypeElement(name);
if (typeElement == null) {
// maybe inner class?
typeElement = elements.getTypeElement(name.replace('$', '.'));
}
return Optional.ofNullable(typeElement)
.map(typeElement1 -> elementFactory.newClassElement(typeElement1, annotationMetadataFactory));
} catch (RuntimeException e) {
// can throw exception on Eclipse JDT which is brain dead
return Optional.empty();
}
}
@Override
public @NonNull ClassElement[] getClassElements(@NonNull String aPackage, @NonNull String... stereotypes) {
ArgumentUtils.requireNonNull("aPackage", aPackage);
ArgumentUtils.requireNonNull("stereotypes", stereotypes);
final PackageElement packageElement = elements.getPackageElement(aPackage);
if (packageElement != null) {
var classElements = new ArrayList<ClassElement>();
populateClassElements(stereotypes, packageElement, classElements);
return classElements.toArray(ClassElement.ZERO_CLASS_ELEMENTS);
}
return ClassElement.ZERO_CLASS_ELEMENTS;
}
@NonNull
@Override
public JavaElementFactory getElementFactory() {
return elementFactory;
}
@NonNull
@Override
public JavaElementAnnotationMetadataFactory getElementAnnotationMetadataFactory() {
return elementAnnotationMetadataFactory;
}
@NonNull
@Override
public ExpressionCompilationContextFactory getExpressionCompilationContextFactory() {
return expressionCompilationContextFactory;
}
@NonNull
@Override
public JavaAnnotationMetadataBuilder getAnnotationMetadataBuilder() {
return annotationMetadataBuilder;
}
@Override
public void info(String message, io.micronaut.inject.ast.@Nullable Element element) {
printMessage(message, Diagnostic.Kind.NOTE, element);
}
@Override
public void info(String message) {
if (StringUtils.isNotEmpty(message)) {
messager.printMessage(Diagnostic.Kind.NOTE, message);
}
}
@Override
public void fail(String message, io.micronaut.inject.ast.@Nullable Element element) {
printMessage(message, Diagnostic.Kind.ERROR, element);
}
@Override
public void warn(String message, io.micronaut.inject.ast.@Nullable Element element) {
printMessage(message, Diagnostic.Kind.WARNING, element);
}
/**
* Print warning message.
*
* @param message The message
* @param element The element
* @since 4.0.0
*/
public void warn(String message, @Nullable Element element) {
if (element == null) {
messager.printMessage(Diagnostic.Kind.WARNING, message);
} else {
messager.printMessage(Diagnostic.Kind.WARNING, message, element);
}
}
private void printMessage(String message, Diagnostic.Kind kind, io.micronaut.inject.ast.@Nullable Element element) {
if (StringUtils.isNotEmpty(message)) {
if (element instanceof BeanElement beanElement) {
element = beanElement.getDeclaringClass();
}
if (element instanceof AbstractJavaElement abstractJavaElement) {
Element el = abstractJavaElement.getNativeType().element();
messager.printMessage(kind, message, el);
} else {
messager.printMessage(kind, message);
}
}
}
private void checkForPostponedOriginalElement(io.micronaut.inject.ast.Element originatingElement) {
if (originatingElement != null && postponedTypes.contains(originatingElement.getName())) {
throw new ElementPostponedToNextRoundException(originatingElement);
}
}
private void checkForPostponedOriginalElements(io.micronaut.inject.ast.Element[] originatingElements) {
if (originatingElements != null) {
for (io.micronaut.inject.ast.Element originatingElement : originatingElements) {
checkForPostponedOriginalElement(originatingElement);
}
}
}
@Override
public OutputStream visitClass(String classname, io.micronaut.inject.ast.@Nullable Element originatingElement) throws IOException {
checkForPostponedOriginalElement(originatingElement);
return outputVisitor.visitClass(classname, new io.micronaut.inject.ast.Element[] {originatingElement});
}
@Override
public OutputStream visitClass(String classname, io.micronaut.inject.ast.Element... originatingElements) throws IOException {
checkForPostponedOriginalElements(originatingElements);
return outputVisitor.visitClass(classname, originatingElements);
}
@Override
public void visitServiceDescriptor(String type, String classname) {
outputVisitor.visitServiceDescriptor(type, classname);
}
@Override
public void visitServiceDescriptor(String type, String classname, io.micronaut.inject.ast.Element originatingElement) {
checkForPostponedOriginalElement(originatingElement);
outputVisitor.visitServiceDescriptor(type, classname, originatingElement);
}
@Override
public Optional<GeneratedFile> visitMetaInfFile(String path, io.micronaut.inject.ast.Element... originatingElements) {
checkForPostponedOriginalElements(originatingElements);
return outputVisitor.visitMetaInfFile(path, originatingElements);
}
@Override
public Optional<GeneratedFile> visitGeneratedFile(String path) {
return outputVisitor.visitGeneratedFile(path);
}
@Override
public Optional<GeneratedFile> visitGeneratedFile(String path, io.micronaut.inject.ast.Element... originatingElements) {
checkForPostponedOriginalElements(originatingElements);
return outputVisitor.visitGeneratedFile(path, originatingElements);
}
@Override
public Optional<GeneratedFile> visitGeneratedSourceFile(String packageName, String fileNameWithoutExtension, io.micronaut.inject.ast.Element... originatingElements) {
checkForPostponedOriginalElements(originatingElements);
return outputVisitor.visitGeneratedSourceFile(packageName, fileNameWithoutExtension, originatingElements);
}
@Override
public void finish() {
outputVisitor.finish();
}
/**
* The messager.
*
* @return The messager
*/
public Messager getMessager() {
return messager;
}
/**
* The model utils.
*
* @return The model utils
*/
public ModelUtils getModelUtils() {
return modelUtils;
}
/**
* The elements.
*
* @return The elements
*/
public Elements getElements() {
return elements;
}
/**
* The annotation utils.
*
* @return The annotation utils
* @deprecated No longer used
*/
@Deprecated(forRemoval = true, since = "4.3.0")
public AnnotationUtils getAnnotationUtils() {
return new AnnotationUtils(processingEnv, elements, messager, types, modelUtils, getGenericUtils(), filer);
}
/**
* The types.
*
* @return The types
*/
public Types getTypes() {
return types;
}
/**
* The generic utils object.
*
* @return The generic utils
* @deprecated No longer used
*/
@Deprecated(forRemoval = true, since = "4.3.0")
public GenericUtils getGenericUtils() {
return new GenericUtils(elements, types, modelUtils);
}
/**
* @return The elements helper
*/
public JavaNativeElementsHelper getNativeElementsHelper() {
return nativeElementsHelper;
}
/**
* Java visitor context options from <code>javac</code> arguments and {@link System#getProperties()}
* <p><b>System properties has priority over arguments.</b></p>
*
* @return Java visitor context options for all visitors
* @see io.micronaut.inject.visitor.TypeElementVisitor
* @see <a href="https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html">javac arguments</a>
*/
@Override
public Map<String, String> getOptions() {
Map<String, String> processorOptions = VisitorContextUtils.getProcessorOptions(processingEnv);
Map<String, String> systemPropsOptions = VisitorContextUtils.getSystemOptions();
// Merge both options, with system props overriding on duplications
return Stream.of(processorOptions, systemPropsOptions)
.flatMap(map -> map.entrySet().stream())
.collect(Collectors.toMap(
Map.Entry::getKey,
Map.Entry::getValue,
(v1, v2) -> StringUtils.isNotEmpty(v2) ? v2 : v1));
}
@Override
public MutableConvertibleValues<Object> put(CharSequence key, @Nullable Object value) {
visitorAttributes.put(key, value);
return this;
}
@Override
public MutableConvertibleValues<Object> remove(CharSequence key) {
visitorAttributes.remove(key);
return this;
}
@Override
public MutableConvertibleValues<Object> clear() {
visitorAttributes.clear();
return this;
}
@Override
public Set<String> names() {
return visitorAttributes.names();
}
@Override
public Collection<Object> values() {
return visitorAttributes.values();
}
@Override
public <T> Optional<T> get(CharSequence name, ArgumentConversionContext<T> conversionContext) {
return visitorAttributes.get(name, conversionContext);
}
private void populateClassElements(@NonNull String[] stereotypes, PackageElement packageElement, List<ClassElement> classElements) {
final List<? extends Element> enclosedElements = packageElement.getEnclosedElements();
boolean includeAll = Arrays.equals(stereotypes, new String[] {"*"});
for (Element enclosedElement : enclosedElements) {
populateClassElements(stereotypes, includeAll, enclosedElement, classElements);
}
}
private void populateClassElements(@NonNull String[] stereotypes, boolean includeAll, Element enclosedElement, List<ClassElement> classElements) {
if (enclosedElement instanceof TypeElement element) {
JavaClassElement classElement = elementFactory.newClassElement(element, elementAnnotationMetadataFactory);
if ((includeAll || Arrays.stream(stereotypes).anyMatch(classElement::hasStereotype)) && !classElement.isAbstract()) {
classElements.add(classElement);
}
List<? extends Element> nestedElements = enclosedElement.getEnclosedElements();
for (Element nestedElement : nestedElements) {
populateClassElements(stereotypes, includeAll, nestedElement, classElements);
}
} else if (enclosedElement instanceof PackageElement element) {
populateClassElements(stereotypes, element, classElements);
}
}
private Optional<JavaFileManager> getStandardFileManager(ProcessingEnvironment processingEnv) {
if (this.standardFileManager == null) {
final Optional<Method> contextMethod = ReflectionUtils.getMethod(processingEnv.getClass(), "getContext");
if (contextMethod.isPresent()) {
final Object context = ReflectionUtils.invokeMethod(processingEnv, contextMethod.get());
try {
if (context != null) {
final Optional<Method> getMethod = ReflectionUtils.getMethod(context.getClass(), "get", Class.class);
this.standardFileManager = (JavaFileManager)
getMethod.map(method -> ReflectionUtils.invokeMethod(context, method, JavaFileManager.class)).orElse(null);
}
} catch (Exception e) {
// ignore
}
}
}
return Optional.ofNullable(this.standardFileManager);
}
@Override
public Collection<String> getGeneratedResources() {
return Collections.unmodifiableCollection(generatedResources);
}
@Override
public void addGeneratedResource(@NonNull String resource) {
generatedResources.add(resource);
}
/**
* @return Gets the produced bean definition builders.
*/
@Internal
public List<AbstractBeanDefinitionBuilder> getBeanElementBuilders() {
final var current = new ArrayList<>(beanDefinitionBuilders);
beanDefinitionBuilders.clear();
return current;
}
/**
* Adds a java bean definition builder.
*
* @param javaBeanDefinitionBuilder The bean builder
*/
@Internal
void addBeanDefinitionBuilder(JavaBeanDefinitionBuilder javaBeanDefinitionBuilder) {
this.beanDefinitionBuilders.add(javaBeanDefinitionBuilder);
}
@Override
public BeanElementBuilder addAssociatedBean(io.micronaut.inject.ast.Element originatingElement, ClassElement type) {
return new JavaBeanDefinitionBuilder(
originatingElement,
type,
type instanceof AbstractAnnotationElement aae ? aae.getElementAnnotationMetadataFactory() : elementAnnotationMetadataFactory,
this
);
}
}
| JavaVisitorContext |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/ProtoStringFieldReferenceEquality.java | {
"start": 1584,
"end": 2946
} | class ____ extends BugChecker implements BinaryTreeMatcher {
private static final String PROTO_SUPER_CLASS = "com.google.protobuf.GeneratedMessage";
private static final String LITE_PROTO_SUPER_CLASS = "com.google.protobuf.GeneratedMessageLite";
private static final Matcher<ExpressionTree> PROTO_STRING_METHOD =
allOf(
instanceMethod().onDescendantOfAny(PROTO_SUPER_CLASS, LITE_PROTO_SUPER_CLASS),
isSameType(Suppliers.STRING_TYPE));
@Override
public Description matchBinary(BinaryTree tree, VisitorState state) {
switch (tree.getKind()) {
case EQUAL_TO, NOT_EQUAL_TO -> {}
default -> {
return NO_MATCH;
}
}
ExpressionTree lhs = tree.getLeftOperand();
ExpressionTree rhs = tree.getRightOperand();
if (match(lhs, rhs, state) || match(rhs, lhs, state)) {
String result =
String.format("%s.equals(%s)", state.getSourceForNode(lhs), state.getSourceForNode(rhs));
if (tree.getKind() == Kind.NOT_EQUAL_TO) {
result = "!" + result;
}
return describeMatch(tree, SuggestedFix.replace(tree, result));
}
return NO_MATCH;
}
private static boolean match(ExpressionTree a, ExpressionTree b, VisitorState state) {
return PROTO_STRING_METHOD.matches(a, state) && b.getKind() != Kind.NULL_LITERAL;
}
}
| ProtoStringFieldReferenceEquality |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/RateSerializationTests.java | {
"start": 563,
"end": 1944
} | class ____ extends AbstractExpressionSerializationTests<Rate> {
@Override
protected Rate createTestInstance() {
Source source = randomSource();
Expression field = randomChild();
Expression filter = randomChild();
Expression window = randomChild();
Expression timestamp = randomChild();
return new Rate(source, field, filter, window, timestamp);
}
@Override
protected Rate mutateInstance(Rate instance) throws IOException {
Source source = randomSource();
Expression field = instance.field();
Expression timestamp = instance.timestamp();
Expression filter = instance.filter();
Expression window = instance.window();
switch (between(0, 3)) {
case 0 -> field = randomValueOtherThan(field, AbstractExpressionSerializationTests::randomChild);
case 1 -> timestamp = randomValueOtherThan(timestamp, AbstractExpressionSerializationTests::randomChild);
case 2 -> filter = randomValueOtherThan(filter, AbstractExpressionSerializationTests::randomChild);
case 3 -> window = randomValueOtherThan(window, AbstractExpressionSerializationTests::randomChild);
default -> throw new AssertionError("unexpected value");
}
return new Rate(source, field, filter, window, timestamp);
}
}
| RateSerializationTests |
java | alibaba__nacos | core/src/test/java/com/alibaba/nacos/core/namespace/filter/NamespaceValidationRequestFilterTest.java | {
"start": 2269,
"end": 13558
} | class ____ {
private NamespaceValidationRequestFilter namespaceValidationFilter;
@Mock
private NamespaceOperationService namespaceOperationService;
@Mock
private Request request;
@Mock
private RequestMeta requestMeta;
@Mock
private AbstractRpcParamExtractor paramExtractor;
@BeforeEach
void setUp() {
namespaceValidationFilter = new NamespaceValidationRequestFilter(namespaceOperationService);
}
@Test
void testFilterWithGlobalConfigDisabled() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(false);
try (MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class)) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithEnabledValidation.class);
// When global config is disabled, filter should return null (skip validation)
assertNull(response);
}
}
@Test
void testFilterWithoutNamespaceValidationAnnotation() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(true);
try (MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class)) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithoutNamespaceValidationAnnotation.class);
// When no @NamespaceValidation annotation is found, should return null
assertNull(response);
}
}
@Test
void testFilterWithNamespaceValidationDisabled() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(true);
try (MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class)) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithDisabledValidation.class);
// When @NamespaceValidation(enable=false), should return null
assertNull(response);
}
}
@Test
void testFilterWithoutExtractorAnnotation() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(true);
try (MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class)) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithEnabledValidationButNoExtractor.class);
// When no extractor annotation is found, should return null
assertNull(response);
}
}
@Test
void testFilterWithExtractorReturningEmptyList() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(true);
try (
MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class);
MockedStatic<ExtractorManager> extractorManagerMock = mockStatic(ExtractorManager.class)
) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
extractorManagerMock.when(() -> ExtractorManager.getRpcExtractor(any())).thenReturn(paramExtractor);
when(paramExtractor.extractParam(request)).thenReturn(Collections.emptyList());
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithEnabledValidation.class);
// When extractor returns empty list, should return null
assertNull(response);
}
}
@Test
void testFilterWithNullNamespaceParam() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(true);
ParamInfo paramInfo = new ParamInfo();
paramInfo.setNamespaceId(null);
List<ParamInfo> paramInfoList = Arrays.asList(paramInfo);
try (
MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class);
MockedStatic<ExtractorManager> extractorManagerMock = mockStatic(ExtractorManager.class)
) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
extractorManagerMock.when(() -> ExtractorManager.getRpcExtractor(any())).thenReturn(paramExtractor);
when(paramExtractor.extractParam(request)).thenReturn(paramInfoList);
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithEnabledValidation.class);
// When namespace is null, should skip validation and return null
assertNull(response);
}
}
@Test
void testFilterWithExistingNamespace() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(true);
ParamInfo paramInfo = new ParamInfo();
paramInfo.setNamespaceId("existing-namespace");
List<ParamInfo> paramInfoList = Collections.singletonList(paramInfo);
when(namespaceOperationService.namespaceExists("existing-namespace")).thenReturn(true);
try (
MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class);
MockedStatic<ExtractorManager> extractorManagerMock = mockStatic(ExtractorManager.class)
) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
extractorManagerMock.when(() -> ExtractorManager.getRpcExtractor(any())).thenReturn(paramExtractor);
when(paramExtractor.extractParam(request)).thenReturn(paramInfoList);
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithEnabledValidation.class);
// When namespace exists, should return null (pass validation)
assertNull(response);
}
}
@Test
void testFilterWithNonExistingNamespace() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(true);
ParamInfo paramInfo = new ParamInfo();
paramInfo.setNamespaceId("non-existing-namespace");
List<ParamInfo> paramInfoList = Arrays.asList(paramInfo);
when(namespaceOperationService.namespaceExists("non-existing-namespace")).thenReturn(false);
try (
MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class);
MockedStatic<ExtractorManager> extractorManagerMock = mockStatic(ExtractorManager.class)
) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
extractorManagerMock.when(() -> ExtractorManager.getRpcExtractor(any())).thenReturn(paramExtractor);
when(paramExtractor.extractParam(request)).thenReturn(paramInfoList);
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithEnabledValidation.class);
// When namespace doesn't exist, should return error response
assertNotNull(response);
assertEquals(ErrorCode.NAMESPACE_NOT_EXIST.getCode(), response.getErrorCode());
assertEquals("Namespace 'non-existing-namespace' does not exist. Please create the namespace first.",
response.getMessage());
}
}
@Test
void testFilterWithMultipleParamInfos() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(true);
ParamInfo paramInfo1 = new ParamInfo();
paramInfo1.setNamespaceId("existing-namespace");
ParamInfo paramInfo2 = new ParamInfo();
paramInfo2.setNamespaceId("non-existing-namespace");
List<ParamInfo> paramInfoList = Arrays.asList(paramInfo1, paramInfo2);
when(namespaceOperationService.namespaceExists("existing-namespace")).thenReturn(true);
when(namespaceOperationService.namespaceExists("non-existing-namespace")).thenReturn(false);
try (
MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class);
MockedStatic<ExtractorManager> extractorManagerMock = mockStatic(ExtractorManager.class)
) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
extractorManagerMock.when(() -> ExtractorManager.getRpcExtractor(any())).thenReturn(paramExtractor);
when(paramExtractor.extractParam(request)).thenReturn(paramInfoList);
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithEnabledValidation.class);
// When one namespace doesn't exist, should return error response
assertNotNull(response);
assertEquals(ErrorCode.NAMESPACE_NOT_EXIST.getCode(), response.getErrorCode());
assertEquals("Namespace 'non-existing-namespace' does not exist. Please create the namespace first.",
response.getMessage());
}
}
@Test
void testFilterWithExceptionInMainFlow() throws NacosException {
NamespaceValidationConfig mockConfig = Mockito.mock(NamespaceValidationConfig.class);
when(mockConfig.isNamespaceValidationEnabled()).thenReturn(true);
try (
MockedStatic<NamespaceValidationConfig> mockedStatic = mockStatic(NamespaceValidationConfig.class);
MockedStatic<ExtractorManager> extractorManagerMock = mockStatic(ExtractorManager.class)
) {
mockedStatic.when(NamespaceValidationConfig::getInstance).thenReturn(mockConfig);
extractorManagerMock.when(() -> ExtractorManager.getRpcExtractor(any())).thenThrow(new RuntimeException("Extractor error"));
Response response = namespaceValidationFilter.filter(request, requestMeta, MockWithEnabledValidation.class);
// When exception occurs in main flow, should return null (caught by try-catch)
assertNull(response);
}
}
static | NamespaceValidationRequestFilterTest |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/FluxCombineLatestTest.java | {
"start": 1305,
"end": 10519
} | class ____ extends FluxOperatorTest<String, String> {
@Override
protected Scenario<String, String> defaultScenarioOptions(Scenario<String, String> defaultOptions) {
return defaultOptions.fusionMode(Fuseable.ASYNC)
.prefetch(Queues.XS_BUFFER_SIZE);
}
@Override
protected List<Scenario<String, String>> scenarios_operatorError() {
return Arrays.asList(scenario(f -> Flux.combineLatest(o -> null,
f,
Flux.just(1))),
scenario(f -> Flux.combineLatest(o -> {
throw exception();
}, f, Flux.just(1))),
scenario(f -> Flux.combineLatest(() -> {
throw exception();
}, o -> (String) o[0])).fusionMode(Fuseable.NONE),
scenario(f -> Flux.combineLatest(() -> null,
o -> (String) o[0])).fusionMode(Fuseable.NONE),
scenario(f -> Flux.combineLatest(() -> new Iterator<Publisher<?>>() {
@Override
public boolean hasNext() {
throw exception();
}
@Override
public Publisher<?> next() {
return null;
}
}, o -> (String) o[0])).fusionMode(Fuseable.NONE),
scenario(f -> Flux.combineLatest(() -> new Iterator<Publisher<?>>() {
@Override
public boolean hasNext() {
return true;
}
@Override
public Publisher<?> next() {
throw exception();
}
}, o -> (String) o[0])).fusionMode(Fuseable.NONE),
scenario(f -> Flux.combineLatest(() -> new Iterator<Publisher<?>>() {
@Override
public boolean hasNext() {
return true;
}
@Override
public Publisher<?> next() {
return null;
}
}, o -> (String) o[0])).fusionMode(Fuseable.NONE));
}
@Override
protected List<Scenario<String, String>> scenarios_errorFromUpstreamFailure() {
return Arrays.asList(scenario(f -> Flux.combineLatest(o -> (String) o[0],
f)).prefetch(-1),
scenario(f -> Flux.combineLatest(o -> (String) o[0],
f,
Flux.never())).shouldHitDropNextHookAfterTerminate(false));
}
//FIXME these tests are weird, no way to ensure which source produces the data
@Override
protected List<Scenario<String, String>> scenarios_operatorSuccess() {
return Arrays.asList(scenario(f -> Flux.combineLatest(o -> (String) o[0],
f)).prefetch(-1),
scenario(f -> Flux.combineLatest(o -> (String) o[1],
f,
Flux.just(item(0), item(1), item(2)),
Flux.just(item(0), item(1), item(2))))
.receiveValues(item(2), item(2), item(2))
.shouldAssertPostTerminateState(false),
scenario(f -> Flux.combineLatest(o -> (String) o[2],
1,
f,
Flux.just(item(0), item(0), item(0)),
Flux.just(item(0), item(0), item(0))))
.prefetch(1)
.receiveValues(item(0), item(0), item(0))
.shouldAssertPostTerminateState(false)
);
}
@Test
public void singleSourceIsMapped() {
AssertSubscriber<String> ts = AssertSubscriber.create();
Flux.combineLatest(a -> a[0].toString(), Flux.just(1))
.subscribe(ts);
ts.assertValues("1")
.assertNoError()
.assertComplete();
}
@Test
public void iterableSingleSourceIsMapped() {
AssertSubscriber<String> ts = AssertSubscriber.create();
Flux.combineLatest(Collections.singleton(Flux.just(1)), a -> a[0].toString())
.subscribe(ts);
ts.assertValues("1")
.assertNoError()
.assertComplete();
}
@Test
public void fused() {
Sinks.Many<Integer> dp1 = Sinks.unsafe().many().multicast().directBestEffort();
Sinks.Many<Integer> dp2 = Sinks.unsafe().many().multicast().directBestEffort();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
ts.requestedFusionMode(Fuseable.ANY);
Flux.combineLatest(dp1.asFlux(), dp2.asFlux(), (a, b) -> a + b)
.subscribe(ts);
dp1.emitNext(1, FAIL_FAST);
dp1.emitNext(2, FAIL_FAST);
dp2.emitNext(10, FAIL_FAST);
dp2.emitNext(20, FAIL_FAST);
dp2.emitNext(30, FAIL_FAST);
dp1.emitNext(3, FAIL_FAST);
dp1.emitComplete(FAIL_FAST);
dp2.emitComplete(FAIL_FAST);
ts.assertFuseableSource()
.assertFusionMode(Fuseable.ASYNC)
.assertValues(12, 22, 32, 33);
}
@Test
public void combineLatest() {
StepVerifier.create(Flux.combineLatest(obj -> (int) obj[0], Flux.just(1)))
.expectNext(1)
.verifyComplete();
}
@Test
public void combineLatestEmpty() {
StepVerifier.create(Flux.combineLatest(obj -> (int) obj[0]))
.verifyComplete();
}
@Test
public void combineLatestHide() {
StepVerifier.create(Flux.combineLatest(obj -> (int) obj[0],
Flux.just(1)
.hide()))
.expectNext(1)
.verifyComplete();
}
@Test
public void combineLatest2() {
StepVerifier.create(Flux.combineLatest(Flux.just(1), Flux.just(2), (a, b) -> a))
.expectNext(1)
.verifyComplete();
}
@Test
public void combineLatest3() {
StepVerifier.create(Flux.combineLatest(Flux.just(1),
Flux.just(2),
Flux.just(3),
obj -> (int) obj[0]))
.expectNext(1)
.verifyComplete();
}
@Test
public void combineLatest4() {
StepVerifier.create(Flux.combineLatest(Flux.just(1),
Flux.just(2),
Flux.just(3),
Flux.just(4),
obj -> (int) obj[0]))
.expectNext(1)
.verifyComplete();
}
@Test
public void combineLatest5() {
StepVerifier.create(Flux.combineLatest(Flux.just(1),
Flux.just(2),
Flux.just(3),
Flux.just(4),
Flux.just(5),
obj -> (int) obj[0]))
.expectNext(1)
.verifyComplete();
}
@Test
public void combineLatest6() {
StepVerifier.create(Flux.combineLatest(Flux.just(1),
Flux.just(2),
Flux.just(3),
Flux.just(4),
Flux.just(5),
Flux.just(6),
obj -> (int) obj[0]))
.expectNext(1)
.verifyComplete();
}
@Test
public void scanOperator() {
FluxCombineLatest s = new FluxCombineLatest<>(Collections.emptyList(), v -> v, Queues.small(), 123);
assertThat(s.scan(Scannable.Attr.PREFETCH)).isEqualTo(123);
assertThat(s.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanMain() {
CoreSubscriber<Integer> actual = new LambdaSubscriber<>(null, e -> {}, null, null);
FluxCombineLatest.CombineLatestCoordinator<String, Integer> test = new FluxCombineLatest.CombineLatestCoordinator<>(
actual, arr -> { throw new IllegalStateException("boomArray");}, 123, Queues.<FluxCombineLatest.SourceAndArray>one().get(), 456);
test.request(2L);
test.error = new IllegalStateException("boom"); //most straightforward way to set it as otherwise it is drained
assertThat(test.scan(Scannable.Attr.REQUESTED_FROM_DOWNSTREAM)).isEqualTo(2L);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
assertThat(test.scan(Scannable.Attr.ERROR)).isSameAs(test.error);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.innerComplete(1);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
}
@Test
public void scanInner() {
CoreSubscriber<Integer> actual = new LambdaSubscriber<>(null, e -> {}, null, null);
FluxCombineLatest.CombineLatestCoordinator<String, Integer> main = new FluxCombineLatest.CombineLatestCoordinator<>(
actual, arr -> arr.length, 123, Queues.<FluxCombineLatest.SourceAndArray>one().get(), 456);
FluxCombineLatest.CombineLatestInner<String> test = new FluxCombineLatest.CombineLatestInner<>(main, 1, 789);
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(789);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(main);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
@Test
public void singleSourceNormalWithFuseableDownstream() {
StepVerifier.create(
Flux.combineLatest(Collections.singletonList(Flux.just(1, 2, 3).hide()), (arr) -> arr[0].toString())
//the map is Fuseable and sees the combine as fuseable too
.map(x -> x + "!")
.collectList())
.assertNext(l -> assertThat(l).containsExactly("1!", "2!", "3!"))
.verifyComplete();
}
@Test
public void singleSourceNormalWithoutFuseableDownstream() {
StepVerifier.create(
Flux.combineLatest(
Collections.singletonList(Flux.just(1, 2, 3).hide()),
(arr) -> arr[0].toString())
//the collectList is NOT Fuseable
.collectList()
)
.assertNext(l -> assertThat(l).containsExactly("1", "2", "3"))
.verifyComplete();
}
@Test
public void singleSourceFusedWithFuseableDownstream() {
StepVerifier.create(
Flux.combineLatest(
Collections.singletonList(Flux.just(1, 2, 3)),
(arr) -> arr[0].toString())
//the map is Fuseable and sees the combine as fuseable too
.map(x -> x + "!")
.collectList())
.assertNext(l -> assertThat(l).containsExactly("1!", "2!", "3!"))
.verifyComplete();
}
}
| FluxCombineLatestTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/GenericMRLoadGenerator.java | {
"start": 13752,
"end": 14057
} | class ____<K extends WritableComparable<?>,
V extends Writable>
extends SampleMapBase<K,V> {
public void map(K key, V val, Context context)
throws IOException, InterruptedException {
emit(key, val, context);
}
}
public static | SampleMapper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cdi/converters/standard/CdiHostedConverterTest.java | {
"start": 1463,
"end": 3930
} | class ____ {
@Test
@ExtendWith(MonitorBean.Resetter.class )
@CdiContainer(beanClasses = {MonitorBean.class, ConverterBean.class})
@ServiceRegistry(resolvableSettings = @ServiceRegistry.ResolvableSetting(
settingName = CDI_BEAN_MANAGER,
resolver = CdiContainerLinker.StandardResolver.class
))
@DomainModel(annotatedClasses = TheEntity.class)
@SessionFactory
public void testAnnotations(CdiContainerScope containerScope, SessionFactoryScope factoryScope) {
factoryScope.getSessionFactory();
// The CDI bean should have been built immediately...
assertTrue( MonitorBean.wasInstantiated() );
assertEquals( 0, MonitorBean.currentFromDbCount() );
assertEquals( 0, MonitorBean.currentToDbCount() );
factoryScope.inTransaction( (session) -> {
session.persist( new TheEntity( 1, "me", 5 ) );
} );
assertEquals( 0, MonitorBean.currentFromDbCount() );
assertEquals( 1, MonitorBean.currentToDbCount() );
factoryScope.inTransaction( (session) -> {
TheEntity it = session.find( TheEntity.class, 1 );
assertNotNull( it );
} );
assertEquals( 1, MonitorBean.currentFromDbCount() );
assertEquals( 1, MonitorBean.currentToDbCount() );
}
@Test
@JiraKey("HHH-14881")
@ExtendWith(MonitorBean.Resetter.class )
@CdiContainer(beanClasses = {MonitorBean.class, OrmXmlConverterBean.class})
@ServiceRegistry(resolvableSettings = @ServiceRegistry.ResolvableSetting(
settingName = CDI_BEAN_MANAGER,
resolver = CdiContainerLinker.StandardResolver.class
))
@DomainModel(annotatedClasses = TheEntity.class, xmlMappings = "org/hibernate/test/cdi/converters/orm.xml")
@SessionFactory
public void testOrmXml(CdiContainerScope cdiScope, SessionFactoryScope factoryScope) {
factoryScope.getSessionFactory();
// The CDI bean should have been built immediately...
assertTrue( MonitorBean.wasInstantiated() );
assertEquals( 0, MonitorBean.currentFromDbCount() );
assertEquals( 0, MonitorBean.currentToDbCount() );
factoryScope.inTransaction( (session) -> {
session.persist( new TheOrmXmlEntity( 1, "me", new MyData( "foo" ) ) );
} );
assertEquals( 0, MonitorBean.currentFromDbCount() );
assertEquals( 1, MonitorBean.currentToDbCount() );
factoryScope.inTransaction( (session) -> {
TheOrmXmlEntity it = session.find( TheOrmXmlEntity.class, 1 );
assertNotNull( it );
} );
assertEquals( 1, MonitorBean.currentFromDbCount() );
assertEquals( 1, MonitorBean.currentToDbCount() );
}
}
| CdiHostedConverterTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/pivot/DateHistogramGroupSourceTests.java | {
"start": 1100,
"end": 13861
} | class ____ extends AbstractXContentSerializingTestCase<DateHistogramGroupSource> {
public static DateHistogramGroupSource randomDateHistogramGroupSource() {
return randomDateHistogramGroupSource(TransformConfigVersion.CURRENT);
}
public static DateHistogramGroupSource randomDateHistogramGroupSourceNoScript() {
return randomDateHistogramGroupSource(TransformConfigVersion.CURRENT, false);
}
public static DateHistogramGroupSource randomDateHistogramGroupSourceNoScript(String fieldPrefix) {
return randomDateHistogramGroupSource(TransformConfigVersion.CURRENT, false, fieldPrefix);
}
public static DateHistogramGroupSource randomDateHistogramGroupSource(TransformConfigVersion version) {
return randomDateHistogramGroupSource(version, randomBoolean());
}
public static DateHistogramGroupSource randomDateHistogramGroupSource(TransformConfigVersion version, boolean withScript) {
return randomDateHistogramGroupSource(version, withScript, "");
}
public static DateHistogramGroupSource randomDateHistogramGroupSource(
TransformConfigVersion version,
boolean withScript,
String fieldPrefix
) {
ScriptConfig scriptConfig = null;
String field;
// either a field or a script must be specified, it's possible to have both, but disallowed to have none
if (version.onOrAfter(TransformConfigVersion.V_7_7_0) && withScript) {
scriptConfig = ScriptConfigTests.randomScriptConfig();
field = randomBoolean() ? null : fieldPrefix + randomAlphaOfLengthBetween(1, 20);
} else {
field = fieldPrefix + randomAlphaOfLengthBetween(1, 20);
}
boolean missingBucket = version.onOrAfter(TransformConfigVersion.V_7_10_0) ? randomBoolean() : false;
Long offset = version.onOrAfter(TransformConfigVersion.V_8_7_0) ? randomOffset() : null;
DateHistogramGroupSource dateHistogramGroupSource;
if (randomBoolean()) {
dateHistogramGroupSource = new DateHistogramGroupSource(
field,
scriptConfig,
missingBucket,
new DateHistogramGroupSource.FixedInterval(
new DateHistogramInterval(between(1, 100) + randomFrom("d", "h", "ms", "s", "m"))
),
randomBoolean() ? randomZone() : null,
randomBoolean() ? offset : null
);
} else {
dateHistogramGroupSource = new DateHistogramGroupSource(
field,
scriptConfig,
missingBucket,
new DateHistogramGroupSource.CalendarInterval(
new DateHistogramInterval("1" + randomFrom("m", "h", "d", "w", "M", "q", "y"))
),
randomBoolean() ? randomZone() : null,
randomBoolean() ? offset : null
);
}
return dateHistogramGroupSource;
}
@Override
protected DateHistogramGroupSource doParseInstance(XContentParser parser) throws IOException {
return DateHistogramGroupSource.fromXContent(parser, false);
}
@Override
protected DateHistogramGroupSource createTestInstance() {
return randomDateHistogramGroupSource();
}
@Override
protected DateHistogramGroupSource mutateInstance(DateHistogramGroupSource instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Reader<DateHistogramGroupSource> instanceReader() {
return DateHistogramGroupSource::new;
}
public void testOffset() {
{
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(
null,
null,
false,
new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval("1d")),
null,
null
);
assertThat(dateHistogramGroupSource.getOffset(), equalTo(0L));
}
{
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(
null,
null,
false,
new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval("1d")),
null,
0L
);
assertThat(dateHistogramGroupSource.getOffset(), equalTo(0L));
}
{
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(
null,
null,
false,
new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval("1d")),
null,
DateHistogramAggregationBuilder.parseStringOffset("-1h")
);
assertThat(dateHistogramGroupSource.getOffset(), equalTo(-3_600_000L));
}
{
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(
null,
null,
false,
new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval("1d")),
null,
DateHistogramAggregationBuilder.parseStringOffset("+1h")
);
assertThat(dateHistogramGroupSource.getOffset(), equalTo(3_600_000L));
}
}
public void testRoundingDateHistogramFixedInterval() {
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20),
null,
randomBoolean(),
new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval("1d")),
null,
null
);
Rounding.Prepared rounding = dateHistogramGroupSource.getRounding();
assertThat(rounding, notNullValue());
// not meant to be complete rounding tests, see {@link RoundingTests} for more
assertThat(rounding.round(time("2020-03-25T23:59:59.000Z")), equalTo(time("2020-03-25T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-26T00:00:00.000Z")), equalTo(time("2020-03-26T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-26T00:00:01.000Z")), equalTo(time("2020-03-26T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-26T23:59:59.000Z")), equalTo(time("2020-03-26T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-27T00:00:00.000Z")), equalTo(time("2020-03-27T00:00:00.000Z")));
}
public void testRoundingDateHistogramCalendarInterval() {
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20),
null,
randomBoolean(),
new DateHistogramGroupSource.CalendarInterval(new DateHistogramInterval("1w")),
null,
null
);
Rounding.Prepared rounding = dateHistogramGroupSource.getRounding();
assertThat(rounding, notNullValue());
// not meant to be complete rounding tests, see {@link RoundingTests} for more
assertThat(rounding.round(time("2020-03-21T23:59:59.000Z")), equalTo(time("2020-03-16T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-22T00:00:00.000Z")), equalTo(time("2020-03-16T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-22T23:59:59.000Z")), equalTo(time("2020-03-16T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-23T00:00:00.000Z")), equalTo(time("2020-03-23T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-23T00:00:01.000Z")), equalTo(time("2020-03-23T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-24T00:00:00.000Z")), equalTo(time("2020-03-23T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-26T23:59:59.000Z")), equalTo(time("2020-03-23T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-28T23:59:59.000Z")), equalTo(time("2020-03-23T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-29T00:00:00.000Z")), equalTo(time("2020-03-23T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-29T23:59:59.000Z")), equalTo(time("2020-03-23T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-30T00:00:00.000Z")), equalTo(time("2020-03-30T00:00:00.000Z")));
}
public void testRoundingDateHistogramCalendarIntervalWithNegativeOffset() {
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20),
null,
randomBoolean(),
new DateHistogramGroupSource.CalendarInterval(new DateHistogramInterval("1w")),
null,
DateHistogramAggregationBuilder.parseStringOffset("-1d")
);
Rounding.Prepared rounding = dateHistogramGroupSource.getRounding();
assertThat(rounding, notNullValue());
// not meant to be complete rounding tests, see {@link RoundingTests} for more
assertThat(rounding.round(time("2020-03-21T23:59:59.000Z")), equalTo(time("2020-03-15T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-22T00:00:00.000Z")), equalTo(time("2020-03-22T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-22T23:59:59.000Z")), equalTo(time("2020-03-22T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-23T00:00:00.000Z")), equalTo(time("2020-03-22T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-23T00:00:01.000Z")), equalTo(time("2020-03-22T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-24T00:00:00.000Z")), equalTo(time("2020-03-22T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-26T23:59:59.000Z")), equalTo(time("2020-03-22T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-28T23:59:59.000Z")), equalTo(time("2020-03-22T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-29T00:00:00.000Z")), equalTo(time("2020-03-29T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-29T23:59:59.000Z")), equalTo(time("2020-03-29T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-30T00:00:00.000Z")), equalTo(time("2020-03-29T00:00:00.000Z")));
}
public void testRoundingDateHistogramCalendarIntervalWithPositiveOffset() {
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20),
null,
randomBoolean(),
new DateHistogramGroupSource.CalendarInterval(new DateHistogramInterval("1w")),
null,
DateHistogramAggregationBuilder.parseStringOffset("+1d")
);
Rounding.Prepared rounding = dateHistogramGroupSource.getRounding();
assertThat(rounding, notNullValue());
// not meant to be complete rounding tests, see {@link RoundingTests} for more
assertThat(rounding.round(time("2020-03-21T23:59:59.000Z")), equalTo(time("2020-03-17T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-22T00:00:00.000Z")), equalTo(time("2020-03-17T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-22T23:59:59.000Z")), equalTo(time("2020-03-17T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-23T00:00:00.000Z")), equalTo(time("2020-03-17T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-23T00:00:01.000Z")), equalTo(time("2020-03-17T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-24T00:00:00.000Z")), equalTo(time("2020-03-24T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-26T23:59:59.000Z")), equalTo(time("2020-03-24T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-28T23:59:59.000Z")), equalTo(time("2020-03-24T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-29T00:00:00.000Z")), equalTo(time("2020-03-24T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-29T23:59:59.000Z")), equalTo(time("2020-03-24T00:00:00.000Z")));
assertThat(rounding.round(time("2020-03-30T00:00:00.000Z")), equalTo(time("2020-03-24T00:00:00.000Z")));
}
private static long time(String time) {
TemporalAccessor accessor = DateFormatter.forPattern("date_optional_time").withZone(ZoneOffset.UTC).parse(time);
return DateFormatters.from(accessor).toInstant().toEpochMilli();
}
private static long randomOffset() {
return randomLongBetween(-1_000_000, 1_000_000);
}
}
| DateHistogramGroupSourceTests |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/objectid/TestObjectIdWithPolymorphic.java | {
"start": 1885,
"end": 2176
} | class ____ extends Base811 {
protected Activity parent;
public Activity(Process owner, Activity parent) {
super(owner);
this.parent = parent;
}
protected Activity() {
super();
}
}
public static | Activity |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestQueueConfigurationAutoRefreshPolicy.java | {
"start": 1897,
"end": 11031
} | class ____ {
private Configuration configuration;
private MockRM rm = null;
private FileSystem fs;
private Path workingPath;
private Path workingPathRecover;
private Path fileSystemWorkingPath;
private Path tmpDir;
private QueueConfigurationAutoRefreshPolicy policy;
static {
YarnConfiguration.addDefaultResource(
YarnConfiguration.CS_CONFIGURATION_FILE);
YarnConfiguration.addDefaultResource(
YarnConfiguration.DR_CONFIGURATION_FILE);
}
@BeforeEach
public void setup() throws IOException {
QueueMetrics.clearQueueMetrics();
DefaultMetricsSystem.setMiniClusterMode(true);
configuration = new YarnConfiguration();
configuration.set(YarnConfiguration.RM_SCHEDULER,
CapacityScheduler.class.getCanonicalName());
fs = FileSystem.get(configuration);
workingPath = new Path(QueueConfigurationAutoRefreshPolicy.
class.getClassLoader().
getResource(".").toString());
workingPathRecover = new Path(QueueConfigurationAutoRefreshPolicy.
class.getClassLoader().
getResource(".").toString() + "/" + "Recover");
fileSystemWorkingPath =
new Path(new File("target", this.getClass().getSimpleName()
+ "-remoteDir").getAbsolutePath());
tmpDir = new Path(new File("target", this.getClass().getSimpleName()
+ "-tmpDir").getAbsolutePath());
fs.delete(fileSystemWorkingPath, true);
fs.mkdirs(fileSystemWorkingPath);
fs.delete(tmpDir, true);
fs.mkdirs(tmpDir);
policy =
new QueueConfigurationAutoRefreshPolicy();
}
private String writeConfigurationXML(Configuration conf, String confXMLName)
throws IOException {
DataOutputStream output = null;
try {
final File confFile = new File(tmpDir.toString(), confXMLName);
if (confFile.exists()) {
confFile.delete();
}
if (!confFile.createNewFile()) {
fail("Can not create " + confXMLName);
}
output = new DataOutputStream(
new FileOutputStream(confFile));
conf.writeXml(output);
return confFile.getAbsolutePath();
} finally {
if (output != null) {
output.close();
}
}
}
private void uploadConfiguration(Boolean isFileSystemBased,
Configuration conf, String confFileName)
throws IOException {
String csConfFile = writeConfigurationXML(conf, confFileName);
if (isFileSystemBased) {
// upload the file into Remote File System
uploadToRemoteFileSystem(new Path(csConfFile),
fileSystemWorkingPath);
} else {
// upload the file into Work Path for Local File
uploadToRemoteFileSystem(new Path(csConfFile),
workingPath);
}
}
private void uploadToRemoteFileSystem(Path filePath, Path remotePath)
throws IOException {
fs.copyFromLocalFile(filePath, remotePath);
}
private void uploadDefaultConfiguration(Boolean
isFileSystemBased) throws IOException {
Configuration conf = new Configuration();
uploadConfiguration(isFileSystemBased,
conf, "core-site.xml");
YarnConfiguration yarnConf = new YarnConfiguration();
uploadConfiguration(isFileSystemBased,
yarnConf, "yarn-site.xml");
CapacitySchedulerConfiguration csConf =
new CapacitySchedulerConfiguration();
uploadConfiguration(isFileSystemBased,
csConf, "capacity-scheduler.xml");
Configuration hadoopPolicyConf = new Configuration(false);
hadoopPolicyConf
.addResource(YarnConfiguration.HADOOP_POLICY_CONFIGURATION_FILE);
uploadConfiguration(isFileSystemBased,
hadoopPolicyConf, "hadoop-policy.xml");
}
@Test
public void testFileSystemBasedEditSchedule() throws Exception {
// Test FileSystemBasedConfigurationProvider scheduled
testCommon(true);
}
@Test
public void testLocalFileBasedEditSchedule() throws Exception {
// Prepare for recover for local file default.
fs.mkdirs(workingPath);
fs.copyFromLocalFile(new Path(workingPath.toString()
+ "/" + YarnConfiguration.CORE_SITE_CONFIGURATION_FILE),
new Path(workingPathRecover.toString()
+ "/" + YarnConfiguration.CORE_SITE_CONFIGURATION_FILE));
fs.copyFromLocalFile(new Path(workingPath.toString()
+ "/" + YarnConfiguration.YARN_SITE_CONFIGURATION_FILE),
new Path(workingPathRecover.toString()
+ "/" + YarnConfiguration.YARN_SITE_CONFIGURATION_FILE));
fs.copyFromLocalFile(new Path(workingPath.toString()
+ "/" + YarnConfiguration.CS_CONFIGURATION_FILE),
new Path(workingPathRecover.toString()
+ "/" + YarnConfiguration.CS_CONFIGURATION_FILE));
// Test LocalConfigurationProvider scheduled
testCommon(false);
// Recover for recover for local file default.
fs.copyFromLocalFile(new Path(workingPathRecover.toString()
+ "/" + YarnConfiguration.CORE_SITE_CONFIGURATION_FILE),
new Path(workingPath.toString()
+ "/" + YarnConfiguration.CORE_SITE_CONFIGURATION_FILE));
fs.copyFromLocalFile(new Path(workingPathRecover.toString()
+ "/" + YarnConfiguration.YARN_SITE_CONFIGURATION_FILE),
new Path(workingPath.toString()
+ "/" + YarnConfiguration.YARN_SITE_CONFIGURATION_FILE));
fs.copyFromLocalFile(new Path(workingPathRecover.toString()
+ "/" + YarnConfiguration.CS_CONFIGURATION_FILE),
new Path(workingPath.toString()
+ "/" + YarnConfiguration.CS_CONFIGURATION_FILE));
fs.delete(workingPathRecover, true);
}
public void testCommon(Boolean isFileSystemBased) throws Exception {
// Set auto refresh interval to 1s
configuration.setLong(CapacitySchedulerConfiguration.
QUEUE_AUTO_REFRESH_MONITORING_INTERVAL,
1000L);
if (isFileSystemBased) {
configuration.set(YarnConfiguration.FS_BASED_RM_CONF_STORE,
fileSystemWorkingPath.toString());
}
//upload default configurations
uploadDefaultConfiguration(isFileSystemBased);
if (isFileSystemBased) {
configuration.set(YarnConfiguration.RM_CONFIGURATION_PROVIDER_CLASS,
FileSystemBasedConfigurationProvider.class.getCanonicalName());
} else {
configuration.set(YarnConfiguration.RM_CONFIGURATION_PROVIDER_CLASS,
LocalConfigurationProvider.class.getCanonicalName());
}
// upload the auto refresh related configurations
uploadConfiguration(isFileSystemBased,
configuration, "yarn-site.xml");
uploadConfiguration(isFileSystemBased,
configuration, "capacity-scheduler.xml");
rm = new MockRM(configuration);
rm.init(configuration);
policy.init(configuration,
rm.getRMContext(),
rm.getResourceScheduler());
rm.start();
CapacityScheduler cs =
(CapacityScheduler) rm.getRMContext().getScheduler();
int maxAppsBefore = cs.getConfiguration().getMaximumSystemApplications();
CapacitySchedulerConfiguration csConf =
new CapacitySchedulerConfiguration();
csConf.setInt(CapacitySchedulerConfiguration.MAXIMUM_SYSTEM_APPLICATIONS,
5000);
uploadConfiguration(isFileSystemBased,
csConf, "capacity-scheduler.xml");
// Refreshed first time.
policy.editSchedule();
// Make sure refresh successfully.
assertFalse(policy.getLastReloadAttemptFailed());
long oldModified = policy.getLastModified();
long oldSuccess = policy.getLastReloadAttempt();
assertTrue(oldSuccess > oldModified);
int maxAppsAfter = cs.getConfiguration().getMaximumSystemApplications();
assertEquals(maxAppsAfter, 5000);
assertTrue(maxAppsAfter != maxAppsBefore);
// Trigger interval for refresh.
GenericTestUtils.waitFor(() -> (policy.getClock().getTime() -
policy.getLastReloadAttempt()) / 1000 > 1,
500, 3000);
// Upload for modified.
csConf.setInt(CapacitySchedulerConfiguration.MAXIMUM_SYSTEM_APPLICATIONS,
3000);
uploadConfiguration(isFileSystemBased,
csConf, "capacity-scheduler.xml");
policy.editSchedule();
// Wait for triggered refresh.
GenericTestUtils.waitFor(() -> policy.getLastReloadAttempt() >
policy.getLastModified(),
500, 3000);
// Make sure refresh successfully.
assertFalse(policy.getLastReloadAttemptFailed());
oldModified = policy.getLastModified();
oldSuccess = policy.getLastReloadAttempt();
assertTrue(oldSuccess > oldModified);
assertEquals(cs.getConfiguration().
getMaximumSystemApplications(), 3000);
// Trigger interval for refresh.
GenericTestUtils.waitFor(() -> (policy.getClock().getTime() -
policy.getLastReloadAttempt()) / 1000 > 1,
500, 3000);
// Without modified
policy.editSchedule();
assertEquals(oldModified,
policy.getLastModified());
assertEquals(oldSuccess,
policy.getLastReloadAttempt());
}
@AfterEach
public void tearDown() throws IOException {
if (rm != null) {
rm.stop();
}
fs.delete(fileSystemWorkingPath, true);
fs.delete(tmpDir, true);
}
}
| TestQueueConfigurationAutoRefreshPolicy |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/EmptyArrayAsNullTest.java | {
"start": 410,
"end": 477
} | class ____ {
public Value value;
}
public static | Model |
java | apache__camel | components/camel-mllp/src/main/java/org/apache/camel/component/mllp/internal/TcpServerBindThread.java | {
"start": 1667,
"end": 6502
} | class ____ extends Thread {
private final Logger log = LoggerFactory.getLogger(this.getClass());
private final MllpTcpServerConsumer consumer;
private final SSLContextParameters sslContextParameters;
public TcpServerBindThread(MllpTcpServerConsumer consumer, final SSLContextParameters sslParams) {
this.consumer = consumer;
this.sslContextParameters = sslParams;
// Get the URI without options
String fullEndpointKey = consumer.getEndpoint().getEndpointKey();
String endpointKey = StringHelper.before(fullEndpointKey, "?", fullEndpointKey);
this.setName(String.format("%s - %s", this.getClass().getSimpleName(), endpointKey));
}
/**
* Bind the TCP ServerSocket within the specified timeout.
*/
@Override
public void run() {
MDC.put(UnitOfWork.MDC_CAMEL_CONTEXT_ID, consumer.getEndpoint().getCamelContext().getName());
Route route = consumer.getRoute();
if (route != null) {
String routeId = route.getId();
if (routeId != null) {
MDC.put(UnitOfWork.MDC_ROUTE_ID, route.getId());
}
}
try {
// Note: this socket is going to be closed in the TcpServerAcceptThread instance
// launched by the consumer
ServerSocket serverSocket;
if (sslContextParameters != null) {
log.debug("Initializing SSLContextParameters");
SSLContext sslContext = sslContextParameters.createSSLContext(consumer.getEndpoint().getCamelContext());
SSLServerSocketFactory sslServerSocketFactory = sslContext.getServerSocketFactory();
serverSocket = sslServerSocketFactory.createServerSocket();
} else {
serverSocket = new ServerSocket();
}
InetSocketAddress socketAddress = setupSocket(serverSocket);
log.debug("Attempting to bind to {}", socketAddress);
doAccept(serverSocket, socketAddress);
} catch (IOException ioEx) {
log.error("Unexpected exception encountered initializing ServerSocket before attempting to bind", ioEx);
} catch (GeneralSecurityException e) {
log.error("Error creating SSLContext for secure server socket", e);
throw new RuntimeException("SSLContext initialization failed", e);
}
}
private void doAccept(ServerSocket serverSocket, InetSocketAddress socketAddress) {
BlockingTask task = Tasks.foregroundTask()
.withBudget(Budgets.iterationTimeBudget()
.withMaxDuration(Duration.ofMillis(consumer.getConfiguration().getBindTimeout()))
.withInterval(Duration.ofMillis(consumer.getConfiguration().getBindRetryInterval()))
.build())
.withName("mllp-tcp-server-accept")
.build();
if (task.run(consumer.getEndpoint().getCamelContext(), () -> doBind(serverSocket, socketAddress))) {
consumer.startAcceptThread(serverSocket);
} else {
log.error("Failed to bind to address {} within timeout {}", socketAddress,
consumer.getConfiguration().getBindTimeout());
}
}
private boolean doBind(ServerSocket serverSocket, InetSocketAddress socketAddress) {
try {
if (consumer.getConfiguration().hasBacklog()) {
serverSocket.bind(socketAddress, consumer.getConfiguration().getBacklog());
} else {
serverSocket.bind(socketAddress);
}
return true;
} catch (IOException e) {
log.warn("Failed to bind to address {} - retrying in {} milliseconds", socketAddress,
consumer.getConfiguration().getBindRetryInterval());
return false;
}
}
private InetSocketAddress setupSocket(ServerSocket serverSocket) throws SocketException {
if (consumer.getConfiguration().hasReceiveBufferSize()) {
serverSocket.setReceiveBufferSize(consumer.getConfiguration().getReceiveBufferSize());
}
if (consumer.getConfiguration().hasReuseAddress()) {
serverSocket.setReuseAddress(consumer.getConfiguration().getReuseAddress());
}
// Accept Timeout
serverSocket.setSoTimeout(consumer.getConfiguration().getAcceptTimeout());
InetSocketAddress socketAddress;
if (null == consumer.getEndpoint().getHostname()) {
socketAddress = new InetSocketAddress(consumer.getEndpoint().getPort());
} else {
socketAddress = new InetSocketAddress(consumer.getEndpoint().getHostname(), consumer.getEndpoint().getPort());
}
return socketAddress;
}
}
| TcpServerBindThread |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/query/criteria/internal/AbstractAtomicExpression.java | {
"start": 500,
"end": 668
} | class ____ all expression types which are atomic (i.e. expressions
* which are not composed of one or more other expressions). For those expression
* types which base | for |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/FindIdentifiersTest.java | {
"start": 18786,
"end": 19072
} | class ____ {
protected String s1;
protected static String s2;
public Super(String s1) {
this.s1 = s1;
}
}
""")
.addSourceLines(
"Sub.java",
"""
| Super |
java | netty__netty | buffer/src/test/java/io/netty/buffer/BigEndianHeapByteBufTest.java | {
"start": 927,
"end": 1798
} | class ____ extends AbstractByteBufTest {
@Override
protected ByteBuf newBuffer(int length, int maxCapacity) {
ByteBuf buffer = Unpooled.buffer(length, maxCapacity);
assertEquals(0, buffer.writerIndex());
return buffer;
}
@Test
public void shouldNotAllowNullInConstructor1() {
assertThrows(NullPointerException.class, new Executable() {
@Override
public void execute() {
new UnpooledHeapByteBuf(null, new byte[1], 0);
}
});
}
@Test
public void shouldNotAllowNullInConstructor2() {
assertThrows(NullPointerException.class, new Executable() {
@Override
public void execute() {
new UnpooledHeapByteBuf(UnpooledByteBufAllocator.DEFAULT, null, 0);
}
});
}
}
| BigEndianHeapByteBufTest |
java | google__guava | android/guava/src/com/google/common/reflect/TypeToken.java | {
"start": 26123,
"end": 27785
} | class ____ extends ForwardingSet<TypeToken<? super T>> implements Serializable {
private transient @Nullable ImmutableSet<TypeToken<? super T>> types;
TypeSet() {}
/** Returns the types that are interfaces implemented by this type. */
public TypeSet interfaces() {
return new InterfaceSet(this);
}
/** Returns the types that are classes. */
public TypeSet classes() {
return new ClassSet();
}
@Override
protected Set<TypeToken<? super T>> delegate() {
ImmutableSet<TypeToken<? super T>> filteredTypes = types;
if (filteredTypes == null) {
// Java has no way to express ? super T when we parameterize TypeToken vs. Class.
@SuppressWarnings({"unchecked", "rawtypes"})
ImmutableList<TypeToken<? super T>> collectedTypes =
(ImmutableList) TypeCollector.FOR_GENERIC_TYPE.collectTypes(TypeToken.this);
return (types =
FluentIterable.from(collectedTypes)
.filter(TypeFilter.IGNORE_TYPE_VARIABLE_OR_WILDCARD)
.toSet());
} else {
return filteredTypes;
}
}
/** Returns the raw types of the types in this set, in the same order. */
public Set<Class<? super T>> rawTypes() {
// Java has no way to express ? super T when we parameterize TypeToken vs. Class.
@SuppressWarnings({"unchecked", "rawtypes"})
ImmutableList<Class<? super T>> collectedTypes =
(ImmutableList) TypeCollector.FOR_RAW_TYPE.collectTypes(getRawTypes());
return ImmutableSet.copyOf(collectedTypes);
}
private static final long serialVersionUID = 0;
}
private final | TypeSet |
java | mockito__mockito | mockito-integration-tests/extensions-tests/src/test/java/org/mockitousage/plugins/resolver/MockResolverTest.java | {
"start": 570,
"end": 927
} | class ____ {
@Test
void mock_resolver_can_unwrap_mocked_instance() {
Foo mock = mock(Foo.class), wrapper = new MockWrapper(mock);
when(wrapper.doIt()).thenReturn(123);
assertThat(mock.doIt()).isEqualTo(123);
assertThat(wrapper.doIt()).isEqualTo(123);
verify(wrapper, times(2)).doIt();
}
| MockResolverTest |
java | spring-projects__spring-boot | module/spring-boot-data-commons/src/test/java/org/springframework/boot/data/autoconfigure/metrics/MetricsRepositoryMethodInvocationListenerBeanPostProcessorTests.java | {
"start": 1419,
"end": 2786
} | class ____ {
private final MetricsRepositoryMethodInvocationListener listener = mock(
MetricsRepositoryMethodInvocationListener.class);
private final MetricsRepositoryMethodInvocationListenerBeanPostProcessor postProcessor = new MetricsRepositoryMethodInvocationListenerBeanPostProcessor(
SingletonSupplier.of(this.listener));
@Test
@SuppressWarnings("rawtypes")
void postProcessBeforeInitializationWhenRepositoryFactoryBeanSupportAddsListener() {
RepositoryFactoryBeanSupport bean = mock(RepositoryFactoryBeanSupport.class);
Object result = this.postProcessor.postProcessBeforeInitialization(bean, "name");
assertThat(result).isSameAs(bean);
ArgumentCaptor<RepositoryFactoryCustomizer> customizer = ArgumentCaptor
.forClass(RepositoryFactoryCustomizer.class);
then(bean).should().addRepositoryFactoryCustomizer(customizer.capture());
RepositoryFactorySupport repositoryFactory = mock(RepositoryFactorySupport.class);
customizer.getValue().customize(repositoryFactory);
then(repositoryFactory).should().addInvocationListener(this.listener);
}
@Test
void postProcessBeforeInitializationWhenOtherBeanDoesNothing() {
Object bean = new Object();
Object result = this.postProcessor.postProcessBeforeInitialization(bean, "name");
assertThat(result).isSameAs(bean);
}
}
| MetricsRepositoryMethodInvocationListenerBeanPostProcessorTests |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java | {
"start": 10970,
"end": 13368
} | class ____ extends ParserRuleContext {
public QueryContext query() {
return getRuleContext(QueryContext.class, 0);
}
public List<PipeContext> pipe() {
return getRuleContexts(PipeContext.class);
}
public PipeContext pipe(int i) {
return getRuleContext(PipeContext.class, i);
}
public StatementContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override
public int getRuleIndex() {
return RULE_statement;
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).enterStatement(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).exitStatement(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof EqlBaseVisitor) return ((EqlBaseVisitor<? extends T>) visitor).visitStatement(this);
else return visitor.visitChildren(this);
}
}
public final StatementContext statement() throws RecognitionException {
StatementContext _localctx = new StatementContext(_ctx, getState());
enterRule(_localctx, 4, RULE_statement);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(70);
query();
setState(74);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la == PIPE) {
{
{
setState(71);
pipe();
}
}
setState(76);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
} catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
} finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static | StatementContext |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/spi-deployment/src/main/java/io/quarkus/resteasy/reactive/server/spi/NonBlockingReturnTypeBuildItem.java | {
"start": 374,
"end": 621
} | class ____ extends MultiBuildItem {
private final DotName type;
public NonBlockingReturnTypeBuildItem(DotName type) {
this.type = type;
}
public DotName getType() {
return type;
}
}
| NonBlockingReturnTypeBuildItem |
java | apache__camel | components/camel-disruptor/src/test/java/org/apache/camel/component/disruptor/DisruptorReconfigureWithBlockingProducerTest.java | {
"start": 3087,
"end": 4286
} | class ____ extends Thread {
private final CountDownLatch startedLatch = new CountDownLatch(1);
private final CountDownLatch resultLatch = new CountDownLatch(1);
private Exception exception;
@Override
public void run() {
for (int i = 0; i < 8; i++) {
template.sendBody("disruptor:foo", "Message");
}
startedLatch.countDown();
try {
for (int i = 0; i < 12; i++) {
template.sendBody("disruptor:foo", "Message");
}
} catch (Exception e) {
exception = e;
}
resultLatch.countDown();
}
public boolean awaitFullBufferProduced() throws InterruptedException {
return startedLatch.await(5, TimeUnit.SECONDS);
}
public boolean checkResult() throws Exception {
if (exception != null) {
throw exception;
}
boolean result = resultLatch.await(5, TimeUnit.SECONDS);
if (exception != null) {
throw exception;
}
return result;
}
}
}
| ProducerThread |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/mixins/MixinJSONTypeTest.java | {
"start": 1971,
"end": 2576
} | class ____ {
private String userName;
private String sex;
private int age;
public User(String userName, String sex, int age) {
this.userName = userName;
this.sex = sex;
this.age = age;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getSex() {
return sex;
}
public void setSex(String sex) {
this.sex = sex;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
}
@JSONType(orders = { "age", "userName", "sex" })
| User |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/kryo/KryoSerializerCompatibilityTest.java | {
"start": 6341,
"end": 10370
} | class ____ {
public final List<Integer> array;
FakeClass(List<Integer> array) {
this.array = array;
}
}
{
SerializerConfigImpl serializerConfigImpl = new SerializerConfigImpl();
KryoSerializer<FakeClass> kryoSerializer =
new KryoSerializer<>(FakeClass.class, serializerConfigImpl);
try (FileInputStream f =
new FileInputStream(
"src/test/resources/type-without-avro-serialized-using-kryo");
DataInputViewStreamWrapper inputView = new DataInputViewStreamWrapper(f)) {
FakeClass myTestClass = kryoSerializer.deserialize(inputView);
assertThat(myTestClass.array).hasSize(3).containsExactly(10, 20, 30);
}
}
}
/**
* Tests that after reconfiguration, registration ids are reconfigured to remain the same as the
* preceding KryoSerializer.
*/
@Test
void testMigrationStrategyForDifferentRegistrationOrder() throws Exception {
SerializerConfigImpl serializerConfigImpl = new SerializerConfigImpl();
serializerConfigImpl.registerKryoType(TestClassA.class);
serializerConfigImpl.registerKryoType(TestClassB.class);
KryoSerializer<TestClass> kryoSerializer =
new KryoSerializer<>(TestClass.class, serializerConfigImpl);
// get original registration ids
int testClassId = kryoSerializer.getKryo().getRegistration(TestClass.class).getId();
int testClassAId = kryoSerializer.getKryo().getRegistration(TestClassA.class).getId();
int testClassBId = kryoSerializer.getKryo().getRegistration(TestClassB.class).getId();
// snapshot configuration and serialize to bytes
TypeSerializerSnapshot kryoSerializerConfigSnapshot =
kryoSerializer.snapshotConfiguration();
byte[] serializedConfig;
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
TypeSerializerSnapshotSerializationUtil.writeSerializerSnapshot(
new DataOutputViewStreamWrapper(out), kryoSerializerConfigSnapshot);
serializedConfig = out.toByteArray();
}
// use new config and instantiate new KryoSerializer
serializerConfigImpl = new SerializerConfigImpl();
serializerConfigImpl.registerKryoType(TestClassB.class); // test with B registered before A
serializerConfigImpl.registerKryoType(TestClassA.class);
kryoSerializer = new KryoSerializer<>(TestClass.class, serializerConfigImpl);
// read configuration from bytes
try (ByteArrayInputStream in = new ByteArrayInputStream(serializedConfig)) {
kryoSerializerConfigSnapshot =
TypeSerializerSnapshotSerializationUtil.readSerializerSnapshot(
new DataInputViewStreamWrapper(in),
Thread.currentThread().getContextClassLoader());
}
// reconfigure - check reconfiguration result and that registration id remains the same
@SuppressWarnings("unchecked")
TypeSerializerSchemaCompatibility<TestClass> compatResult =
kryoSerializer
.snapshotConfiguration()
.resolveSchemaCompatibility(kryoSerializerConfigSnapshot);
assertThat(compatResult.isCompatibleWithReconfiguredSerializer()).isTrue();
kryoSerializer = (KryoSerializer<TestClass>) compatResult.getReconfiguredSerializer();
assertThat(kryoSerializer.getKryo().getRegistration(TestClass.class).getId())
.isEqualTo(testClassId);
assertThat(kryoSerializer.getKryo().getRegistration(TestClassA.class).getId())
.isEqualTo(testClassAId);
assertThat(kryoSerializer.getKryo().getRegistration(TestClassB.class).getId())
.isEqualTo(testClassBId);
}
private static | FakeClass |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/javadoc/ReturnFromVoidTest.java | {
"start": 1558,
"end": 1881
} | interface ____ {
/** */
void foo();
}
""")
.doTest(TestMode.TEXT_MATCH);
}
@Test
public void negative() {
CompilationTestHelper.newInstance(ReturnFromVoid.class, getClass())
.addSourceLines(
"Test.java",
"""
| Test |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/service/notify/AsyncNotifyService.java | {
"start": 8548,
"end": 8897
} | class ____ implements Runnable {
private Queue<NotifySingleRpcTask> queue;
public AsyncRpcTask(Queue<NotifySingleRpcTask> queue) {
this.queue = queue;
}
@Override
public void run() {
executeAsyncRpcTask(queue);
}
}
public static | AsyncRpcTask |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/utils/DynamicPartitionPruningUtils.java | {
"start": 5542,
"end": 11189
} | class ____ {
private final RelNode relNode;
private boolean hasFilter;
private boolean hasPartitionedScan;
private final Map<ObjectIdentifier, ContextResolvedTable> tables = new HashMap<>();
public DppDimSideChecker(RelNode relNode) {
this.relNode = relNode;
}
public boolean isDppDimSide() {
visitDimSide(this.relNode);
return hasFilter && !hasPartitionedScan && tables.size() == 1;
}
/**
* Visit dim side to judge whether dim side has filter condition and whether dim side's
* source table scan is non partitioned scan.
*/
private void visitDimSide(RelNode rel) {
// TODO Let visitDimSide more efficient and more accurate. Like a filter on dim table or
// a filter for the partition field on fact table.
if (rel instanceof TableScan) {
TableScan scan = (TableScan) rel;
TableSourceTable table = scan.getTable().unwrap(TableSourceTable.class);
if (table == null) {
return;
}
if (!hasFilter
&& table.abilitySpecs() != null
&& table.abilitySpecs().length != 0) {
for (SourceAbilitySpec spec : table.abilitySpecs()) {
if (spec instanceof FilterPushDownSpec) {
List<RexNode> predicates = ((FilterPushDownSpec) spec).getPredicates();
for (RexNode predicate : predicates) {
if (isSuitableFilter(predicate)) {
hasFilter = true;
}
}
}
}
}
CatalogTable catalogTable = table.contextResolvedTable().getResolvedTable();
if (catalogTable.isPartitioned()) {
hasPartitionedScan = true;
return;
}
// To ensure there is only one source on the dim side.
setTables(table.contextResolvedTable());
} else if (rel instanceof HepRelVertex) {
visitDimSide(((HepRelVertex) rel).getCurrentRel());
} else if (rel instanceof Exchange || rel instanceof Project) {
visitDimSide(rel.getInput(0));
} else if (rel instanceof Calc) {
RexProgram origProgram = ((Calc) rel).getProgram();
if (origProgram.getCondition() != null
&& isSuitableFilter(
origProgram.expandLocalRef(origProgram.getCondition()))) {
hasFilter = true;
}
visitDimSide(rel.getInput(0));
} else if (rel instanceof Filter) {
if (isSuitableFilter(((Filter) rel).getCondition())) {
hasFilter = true;
}
visitDimSide(rel.getInput(0));
} else if (rel instanceof Join) {
Join join = (Join) rel;
visitDimSide(join.getLeft());
visitDimSide(join.getRight());
} else if (rel instanceof BatchPhysicalGroupAggregateBase) {
visitDimSide(((BatchPhysicalGroupAggregateBase) rel).getInput());
} else if (rel instanceof Union) {
Union union = (Union) rel;
for (RelNode input : union.getInputs()) {
visitDimSide(input);
}
}
}
/**
* Not all filter condition suitable for using to filter partitions by dynamic partition
* pruning rules. For example, NOT NULL can only filter one default partition which have a
* small impact on filtering data.
*/
private static boolean isSuitableFilter(RexNode filterCondition) {
switch (filterCondition.getKind()) {
case AND:
List<RexNode> conjunctions = RelOptUtil.conjunctions(filterCondition);
return isSuitableFilter(conjunctions.get(0))
|| isSuitableFilter(conjunctions.get(1));
case OR:
List<RexNode> disjunctions = RelOptUtil.disjunctions(filterCondition);
return isSuitableFilter(disjunctions.get(0))
&& isSuitableFilter(disjunctions.get(1));
case NOT:
return isSuitableFilter(((RexCall) filterCondition).operands.get(0));
case EQUALS:
case GREATER_THAN:
case GREATER_THAN_OR_EQUAL:
case LESS_THAN:
case LESS_THAN_OR_EQUAL:
case NOT_EQUALS:
case IN:
case LIKE:
case CONTAINS:
case SEARCH:
case IS_FALSE:
case IS_NOT_FALSE:
case IS_NOT_TRUE:
case IS_TRUE:
// TODO adding more suitable filters which can filter enough partitions after
// using this filter in dynamic partition pruning.
return true;
default:
return false;
}
}
private void setTables(ContextResolvedTable catalogTable) {
ObjectIdentifier identifier = catalogTable.getIdentifier();
tables.putIfAbsent(identifier, catalogTable);
}
}
/** This | DppDimSideChecker |
java | quarkusio__quarkus | independent-projects/qute/generator/src/test/java/io/quarkus/qute/generator/hierarchy/FirstLevel.java | {
"start": 53,
"end": 135
} | interface ____ {
default int firstLevel() {
return 1;
}
}
| FirstLevel |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/load/resource/bitmap/DefaultImageHeaderParserTest.java | {
"start": 899,
"end": 30996
} | class ____ {
private static final byte[] PNG_HEADER_WITH_IHDR_CHUNK =
new byte[] {
(byte) 0x89,
0x50,
0x4e,
0x47,
0xd,
0xa,
0x1a,
0xa,
0x0,
0x0,
0x0,
0xd,
0x49,
0x48,
0x44,
0x52,
0x0,
0x0,
0x1,
(byte) 0x90,
0x0,
0x0,
0x1,
0x2c,
0x8,
0x6
};
private ArrayPool byteArrayPool;
@Before
public void setUp() {
byteArrayPool = new LruArrayPool();
}
@Test
public void testCanParsePngType() throws IOException {
// PNG magic number from: http://en.wikipedia.org/wiki/Portable_Network_Graphics.
byte[] data = new byte[] {(byte) 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.PNG, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.PNG, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParsePngWithAlpha() throws IOException {
for (int i = 3; i <= 6; i++) {
byte[] pngHeaderWithIhdrChunk = generatePngHeaderWithIhdr(i);
runTest(
pngHeaderWithIhdrChunk,
new ParserTestCase() {
@Override
public void run(
DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.PNG_A, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.PNG_A, parser.getType(byteBuffer));
}
});
}
}
@Test
public void testCanParsePngWithoutAlpha() throws IOException {
for (int i = 0; i < 3; i++) {
byte[] pngHeaderWithIhdrChunk = generatePngHeaderWithIhdr(i);
runTest(
pngHeaderWithIhdrChunk,
new ParserTestCase() {
@Override
public void run(
DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.PNG, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.PNG, parser.getType(byteBuffer));
}
});
}
}
@Test
public void testCanParseJpegType() throws IOException {
byte[] data = new byte[] {(byte) 0xFF, (byte) 0xD8};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.JPEG, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.JPEG, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseGifType() throws IOException {
byte[] data = new byte[] {'G', 'I', 'F'};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.GIF, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.GIF, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseLosslessWebpWithAlpha() throws IOException {
byte[] data =
new byte[] {
0x52,
0x49,
0x46,
0x46,
0x3c,
0x50,
0x00,
0x00,
0x57,
0x45,
0x42,
0x50,
0x56,
0x50,
0x38,
0x4c, // Lossless
0x30,
0x50,
0x00,
0x00,
0x2f, // Flags
(byte) 0xef,
(byte) 0x80,
0x15,
0x10,
(byte) 0x8d,
0x30,
0x68,
0x1b,
(byte) 0xc9,
(byte) 0x91,
(byte) 0xb2
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.WEBP_A, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.WEBP_A, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseLosslessWebpWithoutAlpha() throws IOException {
byte[] data =
new byte[] {
0x52,
0x49,
0x46,
0x46,
0x3c,
0x50,
0x00,
0x00,
0x57,
0x45,
0x42,
0x50,
0x56,
0x50,
0x38,
0x4c, // Lossless
0x30,
0x50,
0x00,
0x00,
0x00, // Flags
(byte) 0xef,
(byte) 0x80,
0x15,
0x10,
(byte) 0x8d,
0x30,
0x68,
0x1b,
(byte) 0xc9,
(byte) 0x91,
(byte) 0xb2
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.WEBP, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.WEBP, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseExtendedWebpWithAlpha() throws IOException {
byte[] data =
new byte[] {
0x52,
0x49,
0x46,
0x46,
0x3c,
0x50,
0x00,
0x00,
0x57,
0x45,
0x42,
0x50,
0x56,
0x50,
0x38,
0x58, // Extended
0x30,
0x50,
0x00,
0x00,
0x10, // flags
(byte) 0xef,
(byte) 0x80,
0x15,
0x10,
(byte) 0x8d,
0x30,
0x68,
0x1b,
(byte) 0xc9,
(byte) 0x91,
(byte) 0xb2
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.WEBP_A, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.WEBP_A, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseExtendedWebpWithoutAlpha() throws IOException {
byte[] data =
new byte[] {
0x52,
0x49,
0x46,
0x46,
0x3c,
0x50,
0x00,
0x00,
0x57,
0x45,
0x42,
0x50,
0x56,
0x50,
0x38,
0x58, // Extended
0x30,
0x50,
0x00,
0x00,
0x00, // flags
(byte) 0xef,
(byte) 0x80,
0x15,
0x10,
(byte) 0x8d,
0x30,
0x68,
0x1b,
(byte) 0xc9,
(byte) 0x91,
(byte) 0xb2
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.WEBP, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.WEBP, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseExtendedWebpWithoutAlphaAndWithAnimation() throws IOException {
byte[] data =
new byte[] {
0x52,
0x49,
0x46,
0x46,
0x3c,
0x50,
0x00,
0x00,
0x57,
0x45,
0x42,
0x50,
0x56,
0x50,
0x38,
0x58, // Extended
0x30,
0x50,
0x00,
0x00,
0x02, // Flags
(byte) 0xef,
(byte) 0x80,
0x15,
0x10,
(byte) 0x8d,
0x30,
0x68,
0x1b,
(byte) 0xc9,
(byte) 0x91,
(byte) 0xb2
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_WEBP, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_WEBP, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseExtendedWebpWithAlphaAndAnimation() throws IOException {
byte[] data =
new byte[] {
0x52,
0x49,
0x46,
0x46,
0x3c,
0x50,
0x00,
0x00,
0x57,
0x45,
0x42,
0x50,
0x56,
0x50,
0x38,
0x58, // Extended
0x30,
0x50,
0x00,
0x00,
(byte) 0x12, // Flags
(byte) 0xef,
(byte) 0x80,
0x15,
0x10,
(byte) 0x8d,
0x30,
0x68,
0x1b,
(byte) 0xc9,
(byte) 0x91,
(byte) 0xb2
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_WEBP, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_WEBP, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseRealAnimatedWebpFile() throws IOException {
byte[] data =
ByteStreams.toByteArray(TestResourceUtil.openResource(getClass(), "animated_webp.webp"));
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertThat(parser.getType(is)).isEqualTo(ImageType.ANIMATED_WEBP);
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertThat(parser.getType(byteBuffer)).isEqualTo(ImageType.ANIMATED_WEBP);
}
});
}
@Test
public void testCanParseAvifMajorBrand() throws IOException {
byte[] data =
new byte[] {
// Box Size.
0x00,
0x00,
0x00,
0x1C,
// ftyp.
0x66,
0x74,
0x79,
0x70,
// avif (major brand).
0x61,
0x76,
0x69,
0x66,
// minor version.
0x00,
0x00,
0x00,
0x00,
// other minor brands (mif1, miaf, MA1B).
0x6d,
0x69,
0x66,
0x31,
0x6d,
0x69,
0x61,
0x66,
0x4d,
0x41,
0x31,
0x42
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.AVIF, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.AVIF, parser.getType(byteBuffer));
}
});
// Change the major brand from 'avif' to 'avis'. Now, the expected output is ANIMATED_AVIF.
data[11] = 0x73;
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_AVIF, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_AVIF, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseAvifMinorBrand() throws IOException {
byte[] data =
new byte[] {
// Box Size.
0x00,
0x00,
0x00,
0x1C,
// ftyp.
0x66,
0x74,
0x79,
0x70,
// mif1 (major brand).
0x6d,
0x69,
0x66,
0x31,
// minor version.
0x00,
0x00,
0x00,
0x00,
// other minor brands (miaf, avif, MA1B).
0x6d,
0x69,
0x61,
0x66,
0x61,
0x76,
0x69,
0x66,
0x4d,
0x41,
0x31,
0x42
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.AVIF, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.AVIF, parser.getType(byteBuffer));
}
});
// Change the last minor brand from 'MA1B' to 'avis'. Now, the expected output is ANIMATED_AVIF.
data[24] = 0x61;
data[25] = 0x76;
data[26] = 0x69;
data[27] = 0x73;
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_AVIF, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_AVIF, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseAvifAndAvisBrandsAsAnimatedAvif() throws IOException {
byte[] data =
new byte[] {
// Box Size.
0x00,
0x00,
0x00,
0x1C,
// ftyp.
0x66,
0x74,
0x79,
0x70,
// avis (major brand).
0x61,
0x76,
0x69,
0x73,
// minor version.
0x00,
0x00,
0x00,
0x00,
// other minor brands (miaf, avif, MA1B).
0x6d,
0x69,
0x61,
0x66,
0x61,
0x76,
0x69,
0x66,
0x4d,
0x41,
0x31,
0x42
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_AVIF, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_AVIF, parser.getType(byteBuffer));
}
});
// Change the major brand from 'avis' to 'avif'.
data[11] = 0x66;
// Change the minor brand from 'avif' to 'avis'.
data[23] = 0x73;
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_AVIF, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.ANIMATED_AVIF, parser.getType(byteBuffer));
}
});
}
@Test
public void testCannotParseAvifMoreThanFiveMinorBrands() throws IOException {
byte[] data =
new byte[] {
// Box Size.
0x00,
0x00,
0x00,
0x28,
// ftyp.
0x66,
0x74,
0x79,
0x70,
// mif1 (major brand).
0x6d,
0x69,
0x66,
0x31,
// minor version.
0x00,
0x00,
0x00,
0x00,
// more than five minor brands with the sixth one being avif (mif1, miaf, MA1B, mif1,
// miab, avif).
0x6d,
0x69,
0x66,
0x31,
0x6d,
0x69,
0x61,
0x66,
0x4d,
0x41,
0x31,
0x42,
0x6d,
0x69,
0x66,
0x31,
0x6d,
0x69,
0x61,
0x66,
0x61,
0x76,
0x69,
0x66,
};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertNotEquals(ImageType.AVIF, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertNotEquals(ImageType.AVIF, parser.getType(byteBuffer));
}
});
}
@Test
public void testCanParseRealAnimatedAvifFile() throws IOException {
byte[] data =
ByteStreams.toByteArray(TestResourceUtil.openResource(getClass(), "animated_avif.avif"));
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertThat(parser.getType(is)).isEqualTo(ImageType.ANIMATED_AVIF);
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertThat(parser.getType(byteBuffer)).isEqualTo(ImageType.ANIMATED_AVIF);
}
});
}
@Test
public void testReturnsUnknownTypeForUnknownImageHeaders() throws IOException {
byte[] data = new byte[] {0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0};
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.UNKNOWN, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.UNKNOWN, parser.getType(byteBuffer));
}
});
}
// Test for #286.
@Test
public void testHandlesParsingOrientationWithMinimalExifSegment() throws IOException {
byte[] data =
ByteStreams.toByteArray(TestResourceUtil.openResource(getClass(), "short_exif_sample.jpg"));
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(-1, parser.getOrientation(is, byteArrayPool));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(-1, parser.getOrientation(byteBuffer, byteArrayPool));
}
});
}
@Test
public void testReturnsUnknownForEmptyData() throws IOException {
runTest(
new byte[0],
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.UNKNOWN, parser.getType(is));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(ImageType.UNKNOWN, parser.getType(byteBuffer));
}
});
}
// Test for #387.
@Test
public void testHandlesPartialReads() throws IOException {
InputStream is = TestResourceUtil.openResource(getClass(), "issue387_rotated_jpeg.jpg");
DefaultImageHeaderParser parser = new DefaultImageHeaderParser();
assertThat(parser.getOrientation(new PartialReadInputStream(is), byteArrayPool)).isEqualTo(6);
}
// Test for #387.
@Test
public void testHandlesPartialSkips() throws IOException {
InputStream is = TestResourceUtil.openResource(getClass(), "issue387_rotated_jpeg.jpg");
DefaultImageHeaderParser parser = new DefaultImageHeaderParser();
assertThat(parser.getOrientation(new PartialSkipInputStream(is), byteArrayPool)).isEqualTo(6);
}
@Test
public void testHandlesSometimesZeroSkips() throws IOException {
InputStream is =
new ByteArrayInputStream(
new byte[] {(byte) 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a});
DefaultImageHeaderParser parser = new DefaultImageHeaderParser();
assertEquals(ImageType.PNG, parser.getType(new SometimesZeroSkipInputStream(is)));
}
@Test
public void getOrientation_withExifSegmentLessThanLength_returnsUnknown() throws IOException {
ByteBuffer jpegHeaderBytes = getExifMagicNumber();
byte[] data =
new byte[] {
jpegHeaderBytes.get(0),
jpegHeaderBytes.get(1),
(byte) DefaultImageHeaderParser.SEGMENT_START_ID,
(byte) DefaultImageHeaderParser.EXIF_SEGMENT_TYPE,
// SEGMENT_LENGTH
(byte) 0xFF,
(byte) 0xFF,
};
ByteBuffer byteBuffer = ByteBuffer.wrap(data);
DefaultImageHeaderParser parser = new DefaultImageHeaderParser();
assertEquals(
ImageHeaderParser.UNKNOWN_ORIENTATION, parser.getOrientation(byteBuffer, byteArrayPool));
}
@Test
public void getOrientation_withNonExifSegmentLessThanLength_returnsUnknown() throws IOException {
ByteBuffer jpegHeaderBytes = getExifMagicNumber();
byte[] data =
new byte[] {
jpegHeaderBytes.get(0),
jpegHeaderBytes.get(1),
(byte) DefaultImageHeaderParser.SEGMENT_START_ID,
// SEGMENT_TYPE (NOT EXIF_SEGMENT_TYPE)
(byte) 0xE5,
// SEGMENT_LENGTH
(byte) 0xFF,
(byte) 0xFF,
};
ByteBuffer byteBuffer = ByteBuffer.wrap(data);
DefaultImageHeaderParser parser = new DefaultImageHeaderParser();
assertEquals(
ImageHeaderParser.UNKNOWN_ORIENTATION, parser.getOrientation(byteBuffer, byteArrayPool));
}
@Test
public void getOrientation_withExifSegmentAndPreambleButLessThanLength_returnsUnknown()
throws IOException {
ByteBuffer jpegHeaderBytes = getExifMagicNumber();
ByteBuffer exifSegmentPreamble =
ByteBuffer.wrap(DefaultImageHeaderParser.JPEG_EXIF_SEGMENT_PREAMBLE_BYTES);
ByteBuffer data = ByteBuffer.allocate(2 + 1 + 1 + 2 + exifSegmentPreamble.capacity());
data.put(jpegHeaderBytes)
.put((byte) DefaultImageHeaderParser.SEGMENT_START_ID)
.put((byte) DefaultImageHeaderParser.EXIF_SEGMENT_TYPE)
// SEGMENT_LENGTH, add two because length includes the segment length short, and one to go
// beyond the preamble bytes length for the test.
.putShort(
(short) (DefaultImageHeaderParser.JPEG_EXIF_SEGMENT_PREAMBLE_BYTES.length + 2 + 1))
.put(DefaultImageHeaderParser.JPEG_EXIF_SEGMENT_PREAMBLE_BYTES);
data.position(0);
DefaultImageHeaderParser parser = new DefaultImageHeaderParser();
assertEquals(ImageHeaderParser.UNKNOWN_ORIENTATION, parser.getOrientation(data, byteArrayPool));
}
@Test
public void getOrientation_withExifSegmentAndPreambleBetweenLengthAndExpected_returnsUnknown()
throws IOException {
ByteBuffer jpegHeaderBytes = getExifMagicNumber();
ByteBuffer exifSegmentPreamble =
ByteBuffer.wrap(DefaultImageHeaderParser.JPEG_EXIF_SEGMENT_PREAMBLE_BYTES);
ByteBuffer data = ByteBuffer.allocate(2 + 1 + 1 + 2 + exifSegmentPreamble.capacity() + 2 + 1);
data.put(jpegHeaderBytes)
.put((byte) DefaultImageHeaderParser.SEGMENT_START_ID)
.put((byte) DefaultImageHeaderParser.EXIF_SEGMENT_TYPE)
// SEGMENT_LENGTH, add two because length includes the segment length short, and one to go
// beyond the preamble bytes length for the test.
.putShort(
(short) (DefaultImageHeaderParser.JPEG_EXIF_SEGMENT_PREAMBLE_BYTES.length + 2 + 1))
.put(DefaultImageHeaderParser.JPEG_EXIF_SEGMENT_PREAMBLE_BYTES);
data.position(0);
DefaultImageHeaderParser parser = new DefaultImageHeaderParser();
assertEquals(ImageHeaderParser.UNKNOWN_ORIENTATION, parser.getOrientation(data, byteArrayPool));
}
@Test
public void hasJpegMpf_withGainmapFile_returnsTrue() throws IOException {
byte[] data =
ByteStreams.toByteArray(
TestResourceUtil.openResource(getClass(), "small_gainmap_image.jpg"));
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(true, parser.hasJpegMpf(is, byteArrayPool));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(true, parser.hasJpegMpf(byteBuffer, byteArrayPool));
}
});
}
@Test
public void hasJpegMpf_withNonGainmapFile_returnsFalse() throws IOException {
byte[] data =
ByteStreams.toByteArray(TestResourceUtil.openResource(getClass(), "short_exif_sample.jpg"));
runTest(
data,
new ParserTestCase() {
@Override
public void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException {
assertEquals(false, parser.hasJpegMpf(is, byteArrayPool));
}
@Override
public void run(
DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException {
assertEquals(false, parser.hasJpegMpf(byteBuffer, byteArrayPool));
}
});
}
private static ByteBuffer getExifMagicNumber() {
ByteBuffer jpegHeaderBytes = ByteBuffer.allocate(2);
jpegHeaderBytes.putShort((short) DefaultImageHeaderParser.EXIF_MAGIC_NUMBER);
jpegHeaderBytes.position(0);
return jpegHeaderBytes;
}
private | DefaultImageHeaderParserTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_smoothrat2.java | {
"start": 715,
"end": 951
} | class ____ {
private java.sql.Time value;
public java.sql.Time getValue() {
return value;
}
public void setValue(java.sql.Time value) {
this.value = value;
}
}
}
| Entity |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TypeParameterUnusedInFormalsTest.java | {
"start": 5272,
"end": 5606
} | class ____ {
// BUG: Diagnostic contains:
<T> T badMethod(String s) {
return (T) s;
}
}
""")
.doTest();
}
@Test
public void issue343() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_169_values.java | {
"start": 315,
"end": 1607
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "SELECT * FROM (VALUES (89), (35), (77)) EXCEPT SELECT * FROM (VALUES (33), (35), (60))";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("SELECT *\n" + "FROM (\n" + "\tVALUES (89),\n" + "\t(35),\n" + "\t(77)\n" + ")\n"
+ "EXCEPT\n" + "SELECT *\n" + "FROM (\n" + "\tVALUES (33),\n" + "\t(35),\n" + "\t(60)\n"
+ ")", stmt.toString());
}
public void test_2() throws Exception {
String sql = "SELECT * FROM (VALUES 89, 35, 77) EXCEPT SELECT * FROM (VALUES 33, 35, 60)";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("SELECT *\n" + "FROM (\n" + "\tVALUES (89),\n" + "\t(35),\n" + "\t(77)\n" + ")\n" + "EXCEPT\n"
+ "SELECT *\n" + "FROM (\n" + "\tVALUES (33),\n" + "\t(35),\n" + "\t(60)\n" + ")", stmt.toString());
}
}
| MySqlSelectTest_169_values |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceDiagnosticsAction.java | {
"start": 2327,
"end": 2475
} | class ____'t have any members at the moment so return the same hash code
return Objects.hash(NAME);
}
}
public static | doesn |
java | alibaba__nacos | consistency/src/test/java/com/alibaba/nacos/consistency/serialize/HessianSerializerTest.java | {
"start": 1178,
"end": 2691
} | class ____ {
private HessianSerializer hessianSerializer;
@BeforeEach
void setUp() {
hessianSerializer = new HessianSerializer();
}
@Test
void testSerializerAndDeserialize() {
String data = "xxx";
byte[] bytes = hessianSerializer.serialize(data);
try {
hessianSerializer.deserialize(bytes);
} catch (Exception e) {
assertTrue(e instanceof RuntimeException);
}
String res1 = hessianSerializer.deserialize(bytes, String.class);
assertEquals(data, res1);
String res2 = hessianSerializer.deserialize(bytes, "java.lang.String");
assertEquals(data, res2);
}
@Test
void testSerializerAndDeserializeForNotAllowClass() {
Serializable data = new HttpException();
byte[] bytes = hessianSerializer.serialize(data);
try {
HttpException res = hessianSerializer.deserialize(bytes);
fail("deserialize success which is not expected");
} catch (Exception e) {
assertTrue(e instanceof ClassCastException);
}
try {
HttpException res1 = hessianSerializer.deserialize(bytes, HttpException.class);
} catch (Exception e) {
assertTrue(e instanceof NacosDeserializationException);
}
}
@Test
void testName() {
assertEquals("Hessian", hessianSerializer.name());
}
}
| HessianSerializerTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PatternMatchingInstanceofTest.java | {
"start": 1805,
"end": 2114
} | class ____ {
void test(Object o) {
if (o instanceof Test) {
Test test = ((((Test) o)));
test(test);
}
}
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/JsonGenerator.java | {
"start": 889,
"end": 99750
} | class ____
implements Closeable, Flushable, Versioned
{
/*
/**********************************************************************
/* Construction, initialization
/**********************************************************************
*/
protected JsonGenerator() { }
/*
/**********************************************************************
/* Versioned
/**********************************************************************
*/
/**
* Accessor for finding out version of the bundle that provided this generator instance.
*/
@Override
public abstract Version version();
/*
/**********************************************************************
/* Constraints violation checking
/**********************************************************************
*/
/**
* Get the constraints to apply when performing streaming writes.
*
* @return Constraints used for this generator
*/
public StreamWriteConstraints streamWriteConstraints() {
return StreamWriteConstraints.defaults();
}
/*
/**********************************************************************
/* Public API, output configuration, state access
/**********************************************************************
*/
/**
* Accessor for context object that provides information about low-level
* logical position withing output token stream.
*<p>
* NOTE: method was called {@code getOutputContext()} in Jackson 2.x
*
* @return Stream output context ({@link TokenStreamContext}) associated with this generator
*/
public abstract TokenStreamContext streamWriteContext();
/**
* Accessor for context object provided by higher-level databinding
* functionality (or, in some cases, simple placeholder of the same)
* that allows some level of interaction including ability to trigger
* serialization of Object values through generator instance.
*
* @return Object write context ({@link ObjectWriteContext}) associated with this generator
*
* @since 3.0
*/
public abstract ObjectWriteContext objectWriteContext();
/**
* Method that can be used to get access to object that is used
* as target for generated output; this is usually either
* {@link java.io.OutputStream} or {@link java.io.Writer}, depending on what
* generator was constructed with.
* Note that returned value may be null in some cases; including
* case where implementation does not want to exposed raw
* source to caller.
* In cases where output has been decorated, object returned here
* is the decorated version; this allows some level of interaction
* between users of generator and decorator object.
*<p>
* In general use of this accessor should be considered as
* "last effort", i.e. only used if no other mechanism is applicable.
*<p>
* NOTE: was named {@code getOutputTarget()} in Jackson 2.x.
*
* @return Output target this generator was configured with
*/
public abstract Object streamWriteOutputTarget();
/**
* Method for verifying amount of content that is buffered by generator
* but not yet flushed to the underlying target (stream, writer),
* in units (byte, char) that the generator implementation uses for buffering;
* or -1 if this information is not available.
* Unit used is often the same as the unit of underlying target (that is,
* {@code byte} for {@link java.io.OutputStream},
* {@code char} for {@link java.io.Writer}),
* but may differ if buffering is done before encoding.
* Default JSON-backed implementations do use matching units.
*<p>
* NOTE: was named {@code getOutputBuffered()} in Jackson 2.x.
*
* @return Amount of content buffered in internal units, if amount known and
* accessible; -1 if not accessible.
*/
public abstract int streamWriteOutputBuffered();
/**
* Helper method, usually equivalent to:
*<code>
* getOutputContext().currentValue();
*</code>
*<p>
* Note that "current value" is NOT populated (or used) by Streaming generator;
* it is only used by higher-level data-binding functionality.
* The reason it is included here is that it can be stored and accessed hierarchically,
* and gets passed through data-binding.
*
* @return "Current value" for the current context this generator has
*/
public abstract Object currentValue();
/**
* Helper method, usually equivalent to:
*<code>
* getOutputContext().assignCurrentValue(v);
*</code>
* used to assign "current value" for the current context of this generator.
* It is usually assigned and used by higher level data-binding functionality
* (instead of streaming parsers/generators) but is stored at streaming level.
*
* @param v "Current value" to assign to the current output context of this generator
*/
public abstract void assignCurrentValue(Object v);
/*
/**********************************************************************
/* Public API, Feature configuration
/**********************************************************************
*/
// 25-Jan-2021, tatu: Still called by `ClassUtil` of jackson-databind, to
// prevent secondary issues when closing generator. Should probably figure
// out alternate means of safe closing...
/**
* Method for enabling or disabling specified feature:
* check {@link StreamWriteFeature} for list of available features.
*<p>
* NOTE: mostly left in 3.0 just to support disabling of
* {@link StreamWriteFeature#AUTO_CLOSE_CONTENT} by {@code jackson-databind}
*
* @param f Feature to enable or disable
* @param state Whether to enable the feature ({@code true}) or disable ({@code false})
*
* @return This generator, to allow call chaining
*/
public abstract JsonGenerator configure(StreamWriteFeature f, boolean state);
/**
* Method for checking whether given feature is enabled.
* Check {@link StreamWriteFeature} for list of available features.
*
* @param f Feature to check
*
* @return {@code True} if feature is enabled; {@code false} if not
*/
public abstract boolean isEnabled(StreamWriteFeature f);
/**
* Bulk access method for getting state of all standard (format-agnostic)
* {@link StreamWriteFeature}s.
*
* @return Bit mask that defines current states of all standard {@link StreamWriteFeature}s.
*
* @since 3.0
*/
public abstract int streamWriteFeatures();
/*
/**********************************************************************
/* Public API, other configuration
/**********************************************************************
*/
/**
* Method for accessing Schema that this generator uses, if any.
* Default implementation returns null.
*
* @return {@link FormatSchema} this generator is configured to use, if any; {@code null} if none
*/
public FormatSchema getSchema() { return null; }
/**
* Accessor method for testing what is the highest unescaped character
* configured for this generator. This may be either positive value
* (when escaping configuration has been set and is in effect), or
* 0 to indicate that no additional escaping is in effect.
* Some generators may not support additional escaping: for example,
* generators for binary formats that do not use escaping should
* simply return 0.
*
* @return Currently active limitation for highest non-escaped character,
* if defined; or 0 to indicate no additional escaping is performed.
*/
public int getHighestNonEscapedChar() { return 0; }
/**
* Method for accessing custom escapes generator uses for {@link JsonGenerator}s
* it creates.
*
* @return {@link CharacterEscapes} this generator is configured to use, if any; {@code null} if none
*/
public CharacterEscapes getCharacterEscapes() { return null; }
// 04-Oct-2017, tatu: Would like to remove this method, but alas JSONP-support
// does require it...
/**
* Method for defining custom escapes factory uses for {@link JsonGenerator}s
* it creates.
*<p>
* Default implementation does nothing and simply returns this instance.
*
* @param esc {@link CharacterEscapes} to configure this generator to use, if any; {@code null} if none
*
* @return This generator, to allow call chaining
*/
public JsonGenerator setCharacterEscapes(CharacterEscapes esc) { return this; }
/**
* Accessor for object that handles pretty-printing (usually additional white space to make
* results more human-readable) during output. If {@code null}, no pretty-printing is
* done.
* <p>
* NOTE: this may be {@link PrettyPrinter} that {@link TokenStreamFactory} was
* configured with (if stateless), OR an instance created via
* {@link tools.jackson.core.util.Instantiatable#createInstance()} (if
* stateful).
*<p>
* Default implementation returns null so pretty-printing capable generators
* need to override it.
*
* @return Pretty printer used by this generator, if any; {@code null} if none
*/
public PrettyPrinter getPrettyPrinter() { return null; }
/*
/**********************************************************************
/* Public API, capability introspection methods
/**********************************************************************
*/
/**
* Introspection method that may be called to see if the underlying
* data format supports some kind of Object Ids natively (many do not;
* for example, JSON doesn't).
* This method <b>must</b> be called prior to calling
* {@link #writeObjectId} or {@link #writeObjectRef}.
*<p>
* Default implementation returns false; overridden by data formats
* that do support native Object Ids. Caller is expected to either
* use a non-native notation (explicit property or such), or fail,
* in case it cannot use native object ids.
*
* @return {@code True} if this generator is capable of writing "native" Object Ids
* (which is typically determined by capabilities of the underlying format),
* {@code false} if not
*/
public boolean canWriteObjectId() { return false; }
/**
* Introspection method that may be called to see if the underlying
* data format supports some kind of Type Ids natively (many do not;
* for example, JSON doesn't).
* This method <b>must</b> be called prior to calling
* {@link #writeTypeId}.
*<p>
* Default implementation returns false; overridden by data formats
* that do support native Type Ids. Caller is expected to either
* use a non-native notation (explicit property or such), or fail,
* in case it cannot use native type ids.
*
* @return {@code True} if this generator is capable of writing "native" Type Ids
* (which is typically determined by capabilities of the underlying format),
* {@code false} if not
*/
public boolean canWriteTypeId() { return false; }
/**
* Introspection method to call to check whether it is ok to omit
* writing of Object properties or not. Most formats do allow omission,
* but certain positional formats (such as CSV) require output of
* place holders, even if no real values are to be emitted.
*<p>
* NOTE: in Jackson 2.x method was {@code canOmitFields()}.
*
* @return {@code True} if this generator is allowed to only write values
* of some Object properties and omit the rest; {@code false} if not
*/
public boolean canOmitProperties() { return true; }
/**
* Accessor for checking whether this generator has specified capability.
* Short-hand for:
* {@code return getWriteCapabilities().isEnabled(capability); }
*
* @param capability Capability to check
*
* @return True if this generator has specified capability; false if not
*/
public abstract boolean has(StreamWriteCapability capability);
/**
* Accessor for getting metadata on capabilities of this generator, based on
* underlying data format being read (directly or indirectly).
*
* @return Set of read capabilities for content to generate via this generator
*/
public abstract JacksonFeatureSet<StreamWriteCapability> streamWriteCapabilities();
/*
/**********************************************************************
/* Public API, write methods, structural
/**********************************************************************
*/
/**
* Method for writing starting marker of a Array value
* (for JSON this is character '['; plus possible white space decoration
* if pretty-printing is enabled).
*<p>
* Array values can be written in any context where values
* are allowed: meaning everywhere except for when
* a property name is expected.
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeStartArray() throws JacksonException;
/**
* Method for writing start marker of an Array value, similar
* to {@link #writeStartArray()}, but also specifying what is the
* Java object that the Array Object being written represents (if any);
* {@code null} may be passed if not known or not applicable.
* This value is accessible from context as "current value"
*
* @param currentValue Java Object that Array being written represents, if any
* (or {@code null} if not known or not applicable)
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*
* @return This generator, to allow call chaining
*/
public abstract JsonGenerator writeStartArray(Object currentValue) throws JacksonException;
/**
* Method for writing start marker of an Array value, similar
* to {@link #writeStartArray()}, but also specifying what is the
* Java object that the Array Object being written represents (if any)
* and how many elements will be written for the array before calling
* {@link #writeEndArray()}.
*
* @param currentValue Java Object that Array being written represents, if any
* (or {@code null} if not known or not applicable)
* @param size Number of elements this Array will have: actual
* number of values written (before matching call to
* {@link #writeEndArray()} MUST match; generator MAY verify
* this is the case (and SHOULD if format itself encodes length)
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeStartArray(Object currentValue, int size) throws JacksonException;
/**
* Method for writing closing marker of a JSON Array value
* (character ']'; plus possible white space decoration
* if pretty-printing is enabled).
*<p>
* Marker can be written if the innermost structured type is Array.
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeEndArray() throws JacksonException;
/**
* Method for writing starting marker of an Object value
* (character '{'; plus possible white space decoration
* if pretty-printing is enabled).
*<p>
* Object values can be written in any context where values
* are allowed: meaning everywhere except for when
* a property name is expected.
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeStartObject() throws JacksonException;
/**
* Method for writing starting marker of an Object value
* to represent the given Java Object value.
* Argument is offered as metadata, but more
* importantly it should be assigned as the "current value"
* for the Object content that gets constructed and initialized.
*<p>
* Object values can be written in any context where values
* are allowed: meaning everywhere except for when
* a property name is expected.
*
* @param currentValue Java Object that Object being written represents, if any
* (or {@code null} if not known or not applicable)
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeStartObject(Object currentValue) throws JacksonException;
/**
* Method for writing starting marker of an Object value
* to represent the given Java Object value.
* Argument is offered as metadata, but more
* importantly it should be assigned as the "current value"
* for the Object content that gets constructed and initialized.
* In addition, caller knows number of key/value pairs ("properties")
* that will get written for the Object value: this is relevant for
* some format backends (but not, as an example, for JSON).
*<p>
* Object values can be written in any context where values
* are allowed: meaning everywhere except for when
* a property name is expected.
*
* @param forValue Object value to be written (assigned as "current value" for
* the Object context that gets created)
* @param size Number of key/value pairs this Object will have: actual
* number of entries written (before matching call to
* {@link #writeEndObject()} MUST match; generator MAY verify
* this is the case (and SHOULD if format itself encodes length)
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeStartObject(Object forValue, int size) throws JacksonException;
/**
* Method for writing closing marker of an Object value
* (character '}'; plus possible white space decoration
* if pretty-printing is enabled).
*<p>
* Marker can be written if the innermost structured type
* is Object, and the last written event was either a
* complete value, or START-OBJECT marker (see JSON specification
* for more details).
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeEndObject() throws JacksonException;
/**
* Method for writing an Object Property name (JSON String surrounded by
* double quotes: syntactically identical to a JSON String value),
* possibly decorated by white space if pretty-printing is enabled.
*<p>
* Property names can only be written in Object context (check out
* JSON specification for details), when Object Property name is expected
* (property names alternate with values).
*
* @param name Name of the Object Property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeName(String name) throws JacksonException;
/**
* Method similar to {@link #writeName(String)}, main difference
* being that it may perform better as some of processing (such as
* quoting of certain characters, or encoding into external encoding
* if supported by generator) can be done just once and reused for
* later calls.
*<p>
* Default implementation simple uses unprocessed name container in
* serialized String; implementations are strongly encouraged to make
* use of more efficient methods argument object has.
*
* @param name Pre-encoded name of the Object Property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeName(SerializableString name) throws JacksonException;
/**
* Alternative to {@link #writeName(String)} that may be used
* in cases where Object Property key is of numeric type; usually where
* underlying format supports such notion (some binary formats do,
* unlike JSON).
* Default implementation will simply convert id into {@code String}
* and call {@link #writeName(String)}.
*
* @param id Property key id to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writePropertyId(long id) throws JacksonException;
/*
/**********************************************************************
/* Public API, write methods, scalar arrays
/**********************************************************************
*/
/**
* Value write method that can be called to write a single
* array (sequence of {@link JsonToken#START_ARRAY}, zero or
* more {@link JsonToken#VALUE_NUMBER_INT}, {@link JsonToken#END_ARRAY})
*
* @param array Array that contains values to write
* @param offset Offset of the first element to write, within array
* @param length Number of elements in array to write, from `offset` to `offset + len - 1`
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeArray(int[] array, int offset, int length) throws JacksonException
{
Objects.requireNonNull(array, "null 'array' argument");
_verifyOffsets(array.length, offset, length);
writeStartArray(array, length);
for (int i = offset, end = offset+length; i < end; ++i) {
writeNumber(array[i]);
}
writeEndArray();
return this;
}
/**
* Value write method that can be called to write a single
* array (sequence of {@link JsonToken#START_ARRAY}, zero or
* more {@link JsonToken#VALUE_NUMBER_INT}, {@link JsonToken#END_ARRAY})
*
* @param array Array that contains values to write
* @param offset Offset of the first element to write, within array
* @param length Number of elements in array to write, from `offset` to `offset + len - 1`
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeArray(long[] array, int offset, int length) throws JacksonException
{
Objects.requireNonNull(array, "null 'array' argument");
_verifyOffsets(array.length, offset, length);
writeStartArray(array, length);
for (int i = offset, end = offset+length; i < end; ++i) {
writeNumber(array[i]);
}
writeEndArray();
return this;
}
/**
* Value write method that can be called to write a single
* array (sequence of {@link JsonToken#START_ARRAY}, zero or
* more {@link JsonToken#VALUE_NUMBER_FLOAT}, {@link JsonToken#END_ARRAY})
*
* @param array Array that contains values to write
* @param offset Offset of the first element to write, within array
* @param length Number of elements in array to write, from `offset` to `offset + len - 1`
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeArray(double[] array, int offset, int length) throws JacksonException
{
Objects.requireNonNull(array, "null 'array' argument");
_verifyOffsets(array.length, offset, length);
writeStartArray(array, length);
for (int i = offset, end = offset+length; i < end; ++i) {
writeNumber(array[i]);
}
writeEndArray();
return this;
}
/**
* Value write method that can be called to write a single
* array (sequence of {@link JsonToken#START_ARRAY}, zero or
* more {@link JsonToken#VALUE_STRING}, {@link JsonToken#END_ARRAY})
*
* @param array Array that contains values to write
* @param offset Offset of the first element to write, within array
* @param length Number of elements in array to write, from `offset` to `offset + len - 1`
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeArray(String[] array, int offset, int length) throws JacksonException
{
Objects.requireNonNull(array, "null 'array' argument");
_verifyOffsets(array.length, offset, length);
writeStartArray(array, length);
for (int i = offset, end = offset+length; i < end; ++i) {
writeString(array[i]);
}
writeEndArray();
return this;
}
/*
/**********************************************************************
/* Public API, write methods, text/String values
/**********************************************************************
*/
/**
* Method for outputting a String value. Depending on context
* this means either array element, (object) property value or
* a stand-alone (root-level value) String; but in all cases, String will be
* surrounded in double quotes, and contents will be properly
* escaped as required by JSON specification.
*
* @param value String value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeString(String value) throws JacksonException;
/**
* Method for outputting a String value. Depending on context
* this means either array element, (object) property value or
* a stand alone String; but in all cases, String will be
* surrounded in double quotes, and contents will be properly
* escaped as required by JSON specification.
* If {@code len} is < 0, then write all contents of the reader.
* Otherwise, write only len characters.
*<p>
* Note: actual length of content available may exceed {@code len} but
* cannot be less than it: if not enough content available, a
* {@link StreamWriteException} will be thrown.
*
* @param reader Reader to use for reading Text value to write
* @param len Maximum Length of Text value to read (in {@code char}s, non-negative)
* if known; {@code -1} to indicate "read and write it all"
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
* (including the case where {@code reader} does not provide enough content)
*/
public abstract JsonGenerator writeString(Reader reader, int len) throws JacksonException;
/**
* Method for outputting a String value. Depending on context
* this means either array element, (object) property value or
* a stand alone String; but in all cases, String will be
* surrounded in double quotes, and contents will be properly
* escaped as required by JSON specification.
*
* @param buffer Buffer that contains String value to write
* @param offset Offset in {@code buffer} of the first character of String value to write
* @param len Length of the String value (in characters) to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeString(char[] buffer, int offset, int len) throws JacksonException;
/**
* Method similar to {@link #writeString(String)}, but that takes
* {@link SerializableString} which can make this potentially
* more efficient to call as generator may be able to reuse
* quoted and/or encoded representation.
*<p>
* Default implementation just calls {@link #writeString(String)};
* sub-classes should override it with more efficient implementation
* if possible.
*
* @param value Pre-encoded String value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeString(SerializableString value) throws JacksonException;
/**
* Method similar to {@link #writeString(String)} but that takes as
* its input a UTF-8 encoded String that is to be output as-is, without additional
* escaping (type of which depends on data format; backslashes for JSON).
* However, quoting that data format requires (like double-quotes for JSON) will be added
* around the value if and as necessary.
*<p>
* Note that some backends may choose not to support this method: for
* example, if underlying destination is a {@link java.io.Writer}
* using this method would require UTF-8 decoding.
* If so, implementation may instead choose to throw a
* {@link UnsupportedOperationException} due to ineffectiveness
* of having to decode input.
*
* @param buffer Buffer that contains String value to write
* @param offset Offset in {@code buffer} of the first byte of String value to write
* @param len Length of the String value (in characters) to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeRawUTF8String(byte[] buffer, int offset, int len)
throws JacksonException;
/**
* Method similar to {@link #writeString(String)} but that takes as its input
* a UTF-8 encoded String which has <b>not</b> been escaped using whatever
* escaping scheme data format requires (for JSON that is backslash-escaping
* for control characters and double-quotes; for other formats something else).
* This means that textual JSON backends need to check if value needs
* JSON escaping, but otherwise can just be copied as is to output.
* Also, quoting that data format requires (like double-quotes for JSON) will be added
* around the value if and as necessary.
*<p>
* Note that some backends may choose not to support this method: for
* example, if underlying destination is a {@link java.io.Writer}
* using this method would require UTF-8 decoding.
* In this case
* generator implementation may instead choose to throw a
* {@link UnsupportedOperationException} due to ineffectiveness
* of having to decode input.
*
* @param buffer Buffer that contains String value to write
* @param offset Offset in {@code buffer} of the first byte of String value to write
* @param len Length of the String value (in characters) to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeUTF8String(byte[] buffer, int offset, int len)
throws JacksonException;
/*
/**********************************************************************
/* Public API, write methods, raw content
/**********************************************************************
*/
/**
* Method that will force generator to copy
* input text verbatim with <b>no</b> modifications (including
* that no escaping is done and no separators are added even
* if context [array, object] would otherwise require such).
* If such separators are desired, use
* {@link #writeRawValue(String)} instead.
*<p>
* Note that not all generator implementations necessarily support
* such by-pass methods: those that do not will throw
* {@link UnsupportedOperationException}.
*
* @return This generator, to allow call chaining
*
* @param text Textual contents to include as-is in output.
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeRaw(String text) throws JacksonException;
/**
* Method that will force generator to copy
* input text verbatim with <b>no</b> modifications (including
* that no escaping is done and no separators are added even
* if context [array, object] would otherwise require such).
* If such separators are desired, use
* {@link #writeRawValue(String)} instead.
*<p>
* Note that not all generator implementations necessarily support
* such by-pass methods: those that do not will throw
* {@link UnsupportedOperationException}.
*
* @param text String that has contents to include as-is in output
* @param offset Offset within {@code text} of the first character to output
* @param len Length of content (from {@code text}, starting at offset {@code offset}) to output
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeRaw(String text, int offset, int len) throws JacksonException;
/**
* Method that will force generator to copy
* input text verbatim with <b>no</b> modifications (including
* that no escaping is done and no separators are added even
* if context [array, object] would otherwise require such).
* If such separators are desired, use
* {@link #writeRawValue(String)} instead.
*<p>
* Note that not all generator implementations necessarily support
* such by-pass methods: those that do not will throw
* {@link UnsupportedOperationException}.
*
* @param buffer Buffer that has contents to include as-is in output
* @param offset Offset within {@code text} of the first character to output
* @param len Length of content (from {@code text}, starting at offset {@code offset}) to output
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeRaw(char[] buffer, int offset, int len) throws JacksonException;
/**
* Method that will force generator to copy
* input text verbatim with <b>no</b> modifications (including
* that no escaping is done and no separators are added even
* if context [array, object] would otherwise require such).
* If such separators are desired, use
* {@link #writeRawValue(String)} instead.
*<p>
* Note that not all generator implementations necessarily support
* such by-pass methods: those that do not will throw
* {@link UnsupportedOperationException}.
*
* @param c Character to included in output
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeRaw(char c) throws JacksonException;
/**
* Method that will force generator to copy
* input text verbatim with <b>no</b> modifications (including
* that no escaping is done and no separators are added even
* if context [array, object] would otherwise require such).
* If such separators are desired, use
* {@link #writeRawValue(String)} instead.
*<p>
* Note that not all generator implementations necessarily support
* such by-pass methods: those that do not will throw
* {@link UnsupportedOperationException}.
*<p>
* The default implementation delegates to {@link #writeRaw(String)};
* other backends that support raw inclusion of text are encouraged
* to implement it in more efficient manner (especially if they
* use UTF-8 encoding).
*
* @param raw Pre-encoded textual contents to included in output
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeRaw(SerializableString raw) throws JacksonException {
return writeRaw(raw.getValue());
}
/**
* Method that will force generator to copy
* input text verbatim without any modifications, but assuming
* it must constitute a single legal JSON value (number, string,
* boolean, null, Array or List). Assuming this, proper separators
* are added if and as needed (comma or colon), and generator
* state updated to reflect this.
*
* @param text Textual contents to included in output
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeRawValue(String text) throws JacksonException;
public abstract JsonGenerator writeRawValue(String text, int offset, int len) throws JacksonException;
public abstract JsonGenerator writeRawValue(char[] text, int offset, int len) throws JacksonException;
/**
* Method similar to {@link #writeRawValue(String)}, but potentially more
* efficient as it may be able to use pre-encoded content (similar to
* {@link #writeRaw(SerializableString)}.
*
* @param raw Pre-encoded textual contents to included in output
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeRawValue(SerializableString raw) throws JacksonException {
return writeRawValue(raw.getValue());
}
/*
/**********************************************************************
/* Public API, write methods, Binary values
/**********************************************************************
*/
/**
* Method that will output given chunk of binary data as base64
* encoded, as a complete String value (surrounded by double quotes).
* This method defaults
*<p>
* Note: because JSON Strings cannot contain unescaped linefeeds,
* if linefeeds are included (as per last argument), they must be
* escaped. This adds overhead for decoding without improving
* readability.
* Alternatively if linefeeds are not included,
* resulting String value may violate the requirement of base64
* RFC which mandates line-length of 76 characters and use of
* linefeeds. However, all {@link JsonParser} implementations
* are required to accept such "long line base64"; as do
* typical production-level base64 decoders.
*
* @param bv Base64 variant to use: defines details such as
* whether padding is used (and if so, using which character);
* what is the maximum line length before adding linefeed,
* and also the underlying alphabet to use.
* @param data Buffer that contains binary data to write
* @param offset Offset in {@code data} of the first byte of data to write
* @param len Length of data to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeBinary(Base64Variant bv,
byte[] data, int offset, int len) throws JacksonException;
/**
* Similar to {@link #writeBinary(Base64Variant,byte[],int,int)},
* but default to using the Jackson default Base64 variant
* (which is {@link Base64Variants#MIME_NO_LINEFEEDS}).
*
* @param data Buffer that contains binary data to write
* @param offset Offset in {@code data} of the first byte of data to write
* @param len Length of data to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeBinary(byte[] data, int offset, int len) throws JacksonException {
return writeBinary(Base64Variants.getDefaultVariant(), data, offset, len);
}
/**
* Similar to {@link #writeBinary(Base64Variant,byte[],int,int)},
* but assumes default to using the Jackson default Base64 variant
* (which is {@link Base64Variants#MIME_NO_LINEFEEDS}). Also
* assumes that whole byte array is to be output.
*
* @param data Buffer that contains binary data to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeBinary(byte[] data) throws JacksonException {
return writeBinary(Base64Variants.getDefaultVariant(), data, 0, data.length);
}
/**
* Similar to {@link #writeBinary(Base64Variant,InputStream,int)},
* but assumes default to using the Jackson default Base64 variant
* (which is {@link Base64Variants#MIME_NO_LINEFEEDS}).
*
* @param data InputStream to use for reading binary data to write.
* Will not be closed after successful write operation
* @param dataLength (optional) number of bytes that will be available;
* or -1 to be indicate it is not known. Note that implementations
* need not support cases where length is not known in advance; this
* depends on underlying data format: JSON output does NOT require length,
* other formats may
*
* @return Number of bytes actually written
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public int writeBinary(InputStream data, int dataLength) throws JacksonException {
return writeBinary(Base64Variants.getDefaultVariant(), data, dataLength);
}
/**
* Method similar to {@link #writeBinary(Base64Variant,byte[],int,int)},
* but where input is provided through a stream, allowing for incremental
* writes without holding the whole input in memory.
*
* @param bv Base64 variant to use
* @param data InputStream to use for reading binary data to write.
* Will not be closed after successful write operation
* @param dataLength (optional) number of bytes that will be available;
* or -1 to be indicate it is not known.
* If a positive length is given, <code>data</code> MUST provide at least
* that many bytes: if not, an exception will be thrown.
* Note that implementations
* need not support cases where length is not known in advance; this
* depends on underlying data format: JSON output does NOT require length,
* other formats may.
*
* @return Number of bytes read from <code>data</code> and written as binary payload
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract int writeBinary(Base64Variant bv,
InputStream data, int dataLength) throws JacksonException;
/*
/**********************************************************************
/* Public API, write methods, numeric
/**********************************************************************
*/
/**
* Method for outputting given value as JSON number.
* Can be called in any context where a value is expected
* (Array value, Object property value, root-level value).
* Additional white space may be added around the value
* if pretty-printing is enabled.
*
* @param v Number value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeNumber(short v) throws JacksonException;
/**
* Method for outputting given value as JSON number.
* Can be called in any context where a value is expected
* (Array value, Object property value, root-level value).
* Additional white space may be added around the value
* if pretty-printing is enabled.
*
* @param v Number value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeNumber(int v) throws JacksonException;
/**
* Method for outputting given value as JSON number.
* Can be called in any context where a value is expected
* (Array value, Object property value, root-level value).
* Additional white space may be added around the value
* if pretty-printing is enabled.
*
* @param v Number value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeNumber(long v) throws JacksonException;
/**
* Method for outputting given value as JSON number.
* Can be called in any context where a value is expected
* (Array value, Object property value, root-level value).
* Additional white space may be added around the value
* if pretty-printing is enabled.
*
* @param v Number value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeNumber(BigInteger v) throws JacksonException;
/**
* Method for outputting indicate JSON numeric value.
* Can be called in any context where a value is expected
* (Array value, Object property value, root-level value).
* Additional white space may be added around the value
* if pretty-printing is enabled.
*
* @param v Number value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeNumber(double v) throws JacksonException;
/**
* Method for outputting indicate JSON numeric value.
* Can be called in any context where a value is expected
* (Array value, Object property value, root-level value).
* Additional white space may be added around the value
* if pretty-printing is enabled.
*
* @param v Number value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeNumber(float v) throws JacksonException;
/**
* Method for outputting indicate JSON numeric value.
* Can be called in any context where a value is expected
* (Array value, Object property value, root-level value).
* Additional white space may be added around the value
* if pretty-printing is enabled.
*
* @param v Number value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeNumber(BigDecimal v) throws JacksonException;
/**
* Write method that can be used for custom numeric types that can
* not be (easily?) converted to "standard" Java number types.
* Because numbers are not surrounded by double quotes, regular
* {@link #writeString} method cannot be used; nor
* {@link #writeRaw} because that does not properly handle
* value separators needed in Array or Object contexts.
*<p>
* Note: because of lack of type safety, some generator
* implementations may not be able to implement this
* method. For example, if a binary JSON format is used,
* it may require type information for encoding; similarly
* for generator-wrappers around Java objects or JSON nodes.
* If implementation does not implement this method,
* it needs to throw {@link UnsupportedOperationException}.
*
* @param encodedValue Textual (possibly format) number representation to write
*
* @return This generator, to allow call chaining
*
* @throws UnsupportedOperationException If underlying data format does not
* support numbers serialized textually AND if generator is not allowed
* to just output a String instead (Schema-based formats may require actual
* number, for example)
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeNumber(String encodedValue) throws JacksonException;
/**
* Overloaded version of {@link #writeNumber(String)} with same semantics
* but possibly more efficient operation.
*
* @param encodedValueBuffer Buffer that contains the textual number representation to write
* @param offset Offset of the first character of value to write
* @param len Length of the value (in characters) to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeNumber(char[] encodedValueBuffer, int offset, int len) throws JacksonException {
return writeNumber(new String(encodedValueBuffer, offset, len));
}
/*
/**********************************************************************
/* Public API, write methods, other value types
/**********************************************************************
*/
/**
* Method for outputting literal JSON boolean value (one of
* Strings 'true' and 'false').
* Can be called in any context where a value is expected
* (Array value, Object property value, root-level value).
* Additional white space may be added around the value
* if pretty-printing is enabled.
*
* @param state Boolean value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeBoolean(boolean state) throws JacksonException;
/**
* Method for outputting literal JSON null value.
* Can be called in any context where a value is expected
* (Array value, Object property value, root-level value).
* Additional white space may be added around the value
* if pretty-printing is enabled.
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeNull() throws JacksonException;
/**
* Method that can be called on backends that support passing opaque native
* values that some data formats support; not used with JSON backend,
* more common with binary formats.
*<p>
* NOTE: this is NOT the method to call for serializing regular POJOs,
* see {@link #writePOJO} instead.
*
* @param object Native format-specific value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeEmbeddedObject(Object object) throws JacksonException {
// 01-Sep-2016, tatu: As per [core#318], handle small number of cases
if (object == null) {
writeNull();
return this;
}
if (object instanceof byte[]) {
writeBinary((byte[]) object);
return this;
}
throw _constructWriteException("No native support for writing embedded objects of type %s",
object.getClass().getName());
}
/*
/**********************************************************************
/* Public API, write methods, Native Ids (type, object)
/**********************************************************************
*/
/**
* Method that can be called to output so-called native Object Id.
* Note that it may only be called after ensuring this is legal
* (with {@link #canWriteObjectId()}), as not all data formats
* have native type id support; and some may only allow them in
* certain positions or locations.
*
* @param id Native Object Id to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream;
* typically if Object ID output is not allowed
* (either at all, or specifically in this position in output)
*/
public JsonGenerator writeObjectId(Object id) throws JacksonException {
throw _constructWriteException("No native support for writing Object Ids");
}
/**
* Method that can be called to output references to native Object Ids.
* Note that it may only be called after ensuring this is legal
* (with {@link #canWriteObjectId()}), as not all data formats
* have native type id support; and some may only allow them in
* certain positions or locations.
* If output is not allowed by the data format in this position,
* a {@link StreamWriteException} will be thrown.
*
* @param referenced Referenced value, for which Object Id is expected to be written
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream;
* typically if Object ID output is not allowed
* (either at all, or specifically in this position in output)
*/
public JsonGenerator writeObjectRef(Object referenced) throws JacksonException {
throw _constructWriteException("No native support for writing Object Ids");
}
/**
* Method that can be called to output so-called native Type Id.
* Note that it may only be called after ensuring this is legal
* (with {@link #canWriteTypeId()}), as not all data formats
* have native type id support; and some may only allow them in
* certain positions or locations.
* If output is not allowed by the data format in this position,
* a {@link StreamWriteException} will be thrown.
*
* @param id Native Type Id to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeTypeId(Object id) throws JacksonException {
throw _constructWriteException("No native support for writing Type Ids");
}
/**
* Replacement method for {@link #writeTypeId(Object)} which is called
* regardless of whether format has native type ids. If it does have native
* type ids, those are to be used (if configuration allows this), if not,
* structural type id inclusion is to be used. For JSON, for example, no
* native type ids exist and structural inclusion is always used.
*<p>
* NOTE: databind may choose to skip calling this method for some special cases
* (and instead included type id via regular write methods and/or {@link #writeTypeId}
* -- this is discouraged, but not illegal, and may be necessary as a work-around
* in some cases.
*
* @param typeIdDef Full Type Id definition
*
* @return {@link WritableTypeId} for caller to retain and pass to matching
* {@link #writeTypeSuffix} call
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public WritableTypeId writeTypePrefix(WritableTypeId typeIdDef)
throws JacksonException
{
final boolean wasStartObjectWritten = canWriteTypeId()
? _writeTypePrefixUsingNative(typeIdDef)
: _writeTypePrefixUsingWrapper(typeIdDef);
// And then possible start marker for value itself:
switch (typeIdDef.valueShape) {
case START_OBJECT:
if (!wasStartObjectWritten) {
writeStartObject(typeIdDef.forValue);
}
break;
case START_ARRAY:
writeStartArray(typeIdDef.forValue);
break;
default: // otherwise: no start marker
}
return typeIdDef;
}
/**
* Writes a native type id (when supported by format).
*
* @return True if start of an object has been written, False otherwise.
*/
protected boolean _writeTypePrefixUsingNative(WritableTypeId typeIdDef) throws JacksonException {
typeIdDef.wrapperWritten = false;
writeTypeId(typeIdDef.id);
return false;
}
/**
* Writes a wrapper for the type id if necessary.
*
* @return True if start of an object has been written, false otherwise.
*/
protected boolean _writeTypePrefixUsingWrapper(WritableTypeId typeIdDef) throws JacksonException {
// Normally we only support String type ids (non-String reserved for native type ids)
final String id = Objects.toString(typeIdDef.id, null);
// If we don't have Type ID we don't write a wrapper.
if (id == null) {
return false;
}
Inclusion incl = typeIdDef.include;
// first: cannot output "as property" if value not Object; if so, must do "as array"
if ((typeIdDef.valueShape != JsonToken.START_OBJECT) && incl.requiresObjectContext()) {
typeIdDef.include = incl = WritableTypeId.Inclusion.WRAPPER_ARRAY;
}
typeIdDef.wrapperWritten = true;
switch (incl) {
case PARENT_PROPERTY:
// nothing to do here, as it has to be written in suffix...
break;
case PAYLOAD_PROPERTY:
// only output as native type id; otherwise caller must handle using some
// other mechanism, so...
break;
case METADATA_PROPERTY:
// must have Object context by now, so simply write as field name
// Note, too, that it's bit tricky, since we must print START_OBJECT that is part
// of value first -- and then NOT output it later on: hence return "early"
writeStartObject(typeIdDef.forValue);
writeStringProperty(typeIdDef.asProperty, id);
return true;
case WRAPPER_OBJECT:
// NOTE: this is wrapper, not directly related to value to output, so
// do NOT pass "typeIdDef.forValue"
writeStartObject();
writeName(id);
break;
case WRAPPER_ARRAY:
default: // should never occur but translate as "as-array"
writeStartArray(); // wrapper, not actual array object to write
writeString(id);
}
return false;
}
public WritableTypeId writeTypeSuffix(WritableTypeId typeIdDef) throws JacksonException
{
final JsonToken valueShape = typeIdDef.valueShape;
// First: does value need closing?
if (valueShape == JsonToken.START_OBJECT) {
writeEndObject();
} else if (valueShape == JsonToken.START_ARRAY) {
writeEndArray();
}
if (typeIdDef.wrapperWritten) {
switch (typeIdDef.include) {
case WRAPPER_ARRAY:
writeEndArray();
break;
case PARENT_PROPERTY:
// unusually, need to output AFTER value. And no real wrapper...
{
Object id = typeIdDef.id;
String idStr = (id instanceof String) ? (String) id : String.valueOf(id);
writeStringProperty(typeIdDef.asProperty, idStr);
}
break;
case METADATA_PROPERTY:
case PAYLOAD_PROPERTY:
// no actual wrapper; included within Object itself
break;
case WRAPPER_OBJECT:
default: // should never occur but...
writeEndObject();
break;
}
}
return typeIdDef;
}
/*
/**********************************************************************
/* Public API, write methods, serializing Java objects
/**********************************************************************
*/
/**
* Method for writing given Java object (POJO) as tokens into
* stream this generator manages; serialization must be a valid JSON Value
* (Object, Array, null, Number, String or Boolean).
* This is done by delegating call to
* {@link ObjectWriteContext#writeValue(JsonGenerator, Object)}.
*<p>
* NOTE: this method does not trigger flushing of the generator (none of
* {@link JsonGenerator} {@code writeXxx} methods do) in Jackson 3.x:
* this is different from behavior in Jackson 2.x where a side effect
* of delegation to {@code ObjectMapper} caused flushing to occur.
*
* @param pojo Java Object (POJO) value to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writePOJO(Object pojo) throws JacksonException;
/**
* Method for writing given JSON tree (expressed as a tree
* where given {@code TreeNode} is the root) using this generator.
* This is done by delegating call to
* {@link ObjectWriteContext#writeTree}.
*<p>
* NOTE: this method does not trigger flushing of the generator (none of
* {@link JsonGenerator} {@code writeXxx} methods do) in Jackson 3.x:
* this is different from behavior in Jackson 2.x where a side effect
* of delegation to {@code ObjectMapper} caused flushing to occur.
*
* @param rootNode {@link TreeNode} to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public abstract JsonGenerator writeTree(TreeNode rootNode) throws JacksonException;
/*
/**********************************************************************
/* Public API, convenience property write methods
/**********************************************************************
*/
// 25-May-2020, tatu: NOTE! Made `final` on purpose in 3.x to prevent issues
// rising from complexity of overriding only some of methods (writeName()
// and matching writeXxx() for value)
/**
* Convenience method for outputting an Object property
* that contains specified data in base64-encoded form.
* Equivalent to:
*<pre>
* writeName(propertyName);
* writeBinary(value);
*</pre>
*
* @param propertyName Name of Object Property to write
* @param data Binary value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeBinaryProperty(String propertyName, byte[] data) throws JacksonException {
writeName(propertyName);
return writeBinary(data);
}
/**
* Convenience method for outputting an Object property
* that has a boolean value. Equivalent to:
*<pre>
* writeName(propertyName);
* writeBoolean(value);
*</pre>
*
* @param propertyName Name of Object Property to write
* @param value Boolean value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeBooleanProperty(String propertyName, boolean value) throws JacksonException {
writeName(propertyName);
return writeBoolean(value);
}
/**
* Convenience method for outputting an Object property
* that has JSON literal value null. Equivalent to:
*<pre>
* writeName(propertyName);
* writeNull();
*</pre>
*
* @param propertyName Name of the null-valued property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeNullProperty(String propertyName) throws JacksonException {
writeName(propertyName);
return writeNull();
}
/**
* Convenience method for outputting an Object property
* that has a String value. Equivalent to:
*<pre>
* writeName(propertyName);
* writeString(value);
*</pre>
*
* @param propertyName Name of the property to write
* @param value String value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeStringProperty(String propertyName, String value) throws JacksonException {
writeName(propertyName);
return writeString(value);
}
/**
* Convenience method for outputting an Object property
* that has the specified numeric value. Equivalent to:
*<pre>
* writeName(propertyName);
* writeNumber(value);
*</pre>
*
* @param propertyName Name of the property to write
* @param value Numeric value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeNumberProperty(String propertyName, short value) throws JacksonException {
writeName(propertyName);
return writeNumber(value);
}
/**
* Convenience method for outputting an Object property
* that has the specified numeric value. Equivalent to:
*<pre>
* writeName(propertyName);
* writeNumber(value);
*</pre>
*
* @param propertyName Name of the property to write
* @param value Numeric value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeNumberProperty(String propertyName, int value) throws JacksonException {
writeName(propertyName);
return writeNumber(value);
}
/**
* Convenience method for outputting an Object property
* that has the specified numeric value. Equivalent to:
*<pre>
* writeName(propertyName);
* writeNumber(value);
*</pre>
*
* @param propertyName Name of the property to write
* @param value Numeric value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeNumberProperty(String propertyName, long value) throws JacksonException {
writeName(propertyName);
return writeNumber(value);
}
/**
* Convenience method for outputting an Object property
* that has the specified numeric value. Equivalent to:
*<pre>
* writeName(propertyName);
* writeNumber(value);
*</pre>
*
* @param propertyName Name of the property to write
* @param value Numeric value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeNumberProperty(String propertyName, BigInteger value) throws JacksonException {
writeName(propertyName);
return writeNumber(value);
}
/**
* Convenience method for outputting an Object property
* that has the specified numeric value. Equivalent to:
*<pre>
* writeName(propertyName);
* writeNumber(value);
*</pre>
*
* @param propertyName Name of the property to write
* @param value Numeric value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeNumberProperty(String propertyName, float value) throws JacksonException {
writeName(propertyName);
return writeNumber(value);
}
/**
* Convenience method for outputting an Object property
* that has the specified numeric value. Equivalent to:
*<pre>
* writeName(propertyName);
* writeNumber(value);
*</pre>
*
* @param propertyName Name of the property to write
* @param value Numeric value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeNumberProperty(String propertyName, double value) throws JacksonException {
writeName(propertyName);
return writeNumber(value);
}
/**
* Convenience method for outputting an Object property
* that has the specified numeric value.
* Equivalent to:
*<pre>
* writeName(propertyName);
* writeNumber(value);
*</pre>
*
* @param propertyName Name of the property to write
* @param value Numeric value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeNumberProperty(String propertyName, BigDecimal value) throws JacksonException {
writeName(propertyName);
return writeNumber(value);
}
/**
* Convenience method for outputting an Object property
* (that will contain a JSON Array value), and the START_ARRAY marker.
* Equivalent to:
*<pre>
* writeName(propertyName);
* writeStartArray();
*</pre>
*<p>
* Note: caller still has to take care to close the array
* (by calling {#link #writeEndArray}) after writing all values
* of the value Array.
*
* @param propertyName Name of the Array property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeArrayPropertyStart(String propertyName) throws JacksonException {
writeName(propertyName);
return writeStartArray();
}
/**
* Convenience method for outputting an Object property
* (that will contain an Object value), and the START_OBJECT marker.
* Equivalent to:
*<pre>
* writeName(propertyName);
* writeStartObject();
*</pre>
*<p>
* Note: caller still has to take care to close the Object
* (by calling {#link #writeEndObject}) after writing all
* entries of the value Object.
*
* @param propertyName Name of the Object property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writeObjectPropertyStart(String propertyName) throws JacksonException {
writeName(propertyName);
return writeStartObject();
}
/**
* Convenience method for outputting am Object property
* that has contents of specific Java object (POJO) as its value.
* Equivalent to:
*<pre>
* writeName(propertyName);
* writeObject(pojo);
*</pre>
*<p>
* NOTE: see {@link #writePOJO(Object)} for details on how POJO value actually
* gets written (uses delegation).
*
* @param propertyName Name of the property to write
* @param pojo POJO value of the property to write
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public final JsonGenerator writePOJOProperty(String propertyName, Object pojo) throws JacksonException {
writeName(propertyName);
return writePOJO(pojo);
}
// // // But this method does need to be delegate so...
/**
* Method called to indicate that a property in this position was
* skipped. It is usually only called for generators that return
* <code>false</code> from {@link #canOmitProperties()}.
*<p>
* Default implementation does nothing; method is overriden by some format
* backends.
*
* @param propertyName Name of the property that is being omitted
*
* @return This generator, to allow call chaining
*
* @throws JacksonIOException if there is an underlying I/O problem
* @throws StreamWriteException for problems in encoding token stream
*/
public JsonGenerator writeOmittedProperty(String propertyName) throws JacksonException {
return this;
}
/*
/**********************************************************************
/* Public API, copy-through methods
/*
/* NOTE: need to remain here for `JsonGeneratorDelegate` to call
/* (or refactor to have "JsonGeneratorMinimalBase" or such)
/**********************************************************************
*/
/**
* Method for copying contents of the current event that
* the given parser instance points to.
* Note that the method <b>will not</b> copy any other events,
* such as events contained within JSON Array or Object structures.
*<p>
* Calling this method will not advance the given
* parser, although it may cause parser to internally process
* more data (if it lazy loads contents of value events, for example)
*
* @param p Parser that points to the event to copy
*
* @throws JacksonIOException if there is an underlying I/O problem (reading or writing)
* @throws StreamReadException for problems with decoding of token stream
* @throws StreamWriteException for problems in encoding token stream
*/
public void copyCurrentEvent(JsonParser p) throws JacksonException
{
JsonToken t = p.currentToken();
final int token = (t == null) ? ID_NOT_AVAILABLE : t.id();
switch (token) {
case ID_NOT_AVAILABLE:
_reportError("No current event to copy");
break; // never gets here
case ID_START_OBJECT:
writeStartObject();
break;
case ID_END_OBJECT:
writeEndObject();
break;
case ID_START_ARRAY:
writeStartArray();
break;
case ID_END_ARRAY:
writeEndArray();
break;
case ID_PROPERTY_NAME:
writeName(p.currentName());
break;
case ID_STRING:
_copyCurrentStringValue(p);
break;
case ID_NUMBER_INT:
_copyCurrentIntValue(p);
break;
case ID_NUMBER_FLOAT:
// Different from "copyCurrentEventExact"!
_copyCurrentFloatValue(p);
break;
case ID_TRUE:
writeBoolean(true);
break;
case ID_FALSE:
writeBoolean(false);
break;
case ID_NULL:
writeNull();
break;
case ID_EMBEDDED_OBJECT:
writePOJO(p.getEmbeddedObject());
break;
default:
throw new IllegalStateException("Internal error: unknown current token, "+t);
}
}
/**
* Same as {@link #copyCurrentEvent} with the exception that copying of numeric
* values tries to avoid any conversion losses; in particular for floating-point
* numbers. This usually matters when transcoding from textual format like JSON
* to a binary format.
* See {@link #_copyCurrentFloatValueExact} for details.
*
* @param p Parser that points to the event to copy
*
* @throws JacksonIOException if there is an underlying I/O problem (reading or writing)
* @throws StreamReadException for problems with decoding of token stream
* @throws StreamWriteException for problems in encoding token stream
*/
public void copyCurrentEventExact(JsonParser p) throws JacksonException
{
JsonToken t = p.currentToken();
final int token = (t == null) ? ID_NOT_AVAILABLE : t.id();
switch (token) {
case ID_NOT_AVAILABLE:
_reportError("No current event to copy");
break; // never gets here
case ID_START_OBJECT:
writeStartObject();
break;
case ID_END_OBJECT:
writeEndObject();
break;
case ID_START_ARRAY:
writeStartArray();
break;
case ID_END_ARRAY:
writeEndArray();
break;
case ID_PROPERTY_NAME:
writeName(p.currentName());
break;
case ID_STRING:
_copyCurrentStringValue(p);
break;
case ID_NUMBER_INT:
_copyCurrentIntValue(p);
break;
case ID_NUMBER_FLOAT:
// Different from "copyCurrentEvent"!
_copyCurrentFloatValueExact(p);
break;
case ID_TRUE:
writeBoolean(true);
break;
case ID_FALSE:
writeBoolean(false);
break;
case ID_NULL:
writeNull();
break;
case ID_EMBEDDED_OBJECT:
writePOJO(p.getEmbeddedObject());
break;
default:
throw new IllegalStateException("Internal error: unknown current token, "+t);
}
}
/**
* Method for copying contents of the current event
* <b>and following events that it encloses</b>
* the given parser instance points to.
*<p>
* So what constitutes enclosing? Here is the list of
* events that have associated enclosed events that will
* get copied:
*<ul>
* <li>{@link JsonToken#START_OBJECT}:
* all events up to and including matching (closing)
* {@link JsonToken#END_OBJECT} will be copied
* </li>
* <li>{@link JsonToken#START_ARRAY}
* all events up to and including matching (closing)
* {@link JsonToken#END_ARRAY} will be copied
* </li>
* <li>{@link JsonToken#PROPERTY_NAME} the logical value (which
* can consist of a single scalar value; or a sequence of related
* events for structured types (JSON Arrays, Objects)) will
* be copied along with the name itself. So essentially the
* whole <b>Object property</b> (name and value) will be copied.
* </li>
*</ul>
*<p>
* After calling this method, parser will point to the
* <b>last event</b> that was copied. This will either be
* the event parser already pointed to (if there were no
* enclosed events), or the last enclosed event copied.
*
* @param p Parser that points to the value to copy
*
* @throws JacksonIOException if there is an underlying I/O problem (reading or writing)
* @throws StreamReadException for problems with decoding of token stream
* @throws StreamWriteException for problems in encoding token stream
*/
public void copyCurrentStructure(JsonParser p) throws JacksonException
{
JsonToken t = p.currentToken();
// Let's handle property-name separately first
int id = (t == null) ? ID_NOT_AVAILABLE : t.id();
if (id == ID_PROPERTY_NAME) {
writeName(p.currentName());
t = p.nextToken();
id = (t == null) ? ID_NOT_AVAILABLE : t.id();
// fall-through to copy the associated value
}
switch (id) {
case ID_START_OBJECT:
writeStartObject();
_copyCurrentContents(p);
return;
case ID_START_ARRAY:
writeStartArray();
_copyCurrentContents(p);
return;
default:
copyCurrentEvent(p);
}
}
/**
* Same as {@link #copyCurrentStructure} with the exception that copying of numeric
* values tries to avoid any conversion losses; in particular for floating-point
* numbers. This usually matters when transcoding from textual format like JSON
* to a binary format.
* See {@link #_copyCurrentFloatValueExact} for details.
*
* @param p Parser that points to the value to copy
*
* @throws JacksonIOException if there is an underlying I/O problem (reading or writing)
* @throws StreamReadException for problems with decoding of token stream
* @throws StreamWriteException for problems in encoding token stream
*/
public void copyCurrentStructureExact(JsonParser p) throws JacksonException
{
JsonToken t = p.currentToken();
// Let's handle property-name separately first
int id = (t == null) ? ID_NOT_AVAILABLE : t.id();
if (id == ID_PROPERTY_NAME) {
writeName(p.currentName());
t = p.nextToken();
id = (t == null) ? ID_NOT_AVAILABLE : t.id();
// fall-through to copy the associated value
}
switch (id) {
case ID_START_OBJECT:
writeStartObject();
_copyCurrentContentsExact(p);
return;
case ID_START_ARRAY:
writeStartArray();
_copyCurrentContentsExact(p);
return;
default:
copyCurrentEventExact(p);
}
}
protected void _copyCurrentContents(JsonParser p) throws JacksonException
{
int depth = 1;
JsonToken t;
// Mostly copied from `copyCurrentEvent()`, but with added nesting counts
while ((t = p.nextToken()) != null) {
switch (t.id()) {
case ID_PROPERTY_NAME:
writeName(p.currentName());
break;
case ID_START_ARRAY:
writeStartArray();
++depth;
break;
case ID_START_OBJECT:
writeStartObject();
++depth;
break;
case ID_END_ARRAY:
writeEndArray();
if (--depth == 0) {
return;
}
break;
case ID_END_OBJECT:
writeEndObject();
if (--depth == 0) {
return;
}
break;
case ID_STRING:
_copyCurrentStringValue(p);
break;
case ID_NUMBER_INT:
_copyCurrentIntValue(p);
break;
case ID_NUMBER_FLOAT:
_copyCurrentFloatValue(p);
break;
case ID_TRUE:
writeBoolean(true);
break;
case ID_FALSE:
writeBoolean(false);
break;
case ID_NULL:
writeNull();
break;
case ID_EMBEDDED_OBJECT:
writePOJO(p.getEmbeddedObject());
break;
default:
throw new IllegalStateException("Internal error: unknown current token, "+t);
}
}
}
protected void _copyCurrentContentsExact(JsonParser p) throws JacksonException
{
int depth = 1;
JsonToken t;
// Mostly copied from `copyCurrentEventExact()`, but with added nesting counts
while ((t = p.nextToken()) != null) {
switch (t.id()) {
case ID_PROPERTY_NAME:
writeName(p.currentName());
break;
case ID_START_ARRAY:
writeStartArray();
++depth;
break;
case ID_START_OBJECT:
writeStartObject();
++depth;
break;
case ID_END_ARRAY:
writeEndArray();
if (--depth == 0) {
return;
}
break;
case ID_END_OBJECT:
writeEndObject();
if (--depth == 0) {
return;
}
break;
case ID_STRING:
_copyCurrentStringValue(p);
break;
case ID_NUMBER_INT:
_copyCurrentIntValue(p);
break;
case ID_NUMBER_FLOAT:
_copyCurrentFloatValueExact(p);
break;
case ID_TRUE:
writeBoolean(true);
break;
case ID_FALSE:
writeBoolean(false);
break;
case ID_NULL:
writeNull();
break;
case ID_EMBEDDED_OBJECT:
writePOJO(p.getEmbeddedObject());
break;
default:
throw new IllegalStateException("Internal error: unknown current token, "+t);
}
}
}
/**
* Method for copying current {@link JsonToken#VALUE_NUMBER_FLOAT} value;
* overridable by format backend implementations.
* Implementation checks
* {@link JsonParser#getNumberType()} for declared type and uses matching
* accessors: this may cause inexact conversion for some textual formats
* (depending on settings). If this is problematic, use
* {@link #_copyCurrentFloatValueExact} instead (note that doing so may add
* overhead).
*
* @param p Parser that points to the value to copy
*/
protected void _copyCurrentFloatValue(JsonParser p) throws JacksonException
{
NumberType t = p.getNumberType();
if (t == NumberType.BIG_DECIMAL) {
writeNumber(p.getDecimalValue());
} else if (t == NumberType.FLOAT) {
writeNumber(p.getFloatValue());
} else {
writeNumber(p.getDoubleValue());
}
}
/**
* Method for copying current {@link JsonToken#VALUE_NUMBER_FLOAT} value;
* overridable by format backend implementations.
* Implementation ensures it uses most accurate accessors necessary to retain
* exact value in case of possible numeric conversion: in practice this means
* that {@link BigDecimal} is usually used as the representation accessed from
* {@link JsonParser}, regardless of whether {@link Double} might be accurate
* (since detecting lossy conversion is not possible to do efficiently).
* If minimal overhead is desired, use {@link #_copyCurrentFloatValue} instead.
*
* @param p Parser that points to the value to copy
*
* @since 2.15
*/
protected void _copyCurrentFloatValueExact(JsonParser p) throws JacksonException
{
Number n = p.getNumberValueExact();
if (n instanceof BigDecimal) {
writeNumber((BigDecimal) n);
} else if (n instanceof Double) {
writeNumber(n.doubleValue());
} else {
writeNumber(n.floatValue());
}
}
/**
* Method for copying current {@link JsonToken#VALUE_NUMBER_FLOAT} value;
* overridable by format backend implementations.
*
* @param p Parser that points to the value to copy
*/
protected void _copyCurrentIntValue(JsonParser p) throws JacksonException
{
NumberType n = p.getNumberType();
if (n == NumberType.INT) {
writeNumber(p.getIntValue());
} else if (n == NumberType.LONG) {
writeNumber(p.getLongValue());
} else {
writeNumber(p.getBigIntegerValue());
}
}
/**
* Method for copying current {@link JsonToken#VALUE_STRING} value;
* overridable by format backend implementations.
*
* @param p Parser that points to the value to copy
*/
protected void _copyCurrentStringValue(JsonParser p) throws JacksonException
{
if (p.hasStringCharacters()) {
writeString(p.getStringCharacters(), p.getStringOffset(), p.getStringLength());
} else {
writeString(p.getString());
}
}
/*
/**********************************************************************
/* Public API, buffer handling
/**********************************************************************
*/
/**
* Method called to flush any buffered content to the underlying
* target (output stream, writer), and to flush the target itself
* as well.
*/
@Override
public abstract void flush();
/**
* Method that can be called to determine whether this generator
* is closed or not. If it is closed, no more output can be done.
*
* @return {@code True} if this generator has been closed; {@code false} if not
*/
public abstract boolean isClosed();
/*
/**********************************************************************
/* Closeable implementation
/**********************************************************************
*/
/**
* Method called to close this generator, so that no more content
* can be written.
*<p>
* Whether the underlying target (stream, writer) gets closed depends
* on whether this generator either manages the target (i.e. is the
* only one with access to the target -- case if caller passes a
* reference to the resource such as File, but not stream); or
* has feature {@link StreamWriteFeature#AUTO_CLOSE_TARGET} enabled.
* If either of above is true, the target is also closed. Otherwise
* (not managing, feature not enabled), target is not closed.
*/
@Override
public abstract void close();
/*
/**********************************************************************
/* Helper methods for sub-classes
/*
/* NOTE: some could be moved out in 3.0 if there was "JsonGeneratorMinimalBase"
/**********************************************************************
*/
/**
* Helper method used for constructing and throwing
* {@link StreamWriteException} with given message.
*<p>
* Note that sub-classes may override this method to add more detail
* or use a {@link StreamWriteException} sub-class.
*
* @param <T> Bogus type parameter to "return anything" so that compiler
* won't complain when chaining calls
*
* @param msg Message to construct exception with
*
* @return Does not return at all as exception is always thrown, but nominally returns "anything"
*
* @throws StreamWriteException that was constructed with given message
*/
protected <T> T _reportError(String msg) throws StreamWriteException {
throw _constructWriteException(msg);
}
protected <T> T _reportUnsupportedOperation() {
return _reportUnsupportedOperation("Operation not supported by `JsonGenerator` of type "+getClass().getName());
}
protected <T> T _reportUnsupportedOperation(String msg) {
throw new UnsupportedOperationException(msg);
}
/**
* Helper method used for constructing and throwing
* {@link StreamWriteException} with given message, in cases where
* argument(s) used for a call (usually one of {@code writeXxx()} methods)
* is invalid.
* Default implementation simply delegates to {@link #_reportError(String)}.
*
* @param <T> Bogus type parameter to "return anything" so that compiler
* won't complain when chaining calls
*
* @param msg Message to construct exception with
*
* @return Does not return at all as exception is always thrown, but nominally returns "anything"
*
* @throws StreamWriteException that was constructed with given message
*/
protected <T> T _reportArgumentError(String msg) throws StreamWriteException {
return _reportError(msg);
}
// @since 3.0
protected StreamWriteException _constructWriteException(String msg) {
return new StreamWriteException(this, msg);
}
protected StreamWriteException _constructWriteException(String msg, Object arg) {
return _constructWriteException(String.format(msg, arg));
}
protected StreamWriteException _constructWriteException(String msg, Object arg1, Object arg2) {
return _constructWriteException(String.format(msg, arg1, arg2));
}
protected StreamWriteException _constructWriteException(String msg, Throwable t) {
return new StreamWriteException(this, msg, t);
}
// @since 3.0
protected JacksonException _wrapIOFailure(IOException e) {
return JacksonIOException.construct(e, this);
}
protected final void _verifyOffsets(int arrayLength, int offset, int length)
{
if ((offset < 0) || (offset + length) > arrayLength) {
throw new IllegalArgumentException(String.format(
"invalid argument(s) (offset=%d, length=%d) for input array of %d element",
offset, length, arrayLength));
}
}
}
| JsonGenerator |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FunctionGraphEndpointBuilderFactory.java | {
"start": 12089,
"end": 13852
} | interface ____ {
/**
* Huawei FunctionGraph (camel-huaweicloud-functiongraph)
* To call serverless functions on Huawei Cloud
*
* Category: cloud,serverless
* Since: 3.11
* Maven coordinates: org.apache.camel:camel-huaweicloud-functiongraph
*
* Syntax: <code>hwcloud-functiongraph:operation</code>
*
* Path parameter: operation (required)
* Operation to be performed
*
* @param path operation
* @return the dsl builder
*/
default FunctionGraphEndpointBuilder hwcloudFunctiongraph(String path) {
return FunctionGraphEndpointBuilderFactory.endpointBuilder("hwcloud-functiongraph", path);
}
/**
* Huawei FunctionGraph (camel-huaweicloud-functiongraph)
* To call serverless functions on Huawei Cloud
*
* Category: cloud,serverless
* Since: 3.11
* Maven coordinates: org.apache.camel:camel-huaweicloud-functiongraph
*
* Syntax: <code>hwcloud-functiongraph:operation</code>
*
* Path parameter: operation (required)
* Operation to be performed
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path operation
* @return the dsl builder
*/
default FunctionGraphEndpointBuilder hwcloudFunctiongraph(String componentName, String path) {
return FunctionGraphEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static FunctionGraphEndpointBuilder endpointBuilder(String componentName, String path) {
| FunctionGraphBuilders |
java | spring-projects__spring-boot | module/spring-boot-persistence/src/test/java/org/springframework/boot/persistence/autoconfigure/EntityScannerTests.java | {
"start": 2117,
"end": 6139
} | class ____ {
@Test
@SuppressWarnings("NullAway") // Test null check
void createWhenContextIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> new EntityScanner(null))
.withMessageContaining("'context' must not be null");
}
@Test
void scanShouldScanFromSinglePackage() throws Exception {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ScanConfig.class);
EntityScanner scanner = new EntityScanner(context);
Set<Class<?>> scanned = scanner.scan(Entity.class);
assertThat(scanned).containsOnly(EntityA.class, EntityB.class, EntityC.class);
context.close();
}
@Test
void scanShouldScanFromResolvedPlaceholderPackage() throws Exception {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
TestPropertyValues.of("com.example.entity-package=org.springframework.boot.persistence.autoconfigure.scan")
.applyTo(context);
context.register(ScanPlaceholderConfig.class);
context.refresh();
EntityScanner scanner = new EntityScanner(context);
Set<Class<?>> scanned = scanner.scan(Entity.class);
assertThat(scanned).containsOnly(EntityA.class, EntityB.class, EntityC.class);
context.close();
}
@Test
void scanShouldScanFromMultiplePackages() throws Exception {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ScanAConfig.class,
ScanBConfig.class);
EntityScanner scanner = new EntityScanner(context);
Set<Class<?>> scanned = scanner.scan(Entity.class);
assertThat(scanned).containsOnly(EntityA.class, EntityB.class);
context.close();
}
@Test
void scanShouldFilterOnAnnotation() throws Exception {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ScanConfig.class);
EntityScanner scanner = new EntityScanner(context);
assertThat(scanner.scan(Entity.class)).containsOnly(EntityA.class, EntityB.class, EntityC.class);
assertThat(scanner.scan(Embeddable.class)).containsOnly(EmbeddableA.class, EmbeddableB.class,
EmbeddableC.class);
assertThat(scanner.scan(Entity.class, Embeddable.class)).containsOnly(EntityA.class, EntityB.class,
EntityC.class, EmbeddableA.class, EmbeddableB.class, EmbeddableC.class);
context.close();
}
@Test
void scanShouldUseCustomCandidateComponentProvider() throws ClassNotFoundException {
ClassPathScanningCandidateComponentProvider candidateComponentProvider = mock(
ClassPathScanningCandidateComponentProvider.class);
given(candidateComponentProvider
.findCandidateComponents("org.springframework.boot.persistence.autoconfigure.scan"))
.willReturn(Collections.emptySet());
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ScanConfig.class);
TestEntityScanner scanner = new TestEntityScanner(context, candidateComponentProvider);
scanner.scan(Entity.class);
then(candidateComponentProvider).should()
.addIncludeFilter(
assertArg((typeFilter) -> assertThat(typeFilter).isInstanceOfSatisfying(AnnotationTypeFilter.class,
(filter) -> assertThat(filter.getAnnotationType()).isEqualTo(Entity.class))));
then(candidateComponentProvider).should()
.findCandidateComponents("org.springframework.boot.persistence.autoconfigure.scan");
then(candidateComponentProvider).shouldHaveNoMoreInteractions();
}
@Test
void scanShouldScanCommaSeparatedPackagesInPlaceholderPackage() throws Exception {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
TestPropertyValues
.of("com.example.entity-package=org.springframework.boot.persistence.autoconfigure.scan.a,org.springframework.boot.persistence.autoconfigure.scan.b")
.applyTo(context);
context.register(ScanPlaceholderConfig.class);
context.refresh();
EntityScanner scanner = new EntityScanner(context);
Set<Class<?>> scanned = scanner.scan(Entity.class);
assertThat(scanned).containsOnly(EntityA.class, EntityB.class);
context.close();
}
private static | EntityScannerTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java | {
"start": 33470,
"end": 33865
} | class ____ encapsulates the functionality required to
* auto renew Kerbeors TGT. The concrete implementations of this class
* are expected to provide implementation required to perform actual
* TGT renewal (see {@code TicketCacheRenewalRunnable} and
* {@code KeytabRenewalRunnable}).
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
@VisibleForTesting
abstract | which |
java | google__guice | core/src/com/google/inject/internal/BindingProcessor.java | {
"start": 1493,
"end": 12162
} | class ____ extends AbstractBindingProcessor {
private final Initializer initializer;
BindingProcessor(
Errors errors, Initializer initializer, ProcessedBindingData processedBindingData) {
super(errors, processedBindingData);
this.initializer = initializer;
}
@Override
public <T> Boolean visit(Binding<T> command) {
Class<?> rawType = command.getKey().getTypeLiteral().getRawType();
if (Void.class.equals(rawType)) {
if (command instanceof ProviderInstanceBinding
&& ((ProviderInstanceBinding) command).getUserSuppliedProvider()
instanceof ProviderMethod) {
errors.voidProviderMethod();
} else {
errors.missingConstantValues();
}
return true;
}
if (rawType == Provider.class) {
errors.bindingToProvider();
return true;
}
return command.acceptTargetVisitor(
new Processor<T, Boolean>((BindingImpl<T>) command) {
@Override
public Boolean visit(ConstructorBinding<? extends T> binding) {
prepareBinding();
try {
ConstructorBindingImpl<T> onInjector =
ConstructorBindingImpl.create(
injector,
key,
binding.getConstructor(),
source,
scoping,
errors,
false,
false);
scheduleInitialization(onInjector);
putBinding(onInjector);
} catch (ErrorsException e) {
errors.merge(e.getErrors());
putBinding(invalidBinding(injector, key, source));
}
return true;
}
@Override
public Boolean visit(InstanceBinding<? extends T> binding) {
prepareBinding();
Set<InjectionPoint> injectionPoints = binding.getInjectionPoints();
T instance = binding.getInstance();
// Note: We cannot use type=`binding.getKey().getTypeLiteral()`, even though it would
// be more accurate, because it's very likely that people do:
// bind(new TypeLiteral<Foo>() {}).toInstance(someFooBarInstance);
// bind(new TypeLiteral<Bar>() {}).toInstance(someFooBarInstance);
// ... and if we captured the exact type when passing to requestInjection, then we'd
// fail because the same instance requested injection from two different types.
@SuppressWarnings("unchecked") // safe because processor was constructed w/ it
Binding<T> bindingT = (Binding<T>) binding;
Optional<Initializable<T>> ref =
initializer.requestInjection(
injector,
/* type= */ null,
instance,
bindingT,
source,
injectionPoints,
errors);
InternalFactory<? extends T> factory =
ref.isPresent()
? new InitializableFactory<>(ref.get())
: ConstantFactory.create(instance, source);
InternalFactory<? extends T> scopedFactory =
Scoping.scope(key, injector, factory, source, scoping);
putBinding(
new InstanceBindingImpl<T>(
injector, key, source, scopedFactory, injectionPoints, instance));
return true;
}
@Override
public Boolean visit(ProviderInstanceBinding<? extends T> binding) {
prepareBinding();
@SuppressWarnings("unchecked") // always visited with Binding<T>
var provider = (jakarta.inject.Provider<T>) binding.getUserSuppliedProvider();
// This is how we support ProviderMethods and native multibinders. By detecting them
// here, we can leverage faster `InternalFactory` implementations.
if (provider instanceof InternalProviderInstanceBindingImpl.Factory) {
InternalProviderInstanceBindingImpl.Factory<T> asProviderMethod =
(InternalProviderInstanceBindingImpl.Factory<T>) provider;
// Try to claim it exclusively, otherwise we will treat it as a normal provider
// instance binding. This prevents undefined behavior where multiple injectors
// race to initialize the binding based on different settings.
if (asProviderMethod.bindToInjector(injector)) {
return visitInternalProviderInstanceBindingFactory(asProviderMethod);
}
}
Set<InjectionPoint> injectionPoints = binding.getInjectionPoints();
Optional<Initializable<jakarta.inject.Provider<T>>> initializable =
initializer.requestInjection(
injector, /* type= */ null, provider, null, source, injectionPoints, errors);
@SuppressWarnings("unchecked") // always visited with Binding<T>
ProvisionListenerStackCallback<T> listener =
injector.provisionListenerStore.get((Binding<T>) binding);
int circularFactoryId = injector.circularFactoryIdFactory.next();
Class<? super T> rawType = key.getTypeLiteral().getRawType();
InternalFactory<T> factory =
(initializable.isPresent()
? new InternalFactoryToInitializableAdapter<T>(
rawType, initializable.get(), source, listener, circularFactoryId)
: new ConstantProviderInternalFactory<T>(
rawType, provider, source, listener, circularFactoryId));
InternalFactory<? extends T> scopedFactory =
Scoping.scope(key, injector, factory, source, scoping);
putBinding(
new ProviderInstanceBindingImpl<T>(
injector, key, source, scopedFactory, scoping, provider, injectionPoints));
return true;
}
@Override
public Boolean visit(ProviderKeyBinding<? extends T> binding) {
prepareBinding();
Key<? extends jakarta.inject.Provider<? extends T>> providerKey =
binding.getProviderKey();
// always visited with Binding<T>
@SuppressWarnings("unchecked")
BoundProviderFactory<T> boundProviderFactory =
new BoundProviderFactory<T>(
key.getTypeLiteral().getRawType(),
injector,
providerKey,
source,
injector.provisionListenerStore.get((ProviderKeyBinding<T>) binding));
processedBindingData.addCreationListener(boundProviderFactory);
InternalFactory<? extends T> scopedFactory =
Scoping.scope(
key,
injector,
(InternalFactory<? extends T>) boundProviderFactory,
source,
scoping);
putBinding(
new LinkedProviderBindingImpl<T>(
injector, key, source, scopedFactory, scoping, providerKey));
return true;
}
@Override
public Boolean visit(LinkedKeyBinding<? extends T> binding) {
prepareBinding();
Key<? extends T> linkedKey = binding.getLinkedKey();
if (key.equals(linkedKey)) {
// TODO: b/168656899 check for transitive recursive binding
errors.recursiveBinding(key, linkedKey);
}
FactoryProxy<T> factory = new FactoryProxy<>(injector, key, linkedKey, source);
processedBindingData.addCreationListener(factory);
InternalFactory<? extends T> scopedFactory =
Scoping.scope(key, injector, factory, source, scoping);
putBinding(
new LinkedBindingImpl<T>(injector, key, source, scopedFactory, scoping, linkedKey));
return true;
}
/** Handle ProviderMethods specially. */
private Boolean visitInternalProviderInstanceBindingFactory(
InternalProviderInstanceBindingImpl.Factory<T> provider) {
InternalProviderInstanceBindingImpl<T> binding =
new InternalProviderInstanceBindingImpl<T>(
injector,
key,
source,
provider,
Scoping.scope(key, injector, provider, source, scoping),
scoping);
switch (binding.getInitializationTiming()) {
case DELAYED:
scheduleDelayedInitialization(binding);
break;
case EAGER:
scheduleInitialization(binding);
break;
default:
throw new AssertionError();
}
putBinding(binding);
return true;
}
@Override
public Boolean visit(UntargettedBinding<? extends T> untargetted) {
return false;
}
@Override
public Boolean visit(ExposedBinding<? extends T> binding) {
throw new IllegalArgumentException("Cannot apply a non-module element");
}
@Override
public Boolean visit(ConvertedConstantBinding<? extends T> binding) {
throw new IllegalArgumentException("Cannot apply a non-module element");
}
@Override
public Boolean visit(ProviderBinding<? extends T> binding) {
throw new IllegalArgumentException("Cannot apply a non-module element");
}
@Override
protected Boolean visitOther(Binding<? extends T> binding) {
throw new IllegalStateException("BindingProcessor should override all visitations");
}
});
}
@Override
public Boolean visit(PrivateElements privateElements) {
for (Key<?> key : privateElements.getExposedKeys()) {
bindExposed(privateElements, key);
}
return false; // leave the private elements for the PrivateElementsProcessor to handle
}
private <T> void bindExposed(PrivateElements privateElements, Key<T> key) {
Object source = privateElements.getExposedSource(key);
ExposedKeyFactory<T> exposedKeyFactory = new ExposedKeyFactory<>(key, source, privateElements);
processedBindingData.addCreationListener(exposedKeyFactory);
putBinding(
new ExposedBindingImpl<T>(injector, source, key, exposedKeyFactory, privateElements));
}
}
| BindingProcessor |
java | quarkusio__quarkus | extensions/oidc/runtime-dev/src/main/java/io/quarkus/oidc/runtime/dev/ui/OidcDevServicesUtils.java | {
"start": 797,
"end": 11253
} | class ____ {
private static final Logger LOG = Logger.getLogger(OidcDevServicesUtils.class);
private static final String APPLICATION_JSON = "application/json";
private OidcDevServicesUtils() {
}
public static WebClient createWebClient(Vertx vertx) {
WebClientOptions options = new WebClientOptions();
options.setTrustAll(true);
options.setVerifyHost(false);
Config config = ConfigProvider.getConfig();
config.getOptionalValue("quarkus.oidc.proxy.host", String.class)
.ifPresent(proxyHost -> {
OidcCommonConfig.Proxy proxyConf = new OidcCommonConfig.Proxy() {
@Override
public Optional<String> host() {
return Optional.of(proxyHost);
}
@Override
public int port() {
return config.getOptionalValue("quarkus.oidc.proxy.port", Integer.class).orElse(80);
}
@Override
public Optional<String> username() {
return config.getOptionalValue("quarkus.oidc.proxy.username", String.class);
}
@Override
public Optional<String> password() {
return config.getOptionalValue("quarkus.oidc.proxy.password", String.class);
}
};
options.setProxyOptions(OidcCommonUtils.toProxyOptions(proxyConf).orElse(null));
});
return WebClient.create(new io.vertx.mutiny.core.Vertx(vertx), options);
}
public static Uni<String> getPasswordAccessToken(WebClient client,
String tokenUrl,
String clientId,
String clientSecret,
String userName,
String userPassword,
Map<String, String> passwordGrantOptions) {
HttpRequest<Buffer> request = client.postAbs(tokenUrl);
request.putHeader(HttpHeaders.CONTENT_TYPE.toString(), HttpHeaders.APPLICATION_X_WWW_FORM_URLENCODED.toString());
io.vertx.mutiny.core.MultiMap props = new io.vertx.mutiny.core.MultiMap(MultiMap.caseInsensitiveMultiMap());
props.add("client_id", clientId);
if (clientSecret != null) {
props.add("client_secret", clientSecret);
}
props.add("username", userName);
props.add("password", userPassword);
props.add("grant_type", "password");
if (passwordGrantOptions != null) {
props.addAll(passwordGrantOptions);
}
return request.sendBuffer(OidcCommonUtils.encodeForm(props)).onItem()
.transform(resp -> getAccessTokenFromJson(resp))
.onFailure()
.retry()
.withBackOff(Duration.ofSeconds(2), Duration.ofSeconds(2))
.expireIn(10 * 1000);
}
public static Uni<String> getClientCredAccessToken(WebClient client,
String tokenUrl,
String clientId,
String clientSecret,
Map<String, String> clientCredGrantOptions) {
HttpRequest<Buffer> request = client.postAbs(tokenUrl);
request.putHeader(HttpHeaders.CONTENT_TYPE.toString(), HttpHeaders.APPLICATION_X_WWW_FORM_URLENCODED.toString());
io.vertx.mutiny.core.MultiMap props = new io.vertx.mutiny.core.MultiMap(MultiMap.caseInsensitiveMultiMap());
props.add("client_id", clientId);
if (clientSecret != null) {
props.add("client_secret", clientSecret);
}
props.add("grant_type", "client_credentials");
if (clientCredGrantOptions != null) {
props.addAll(clientCredGrantOptions);
}
return request.sendBuffer(OidcCommonUtils.encodeForm(props)).onItem()
.transform(resp -> getAccessTokenFromJson(resp));
}
public static Uni<String> getTokens(String tokenUrl, String clientId, String clientSecret,
String authorizationCode, String redirectUri,
Vertx vertxInstance, Map<String, String> grantOptions) {
WebClient client = createWebClient(vertxInstance);
LOG.infof("Using authorization_code grant to get a token from '%s' with client id '%s'",
tokenUrl, clientId);
HttpRequest<Buffer> request = client.postAbs(tokenUrl);
request.putHeader(HttpHeaders.CONTENT_TYPE.toString(), HttpHeaders.APPLICATION_X_WWW_FORM_URLENCODED.toString());
request.putHeader(HttpHeaders.ACCEPT.toString(), APPLICATION_JSON);
io.vertx.mutiny.core.MultiMap props = new io.vertx.mutiny.core.MultiMap(MultiMap.caseInsensitiveMultiMap());
props.add("client_id", clientId);
if (clientSecret != null && !clientSecret.isBlank()) {
props.add("client_secret", clientSecret);
}
props.add("grant_type", "authorization_code");
props.add("code", authorizationCode);
props.add("redirect_uri", redirectUri);
if (grantOptions != null) {
props.addAll(grantOptions);
}
return request
.sendBuffer(OidcCommonUtils.encodeForm(props))
.map(OidcDevServicesUtils::getBodyAsString)
.onFailure().invoke(t -> LOG.errorf("Token can not be acquired from OpenId Connect provider: %s", t.toString()))
.eventually(client::close);
}
public static Uni<Integer> testServiceWithToken(String serviceUrl, String token, Vertx vertxInstance) {
LOG.infof("Test token: %s", token);
LOG.infof("Sending token to '%s'", serviceUrl);
WebClient client = createWebClient(vertxInstance);
return client.getAbs(serviceUrl)
.putHeader(HttpHeaders.AUTHORIZATION.toString(), "Bearer " + token)
.send()
.map(HttpResponse::statusCode)
.invoke(statusCode -> LOG.infof("Result: %d", statusCode))
.onFailure().invoke(t -> LOG.errorf("Token can not be sent to the service: %s", t.toString()))
.eventually(client::close);
}
public static Uni<String> testServiceWithClientCred(String tokenUrl, String serviceUrl, String clientId,
String clientSecret, Vertx vertxInstance, Duration timeout,
Map<String, String> clientCredGrantOptions) {
WebClient client = OidcDevServicesUtils.createWebClient(vertxInstance);
LOG.infof("Using a client_credentials grant to get a token token from '%s' with client id '%s'",
tokenUrl, clientId);
Uni<String> token = OidcDevServicesUtils.getClientCredAccessToken(client, tokenUrl, clientId, clientSecret,
clientCredGrantOptions)
.ifNoItem().after(timeout).fail()
.invoke(t -> LOG.infof("Test token: %s", t))
.onFailure()
.invoke(t -> LOG.errorf("Token can not be acquired from OpenId Connect provider: %s", t.toString()));
// no service url -> only token is required
if (serviceUrl != null) {
token = testServiceInternal(client, serviceUrl, token);
}
return token.eventually(client::close);
}
public static Uni<String> testServiceWithPassword(String tokenUrl, String serviceUrl, String clientId,
String clientSecret, String username, String password,
Vertx vertxInstance, Duration timeout,
Map<String, String> passwordGrantOptions,
Map<String, String> usernameToPassword) {
WebClient client = OidcDevServicesUtils.createWebClient(vertxInstance);
LOG.infof("Using a password grant to get a token from '%s' for user '%s' with client id '%s'",
tokenUrl, username, clientId);
// user-defined password has preference over known passwords
if (password == null || password.isBlank()) {
password = usernameToPassword.get("password");
if (password == null) {
return Uni.createFrom().failure(
new IllegalArgumentException("Can't request access token as password is missing"));
}
}
Uni<String> token = OidcDevServicesUtils.getPasswordAccessToken(client, tokenUrl,
clientId, clientSecret, username, password, passwordGrantOptions)
.ifNoItem().after(timeout).fail()
.invoke(t -> LOG.infof("Test token: %s", t))
.onFailure()
.invoke(t -> LOG.errorf("Token can not be acquired from OpenId Connect provider: %s", t.toString()));
// no service url -> only token is required
if (serviceUrl != null) {
token = testServiceInternal(client, serviceUrl, token);
}
return token.eventually(client::close);
}
private static Uni<String> testServiceInternal(WebClient client, String serviceUrl, Uni<String> tokenUni) {
return tokenUni
.flatMap(token -> {
LOG.infof("Sending token to '%s'", serviceUrl);
return client
.getAbs(serviceUrl)
.putHeader(HttpHeaders.AUTHORIZATION.toString(), "Bearer " + token)
.send()
.map(HttpResponse::statusCode)
.map(Object::toString)
.invoke(statusCode -> LOG.infof("Result: %s", statusCode))
.onFailure().invoke(t2 -> LOG.errorf("Token can not be sent to the service: %s",
t2.toString()));
});
}
private static String getBodyAsString(HttpResponse<Buffer> resp) {
if (resp.statusCode() == 200) {
return resp.bodyAsString();
} else {
String errorMessage = resp.bodyAsString();
throw new RuntimeException(errorMessage);
}
}
private static String getAccessTokenFromJson(HttpResponse<Buffer> resp) {
if (resp.statusCode() == 200) {
JsonObject json = resp.bodyAsJsonObject();
return json.getString("access_token");
} else {
String errorMessage = resp.bodyAsString();
throw new RuntimeException(errorMessage);
}
}
}
| OidcDevServicesUtils |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/StaticNestedBeforeAllAndAfterAllMethodsTests.java | {
"start": 963,
"end": 1662
} | class ____ extends AbstractJupiterTestEngineTests {
private static final List<String> methodsInvoked = new ArrayList<>();
@DisplayName("static @BeforeAll and @AfterAll methods in @Nested test class")
@Test
void staticBeforeAllAndAfterAllMethodsInNestedTestClass() {
executeTestsForClass(TestCase.class).testEvents().assertStatistics(stats -> stats.started(2).succeeded(2));
assertThat(methodsInvoked).containsExactly(//
"@BeforeAll: top-level", //
"@Test: top-level", //
"@BeforeAll: nested", //
"@Test: nested", //
"@AfterAll: nested", //
"@AfterAll: top-level"//
);
}
@SuppressWarnings("JUnitMalformedDeclaration")
static | StaticNestedBeforeAllAndAfterAllMethodsTests |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FtpsEndpointBuilderFactory.java | {
"start": 208664,
"end": 232099
} | interface ____
extends
FtpsEndpointConsumerBuilder,
FtpsEndpointProducerBuilder {
default AdvancedFtpsEndpointBuilder advanced() {
return (AdvancedFtpsEndpointBuilder) this;
}
/**
* Specifies the file transfer mode, BINARY or ASCII. Default is ASCII
* (false).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param binary the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder binary(boolean binary) {
doSetProperty("binary", binary);
return this;
}
/**
* Specifies the file transfer mode, BINARY or ASCII. Default is ASCII
* (false).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param binary the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder binary(String binary) {
doSetProperty("binary", binary);
return this;
}
/**
* This option is used to specify the encoding of the file. You can use
* this on the consumer, to specify the encodings of the files, which
* allow Camel to know the charset it should load the file content in
* case the file content is being accessed. Likewise when writing a
* file, you can use this option to specify which charset to write the
* file as well. Do mind that when writing the file Camel may have to
* read the message content into memory to be able to convert the data
* into the configured charset, so do not use this if you have big
* messages.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param charset the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder charset(String charset) {
doSetProperty("charset", charset);
return this;
}
/**
* Whether or not to disconnect from remote FTP server right after use.
* Disconnect will only disconnect the current connection to the FTP
* server. If you have a consumer which you want to stop, then you need
* to stop the consumer/route instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disconnect the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder disconnect(boolean disconnect) {
doSetProperty("disconnect", disconnect);
return this;
}
/**
* Whether or not to disconnect from remote FTP server right after use.
* Disconnect will only disconnect the current connection to the FTP
* server. If you have a consumer which you want to stop, then you need
* to stop the consumer/route instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disconnect the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder disconnect(String disconnect) {
doSetProperty("disconnect", disconnect);
return this;
}
/**
* Producer: If provided, then Camel will write a 2nd done file when the
* original file has been written. The done file will be empty. This
* option configures what file name to use. Either you can specify a
* fixed name. Or you can use dynamic placeholders. The done file will
* always be written in the same folder as the original file. Consumer:
* If provided, Camel will only consume files if a done file exists.
* This option configures what file name to use. Either you can specify
* a fixed name. Or you can use dynamic placeholders.The done file is
* always expected in the same folder as the original file. Only
* ${file.name} and ${file.name.next} is supported as dynamic
* placeholders.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param doneFileName the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder doneFileName(String doneFileName) {
doSetProperty("doneFileName", doneFileName);
return this;
}
/**
* Use Expression such as File Language to dynamically set the filename.
* For consumers, it's used as a filename filter. For producers, it's
* used to evaluate the filename to write. If an expression is set, it
* take precedence over the CamelFileName header. (Note: The header
* itself can also be an Expression). The expression options support
* both String and Expression types. If the expression is a String type,
* it is always evaluated using the File Language. If the expression is
* an Expression type, the specified Expression type is used - this
* allows you, for instance, to use OGNL expressions. For the consumer,
* you can use it to filter filenames, so you can for instance consume
* today's file using the File Language syntax:
* mydata-${date:now:yyyyMMdd}.txt. The producers support the
* CamelOverruleFileName header which takes precedence over any existing
* CamelFileName header; the CamelOverruleFileName is a header that is
* used only once, and makes it easier as this avoids to temporary store
* CamelFileName and have to restore it afterwards.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param fileName the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder fileName(String fileName) {
doSetProperty("fileName", fileName);
return this;
}
/**
* Sets passive mode connections. Default is active mode connections.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param passiveMode the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder passiveMode(boolean passiveMode) {
doSetProperty("passiveMode", passiveMode);
return this;
}
/**
* Sets passive mode connections. Default is active mode connections.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param passiveMode the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder passiveMode(String passiveMode) {
doSetProperty("passiveMode", passiveMode);
return this;
}
/**
* Sets the path separator to be used. UNIX = Uses unix style path
* separator Windows = Uses windows style path separator Auto = (is
* default) Use existing path separator in file name.
*
* The option is a:
* <code>org.apache.camel.component.file.remote.RemoteFileConfiguration.PathSeparator</code> type.
*
* Default: UNIX
* Group: common
*
* @param separator the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder separator(org.apache.camel.component.file.remote.RemoteFileConfiguration.PathSeparator separator) {
doSetProperty("separator", separator);
return this;
}
/**
* Sets the path separator to be used. UNIX = Uses unix style path
* separator Windows = Uses windows style path separator Auto = (is
* default) Use existing path separator in file name.
*
* The option will be converted to a
* <code>org.apache.camel.component.file.remote.RemoteFileConfiguration.PathSeparator</code> type.
*
* Default: UNIX
* Group: common
*
* @param separator the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder separator(String separator) {
doSetProperty("separator", separator);
return this;
}
/**
* Configures the interval in seconds to use when logging the progress
* of upload and download operations that are in-flight. This is used
* for logging progress when operations take a longer time.
*
* The option is a: <code>int</code> type.
*
* Default: 5
* Group: common
*
* @param transferLoggingIntervalSeconds the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder transferLoggingIntervalSeconds(int transferLoggingIntervalSeconds) {
doSetProperty("transferLoggingIntervalSeconds", transferLoggingIntervalSeconds);
return this;
}
/**
* Configures the interval in seconds to use when logging the progress
* of upload and download operations that are in-flight. This is used
* for logging progress when operations take a longer time.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 5
* Group: common
*
* @param transferLoggingIntervalSeconds the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder transferLoggingIntervalSeconds(String transferLoggingIntervalSeconds) {
doSetProperty("transferLoggingIntervalSeconds", transferLoggingIntervalSeconds);
return this;
}
/**
* Configure the logging level to use when logging the progress of
* upload and download operations.
*
* The option is a: <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: DEBUG
* Group: common
*
* @param transferLoggingLevel the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder transferLoggingLevel(org.apache.camel.LoggingLevel transferLoggingLevel) {
doSetProperty("transferLoggingLevel", transferLoggingLevel);
return this;
}
/**
* Configure the logging level to use when logging the progress of
* upload and download operations.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: DEBUG
* Group: common
*
* @param transferLoggingLevel the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder transferLoggingLevel(String transferLoggingLevel) {
doSetProperty("transferLoggingLevel", transferLoggingLevel);
return this;
}
/**
* Configures whether perform verbose (fine-grained) logging of the
* progress of upload and download operations.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param transferLoggingVerbose the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder transferLoggingVerbose(boolean transferLoggingVerbose) {
doSetProperty("transferLoggingVerbose", transferLoggingVerbose);
return this;
}
/**
* Configures whether perform verbose (fine-grained) logging of the
* progress of upload and download operations.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param transferLoggingVerbose the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder transferLoggingVerbose(String transferLoggingVerbose) {
doSetProperty("transferLoggingVerbose", transferLoggingVerbose);
return this;
}
/**
* Account to use for login.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param account the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder account(String account) {
doSetProperty("account", account);
return this;
}
/**
* Use this option to disable default options when using secure data
* channel. This allows you to be in full control what the execPbsz and
* execProt setting should be used. Default is false.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param disableSecureDataChannelDefaults the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder disableSecureDataChannelDefaults(boolean disableSecureDataChannelDefaults) {
doSetProperty("disableSecureDataChannelDefaults", disableSecureDataChannelDefaults);
return this;
}
/**
* Use this option to disable default options when using secure data
* channel. This allows you to be in full control what the execPbsz and
* execProt setting should be used. Default is false.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param disableSecureDataChannelDefaults the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder disableSecureDataChannelDefaults(String disableSecureDataChannelDefaults) {
doSetProperty("disableSecureDataChannelDefaults", disableSecureDataChannelDefaults);
return this;
}
/**
* When using secure data channel you can set the exec protection buffer
* size.
*
* The option is a: <code>java.lang.Long</code> type.
*
* Group: security
*
* @param execPbsz the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder execPbsz(Long execPbsz) {
doSetProperty("execPbsz", execPbsz);
return this;
}
/**
* When using secure data channel you can set the exec protection buffer
* size.
*
* The option will be converted to a <code>java.lang.Long</code> type.
*
* Group: security
*
* @param execPbsz the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder execPbsz(String execPbsz) {
doSetProperty("execPbsz", execPbsz);
return this;
}
/**
* The exec protection level PROT command. C - Clear S - Safe(SSL
* protocol only) E - Confidential(SSL protocol only) P - Private.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param execProt the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder execProt(String execProt) {
doSetProperty("execProt", execProt);
return this;
}
/**
* Set the key store parameters. This is a multi-value option with
* prefix: ftpClient.keyStore.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* ftpClientKeyStoreParameters(String, Object) method to add a value
* (call the method multiple times to set more values).
*
* Group: security
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default FtpsEndpointBuilder ftpClientKeyStoreParameters(String key, Object value) {
doSetMultiValueProperty("ftpClientKeyStoreParameters", "ftpClient.keyStore." + key, value);
return this;
}
/**
* Set the key store parameters. This is a multi-value option with
* prefix: ftpClient.keyStore.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* ftpClientKeyStoreParameters(String, Object) method to add a value
* (call the method multiple times to set more values).
*
* Group: security
*
* @param values the values
* @return the dsl builder
*/
default FtpsEndpointBuilder ftpClientKeyStoreParameters(Map values) {
doSetMultiValueProperties("ftpClientKeyStoreParameters", "ftpClient.keyStore.", values);
return this;
}
/**
* Set the trust store parameters. This is a multi-value option with
* prefix: ftpClient.trustStore.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* ftpClientTrustStoreParameters(String, Object) method to add a value
* (call the method multiple times to set more values).
*
* Group: security
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default FtpsEndpointBuilder ftpClientTrustStoreParameters(String key, Object value) {
doSetMultiValueProperty("ftpClientTrustStoreParameters", "ftpClient.trustStore." + key, value);
return this;
}
/**
* Set the trust store parameters. This is a multi-value option with
* prefix: ftpClient.trustStore.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* ftpClientTrustStoreParameters(String, Object) method to add a value
* (call the method multiple times to set more values).
*
* Group: security
*
* @param values the values
* @return the dsl builder
*/
default FtpsEndpointBuilder ftpClientTrustStoreParameters(Map values) {
doSetMultiValueProperties("ftpClientTrustStoreParameters", "ftpClient.trustStore.", values);
return this;
}
/**
* Set the security mode (Implicit/Explicit). true - Implicit Mode /
* False - Explicit Mode.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param implicit the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder implicit(boolean implicit) {
doSetProperty("implicit", implicit);
return this;
}
/**
* Set the security mode (Implicit/Explicit). true - Implicit Mode /
* False - Explicit Mode.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param implicit the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder implicit(String implicit) {
doSetProperty("implicit", implicit);
return this;
}
/**
* Password to use for login.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Set the underlying security protocol.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: TLSv1.3
* Group: security
*
* @param securityProtocol the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder securityProtocol(String securityProtocol) {
doSetProperty("securityProtocol", securityProtocol);
return this;
}
/**
* Gets the JSSE configuration that overrides any settings in
* FtpsEndpoint#ftpClientKeyStoreParameters,
* ftpClientTrustStoreParameters, and
* FtpsConfiguration#getSecurityProtocol().
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder sslContextParameters(org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Gets the JSSE configuration that overrides any settings in
* FtpsEndpoint#ftpClientKeyStoreParameters,
* ftpClientTrustStoreParameters, and
* FtpsConfiguration#getSecurityProtocol().
*
* The option will be converted to a
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder sslContextParameters(String sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Username to use for login.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default FtpsEndpointBuilder username(String username) {
doSetProperty("username", username);
return this;
}
}
/**
* Advanced builder for endpoint for the FTPS component.
*/
public | FtpsEndpointBuilder |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongFloatTupleBlockSourceOperator.java | {
"start": 762,
"end": 2653
} | class ____ extends AbstractBlockSourceOperator {
private static final int DEFAULT_MAX_PAGE_POSITIONS = 8 * 1024;
private final List<Tuple<Long, Float>> values;
public LongFloatTupleBlockSourceOperator(BlockFactory blockFactory, Stream<Tuple<Long, Float>> values) {
this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS);
}
public LongFloatTupleBlockSourceOperator(BlockFactory blockFactory, Stream<Tuple<Long, Float>> values, int maxPagePositions) {
super(blockFactory, maxPagePositions);
this.values = values.toList();
}
public LongFloatTupleBlockSourceOperator(BlockFactory blockFactory, List<Tuple<Long, Float>> values) {
this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS);
}
public LongFloatTupleBlockSourceOperator(BlockFactory blockFactory, List<Tuple<Long, Float>> values, int maxPagePositions) {
super(blockFactory, maxPagePositions);
this.values = values;
}
@Override
protected Page createPage(int positionOffset, int length) {
var blockBuilder1 = blockFactory.newLongBlockBuilder(length);
var blockBuilder2 = blockFactory.newFloatBlockBuilder(length);
for (int i = 0; i < length; i++) {
Tuple<Long, Float> item = values.get(positionOffset + i);
if (item.v1() == null) {
blockBuilder1.appendNull();
} else {
blockBuilder1.appendLong(item.v1());
}
if (item.v2() == null) {
blockBuilder2.appendNull();
} else {
blockBuilder2.appendFloat(item.v2());
}
}
currentPosition += length;
return new Page(blockBuilder1.build(), blockBuilder2.build());
}
@Override
protected int remaining() {
return values.size() - currentPosition;
}
}
| LongFloatTupleBlockSourceOperator |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/aot/TestContextGenerationContext.java | {
"start": 1542,
"end": 3071
} | class ____
* @param generatedFiles the generated files
* @param runtimeHints the runtime hints
*/
TestContextGenerationContext(
ClassNameGenerator classNameGenerator, GeneratedFiles generatedFiles, RuntimeHints runtimeHints) {
super(classNameGenerator, generatedFiles, runtimeHints);
this.featureName = null;
}
/**
* Create a new {@link TestContextGenerationContext} instance based on the
* supplied {@code existing} context and feature name.
* @param existing the existing context upon which to base the new one
* @param featureName the feature name to use
*/
private TestContextGenerationContext(TestContextGenerationContext existing, String featureName) {
super(existing, featureName);
this.featureName = featureName;
}
/**
* Create a new {@link TestContextGenerationContext} instance using the specified
* feature name to qualify generated assets for a dedicated round of code generation.
* <p>If <em>this</em> {@code TestContextGenerationContext} has a configured feature
* name, the existing feature name will be prepended to the supplied feature name in
* order to avoid naming collisions.
* @param featureName the feature name to use
* @return a specialized {@link TestContextGenerationContext} for the specified
* feature name
*/
@Override
public TestContextGenerationContext withName(String featureName) {
if (this.featureName != null) {
featureName = this.featureName + featureName;
}
return new TestContextGenerationContext(this, featureName);
}
}
| names |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SftpComponentBuilderFactory.java | {
"start": 1378,
"end": 1808
} | interface ____ {
/**
* SFTP (camel-ftp)
* Upload and download files to/from SFTP servers.
*
* Category: file
* Since: 1.1
* Maven coordinates: org.apache.camel:camel-ftp
*
* @return the dsl builder
*/
static SftpComponentBuilder sftp() {
return new SftpComponentBuilderImpl();
}
/**
* Builder for the SFTP component.
*/
| SftpComponentBuilderFactory |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/array/BooleanPrimitiveArrayComparator.java | {
"start": 1093,
"end": 2181
} | class ____
extends PrimitiveArrayComparator<boolean[], BooleanComparator> {
public BooleanPrimitiveArrayComparator(boolean ascending) {
super(ascending, new BooleanComparator(ascending));
}
@Override
public int hash(boolean[] record) {
int result = 0;
for (boolean field : record) {
result += field ? 1231 : 1237;
}
return result;
}
@Override
public int compare(boolean[] first, boolean[] second) {
for (int x = 0; x < min(first.length, second.length); x++) {
int cmp = (second[x] == first[x] ? 0 : (first[x] ? 1 : -1));
if (cmp != 0) {
return ascending ? cmp : -cmp;
}
}
int cmp = first.length - second.length;
return ascending ? cmp : -cmp;
}
@Override
public TypeComparator<boolean[]> duplicate() {
BooleanPrimitiveArrayComparator dupe = new BooleanPrimitiveArrayComparator(this.ascending);
dupe.setReference(this.reference);
return dupe;
}
}
| BooleanPrimitiveArrayComparator |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 13156,
"end": 13307
} | class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return dialect.supportsWait();
}
}
public static | SupportsWait |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/internals/InMemoryWindowStore.java | {
"start": 24781,
"end": 26695
} | class ____
extends InMemoryWindowStoreIteratorWrapper
implements KeyValueIterator<Windowed<Bytes>, byte[]> {
private final long windowSize;
WrappedWindowedKeyValueIterator(final Bytes keyFrom,
final Bytes keyTo,
final Iterator<Map.Entry<Long, ConcurrentNavigableMap<Bytes, byte[]>>> segmentIterator,
final ClosingCallback callback,
final boolean retainDuplicates,
final long windowSize,
final boolean forward) {
super(keyFrom, keyTo, segmentIterator, callback, retainDuplicates, forward);
this.windowSize = windowSize;
}
public Windowed<Bytes> peekNextKey() {
if (!hasNext()) {
throw new NoSuchElementException();
}
return getWindowedKey();
}
public KeyValue<Windowed<Bytes>, byte[]> next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
final KeyValue<Windowed<Bytes>, byte[]> result = new KeyValue<>(getWindowedKey(), super.next.value);
super.next = null;
return result;
}
private Windowed<Bytes> getWindowedKey() {
final Bytes key = super.retainDuplicates ? getKey(super.next.key) : super.next.key;
long endTime = super.currentTime + windowSize;
if (endTime < 0) {
LOG.warn("Warning: window end time was truncated to Long.MAX");
endTime = Long.MAX_VALUE;
}
final TimeWindow timeWindow = new TimeWindow(super.currentTime, endTime);
return new Windowed<>(key, timeWindow);
}
}
}
| WrappedWindowedKeyValueIterator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/TruthIncompatibleTypeTest.java | {
"start": 24968,
"end": 25206
} | class ____ extends TestParameterValuesProvider {
@Override
public ImmutableList<Method> provideValues(Context context) {
return getAssertionMethods(IterableSubject.class);
}
}
private static final | IterableSubjectMethods |
java | playframework__playframework | dev-mode/play-build-link/src/main/java/play/core/BuildLink.java | {
"start": 1977,
"end": 2618
} | class ____ find the source for.
* @param line The line number the exception was thrown at.
* @return Either:
* <ul>
* <li>[File, Integer] - The source file, and the passed in line number, if the source
* wasn't generated, or if it was generated, and the line number could be mapped, then
* the original source file and the mapped line number.
* <li>[File, null] - If the source was generated but the line number couldn't be mapped,
* then just the original source file and null for the unmappable line number.
* <li>null - If no source file could be found for the | to |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/shareddata/LocalMap.java | {
"start": 1252,
"end": 1434
} | class ____ the {@link Map} interface. However some methods are only accessible in Java.
*
* @author <a href="http://tfox.org">Tim Fox</a>
* @see Shareable
*/
@VertxGen
public | extends |
java | apache__rocketmq | store/src/main/java/org/apache/rocketmq/store/ha/autoswitch/BrokerMetadata.java | {
"start": 933,
"end": 3256
} | class ____ extends MetadataFile {
protected String clusterName;
protected String brokerName;
protected Long brokerId;
public BrokerMetadata(String filePath) {
this.filePath = filePath;
}
public void updateAndPersist(String clusterName, String brokerName, Long brokerId) throws Exception {
this.clusterName = clusterName;
this.brokerName = brokerName;
this.brokerId = brokerId;
writeToFile();
}
@Override
public String encodeToStr() {
StringBuilder sb = new StringBuilder();
sb.append(clusterName).append("#");
sb.append(brokerName).append("#");
sb.append(brokerId);
return sb.toString();
}
@Override
public void decodeFromStr(String dataStr) {
if (dataStr == null) return;
String[] dataArr = dataStr.split("#");
this.clusterName = dataArr[0];
this.brokerName = dataArr[1];
this.brokerId = Long.valueOf(dataArr[2]);
}
@Override
public boolean isLoaded() {
return StringUtils.isNotEmpty(this.clusterName) && StringUtils.isNotEmpty(this.brokerName) && brokerId != null;
}
@Override
public void clearInMem() {
this.clusterName = null;
this.brokerName = null;
this.brokerId = null;
}
public String getBrokerName() {
return brokerName;
}
public Long getBrokerId() {
return brokerId;
}
public String getClusterName() {
return clusterName;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BrokerMetadata that = (BrokerMetadata) o;
return Objects.equals(clusterName, that.clusterName) && Objects.equals(brokerName, that.brokerName) && Objects.equals(brokerId, that.brokerId);
}
@Override
public int hashCode() {
return Objects.hash(clusterName, brokerName, brokerId);
}
@Override
public String toString() {
return "BrokerMetadata{" +
"clusterName='" + clusterName + '\'' +
", brokerName='" + brokerName + '\'' +
", brokerId=" + brokerId +
", filePath='" + filePath + '\'' +
'}';
}
}
| BrokerMetadata |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/validation/BindingResult.java | {
"start": 1543,
"end": 6108
} | interface ____ extends Errors {
/**
* Prefix for the name of the BindingResult instance in a model,
* followed by the object name.
*/
String MODEL_KEY_PREFIX = BindingResult.class.getName() + ".";
/**
* Return the wrapped target object, which may be a bean, an object with
* public fields, a Map - depending on the concrete binding strategy.
*/
@Nullable Object getTarget();
/**
* Return a model Map for the obtained state, exposing a BindingResult
* instance as '{@link #MODEL_KEY_PREFIX MODEL_KEY_PREFIX} + objectName'
* and the object itself as 'objectName'.
* <p>Note that the Map is constructed every time you're calling this method.
* Adding things to the map and then re-calling this method will not work.
* <p>The attributes in the model Map returned by this method are usually
* included in the {@link org.springframework.web.servlet.ModelAndView}
* for a form view that uses Spring's {@code bind} tag in a JSP,
* which needs access to the BindingResult instance. Spring's pre-built
* form controllers will do this for you when rendering a form view.
* When building the ModelAndView instance yourself, you need to include
* the attributes from the model Map returned by this method.
* @see #getObjectName()
* @see #MODEL_KEY_PREFIX
* @see org.springframework.web.servlet.ModelAndView
* @see org.springframework.web.servlet.tags.BindTag
*/
Map<String, Object> getModel();
/**
* Extract the raw field value for the given field.
* Typically used for comparison purposes.
* @param field the field to check
* @return the current value of the field in its raw form, or {@code null} if not known
*/
@Nullable Object getRawFieldValue(String field);
/**
* Find a custom property editor for the given type and property.
* @param field the path of the property (name or nested path), or
* {@code null} if looking for an editor for all properties of the given type
* @param valueType the type of the property (can be {@code null} if a property
* is given but should be specified in any case for consistency checking)
* @return the registered editor, or {@code null} if none
*/
@Nullable PropertyEditor findEditor(@Nullable String field, @Nullable Class<?> valueType);
/**
* Return the underlying PropertyEditorRegistry.
* @return the PropertyEditorRegistry, or {@code null} if none
* available for this BindingResult
*/
@Nullable PropertyEditorRegistry getPropertyEditorRegistry();
/**
* Resolve the given error code into message codes.
* <p>Calls the configured {@link MessageCodesResolver} with appropriate parameters.
* @param errorCode the error code to resolve into message codes
* @return the resolved message codes
*/
String[] resolveMessageCodes(String errorCode);
/**
* Resolve the given error code into message codes for the given field.
* <p>Calls the configured {@link MessageCodesResolver} with appropriate parameters.
* @param errorCode the error code to resolve into message codes
* @param field the field to resolve message codes for
* @return the resolved message codes
*/
String[] resolveMessageCodes(String errorCode, String field);
/**
* Add a custom {@link ObjectError} or {@link FieldError} to the errors list.
* <p>Intended to be used by cooperating strategies such as {@link BindingErrorProcessor}.
* @see ObjectError
* @see FieldError
* @see BindingErrorProcessor
*/
void addError(ObjectError error);
/**
* Record the given value for the specified field.
* <p>To be used when a target object cannot be constructed, making
* the original field values available through {@link #getFieldValue}.
* In case of a registered error, the rejected value will be exposed
* for each affected field.
* @param field the field to record the value for
* @param type the type of the field
* @param value the original value
* @since 5.0.4
*/
default void recordFieldValue(String field, Class<?> type, @Nullable Object value) {
}
/**
* Mark the specified disallowed field as suppressed.
* <p>The data binder invokes this for each field value that was
* detected to target a disallowed field.
* @see DataBinder#setAllowedFields
*/
default void recordSuppressedField(String field) {
}
/**
* Return the list of fields that were suppressed during the bind process.
* <p>Can be used to determine whether any field values were targeting
* disallowed fields.
* @see DataBinder#setAllowedFields
*/
default String[] getSuppressedFields() {
return new String[0];
}
}
| BindingResult |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/origin/TextResourceOrigin.java | {
"start": 3822,
"end": 5058
} | class ____ {
private final int line;
private final int column;
/**
* Create a new {@link Location} instance.
* @param line the line number (zero indexed)
* @param column the column number (zero indexed)
*/
public Location(int line, int column) {
this.line = line;
this.column = column;
}
/**
* Return the line of the text resource where the property originated.
* @return the line number (zero indexed)
*/
public int getLine() {
return this.line;
}
/**
* Return the column of the text resource where the property originated.
* @return the column number (zero indexed)
*/
public int getColumn() {
return this.column;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Location other = (Location) obj;
boolean result = true;
result = result && this.line == other.line;
result = result && this.column == other.column;
return result;
}
@Override
public int hashCode() {
return (31 * this.line) + this.column;
}
@Override
public String toString() {
return (this.line + 1) + ":" + (this.column + 1);
}
}
}
| Location |
java | junit-team__junit5 | junit-vintage-engine/src/test/java/org/junit/vintage/engine/VintageTestEngineExecutionTests.java | {
"start": 39429,
"end": 43638
} | class ____ reflection to avoid compilation errors in Eclipse IDE.
String testClassName = "org.junit.vintage.engine.samples.spock.SpockTestCaseWithUnrolledAndRegularFeatureMethods";
Class<?> testClass = ReflectionUtils.loadRequiredClass(testClassName, getClass().getClassLoader());
var request = LauncherDiscoveryRequestBuilder.request() //
.selectors(selectMethod(testClass, "regular")) //
.enableImplicitConfigurationParameters(false) //
.build();
execute(request).allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(testClass), started()), //
event(test("regular"), started()), //
event(test("regular"), finishedSuccessfully()), //
event(container(testClass), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void executesIgnoredJUnit3TestCase() {
var suiteClass = IgnoredJUnit3TestCase.class;
execute(suiteClass).allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(suiteClass), skippedWithReason(isEqual("testing"))), //
event(engine(), finishedSuccessfully()));
}
@Test
void executesJUnit4SuiteWithIgnoredJUnit3TestCase() {
var suiteClass = JUnit4SuiteWithIgnoredJUnit3TestCase.class;
var testClass = IgnoredJUnit3TestCase.class;
execute(suiteClass).allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(suiteClass), started()), //
event(container(testClass), skippedWithReason(isEqual("testing"))), //
event(container(suiteClass), finishedSuccessfully()), //
event(engine(), finishedSuccessfully()));
}
@Test
void supportsCancellation() {
CancellingTestCase.cancellationToken = CancellationToken.create();
try {
var results = vintageTestEngine() //
.selectors(selectClass(CancellingTestCase.class),
selectClass(PlainJUnit4TestCaseWithSingleTestWhichFails.class)) //
.cancellationToken(CancellingTestCase.cancellationToken) //
.execute();
results.allEvents().assertEventsMatchExactly( //
event(engine(), started()), //
event(container(CancellingTestCase.class), started()), //
event(test(), started()), //
event(test(), finishedWithFailure()), //
event(test(), skippedWithReason("Execution cancelled")), //
event(container(CancellingTestCase.class), abortedWithReason(instanceOf(StoppedByUserException.class))), //
event(container(PlainJUnit4TestCaseWithSingleTestWhichFails.class),
skippedWithReason("Execution cancelled")), //
event(engine(), finishedSuccessfully()));
}
finally {
CancellingTestCase.cancellationToken = null;
}
}
private static EngineExecutionResults execute(Class<?> testClass) {
return execute(request(testClass));
}
@SuppressWarnings("deprecation")
private static EngineExecutionResults execute(LauncherDiscoveryRequest request) {
return EngineTestKit.execute(new VintageTestEngine(), request);
}
@SuppressWarnings("deprecation")
private static EngineTestKit.Builder vintageTestEngine() {
return EngineTestKit.engine(new VintageTestEngine()) //
.enableImplicitConfigurationParameters(false);
}
@SuppressWarnings("deprecation")
private static void execute(Class<?> testClass, EngineExecutionListener listener) {
var testEngine = new VintageTestEngine();
var engineTestDescriptor = testEngine.discover(request(testClass), UniqueId.forEngine(testEngine.getId()));
ExecutionRequest executionRequest = mock();
when(executionRequest.getRootTestDescriptor()).thenReturn(engineTestDescriptor);
when(executionRequest.getEngineExecutionListener()).thenReturn(listener);
when(executionRequest.getConfigurationParameters()).thenReturn(mock());
when(executionRequest.getCancellationToken()).thenReturn(CancellationToken.disabled());
testEngine.execute(executionRequest);
}
private static LauncherDiscoveryRequest request(Class<?> testClass) {
return LauncherDiscoveryRequestBuilder.request() //
.selectors(selectClass(testClass)) //
.enableImplicitConfigurationParameters(false) //
.build();
}
private static boolean atLeastJUnit4_13() {
return JUnit4VersionCheck.parseVersion(Version.id()).compareTo(new BigDecimal("4.13")) >= 0;
}
}
| via |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/serialization/entity/AnEntity.java | {
"start": 159,
"end": 356
} | class ____ be in a package that is different from the test
* so that the test does not have access to private field,
* and the protected getter and setter.
*
* @author Gail Badner
*/
public | should |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/batch/RankBatchRestoreTest.java | {
"start": 1213,
"end": 2065
} | class ____ extends BatchRestoreTestBase {
public RankBatchRestoreTest() {
super(BatchExecRank.class);
}
@Override
public List<TableTestProgram> programs() {
return Arrays.asList(
RankTestPrograms.RANK_TEST_APPEND_FAST_STRATEGY,
// org.apache.flink.table.api.TableException: Querying a table in batch mode is
// currently only possible for INSERT-only table sources. But the source for table
// 'default_catalog.default_database.MyTable' produces other changelog messages than
// just INSERT.
// RankTestPrograms.RANK_TEST_RETRACT_STRATEGY,
RankTestPrograms.RANK_TEST_UPDATE_FAST_STRATEGY,
RankTestPrograms.RANK_N_TEST,
RankTestPrograms.RANK_2_TEST);
}
}
| RankBatchRestoreTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/OAIPMHEndpointBuilderFactory.java | {
"start": 46668,
"end": 48728
} | interface ____ {
/**
* OAI-PMH (camel-oaipmh)
* Harvest metadata using OAI-PMH protocol
*
* Category: search
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-oaipmh
*
* @return the dsl builder for the headers' name.
*/
default OAIPMHHeaderNameBuilder oaipmh() {
return OAIPMHHeaderNameBuilder.INSTANCE;
}
/**
* OAI-PMH (camel-oaipmh)
* Harvest metadata using OAI-PMH protocol
*
* Category: search
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-oaipmh
*
* Syntax: <code>oaipmh:baseUrl</code>
*
* Path parameter: baseUrl (required)
* Base URL of the repository to which the request is made through the
* OAI-PMH protocol
*
* @param path baseUrl
* @return the dsl builder
*/
default OAIPMHEndpointBuilder oaipmh(String path) {
return OAIPMHEndpointBuilderFactory.endpointBuilder("oaipmh", path);
}
/**
* OAI-PMH (camel-oaipmh)
* Harvest metadata using OAI-PMH protocol
*
* Category: search
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-oaipmh
*
* Syntax: <code>oaipmh:baseUrl</code>
*
* Path parameter: baseUrl (required)
* Base URL of the repository to which the request is made through the
* OAI-PMH protocol
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path baseUrl
* @return the dsl builder
*/
default OAIPMHEndpointBuilder oaipmh(String componentName, String path) {
return OAIPMHEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the OAI-PMH component.
*/
public static | OAIPMHBuilders |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/extension/ExtensionLoader.java | {
"start": 40740,
"end": 47626
} | class ____ found,skip try to load resources
ClassUtils.forName(oldType);
loadDirectoryInternal(extensionClasses, strategy, oldType);
} catch (ClassNotFoundException classNotFoundException) {
}
}
}
/**
* extract and cache default extension name if exists
*/
private void cacheDefaultExtensionName() {
final SPI defaultAnnotation = type.getAnnotation(SPI.class);
if (defaultAnnotation == null) {
return;
}
String value = defaultAnnotation.value();
if ((value = value.trim()).length() > 0) {
String[] names = NAME_SEPARATOR.split(value);
if (names.length > 1) {
throw new IllegalStateException("More than 1 default extension name on extension " + type.getName()
+ ": " + Arrays.toString(names));
}
if (names.length == 1) {
cachedDefaultName = names[0];
}
}
}
private void loadDirectoryInternal(
Map<String, Class<?>> extensionClasses, LoadingStrategy loadingStrategy, String type)
throws InterruptedException {
String fileName = loadingStrategy.directory() + type;
try {
List<ClassLoader> classLoadersToLoad = new LinkedList<>();
// try to load from ExtensionLoader's ClassLoader first
if (loadingStrategy.preferExtensionClassLoader()) {
ClassLoader extensionLoaderClassLoader = ExtensionLoader.class.getClassLoader();
if (ClassLoader.getSystemClassLoader() != extensionLoaderClassLoader) {
classLoadersToLoad.add(extensionLoaderClassLoader);
}
}
if (specialSPILoadingStrategyMap.containsKey(type)) {
String internalDirectoryType = specialSPILoadingStrategyMap.get(type);
// skip to load spi when name don't match
if (!LoadingStrategy.ALL.equals(internalDirectoryType)
&& !internalDirectoryType.equals(loadingStrategy.getName())) {
return;
}
classLoadersToLoad.clear();
classLoadersToLoad.add(ExtensionLoader.class.getClassLoader());
} else {
// load from scope model
Set<ClassLoader> classLoaders = scopeModel.getClassLoaders();
if (CollectionUtils.isEmpty(classLoaders)) {
Enumeration<java.net.URL> resources = ClassLoader.getSystemResources(fileName);
if (resources != null) {
while (resources.hasMoreElements()) {
loadResource(
extensionClasses,
null,
resources.nextElement(),
loadingStrategy.overridden(),
loadingStrategy.includedPackages(),
loadingStrategy.excludedPackages(),
loadingStrategy.onlyExtensionClassLoaderPackages());
}
}
} else {
classLoadersToLoad.addAll(classLoaders);
}
}
Map<ClassLoader, Set<java.net.URL>> resources =
ClassLoaderResourceLoader.loadResources(fileName, classLoadersToLoad);
resources.forEach(((classLoader, urls) -> {
loadFromClass(
extensionClasses,
loadingStrategy.overridden(),
urls,
classLoader,
loadingStrategy.includedPackages(),
loadingStrategy.excludedPackages(),
loadingStrategy.onlyExtensionClassLoaderPackages());
}));
} catch (InterruptedException e) {
throw e;
} catch (Throwable t) {
logger.error(
COMMON_ERROR_LOAD_EXTENSION,
"",
"",
"Exception occurred when loading extension class (interface: " + type + ", description file: "
+ fileName + ").",
t);
}
}
private void loadFromClass(
Map<String, Class<?>> extensionClasses,
boolean overridden,
Set<java.net.URL> urls,
ClassLoader classLoader,
String[] includedPackages,
String[] excludedPackages,
String[] onlyExtensionClassLoaderPackages) {
if (CollectionUtils.isNotEmpty(urls)) {
for (java.net.URL url : urls) {
loadResource(
extensionClasses,
classLoader,
url,
overridden,
includedPackages,
excludedPackages,
onlyExtensionClassLoaderPackages);
}
}
}
private void loadResource(
Map<String, Class<?>> extensionClasses,
ClassLoader classLoader,
java.net.URL resourceURL,
boolean overridden,
String[] includedPackages,
String[] excludedPackages,
String[] onlyExtensionClassLoaderPackages) {
try {
List<String> newContentList = getResourceContent(resourceURL);
String clazz;
for (String line : newContentList) {
try {
String name = null;
int i = line.indexOf('=');
if (i > 0) {
name = line.substring(0, i).trim();
clazz = line.substring(i + 1).trim();
} else {
clazz = line;
}
if (StringUtils.isNotEmpty(clazz)
&& !isExcluded(clazz, excludedPackages)
&& isIncluded(clazz, includedPackages)
&& !isExcludedByClassLoader(clazz, classLoader, onlyExtensionClassLoaderPackages)) {
loadClass(
classLoader,
extensionClasses,
resourceURL,
Class.forName(clazz, true, classLoader),
name,
overridden);
}
} catch (Throwable t) {
IllegalStateException e = new IllegalStateException(
"Failed to load extension class (interface: " + type + ", | not |
java | google__gson | gson/src/test/java/com/google/gson/functional/CollectionTest.java | {
"start": 15838,
"end": 16522
} | class ____ {
int value;
Entry(int value) {
this.value = value;
}
}
@Test
public void testSetSerialization() {
Set<Entry> set = new HashSet<>();
set.add(new Entry(1));
set.add(new Entry(2));
String json = gson.toJson(set);
assertThat(json).contains("1");
assertThat(json).contains("2");
}
@Test
public void testSetDeserialization() {
String json = "[{value:1},{value:2}]";
Type type = new TypeToken<Set<Entry>>() {}.getType();
Set<Entry> set = gson.fromJson(json, type);
assertThat(set.size()).isEqualTo(2);
for (Entry entry : set) {
assertThat(entry.value).isAnyOf(1, 2);
}
}
private static | Entry |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/catalog/TableChange.java | {
"start": 39177,
"end": 40329
} | class ____ implements ColumnPosition {
private final String column;
private After(String column) {
this.column = column;
}
public String column() {
return column;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof After)) {
return false;
}
After after = (After) o;
return Objects.equals(column, after.column);
}
@Override
public int hashCode() {
return Objects.hash(column);
}
@Override
public String toString() {
return String.format("AFTER %s", EncodingUtils.escapeIdentifier(column));
}
}
// --------------------------------------------------------------------------------------------
// Catalog table change
// --------------------------------------------------------------------------------------------
/** {@link CatalogTableChange} represents the modification of the CatalogTable. */
@PublicEvolving
| After |
java | quarkusio__quarkus | integration-tests/virtual-http-resteasy/src/test/java/io/quarkus/it/virtual/HttpResponseMessageMock.java | {
"start": 402,
"end": 1242
} | class ____ implements HttpResponseMessage {
private int httpStatusCode;
private HttpStatusType httpStatus;
private Object body;
private Map<String, String> headers;
public HttpResponseMessageMock(final HttpStatusType status, final Map<String, String> headers, final Object body) {
this.httpStatus = status;
this.httpStatusCode = status.value();
this.headers = headers;
this.body = body;
}
@Override
public HttpStatusType getStatus() {
return this.httpStatus;
}
@Override
public int getStatusCode() {
return httpStatusCode;
}
@Override
public String getHeader(String key) {
return this.headers.get(key);
}
@Override
public Object getBody() {
return this.body;
}
public static | HttpResponseMessageMock |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/persister/collection/CollectionPersister.java | {
"start": 2335,
"end": 3603
} | interface ____ a contract between the persistence strategy and
* the actual {@linkplain PersistentCollection persistent collection framework}
* and {@link org.hibernate.engine.spi.SessionImplementor session}. It does
* not define operations that are required for querying collections, nor
* for loading by outer join. Implementations are highly coupled to the
* {@link PersistentCollection} hierarchy, since double dispatch is used to
* load and update collection elements.
* <p>
* Unless a custom {@link org.hibernate.persister.spi.PersisterFactory} is
* used, it is expected that implementations of {@link CollectionPersister}
* define a constructor accepting the following arguments:
* <ol>
* <li>
* {@link org.hibernate.mapping.Collection} - The metadata about
* the collection to be handled by the persister,
* </li>
* <li>
* {@link CollectionDataAccess} - the second level caching strategy
* for this collection, and
* </li>
* <li>
* {@link RuntimeModelCreationContext} - access to additional
* information useful while constructing the persister.
* </li>
* </ol>
*
* @see PersistentCollection
* @see PluralAttributeMapping
*
* @author Gavin King
*/
public | defines |
java | apache__camel | components/camel-test/camel-test-junit5/src/main/java/org/apache/camel/test/junit5/DebugBreakpoint.java | {
"start": 1164,
"end": 2889
} | class ____ extends BreakpointSupport {
@Override
public void beforeProcess(Exchange exchange, Processor processor, NamedNode definition) {
debugBefore(exchange, processor, (ProcessorDefinition<?>) definition, definition.getId(),
definition.getLabel());
}
@Override
public void afterProcess(Exchange exchange, Processor processor, NamedNode definition, long timeTaken) {
debugAfter(exchange, processor, (ProcessorDefinition<?>) definition, definition.getId(),
definition.getLabel(), timeTaken);
}
/**
* Single step debugs and Camel invokes this method before entering the given processor
*
* @param exchange the {@link Exchange}
* @param processor the {@link Processor} which was processed
* @param definition the {@link ProcessorDefinition} definition of the processor
* @param id the definition ID
* @param label the definition label
*/
protected abstract void debugBefore(
Exchange exchange, Processor processor, ProcessorDefinition<?> definition, String id, String label);
/**
* Single step debugs and Camel invokes this method after processing the given processor
*
* @param exchange the {@link Exchange}
* @param processor the {@link Processor} which was processed
* @param definition the {@link ProcessorDefinition} definition of the processor
* @param id the definition ID
* @param label the definition label
*/
protected abstract void debugAfter(
Exchange exchange, Processor processor, ProcessorDefinition<?> definition, String id, String label,
long timeTaken);
}
| DebugBreakpoint |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/ResourceSupport.java | {
"start": 1396,
"end": 10455
} | class ____ {
/**
* Try to get the {@linkplain Resource resources} for the supplied classpath
* resource name.
*
* <p>The name of a <em>classpath resource</em> must follow the semantics
* for resource paths as defined in {@link ClassLoader#getResource(String)}.
*
* <p>If the supplied classpath resource name is prefixed with a slash
* ({@code /}), the slash will be removed.
*
* @param classpathResourceName the name of the resource to load; never
* {@code null} or blank
* @return a successful {@code Try} containing the set of loaded resources
* (potentially empty) or a failed {@code Try} containing the exception in
* case a failure occurred while trying to list resources; never
* {@code null}
* @see #tryToGetResources(String, ClassLoader)
* @see ReflectionSupport#tryToLoadClass(String)
*/
public static Try<Set<Resource>> tryToGetResources(String classpathResourceName) {
return ReflectionUtils.tryToGetResources(classpathResourceName);
}
/**
* Try to load the {@linkplain Resource resources} for the supplied classpath
* resource name, using the supplied {@link ClassLoader}.
*
* <p>The name of a <em>classpath resource</em> must follow the semantics
* for resource paths as defined in {@link ClassLoader#getResource(String)}.
*
* <p>If the supplied classpath resource name is prefixed with a slash
* ({@code /}), the slash will be removed.
*
* @param classpathResourceName the name of the resource to load; never
* {@code null} or blank
* @param classLoader the {@code ClassLoader} to use; never {@code null}
* @return a successful {@code Try} containing the set of loaded resources
* (potentially empty) or a failed {@code Try} containing the exception in
* case a failure occurred while trying to list resources; never
* {@code null}
* @see #tryToGetResources(String)
* @see ReflectionSupport#tryToLoadClass(String, ClassLoader)
*/
public static Try<Set<Resource>> tryToGetResources(String classpathResourceName, ClassLoader classLoader) {
return ReflectionUtils.tryToGetResources(classpathResourceName, classLoader);
}
/**
* Find all {@linkplain Resource resources} in the supplied classpath {@code root}
* that match the specified {@code resourceFilter}.
*
* <p>The classpath scanning algorithm searches recursively in subpackages
* beginning with the root of the classpath.
*
* @param root the URI for the classpath root in which to scan; never
* {@code null}
* @param resourceFilter the resource type filter; never {@code null}
* @return an immutable list of all such resources found; never {@code null}
* but potentially empty
* @see #findAllResourcesInPackage(String, ResourceFilter)
* @see #findAllResourcesInModule(String, ResourceFilter)
* @see ReflectionSupport#findAllClassesInClasspathRoot(URI, Predicate, Predicate)
*/
public static List<Resource> findAllResourcesInClasspathRoot(URI root, ResourceFilter resourceFilter) {
return ReflectionUtils.findAllResourcesInClasspathRoot(root, resourceFilter);
}
/**
* Find all {@linkplain Resource resources} in the supplied classpath {@code root}
* that match the specified {@code resourceFilter}.
*
* <p>The classpath scanning algorithm searches recursively in subpackages
* beginning with the root of the classpath.
*
* @param root the URI for the classpath root in which to scan; never
* {@code null}
* @param resourceFilter the resource type filter; never {@code null}
* @return a stream of all such classes found; never {@code null}
* but potentially empty
* @see #streamAllResourcesInPackage(String, ResourceFilter)
* @see #streamAllResourcesInModule(String, ResourceFilter)
* @see ReflectionSupport#streamAllClassesInClasspathRoot(URI, Predicate, Predicate)
*/
public static Stream<Resource> streamAllResourcesInClasspathRoot(URI root, ResourceFilter resourceFilter) {
return ReflectionUtils.streamAllResourcesInClasspathRoot(root, resourceFilter);
}
/**
* Find all {@linkplain Resource resources} in the supplied {@code basePackageName}
* that match the specified {@code resourceFilter}.
*
* <p>The classpath scanning algorithm searches recursively in subpackages
* beginning within the supplied base package. The resulting list may include
* identically named resources from different classpath roots.
*
* @param basePackageName the name of the base package in which to start
* scanning; must not be {@code null} and must be valid in terms of Java
* syntax
* @param resourceFilter the resource type filter; never {@code null}
* @return an immutable list of all such classes found; never {@code null}
* but potentially empty
* @see #findAllResourcesInClasspathRoot(URI, ResourceFilter)
* @see #findAllResourcesInModule(String, ResourceFilter)
* @see ReflectionSupport#findAllClassesInPackage(String, Predicate, Predicate)
*/
public static List<Resource> findAllResourcesInPackage(String basePackageName, ResourceFilter resourceFilter) {
return ReflectionUtils.findAllResourcesInPackage(basePackageName, resourceFilter);
}
/**
* Find all {@linkplain Resource resources} in the supplied {@code basePackageName}
* that match the specified {@code resourceFilter}.
*
* <p>The classpath scanning algorithm searches recursively in subpackages
* beginning within the supplied base package. The resulting stream may
* include identically named resources from different classpath roots.
*
* @param basePackageName the name of the base package in which to start
* scanning; must not be {@code null} and must be valid in terms of Java
* syntax
* @param resourceFilter the resource type filter; never {@code null}
* @return a stream of all such resources found; never {@code null}
* but potentially empty
* @see #streamAllResourcesInClasspathRoot(URI, ResourceFilter)
* @see #streamAllResourcesInModule(String, ResourceFilter)
* @see ReflectionSupport#streamAllClassesInPackage(String, Predicate, Predicate)
*/
public static Stream<Resource> streamAllResourcesInPackage(String basePackageName, ResourceFilter resourceFilter) {
return ReflectionUtils.streamAllResourcesInPackage(basePackageName, resourceFilter);
}
/**
* Find all {@linkplain Resource resources} in the supplied {@code moduleName}
* that match the specified {@code resourceFilter}.
*
* <p>The module-path scanning algorithm searches recursively in all
* packages contained in the module.
*
* @param moduleName the name of the module to scan; never {@code null} or
* <em>empty</em>
* @param resourceFilter the resource type filter; never {@code null}
* @return an immutable list of all such resources found; never {@code null}
* but potentially empty
* @see #findAllResourcesInClasspathRoot(URI, ResourceFilter)
* @see #findAllResourcesInPackage(String, ResourceFilter)
* @see ReflectionSupport#findAllClassesInModule(String, Predicate, Predicate)
*/
public static List<Resource> findAllResourcesInModule(String moduleName, ResourceFilter resourceFilter) {
return ReflectionUtils.findAllResourcesInModule(moduleName, resourceFilter);
}
/**
* Find all {@linkplain Resource resources} in the supplied {@code module}
* that match the specified {@code resourceFilter}.
*
* <p>The module-path scanning algorithm searches recursively in all
* packages contained in the module.
*
* @param module the module to scan; never {@code null} or <em>unnamed</em>
* @param resourceFilter the resource type filter; never {@code null}
* @return an immutable list of all such resources found; never {@code null}
* but potentially empty
* @since 6.1
* @see #findAllResourcesInClasspathRoot(URI, ResourceFilter)
* @see #findAllResourcesInPackage(String, ResourceFilter)
* @see ReflectionSupport#findAllClassesInModule(String, Predicate, Predicate)
*/
@API(status = EXPERIMENTAL, since = "6.1")
public static List<Resource> findAllResourcesInModule(Module module, ResourceFilter resourceFilter) {
return ReflectionUtils.findAllResourcesInModule(module, resourceFilter);
}
/**
* Find all {@linkplain Resource resources} in the supplied {@code moduleName}
* that match the specified {@code resourceFilter}.
*
* <p>The module-path scanning algorithm searches recursively in all
* packages contained in the module.
*
* @param moduleName the name of the module to scan; never {@code null} or
* <em>empty</em>
* @param resourceFilter the resource type filter; never {@code null}
* @return a stream of all such resources found; never {@code null}
* but potentially empty
* @see #streamAllResourcesInClasspathRoot(URI, ResourceFilter)
* @see #streamAllResourcesInPackage(String, ResourceFilter)
* @see ReflectionSupport#streamAllClassesInModule(String, Predicate, Predicate)
*/
public static Stream<Resource> streamAllResourcesInModule(String moduleName, ResourceFilter resourceFilter) {
return ReflectionUtils.streamAllResourcesInModule(moduleName, resourceFilter);
}
private ResourceSupport() {
}
}
| ResourceSupport |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/compress/Lz4BlockOutputStream.java | {
"start": 1367,
"end": 9664
} | class ____ extends OutputStream {
public static final int MAGIC = 0x184D2204;
public static final int LZ4_FRAME_INCOMPRESSIBLE_MASK = 0x80000000;
public static final String CLOSED_STREAM = "The stream is already closed";
public static final int BLOCKSIZE_64KB = 4;
private final LZ4Compressor compressor;
private final XXHash32 checksum;
private final boolean useBrokenFlagDescriptorChecksum;
private final FLG flg;
private final BD bd;
private final int maxBlockSize;
private OutputStream out;
private byte[] buffer;
private byte[] compressedBuffer;
private int bufferOffset;
private boolean finished;
/**
* Create a new {@link OutputStream} that will compress data using the LZ4 algorithm.
*
* @param out The output stream to compress
* @param blockSize Default: 4. The block size used during compression. 4=64kb, 5=256kb, 6=1mb, 7=4mb. All other
* values will generate an exception
* @param level The compression level to use
* @param blockChecksum Default: false. When true, a XXHash32 checksum is computed and appended to the stream for
* every block of data
* @param useBrokenFlagDescriptorChecksum Default: false. When true, writes an incorrect FrameDescriptor checksum
* compatible with older kafka clients.
* @throws IOException
*/
public Lz4BlockOutputStream(OutputStream out, int blockSize, int level, boolean blockChecksum, boolean useBrokenFlagDescriptorChecksum) throws IOException {
this.out = out;
/*
* lz4-java provides two types of compressors; fastCompressor, which requires less memory but fast compression speed (with default compression level only),
* and highCompressor which requires more memory and slower speed but compresses more efficiently (with various compression level).
*
* For backward compatibility, Lz4BlockOutputStream uses fastCompressor with default compression level but, with the other level, it uses highCompressor.
*/
compressor = level == CompressionType.LZ4.defaultLevel() ? LZ4Factory.fastestInstance().fastCompressor() : LZ4Factory.fastestInstance().highCompressor(level);
checksum = XXHashFactory.fastestInstance().hash32();
this.useBrokenFlagDescriptorChecksum = useBrokenFlagDescriptorChecksum;
bd = new BD(blockSize);
flg = new FLG(blockChecksum);
bufferOffset = 0;
maxBlockSize = bd.getBlockMaximumSize();
buffer = new byte[maxBlockSize];
compressedBuffer = new byte[compressor.maxCompressedLength(maxBlockSize)];
finished = false;
writeHeader();
}
/**
* Create a new {@link OutputStream} that will compress data using the LZ4 algorithm.
*
* @param out The output stream to compress
* @param level The compression level to use
* @param useBrokenFlagDescriptorChecksum Default: false. When true, writes an incorrect FrameDescriptor checksum
* compatible with older kafka clients.
* @throws IOException
*/
public Lz4BlockOutputStream(OutputStream out, int level, boolean useBrokenFlagDescriptorChecksum) throws IOException {
this(out, BLOCKSIZE_64KB, level, false, useBrokenFlagDescriptorChecksum);
}
/**
* Check whether KafkaLZ4BlockInputStream is configured to write an
* incorrect Frame Descriptor checksum, which is useful for
* compatibility with old client implementations.
*/
public boolean useBrokenFlagDescriptorChecksum() {
return this.useBrokenFlagDescriptorChecksum;
}
/**
* Writes the magic number and frame descriptor to the underlying {@link OutputStream}.
*
* @throws IOException
*/
private void writeHeader() throws IOException {
ByteUtils.writeUnsignedIntLE(buffer, 0, MAGIC);
bufferOffset = 4;
buffer[bufferOffset++] = flg.toByte();
buffer[bufferOffset++] = bd.toByte();
// TODO write uncompressed content size, update flg.validate()
// compute checksum on all descriptor fields
int offset = 4;
int len = bufferOffset - offset;
if (this.useBrokenFlagDescriptorChecksum) {
len += offset;
offset = 0;
}
byte hash = (byte) ((checksum.hash(buffer, offset, len, 0) >> 8) & 0xFF);
buffer[bufferOffset++] = hash;
// write out frame descriptor
out.write(buffer, 0, bufferOffset);
bufferOffset = 0;
}
/**
* Compresses buffered data, optionally computes an XXHash32 checksum, and writes the result to the underlying
* {@link OutputStream}.
*
* @throws IOException
*/
private void writeBlock() throws IOException {
if (bufferOffset == 0) {
return;
}
int compressedLength = compressor.compress(buffer, 0, bufferOffset, compressedBuffer, 0);
byte[] bufferToWrite = compressedBuffer;
int compressMethod = 0;
// Store block uncompressed if compressed length is greater (incompressible)
if (compressedLength >= bufferOffset) {
bufferToWrite = buffer;
compressedLength = bufferOffset;
compressMethod = LZ4_FRAME_INCOMPRESSIBLE_MASK;
}
// Write content
ByteUtils.writeUnsignedIntLE(out, compressedLength | compressMethod);
out.write(bufferToWrite, 0, compressedLength);
// Calculate and write block checksum
if (flg.isBlockChecksumSet()) {
int hash = checksum.hash(bufferToWrite, 0, compressedLength, 0);
ByteUtils.writeUnsignedIntLE(out, hash);
}
bufferOffset = 0;
}
/**
* Similar to the {@link #writeBlock()} method. Writes a 0-length block (without block checksum) to signal the end
* of the block stream.
*
* @throws IOException
*/
private void writeEndMark() throws IOException {
ByteUtils.writeUnsignedIntLE(out, 0);
// TODO implement content checksum, update flg.validate()
}
@Override
public void write(int b) throws IOException {
ensureNotFinished();
if (bufferOffset == maxBlockSize) {
writeBlock();
}
buffer[bufferOffset++] = (byte) b;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
net.jpountz.util.SafeUtils.checkRange(b, off, len);
ensureNotFinished();
int bufferRemainingLength = maxBlockSize - bufferOffset;
// while b will fill the buffer
while (len > bufferRemainingLength) {
// fill remaining space in buffer
System.arraycopy(b, off, buffer, bufferOffset, bufferRemainingLength);
bufferOffset = maxBlockSize;
writeBlock();
// compute new offset and length
off += bufferRemainingLength;
len -= bufferRemainingLength;
bufferRemainingLength = maxBlockSize;
}
System.arraycopy(b, off, buffer, bufferOffset, len);
bufferOffset += len;
}
@Override
public void flush() throws IOException {
if (!finished) {
writeBlock();
}
if (out != null) {
out.flush();
}
}
/**
* A simple state check to ensure the stream is still open.
*/
private void ensureNotFinished() {
if (finished) {
throw new IllegalStateException(CLOSED_STREAM);
}
}
@Override
public void close() throws IOException {
try {
if (!finished) {
// basically flush the buffer writing the last block
writeBlock();
// write the end block
writeEndMark();
}
} finally {
try {
if (out != null) {
try (OutputStream outStream = out) {
outStream.flush();
}
}
} finally {
out = null;
buffer = null;
compressedBuffer = null;
finished = true;
}
}
}
public static | Lz4BlockOutputStream |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/bean/BeanDeserializerTest.java | {
"start": 3429,
"end": 3901
} | class ____ extends BeanDeserializer
{
protected static int propCount;
public Issue476Deserializer(BeanDeserializer src) {
super(src);
}
@Override
public ValueDeserializer<?> createContextual(DeserializationContext ctxt,
BeanProperty property)
{
super.createContextual(ctxt, property);
propCount++;
return this;
}
}
public | Issue476Deserializer |
java | spring-projects__spring-framework | spring-expression/src/main/java/org/springframework/expression/spel/ast/FunctionReference.java | {
"start": 2446,
"end": 12742
} | class ____ extends SpelNodeImpl {
private final String name;
// Captures the most recently used method for the function invocation *if* the method
// can safely be used for compilation (i.e. no argument conversion is going on)
private volatile @Nullable Method method;
public FunctionReference(String functionName, int startPos, int endPos, SpelNodeImpl... arguments) {
super(startPos, endPos, arguments);
this.name = functionName;
}
@Override
public TypedValue getValueInternal(ExpressionState state) throws EvaluationException {
TypedValue value = state.lookupVariable(this.name);
if (value == TypedValue.NULL) {
throw new SpelEvaluationException(getStartPosition(), SpelMessage.FUNCTION_NOT_DEFINED, this.name);
}
Object function = value.getValue();
// Static Java method registered via a Method.
// Note: "javaMethod" cannot be named "method" due to a bug in Checkstyle.
if (function instanceof Method javaMethod) {
try {
return executeFunctionViaMethod(state, javaMethod);
}
catch (SpelEvaluationException ex) {
ex.setPosition(getStartPosition());
throw ex;
}
}
// Function registered via a MethodHandle.
if (function instanceof MethodHandle methodHandle) {
try {
return executeFunctionViaMethodHandle(state, methodHandle);
}
catch (SpelEvaluationException ex) {
ex.setPosition(getStartPosition());
throw ex;
}
}
// Neither a Method nor a MethodHandle?
throw new SpelEvaluationException(
SpelMessage.FUNCTION_REFERENCE_CANNOT_BE_INVOKED, this.name, value.getClass());
}
/**
* Execute a function represented as a {@link Method}.
* @param state the expression evaluation state
* @param method the method to invoke
* @return the return value of the invoked Java method
* @throws EvaluationException if there is any problem invoking the method
*/
private TypedValue executeFunctionViaMethod(ExpressionState state, Method method) throws EvaluationException {
@Nullable Object[] functionArgs = getArguments(state);
if (!method.isVarArgs()) {
int declaredParamCount = method.getParameterCount();
if (declaredParamCount != functionArgs.length) {
throw new SpelEvaluationException(SpelMessage.INCORRECT_NUMBER_OF_ARGUMENTS_TO_FUNCTION,
this.name, functionArgs.length, declaredParamCount);
}
}
if (!Modifier.isStatic(method.getModifiers())) {
throw new SpelEvaluationException(getStartPosition(),
SpelMessage.FUNCTION_MUST_BE_STATIC, ClassUtils.getQualifiedMethodName(method), this.name);
}
// Convert arguments if necessary and remap them for varargs if required
TypeConverter converter = state.getEvaluationContext().getTypeConverter();
boolean argumentConversionOccurred = ReflectionHelper.convertAllArguments(converter, functionArgs, method);
if (method.isVarArgs()) {
functionArgs = ReflectionHelper.setupArgumentsForVarargsInvocation(
method.getParameterTypes(), functionArgs);
}
boolean compilable = false;
try {
ReflectionUtils.makeAccessible(method);
Object result = method.invoke(method.getClass(), functionArgs);
compilable = !argumentConversionOccurred;
return new TypedValue(result, new TypeDescriptor(new MethodParameter(method, -1)).narrow(result));
}
catch (Exception ex) {
Throwable cause = ((ex instanceof InvocationTargetException ite && ite.getCause() != null) ?
ite.getCause() : ex);
throw new SpelEvaluationException(getStartPosition(), cause, SpelMessage.EXCEPTION_DURING_FUNCTION_CALL,
this.name, cause.getMessage());
}
finally {
if (compilable) {
this.exitTypeDescriptor = CodeFlow.toDescriptor(method.getReturnType());
this.method = method;
}
else {
this.exitTypeDescriptor = null;
this.method = null;
}
}
}
/**
* Execute a function represented as {@link MethodHandle}.
* <p>Method types that take no arguments (fully bound handles or static methods
* with no parameters) can use {@link MethodHandle#invoke(Object...)} which is the most
* efficient. Otherwise, {@link MethodHandle#invokeWithArguments(Object...)} is used.
* @param state the expression evaluation state
* @param methodHandle the method handle to invoke
* @return the return value of the invoked Java method
* @throws EvaluationException if there is any problem invoking the method
* @since 6.1
*/
private TypedValue executeFunctionViaMethodHandle(ExpressionState state, MethodHandle methodHandle) throws EvaluationException {
@Nullable Object[] functionArgs = getArguments(state);
MethodType declaredParams = methodHandle.type();
int spelParamCount = functionArgs.length;
int declaredParamCount = declaredParams.parameterCount();
// We don't use methodHandle.isVarargsCollector(), because a MethodHandle created via
// MethodHandle#bindTo() is "never a variable-arity method handle, even if the original
// target method handle was." Thus, we merely assume/suspect that varargs are supported
// if the last parameter type is an array.
boolean isSuspectedVarargs = declaredParams.lastParameterType().isArray();
if (isSuspectedVarargs) {
if (spelParamCount < declaredParamCount - 1) {
// Varargs, but the number of provided arguments (potentially 0) is insufficient
// for a varargs invocation for the number of declared parameters.
//
// As stated in the Javadoc for MethodHandle#asVarargsCollector(), "the caller
// must supply, at a minimum, N-1 arguments, where N is the arity of the target."
throw new SpelEvaluationException(SpelMessage.INCORRECT_NUMBER_OF_ARGUMENTS_TO_FUNCTION,
this.name, spelParamCount, (declaredParamCount - 1) + " or more");
}
}
else if (spelParamCount != declaredParamCount) {
// Incorrect number and not varargs. Perhaps a subset of arguments was provided,
// but the MethodHandle wasn't bound?
throw new SpelEvaluationException(SpelMessage.INCORRECT_NUMBER_OF_ARGUMENTS_TO_FUNCTION,
this.name, spelParamCount, declaredParamCount);
}
// simplest case: the MethodHandle is fully bound or represents a static method with no params:
if (declaredParamCount == 0) {
try {
return new TypedValue(methodHandle.invoke());
}
catch (Throwable ex) {
throw new SpelEvaluationException(getStartPosition(), ex, SpelMessage.EXCEPTION_DURING_FUNCTION_CALL,
this.name, ex.getMessage());
}
finally {
// Note: we consider MethodHandles not compilable
this.exitTypeDescriptor = null;
this.method = null;
}
}
// more complex case, we need to look at conversion and varargs repackaging
Integer varArgPosition = null;
if (isSuspectedVarargs) {
varArgPosition = declaredParamCount - 1;
}
TypeConverter converter = state.getEvaluationContext().getTypeConverter();
ReflectionHelper.convertAllMethodHandleArguments(converter, functionArgs, methodHandle, varArgPosition);
if (isSuspectedVarargs) {
if (declaredParamCount == 1 && !methodHandle.isVarargsCollector()) {
// We only repackage the arguments if the MethodHandle accepts a single
// argument AND the MethodHandle is not a "varargs collector" -- for example,
// when we are dealing with a bound MethodHandle.
functionArgs = ReflectionHelper.setupArgumentsForVarargsInvocation(
methodHandle.type().parameterArray(), functionArgs);
}
else if (spelParamCount == declaredParamCount) {
// If the varargs were supplied already packaged in an array, we have to create
// a new array, add the non-varargs arguments to the beginning of that array,
// and add the unpackaged varargs arguments to the end of that array. The reason
// is that MethodHandle.invokeWithArguments(Object...) does not expect varargs
// to be packaged in an array, in contrast to how method invocation works with
// reflection.
int actualVarargsIndex = functionArgs.length - 1;
if (actualVarargsIndex >= 0 && functionArgs[actualVarargsIndex] instanceof Object[] argsToUnpack) {
Object[] newArgs = new Object[actualVarargsIndex + argsToUnpack.length];
System.arraycopy(functionArgs, 0, newArgs, 0, actualVarargsIndex);
System.arraycopy(argsToUnpack, 0, newArgs, actualVarargsIndex, argsToUnpack.length);
functionArgs = newArgs;
}
}
}
try {
return new TypedValue(methodHandle.invokeWithArguments(functionArgs));
}
catch (Throwable ex) {
throw new SpelEvaluationException(getStartPosition(), ex, SpelMessage.EXCEPTION_DURING_FUNCTION_CALL,
this.name, ex.getMessage());
}
finally {
// Note: we consider MethodHandles not compilable
this.exitTypeDescriptor = null;
this.method = null;
}
}
@Override
public String toStringAST() {
StringJoiner sj = new StringJoiner(",", "(", ")");
for (int i = 0; i < getChildCount(); i++) {
sj.add(getChild(i).toStringAST());
}
return '#' + this.name + sj;
}
/**
* Compute the arguments to the function, they are the children of this expression node.
* @return an array of argument values for the function call
*/
private @Nullable Object[] getArguments(ExpressionState state) throws EvaluationException {
// Compute arguments to the function
@Nullable Object[] arguments = new Object[getChildCount()];
for (int i = 0; i < arguments.length; i++) {
arguments[i] = this.children[i].getValueInternal(state).getValue();
}
return arguments;
}
@Override
public boolean isCompilable() {
Method method = this.method;
if (method == null) {
return false;
}
int methodModifiers = method.getModifiers();
if (!Modifier.isStatic(methodModifiers) || !Modifier.isPublic(methodModifiers) ||
!Modifier.isPublic(method.getDeclaringClass().getModifiers())) {
return false;
}
for (SpelNodeImpl child : this.children) {
if (!child.isCompilable()) {
return false;
}
}
return true;
}
@Override
public void generateCode(MethodVisitor mv, CodeFlow cf) {
Method method = this.method;
Assert.state(method != null, "No method handle");
String classDesc = method.getDeclaringClass().getName().replace('.', '/');
generateCodeForArguments(mv, cf, method, this.children);
mv.visitMethodInsn(INVOKESTATIC, classDesc, method.getName(),
CodeFlow.createSignatureDescriptor(method), false);
cf.pushDescriptor(this.exitTypeDescriptor);
}
}
| FunctionReference |
java | apache__flink | flink-connectors/flink-connector-base/src/main/java/org/apache/flink/connector/base/source/reader/fetcher/SplitFetcherManager.java | {
"start": 2311,
"end": 2763
} | class ____ with the {@link SourceReaderBase}.
*
* <p>The split fetcher manager could be used to support different threading models by implementing
* the {@link #addSplits(List)} method differently. For example, a single thread split fetcher
* manager would only start a single fetcher and assign all the splits to it. A one-thread-per-split
* fetcher may spawn a new thread every time a new split is assigned.
*/
@PublicEvolving
public abstract | works |
java | apache__kafka | group-coordinator/src/main/java/org/apache/kafka/coordinator/group/classic/ClassicGroupMember.java | {
"start": 2336,
"end": 12829
} | class ____ {
/**
* An empty assignment.
*/
public static final byte[] EMPTY_ASSIGNMENT = Bytes.EMPTY;
/**
* The member id.
*/
private final String memberId;
/**
* The group instance id.
*/
private final Optional<String> groupInstanceId;
/**
* The client id.
*/
private final String clientId;
/**
* The client host.
*/
private final String clientHost;
/**
* The rebalance timeout in milliseconds.
*/
private int rebalanceTimeoutMs;
/**
* The session timeout in milliseconds.
*/
private int sessionTimeoutMs;
/**
* The protocol type.
*/
private final String protocolType;
/**
* The list of supported protocols.
*/
private JoinGroupRequestProtocolCollection supportedProtocols;
/**
* The assignment stored by the client assignor.
*/
private byte[] assignment;
/**
* The future that is invoked once this member joins the group.
*/
private CompletableFuture<JoinGroupResponseData> awaitingJoinFuture = null;
/**
* The future that is invoked once this member completes the sync group phase.
*/
private CompletableFuture<SyncGroupResponseData> awaitingSyncFuture = null;
/**
* Indicates whether the member is a new member of the group.
*/
private boolean isNew = false;
public ClassicGroupMember(
String memberId,
Optional<String> groupInstanceId,
String clientId,
String clientHost,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
String protocolType,
JoinGroupRequestProtocolCollection supportedProtocols
) {
this(
memberId,
groupInstanceId,
clientId,
clientHost,
rebalanceTimeoutMs,
sessionTimeoutMs,
protocolType,
supportedProtocols,
EMPTY_ASSIGNMENT
);
}
public ClassicGroupMember(
String memberId,
Optional<String> groupInstanceId,
String clientId,
String clientHost,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
String protocolType,
JoinGroupRequestProtocolCollection supportedProtocols,
byte[] assignment
) {
this.memberId = memberId;
this.groupInstanceId = groupInstanceId;
this.clientId = clientId;
this.clientHost = clientHost;
this.rebalanceTimeoutMs = rebalanceTimeoutMs;
this.sessionTimeoutMs = sessionTimeoutMs;
this.protocolType = protocolType;
this.supportedProtocols = supportedProtocols;
this.assignment = assignment;
}
/**
* @return true if the member is utilizing static membership, false otherwise.
*/
public boolean isStaticMember() {
return groupInstanceId.isPresent();
}
/**
* @return whether the member is awaiting join.
*/
public boolean isAwaitingJoin() {
return awaitingJoinFuture != null;
}
/**
* @return whether the member is awaiting sync.
*/
public boolean isAwaitingSync() {
return awaitingSyncFuture != null;
}
/**
* Get the metadata corresponding to the provided protocol.
*/
public byte[] metadata(String protocolName) {
for (JoinGroupRequestProtocol protocol : supportedProtocols) {
if (protocol.name().equals(protocolName)) {
return protocol.metadata();
}
}
throw new IllegalArgumentException("Member does not support protocol " +
protocolName);
}
/**
* The heartbeat is always considered satisfied when an existing member has made a
* successful join/sync group request during a rebalance.
*
* @return true if heartbeat was satisfied; false otherwise.
*/
public boolean hasSatisfiedHeartbeat() {
if (isNew) {
// New members can be expired even while awaiting join, so we check this first
return false;
} else {
// Members that are awaiting a rebalance automatically satisfy expected heartbeats
return isAwaitingJoin() || isAwaitingSync();
}
}
/**
* Compare the given list of protocols with the member's supported protocols.
*
* @param protocols list of protocols to match.
* @return true if the given list matches the member's list of supported protocols,
* false otherwise.
*/
public boolean matches(JoinGroupRequestProtocolCollection protocols) {
return protocols.equals(this.supportedProtocols);
}
/**
* Vote for one of the potential group protocols. This takes into account the protocol preference as
* indicated by the order of supported protocols and returns the first one also contained in the set
*
* @param candidates The protocol names that this member can vote for
* @return the first supported protocol that matches one of the candidates
*/
public String vote(Set<String> candidates) {
for (JoinGroupRequestProtocol protocol : supportedProtocols) {
if (candidates.contains(protocol.name())) {
return protocol.name();
}
}
throw new IllegalArgumentException("Member does not support any of the candidate protocols");
}
/**
* Transform protocols into their respective names.
*
* @param supportedProtocols list of supported protocols.
* @return a set of protocol names from the given list of supported protocols.
*/
public static Set<String> plainProtocolSet(
JoinGroupRequestProtocolCollection supportedProtocols
) {
Set<String> protocolNames = new HashSet<>();
for (JoinGroupRequestProtocol protocol : supportedProtocols) {
protocolNames.add(protocol.name());
}
return protocolNames;
}
/**
* @return whether the member has an assignment set.
*/
public boolean hasAssignment() {
return assignment != null && assignment.length > 0;
}
/**
* @return the member id.
*/
public String memberId() {
return memberId;
}
/**
* @return the group instance id.
*/
public Optional<String> groupInstanceId() {
return groupInstanceId;
}
/**
* @return the client id.
*/
public String clientId() {
return clientId;
}
/**
* @return the client host.
*/
public String clientHost() {
return clientHost;
}
/**
* @return the rebalance timeout in milliseconds.
*/
public int rebalanceTimeoutMs() {
return rebalanceTimeoutMs;
}
/**
* @return the session timeout in milliseconds.
*/
public int sessionTimeoutMs() {
return sessionTimeoutMs;
}
/**
* @return the protocol type.
*/
public String protocolType() {
return protocolType;
}
/**
* @return the list of supported protocols.
*/
public JoinGroupRequestProtocolCollection supportedProtocols() {
return supportedProtocols;
}
/**
* @return the member's assignment.
*/
public byte[] assignment() {
return assignment;
}
/**
* @return the awaiting join future.
*/
public CompletableFuture<JoinGroupResponseData> awaitingJoinFuture() {
return awaitingJoinFuture;
}
/**
* @return the awaiting sync future.
*/
public CompletableFuture<SyncGroupResponseData> awaitingSyncFuture() {
return awaitingSyncFuture;
}
/**
* @return true if the member is new, false otherwise.
*/
public boolean isNew() {
return isNew;
}
/**
* @return The described group member without metadata.
*/
public DescribeGroupsResponseData.DescribedGroupMember describeNoMetadata() {
return new DescribeGroupsResponseData.DescribedGroupMember()
.setMemberId(memberId())
.setGroupInstanceId(groupInstanceId().orElse(null))
.setClientId(clientId())
.setClientHost(clientHost())
.setMemberAssignment(assignment());
}
/**
* @return The described group member with metadata corresponding to the provided protocol.
*/
public DescribeGroupsResponseData.DescribedGroupMember describe(String protocolName) {
return describeNoMetadata().setMemberMetadata(metadata(protocolName));
}
/**
* @param value the new rebalance timeout in milliseconds.
*/
public void setRebalanceTimeoutMs(int value) {
this.rebalanceTimeoutMs = value;
}
/**
* @param value the new session timeout in milliseconds.
*/
public void setSessionTimeoutMs(int value) {
this.sessionTimeoutMs = value;
}
/**
* @param value the new list of supported protocols.
*/
public void setSupportedProtocols(JoinGroupRequestProtocolCollection value) {
this.supportedProtocols = value;
}
/**
* @param value the new assignment.
*/
public void setAssignment(byte[] value) {
this.assignment = value;
}
/**
* @param value the updated join future.
*/
public void setAwaitingJoinFuture(CompletableFuture<JoinGroupResponseData> value) {
this.awaitingJoinFuture = value;
}
/**
* @param value the updated sync future.
*/
public void setAwaitingSyncFuture(CompletableFuture<SyncGroupResponseData> value) {
this.awaitingSyncFuture = value;
}
/**
* @param value true if the member is new, false otherwise.
*/
public void setIsNew(boolean value) {
this.isNew = value;
}
@Override
public String toString() {
return "ClassicGroupMember(" +
"memberId='" + memberId + '\'' +
", groupInstanceId='" + groupInstanceId.orElse("") + '\'' +
", clientId='" + clientId + '\'' +
", clientHost='" + clientHost + '\'' +
", rebalanceTimeoutMs=" + rebalanceTimeoutMs +
", sessionTimeoutMs=" + sessionTimeoutMs +
", protocolType='" + protocolType + '\'' +
", supportedProtocols=" + supportedProtocols.stream()
.map(JoinGroupRequestProtocol::name)
.toList() +
')';
}
}
| ClassicGroupMember |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/jmx/export/assembler/MethodExclusionMBeanInfoAssemblerTests.java | {
"start": 1067,
"end": 2831
} | class ____ extends AbstractJmxAssemblerTests {
private static final String OBJECT_NAME = "bean:name=testBean5";
@Override
protected String getObjectName() {
return OBJECT_NAME;
}
@Override
protected int getExpectedOperationCount() {
return 9;
}
@Override
protected int getExpectedAttributeCount() {
return 4;
}
@Override
protected String getApplicationContextPath() {
return "org/springframework/jmx/export/assembler/methodExclusionAssembler.xml";
}
@Override
protected MBeanInfoAssembler getAssembler() {
MethodExclusionMBeanInfoAssembler assembler = new MethodExclusionMBeanInfoAssembler();
assembler.setIgnoredMethods(new String[] {"dontExposeMe", "setSuperman"});
return assembler;
}
@Test
void testSupermanIsReadOnly() throws Exception {
ModelMBeanInfo info = getMBeanInfoFromAssembler();
ModelMBeanAttributeInfo attr = info.getAttribute("Superman");
assertThat(attr.isReadable()).isTrue();
assertThat(attr.isWritable()).isFalse();
}
/*
* https://opensource.atlassian.com/projects/spring/browse/SPR-2754
*/
@Test
void testIsNotIgnoredDoesntIgnoreUnspecifiedBeanMethods() throws Exception {
final String beanKey = "myTestBean";
MethodExclusionMBeanInfoAssembler assembler = new MethodExclusionMBeanInfoAssembler();
Properties ignored = new Properties();
ignored.setProperty(beanKey, "dontExposeMe,setSuperman");
assembler.setIgnoredMethodMappings(ignored);
Method method = JmxTestBean.class.getMethod("dontExposeMe");
assertThat(assembler.isNotIgnored(method, beanKey)).isFalse();
// this bean does not have any ignored methods on it, so must obviously not be ignored...
assertThat(assembler.isNotIgnored(method, "someOtherBeanKey")).isTrue();
}
}
| MethodExclusionMBeanInfoAssemblerTests |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/DeadLetterChannelOnExceptionOnRedeliveryTest.java | {
"start": 3834,
"end": 4622
} | class ____ implements Processor {
@Override
public void process(Exchange exchange) {
// the message is being redelivered so we can alter it
// we just append the redelivery counter to the body
// you can of course do all kind of stuff instead
String body = exchange.getIn().getBody(String.class);
int count = exchange.getIn().getHeader("CamelRedeliveryCounter", Integer.class);
exchange.getIn().setBody(body + count);
}
}
// END SNIPPET: e3
// START SNIPPET: e4
// This is our processor that is executed before IOException redeliver
// attempt
// here we can do what we want in the java code, such as altering the
// message
public static | MyRedeliverProcessor |
java | apache__camel | tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/EndpointSchemaGeneratorMojo.java | {
"start": 44391,
"end": 64788
} | class ____ as description
String doc = getDocComment(endpointClassElement);
if (doc != null) {
// need to sanitize the description first (we only want a
// summary)
doc = JavadocHelper.sanitizeDescription(doc, true);
// the javadoc may actually be empty, so only change the doc if
// we got something
if (!Strings.isNullOrEmpty(doc)) {
model.setDescription(doc);
}
}
// project.getDescription may fallback and use parent description
if ("Camel Components".equalsIgnoreCase(model.getDescription()) || Strings.isNullOrEmpty(model.getDescription())) {
throw new IllegalStateException(
"Cannot find description to use for component: " + scheme
+ ". Add <description> to Maven pom.xml or javadoc to the endpoint: "
+ endpointClassElement);
}
return model;
}
protected void findComponentClassProperties(
ComponentModel componentModel, Class<?> classElement,
String prefix, String nestedTypeName, String nestedFieldName) {
final Class<?> orgClassElement = classElement;
Set<String> excludes = new HashSet<>();
while (true) {
processMetadataClassAnnotation(componentModel, classElement, excludes);
List<Method> methods = findCandidateClassMethods(classElement);
// if the component has options with annotations then we only want to generate options that are annotated
// as ideally components should favour doing this, so we can control what is an option and what is not
List<Field> fields = Stream.of(classElement.getDeclaredFields()).toList();
boolean annotationBasedOptions = fields.stream().anyMatch(f -> f.getAnnotation(Metadata.class) != null)
|| methods.stream().anyMatch(m -> m.getAnnotation(Metadata.class) != null);
if (!methods.isEmpty() && !annotationBasedOptions) {
getLog().warn("Component class " + classElement.getName() + " has not been marked up with @Metadata for "
+ methods.size() + " options.");
}
for (Method method : methods) {
String methodName = method.getName();
Metadata metadata = method.getAnnotation(Metadata.class);
boolean deprecated = method.getAnnotation(Deprecated.class) != null;
String deprecationNote = null;
if (metadata != null) {
deprecationNote = metadata.deprecationNote();
}
// we usually favor putting the @Metadata annotation on the
// field instead of the setter, so try to use it if its there
String fieldName = methodName.substring(3);
fieldName = fieldName.substring(0, 1).toLowerCase() + fieldName.substring(1);
Field fieldElement = getFieldElement(classElement, fieldName);
if (fieldElement != null && metadata == null) {
metadata = fieldElement.getAnnotation(Metadata.class);
}
if (metadata != null && metadata.skip()) {
continue;
}
// skip methods/fields which has no annotation if we only look for annotation based
if (annotationBasedOptions && metadata == null) {
continue;
}
// if the field type is a nested parameter then iterate
// through its fields
if (fieldElement != null) {
Class<?> fieldTypeElement = fieldElement.getType();
String fieldTypeName = getTypeName(GenericsUtil.resolveType(orgClassElement, fieldElement));
UriParams fieldParams = fieldTypeElement.getAnnotation(UriParams.class);
if (fieldParams != null) {
String nestedPrefix = prefix;
String extraPrefix = fieldParams.prefix();
if (!Strings.isNullOrEmpty(extraPrefix)) {
nestedPrefix += extraPrefix;
}
nestedTypeName = fieldTypeName;
nestedFieldName = fieldElement.getName();
findClassProperties(componentModel, fieldTypeElement, Collections.emptySet(), nestedPrefix,
nestedTypeName, nestedFieldName, true);
nestedTypeName = null;
nestedFieldName = null;
// we also want to include the configuration itself so continue and add ourselves
}
}
boolean required = metadata != null && metadata.required();
String label = metadata != null ? metadata.label() : null;
boolean secret = metadata != null && metadata.secret();
boolean autowired = metadata != null && metadata.autowired();
boolean supportFileReference = metadata != null && metadata.supportFileReference();
boolean largeInput = metadata != null && metadata.largeInput();
String inputLanguage = metadata != null ? metadata.inputLanguage() : null;
boolean important = metadata != null && metadata.important();
// we do not yet have default values / notes / as no annotation
// support yet
// String defaultValueNote = param.defaultValueNote();
Object defaultValue = metadata != null ? metadata.defaultValue() : "";
String defaultValueNote = null;
String name = prefix + fieldName;
String displayName = metadata != null ? metadata.displayName() : null;
// compute a display name if we don't have anything
if (Strings.isNullOrEmpty(displayName)) {
displayName = Strings.asTitle(name);
}
Class<?> fieldType = method.getParameters()[0].getType();
String fieldTypeName = getTypeName(GenericsUtil.resolveParameterTypes(orgClassElement, method)[0]);
String docComment = findJavaDoc(method, fieldName, name, classElement, false);
if (Strings.isNullOrEmpty(docComment)) {
docComment = metadata != null ? metadata.description() : null;
}
if (Strings.isNullOrEmpty(docComment)) {
// apt cannot grab javadoc from camel-core, only from
// annotations
if ("setHeaderFilterStrategy".equals(methodName)) {
docComment = HEADER_FILTER_STRATEGY_JAVADOC;
} else {
docComment = "";
}
}
// gather enums
List<String> enums = getEnums(metadata, fieldType);
// the field type may be overloaded by another type
boolean isDuration = false;
if (metadata != null && !Strings.isNullOrEmpty(metadata.javaType())) {
String mjt = metadata.javaType();
if ("java.time.Duration".equals(mjt)) {
isDuration = true;
} else {
fieldTypeName = mjt;
}
}
// generics for collection types
String nestedType = null;
String desc = fieldTypeName;
if (desc.contains("<") && desc.contains(">")) {
desc = Strings.between(desc, "<", ">");
// if it has additional nested types, then we only want the outer type
int pos = desc.indexOf('<');
if (pos != -1) {
desc = desc.substring(0, pos);
}
// if its a map then it has a key/value, so we only want the last part
pos = desc.indexOf(',');
if (pos != -1) {
desc = desc.substring(pos + 1);
}
desc = desc.replace('$', '.');
desc = desc.trim();
// skip if the type is generic or a wildcard
if (!desc.isEmpty() && desc.indexOf('?') == -1 && !desc.contains(" extends ")) {
nestedType = desc;
}
}
// prepare default value so its value is correct according to its type
defaultValue = getDefaultValue(defaultValue, fieldTypeName, isDuration);
String group = EndpointHelper.labelAsGroupName(label, componentModel.isConsumerOnly(),
componentModel.isProducerOnly());
// filter out consumer/producer only
boolean accept = !excludes.contains(name);
if (componentModel.isConsumerOnly() && "producer".equals(group)) {
accept = false;
} else if (componentModel.isProducerOnly() && "consumer".equals(group)) {
accept = false;
}
if (accept) {
Optional<ComponentOptionModel> prev = componentModel.getComponentOptions().stream()
.filter(opt -> name.equals(opt.getName())).findAny();
if (prev.isPresent()) {
String prv = prev.get().getJavaType();
String cur = fieldTypeName;
if (prv.equals("java.lang.String")
|| prv.equals("java.lang.String[]") && cur.equals("java.util.Collection<java.lang.String>")) {
componentModel.getComponentOptions().remove(prev.get());
} else {
accept = false;
}
}
}
if (accept) {
ComponentOptionModel option = new ComponentOptionModel();
option.setKind("property");
option.setName(name);
option.setDisplayName(displayName);
option.setType(MojoHelper.getType(fieldTypeName, enums != null && !enums.isEmpty(), isDuration));
option.setJavaType(fieldTypeName);
option.setRequired(required);
option.setDefaultValue(defaultValue);
option.setDefaultValueNote(defaultValueNote);
option.setDescription(docComment.trim());
option.setDeprecated(deprecated);
option.setDeprecationNote(deprecationNote);
option.setSecret(secret);
option.setAutowired(autowired);
option.setGroup(group);
option.setLabel(label);
option.setEnums(enums);
option.setNestedType(nestedType);
option.setConfigurationClass(nestedTypeName);
option.setConfigurationField(nestedFieldName);
option.setSupportFileReference(supportFileReference);
option.setLargeInput(largeInput);
option.setInputLanguage(inputLanguage);
option.setImportant(important);
componentModel.addComponentOption(option);
}
}
// check super classes which may also have fields
Class<?> superclass = classElement.getSuperclass();
if (superclass != null && superclass != Object.class) {
classElement = superclass;
} else {
break;
}
}
}
private List<String> getEnums(Metadata metadata, Class<?> fieldType) {
List<String> enums = null;
if (metadata != null && !Strings.isNullOrEmpty(metadata.enums())) {
String[] values = metadata.enums().split(",");
enums = Stream.of(values).map(String::trim).toList();
} else if (fieldType != null && fieldType.isEnum()) {
enums = new ArrayList<>();
for (Object val : fieldType.getEnumConstants()) {
String str = val.toString();
if (!enums.contains(str)) {
enums.add(str);
}
}
}
return enums;
}
private Field getFieldElement(Class<?> classElement, String fieldName) {
Field fieldElement;
try {
fieldElement = classElement.getDeclaredField(fieldName);
} catch (NoSuchFieldException e) {
fieldElement = null;
}
return fieldElement;
}
private List<Method> findCandidateClassMethods(Class<?> classElement) {
return Stream.of(classElement.getDeclaredMethods()).filter(method -> {
Metadata metadata = method.getAnnotation(Metadata.class);
String methodName = method.getName();
if (metadata != null && metadata.skip()) {
return false;
}
if (method.isSynthetic() || !Modifier.isPublic(method.getModifiers())) {
return false;
}
// must be the setter
boolean isSetter = methodName.startsWith("set")
&& method.getParameters().length == 1
&& method.getReturnType() == Void.TYPE;
if (!isSetter) {
return false;
}
// skip unwanted methods as they are inherited from default
// component and are not intended for end users to configure
if ("setEndpointClass".equals(methodName) || "setCamelContext".equals(methodName)
|| "setEndpointHeaderFilterStrategy".equals(methodName) || "setApplicationContext".equals(methodName)) {
return false;
}
if (isGroovyMetaClassProperty(method)) {
return false;
}
return true;
}).collect(Collectors.toList());
}
private void processMetadataClassAnnotation(ComponentModel componentModel, Class<?> classElement, Set<String> excludes) {
Metadata componentAnnotation = classElement.getAnnotation(Metadata.class);
if (componentAnnotation != null) {
if (Objects.equals("verifiers", componentAnnotation.label())) {
componentModel.setVerifiers(componentAnnotation.enums());
}
Collections.addAll(excludes, componentAnnotation.excludeProperties().split(","));
}
}
protected void findClassProperties(
ComponentModel componentModel, Class<?> classElement,
Set<String> excludes, String prefix,
String nestedTypeName, String nestedFieldName, boolean componentOption) {
final Class<?> orgClassElement = classElement;
excludes = new HashSet<>(excludes);
while (true) {
String apiName = null;
boolean apiOption = false;
// only check for api if component is API based
ApiParams apiParams = null;
if (componentModel.isApi()) {
apiParams = classElement.getAnnotation(ApiParams.class);
if (apiParams != null) {
apiName = apiParams.apiName();
apiOption = !Strings.isNullOrEmpty(apiName);
}
}
collectExcludes(classElement, excludes);
Metadata metadata;
for (final Field fieldElement : classElement.getDeclaredFields()) {
metadata = fieldElement.getAnnotation(Metadata.class);
if (metadata != null && metadata.skip()) {
continue;
}
boolean deprecated = fieldElement.getAnnotation(Deprecated.class) != null;
String deprecationNote = null;
if (metadata != null) {
deprecationNote = metadata.deprecationNote();
}
Boolean secret = metadata != null ? metadata.secret() : null;
if (collectUriPathProperties(componentModel, classElement, excludes, prefix, nestedTypeName, nestedFieldName,
componentOption, orgClassElement, metadata, fieldElement, deprecated, deprecationNote, secret)) {
continue;
}
String fieldName;
UriParam param = fieldElement.getAnnotation(UriParam.class);
if (param != null) {
fieldName = fieldElement.getName();
String name = prefix + (Strings.isNullOrEmpty(param.name()) ? fieldName : param.name());
// should we exclude the name?
if (excludes.contains(name)) {
continue;
}
String paramOptionalPrefix = param.optionalPrefix();
String paramPrefix = param.prefix();
boolean multiValue = param.multiValue();
Object defaultValue = param.defaultValue();
if (isNullOrEmpty(defaultValue) && metadata != null) {
defaultValue = metadata.defaultValue();
}
String defaultValueNote = param.defaultValueNote();
boolean required = metadata != null && metadata.required();
String label = param.label();
if (Strings.isNullOrEmpty(label) && metadata != null) {
label = metadata.label();
}
String displayName = param.displayName();
if (Strings.isNullOrEmpty(displayName)) {
displayName = metadata != null ? metadata.displayName() : null;
}
// compute a display name if we don't have anything
if (Strings.isNullOrEmpty(displayName)) {
displayName = Strings.asTitle(name);
}
// if the field type is a nested parameter then iterate
// through its fields
Class<?> fieldTypeElement = fieldElement.getType();
String fieldTypeName = getTypeName(GenericsUtil.resolveType(orgClassElement, fieldElement));
UriParams fieldParams = fieldTypeElement.getAnnotation(UriParams.class);
if (fieldParams != null) {
String nestedPrefix = prefix;
String extraPrefix = fieldParams.prefix();
if (!Strings.isNullOrEmpty(extraPrefix)) {
nestedPrefix += extraPrefix;
}
nestedTypeName = fieldTypeName;
nestedFieldName = fieldElement.getName();
findClassProperties(componentModel, fieldTypeElement, excludes, nestedPrefix, nestedTypeName,
nestedFieldName, componentOption);
nestedTypeName = null;
nestedFieldName = null;
} else {
ApiParam apiParam = fieldElement.getAnnotation(ApiParam.class);
collectNonNestedField(componentModel, classElement, nestedTypeName, nestedFieldName, componentOption,
apiName, apiOption, apiParams, metadata, fieldElement, deprecated, deprecationNote, secret,
fieldName, param, apiParam, name, paramOptionalPrefix, paramPrefix, multiValue, defaultValue,
defaultValueNote, required, label, displayName, fieldTypeElement, fieldTypeName);
}
}
}
if (apiOption) {
// include extra methods that has no parameters and are only included in the | javadoc |
java | quarkusio__quarkus | extensions/devui/deployment/src/main/java/io/quarkus/devui/deployment/BuildTimeConstBuildItem.java | {
"start": 282,
"end": 1677
} | class ____ extends AbstractDevUIBuildItem {
private final Map<String, BuildTimeData> buildTimeData;
public BuildTimeConstBuildItem() {
this(new HashMap<>());
}
public BuildTimeConstBuildItem(Map<String, BuildTimeData> buildTimeData) {
super();
this.buildTimeData = buildTimeData;
}
public BuildTimeConstBuildItem(String customIdentifier) {
this(customIdentifier, new HashMap<>());
}
public BuildTimeConstBuildItem(String customIdentifier, Map<String, BuildTimeData> buildTimeData) {
super(customIdentifier);
this.buildTimeData = buildTimeData;
}
public void addBuildTimeData(String fieldName, Object fieldData) {
this.addBuildTimeData(fieldName, fieldData, null, false);
}
public void addBuildTimeData(String fieldName, Object fieldData, String description, boolean mcpEnabledAsDefault) {
this.buildTimeData.put(fieldName, new BuildTimeData(fieldData, description, mcpEnabledAsDefault));
}
public void addAllBuildTimeData(Map<String, BuildTimeData> buildTimeData) {
this.buildTimeData.putAll(buildTimeData);
}
public Map<String, BuildTimeData> getBuildTimeData() {
return this.buildTimeData;
}
public boolean hasBuildTimeData() {
return this.buildTimeData != null && !this.buildTimeData.isEmpty();
}
}
| BuildTimeConstBuildItem |
java | apache__camel | components/camel-debezium/camel-debezium-common/camel-debezium-common-component/src/main/java/org/apache/camel/component/debezium/DebeziumEndpoint.java | {
"start": 1584,
"end": 6604
} | class ____<C extends EmbeddedDebeziumConfiguration> extends DefaultEndpoint {
protected DebeziumEndpoint(String uri, DebeziumComponent<C> component) {
super(uri, component);
}
protected DebeziumEndpoint() {
}
@Override
public Producer createProducer() throws Exception {
throw new UnsupportedOperationException(
"Cannot produce from a DebeziumEndpoint: "
+ getEndpointUri());
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
DebeziumConsumer consumer = new DebeziumConsumer(this, processor);
configureConsumer(consumer);
return consumer;
}
public ExecutorService createExecutor(Object source) {
return getCamelContext().getExecutorServiceManager().newSingleThreadExecutor(source,
"DebeziumConsumer");
}
public Exchange createDbzExchange(DebeziumConsumer consumer, final SourceRecord sourceRecord) {
final Exchange exchange;
if (consumer != null) {
exchange = consumer.createExchange(false);
} else {
exchange = super.createExchange();
}
final Message message = exchange.getIn();
final Schema valueSchema = sourceRecord.valueSchema();
final Object value = sourceRecord.value();
// extract values from SourceRecord
final Map<String, Object> sourceMetadata = extractSourceMetadataValueFromValueStruct(valueSchema, value);
final Object operation = extractValueFromValueStruct(valueSchema, value, Envelope.FieldName.OPERATION);
final Object before = extractValueFromValueStruct(valueSchema, value, Envelope.FieldName.BEFORE);
final Object body = extractBodyValueFromValueStruct(valueSchema, value);
final Object timestamp = extractValueFromValueStruct(valueSchema, value, Envelope.FieldName.TIMESTAMP);
final Object ddl = extractValueFromValueStruct(valueSchema, value, HistoryRecord.Fields.DDL_STATEMENTS);
// set message headers
message.setHeader(DebeziumConstants.HEADER_IDENTIFIER, sourceRecord.topic());
message.setHeader(DebeziumConstants.HEADER_KEY, sourceRecord.key());
message.setHeader(DebeziumConstants.HEADER_SOURCE_METADATA, sourceMetadata);
message.setHeader(DebeziumConstants.HEADER_OPERATION, operation);
message.setHeader(DebeziumConstants.HEADER_BEFORE, before);
message.setHeader(DebeziumConstants.HEADER_TIMESTAMP, timestamp);
message.setHeader(DebeziumConstants.HEADER_DDL_SQL, ddl);
message.setHeader(Exchange.MESSAGE_TIMESTAMP, timestamp);
message.setBody(body);
return exchange;
}
public abstract C getConfiguration();
public abstract void setConfiguration(C configuration);
protected Object extractBodyValueFromValueStruct(final Schema schema, final Object value) {
// by default, we will extract the value from field `after`, however if other connector needs different field, this method needs to be overriden
return extractFieldValueFromValueStruct(schema, value, Envelope.FieldName.AFTER);
}
protected Object extractFieldValueFromValueStruct(final Schema schema, final Object value, final String fieldName) {
// first we try with normal extraction from value struct
final Object valueExtracted = extractValueFromValueStruct(schema, value, fieldName);
if (valueExtracted == null && !isSchemaAStructSchema(schema)) { // we could have anything other than struct, we just return that
return value;
}
return valueExtracted;
}
private Map<String, Object> extractSourceMetadataValueFromValueStruct(final Schema schema, final Object value) {
// we want to convert metadata to map since it facilitate usage and also struct structure is not needed for the metadata
final Object valueExtracted = extractValueFromValueStruct(schema, value, Envelope.FieldName.SOURCE);
if (valueExtracted != null) {
return DebeziumTypeConverter.toMap((Struct) valueExtracted);
}
return null;
}
private Object extractValueFromValueStruct(final Schema schema, final Object value, final String fieldName) {
// first we check if we have a value and a schema of struct type
if (isSchemaAStructSchema(schema) && value != null) {
// now we return our desired fieldName
try {
final Struct valueStruct = (Struct) value;
return valueStruct.get(fieldName);
} catch (DataException e) {
// we return null instead since this exception thrown when no value set or field doesn't exist
return null;
}
}
return null;
}
private boolean isSchemaAStructSchema(final Schema schema) {
return schema != null && schema.type().equals(Schema.Type.STRUCT);
}
}
| DebeziumEndpoint |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/RemoteConnectionStrategy.java | {
"start": 2907,
"end": 14620
} | enum ____ {
initial,
reconnect
}
private final int maxPendingConnectionListeners;
protected final Logger logger = LogManager.getLogger(getClass());
private final AtomicBoolean closed = new AtomicBoolean(false);
private final Object mutex = new Object();
private List<ActionListener<Void>> listeners = new ArrayList<>();
private final AtomicBoolean initialConnectionAttempted = new AtomicBoolean(false);
private final LongCounter connectionAttemptFailures;
protected final TransportService transportService;
protected final RemoteConnectionManager connectionManager;
protected final ProjectId originProjectId;
protected final ProjectId linkedProjectId;
protected final String clusterAlias;
RemoteConnectionStrategy(LinkedProjectConfig config, TransportService transportService, RemoteConnectionManager connectionManager) {
this.originProjectId = config.originProjectId();
this.linkedProjectId = config.linkedProjectId();
this.clusterAlias = config.linkedProjectAlias();
this.transportService = transportService;
this.connectionManager = connectionManager;
this.maxPendingConnectionListeners = config.maxPendingConnectionListeners();
this.connectionAttemptFailures = lookupConnectionFailureMetric(transportService.getTelemetryProvider());
connectionManager.addListener(this);
}
private LongCounter lookupConnectionFailureMetric(TelemetryProvider telemetryProvider) {
final var meterRegistry = telemetryProvider == null ? null : telemetryProvider.getMeterRegistry();
return meterRegistry == null ? null : meterRegistry.getLongCounter(RemoteClusterService.CONNECTION_ATTEMPT_FAILURES_COUNTER_NAME);
}
static ConnectionProfile buildConnectionProfile(LinkedProjectConfig config, String transportProfile) {
ConnectionProfile.Builder builder = new ConnectionProfile.Builder().setConnectTimeout(config.transportConnectTimeout())
.setHandshakeTimeout(config.transportConnectTimeout())
.setCompressionEnabled(config.connectionCompression())
.setCompressionScheme(config.connectionCompressionScheme())
.setPingInterval(config.clusterPingSchedule())
.addConnections(
0,
TransportRequestOptions.Type.BULK,
TransportRequestOptions.Type.STATE,
TransportRequestOptions.Type.RECOVERY,
TransportRequestOptions.Type.PING
)
.addConnections(config.connectionStrategy().getNumberOfChannels(), TransportRequestOptions.Type.REG)
.setTransportProfile(transportProfile);
return builder.build();
}
static InetSocketAddress parseConfiguredAddress(String configuredAddress) {
final String host = parseHost(configuredAddress);
final int port = parsePort(configuredAddress);
InetAddress hostAddress;
try {
hostAddress = InetAddress.getByName(host);
} catch (UnknownHostException e) {
throw new IllegalArgumentException("unknown host [" + host + "]", e);
}
return new InetSocketAddress(hostAddress, port);
}
static String parseHost(final String configuredAddress) {
return configuredAddress.substring(0, indexOfPortSeparator(configuredAddress));
}
static int parsePort(String remoteHost) {
try {
int port = Integer.parseInt(remoteHost.substring(indexOfPortSeparator(remoteHost) + 1));
if (port <= 0) {
throw new IllegalArgumentException("port number must be > 0 but was: [" + port + "]");
}
return port;
} catch (NumberFormatException e) {
throw new IllegalArgumentException("failed to parse port", e);
}
}
private static int indexOfPortSeparator(String remoteHost) {
int portSeparator = remoteHost.lastIndexOf(':'); // in case we have a IPv6 address ie. [::1]:9300
if (portSeparator == -1 || portSeparator == remoteHost.length()) {
throw new IllegalArgumentException("remote hosts need to be configured as [host:port], found [" + remoteHost + "] instead");
}
return portSeparator;
}
/**
* Triggers a connect round unless there is one running already. If there is a connect round running, the listener will either
* be queued or rejected and failed.
*/
void connect(ActionListener<Void> connectListener) {
boolean runConnect = false;
final ActionListener<Void> listener = ContextPreservingActionListener.wrapPreservingContext(
connectListener,
transportService.getThreadPool().getThreadContext()
);
boolean isCurrentlyClosed;
synchronized (mutex) {
isCurrentlyClosed = this.closed.get();
if (isCurrentlyClosed) {
assert listeners.isEmpty();
} else {
if (listeners.size() >= maxPendingConnectionListeners) {
assert listeners.size() == maxPendingConnectionListeners;
listener.onFailure(new EsRejectedExecutionException("connect listener queue is full"));
return;
} else {
listeners.add(listener);
}
runConnect = listeners.size() == 1;
}
}
if (isCurrentlyClosed) {
connectListener.onFailure(new AlreadyClosedException("connect handler is already closed"));
return;
}
if (runConnect) {
ExecutorService executor = transportService.getThreadPool().executor(ThreadPool.Names.MANAGEMENT);
executor.submit(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
ActionListener.onFailure(getAndClearListeners(), e);
}
@Override
protected void doRun() {
connectImpl(new ActionListener<>() {
@Override
public void onResponse(Void aVoid) {
connectionAttemptCompleted(null);
ActionListener.onResponse(getAndClearListeners(), aVoid);
}
@Override
public void onFailure(Exception e) {
connectionAttemptCompleted(e);
ActionListener.onFailure(getAndClearListeners(), e);
}
});
}
});
}
}
private void connectionAttemptCompleted(@Nullable Exception e) {
final boolean isInitialAttempt = initialConnectionAttempted.compareAndSet(false, true);
final org.apache.logging.log4j.util.Supplier<String> msgSupplier = () -> format(
"Origin project [%s] %s linked project [%s] with alias [%s] on %s attempt",
originProjectId,
e == null ? "successfully connected to" : "failed to connect to",
linkedProjectId,
clusterAlias,
isInitialAttempt ? "the initial connection" : "a reconnection"
);
if (e == null) {
logger.debug(msgSupplier);
} else {
logger.warn(msgSupplier, e);
if (connectionAttemptFailures != null) {
final var attributesMap = new HashMap<String, Object>();
attributesMap.put("linked_project_id", linkedProjectId.toString());
attributesMap.put("linked_project_alias", clusterAlias);
attributesMap.put("attempt", (isInitialAttempt ? ConnectionAttempt.initial : ConnectionAttempt.reconnect).toString());
attributesMap.put("strategy", strategyType().toString());
addStrategySpecificConnectionErrorMetricAttributes(attributesMap);
connectionAttemptFailures.incrementBy(1, attributesMap);
}
}
}
boolean shouldRebuildConnection(LinkedProjectConfig config) {
return config.connectionStrategy().equals(strategyType()) == false
|| connectionProfileChanged(config)
|| strategyMustBeRebuilt(config);
}
protected abstract boolean strategyMustBeRebuilt(LinkedProjectConfig config);
protected abstract ConnectionStrategy strategyType();
/**
* Add strategy-specific attributes for a new connection error metric record. The default implementation is a no-op.
*/
protected void addStrategySpecificConnectionErrorMetricAttributes(Map<String, Object> attributesMap) {}
@Override
public void onNodeDisconnected(DiscoveryNode node, @Nullable Exception closeException) {
if (shouldOpenMoreConnections()) {
// try to reconnect and fill up the slot of the disconnected node
connect(
ActionListener.wrap(
ignore -> logger.trace("[{}] successfully connected after disconnect of {}", clusterAlias, node),
e -> logger.debug(() -> format("[%s] failed to connect after disconnect of %s", clusterAlias, node), e)
)
);
}
}
@Override
public void close() {
final List<ActionListener<Void>> toNotify;
synchronized (mutex) {
if (closed.compareAndSet(false, true)) {
connectionManager.removeListener(this);
toNotify = listeners;
listeners = Collections.emptyList();
} else {
toNotify = Collections.emptyList();
}
}
ActionListener.onFailure(toNotify, new AlreadyClosedException("connect handler is already closed"));
}
public boolean isClosed() {
return closed.get();
}
// for testing only
boolean assertNoRunningConnections() {
synchronized (mutex) {
assert listeners.isEmpty();
}
return true;
}
protected abstract boolean shouldOpenMoreConnections();
protected abstract void connectImpl(ActionListener<Void> listener);
protected abstract RemoteConnectionInfo.ModeInfo getModeInfo();
protected static boolean isRetryableException(Exception e) {
// ISE if we fail the handshake with a version incompatible node
return e instanceof ConnectTransportException || e instanceof IOException || e instanceof IllegalStateException;
}
private List<ActionListener<Void>> getAndClearListeners() {
final List<ActionListener<Void>> result;
synchronized (mutex) {
if (listeners.isEmpty()) {
result = Collections.emptyList();
} else {
result = listeners;
listeners = new ArrayList<>();
}
}
return result;
}
private boolean connectionProfileChanged(LinkedProjectConfig config) {
final var oldProfile = connectionManager.getConnectionProfile();
final var newProfile = new ConnectionProfile.Builder(oldProfile).setCompressionEnabled(config.connectionCompression())
.setCompressionScheme(config.connectionCompressionScheme())
.setPingInterval(config.clusterPingSchedule())
.build();
return Objects.equals(oldProfile.getCompressionEnabled(), newProfile.getCompressionEnabled()) == false
|| Objects.equals(oldProfile.getPingInterval(), newProfile.getPingInterval()) == false
|| Objects.equals(oldProfile.getCompressionScheme(), newProfile.getCompressionScheme()) == false;
}
}
| ConnectionAttempt |
java | apache__maven | its/core-it-suite/src/test/resources/mng-4331/maven-it-plugin-dependency-collection/src/main/java/org/apache/maven/plugin/coreit/AggregateTestMojo.java | {
"start": 1607,
"end": 2063
} | class ____ extends AbstractDependencyMojo {
/**
* The path to the output file for the project artifacts, relative to the project base directory. Each line of this
* UTF-8 encoded file specifies an artifact identifier. If not specified, the artifact list will not be written to
* disk. Unlike the test artifacts, the collection of project artifacts additionally contains those artifacts that
* do not contribute to the | AggregateTestMojo |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java | {
"start": 755,
"end": 9151
} | interface ____ extends Block permits BooleanArrayBlock, BooleanVectorBlock, ConstantNullBlock, BooleanBigArrayBlock {
/**
* Retrieves the boolean value stored at the given value index.
*
* <p> Values for a given position are between getFirstValueIndex(position) (inclusive) and
* getFirstValueIndex(position) + getValueCount(position) (exclusive).
*
* @param valueIndex the value index
* @return the data value (as a boolean)
*/
boolean getBoolean(int valueIndex);
/**
* Checks if this block has the given value at position. If at this index we have a
* multivalue, then it returns true if any values match.
*
* @param position the index at which we should check the value(s)
* @param value the value to check against
*/
default boolean hasValue(int position, boolean value) {
final var count = getValueCount(position);
final var startIndex = getFirstValueIndex(position);
for (int index = startIndex; index < startIndex + count; index++) {
if (value == getBoolean(index)) {
return true;
}
}
return false;
}
@Override
BooleanVector asVector();
/**
* Convert this to a {@link BooleanVector "mask"} that's appropriate for
* passing to {@link #keepMask}. Null and multivalued positions will be
* converted to {@code false}.
*/
ToMask toMask();
@Override
BooleanBlock filter(int... positions);
/**
* Make a deep copy of this {@link Block} using the provided {@link BlockFactory},
* likely copying all data.
*/
@Override
default BooleanBlock deepCopy(BlockFactory blockFactory) {
try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(getPositionCount())) {
builder.copyFrom(this, 0, getPositionCount());
builder.mvOrdering(mvOrdering());
return builder.build();
}
}
@Override
BooleanBlock keepMask(BooleanVector mask);
@Override
ReleasableIterator<? extends BooleanBlock> lookup(IntBlock positions, ByteSizeValue targetBlockSize);
@Override
BooleanBlock expand();
static BooleanBlock readFrom(BlockStreamInput in) throws IOException {
final byte serializationType = in.readByte();
return switch (serializationType) {
case SERIALIZE_BLOCK_VALUES -> BooleanBlock.readValues(in);
case SERIALIZE_BLOCK_VECTOR -> BooleanVector.readFrom(in.blockFactory(), in).asBlock();
case SERIALIZE_BLOCK_ARRAY -> BooleanArrayBlock.readArrayBlock(in.blockFactory(), in);
case SERIALIZE_BLOCK_BIG_ARRAY -> BooleanBigArrayBlock.readArrayBlock(in.blockFactory(), in);
default -> {
assert false : "invalid block serialization type " + serializationType;
throw new IllegalStateException("invalid serialization type " + serializationType);
}
};
}
private static BooleanBlock readValues(BlockStreamInput in) throws IOException {
final int positions = in.readVInt();
try (BooleanBlock.Builder builder = in.blockFactory().newBooleanBlockBuilder(positions)) {
for (int i = 0; i < positions; i++) {
if (in.readBoolean()) {
builder.appendNull();
} else {
final int valueCount = in.readVInt();
builder.beginPositionEntry();
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
builder.appendBoolean(in.readBoolean());
}
builder.endPositionEntry();
}
}
return builder.build();
}
}
@Override
default void writeTo(StreamOutput out) throws IOException {
BooleanVector vector = asVector();
final var version = out.getTransportVersion();
if (vector != null) {
out.writeByte(SERIALIZE_BLOCK_VECTOR);
vector.writeTo(out);
} else if (this instanceof BooleanArrayBlock b) {
out.writeByte(SERIALIZE_BLOCK_ARRAY);
b.writeArrayBlock(out);
} else if (this instanceof BooleanBigArrayBlock b) {
out.writeByte(SERIALIZE_BLOCK_BIG_ARRAY);
b.writeArrayBlock(out);
} else {
out.writeByte(SERIALIZE_BLOCK_VALUES);
BooleanBlock.writeValues(this, out);
}
}
private static void writeValues(BooleanBlock block, StreamOutput out) throws IOException {
final int positions = block.getPositionCount();
out.writeVInt(positions);
for (int pos = 0; pos < positions; pos++) {
if (block.isNull(pos)) {
out.writeBoolean(true);
} else {
out.writeBoolean(false);
final int valueCount = block.getValueCount(pos);
out.writeVInt(valueCount);
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
out.writeBoolean(block.getBoolean(block.getFirstValueIndex(pos) + valueIndex));
}
}
}
}
/**
* Compares the given object with this block for equality. Returns {@code true} if and only if the
* given object is a BooleanBlock, and both blocks are {@link #equals(BooleanBlock, BooleanBlock) equal}.
*/
@Override
boolean equals(Object obj);
/** Returns the hash code of this block, as defined by {@link #hash(BooleanBlock)}. */
@Override
int hashCode();
/**
* Returns {@code true} if the given blocks are equal to each other, otherwise {@code false}.
* Two blocks are considered equal if they have the same position count, and contain the same
* values (including absent null values) in the same order. This definition ensures that the
* equals method works properly across different implementations of the BooleanBlock interface.
*/
static boolean equals(BooleanBlock block1, BooleanBlock block2) {
if (block1 == block2) {
return true;
}
final int positions = block1.getPositionCount();
if (positions != block2.getPositionCount()) {
return false;
}
for (int pos = 0; pos < positions; pos++) {
if (block1.isNull(pos) || block2.isNull(pos)) {
if (block1.isNull(pos) != block2.isNull(pos)) {
return false;
}
} else {
final int valueCount = block1.getValueCount(pos);
if (valueCount != block2.getValueCount(pos)) {
return false;
}
final int b1ValueIdx = block1.getFirstValueIndex(pos);
final int b2ValueIdx = block2.getFirstValueIndex(pos);
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
if (block1.getBoolean(b1ValueIdx + valueIndex) != block2.getBoolean(b2ValueIdx + valueIndex)) {
return false;
}
}
}
}
return true;
}
/**
* Generates the hash code for the given block. The hash code is computed from the block's values.
* This ensures that {@code block1.equals(block2)} implies that {@code block1.hashCode()==block2.hashCode()}
* for any two blocks, {@code block1} and {@code block2}, as required by the general contract of
* {@link Object#hashCode}.
*/
static int hash(BooleanBlock block) {
final int positions = block.getPositionCount();
int result = 1;
for (int pos = 0; pos < positions; pos++) {
if (block.isNull(pos)) {
result = 31 * result - 1;
} else {
final int valueCount = block.getValueCount(pos);
result = 31 * result + valueCount;
final int firstValueIdx = block.getFirstValueIndex(pos);
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
result = 31 * result + Boolean.hashCode(block.getBoolean(firstValueIdx + valueIndex));
}
}
}
return result;
}
/**
* Builder for {@link BooleanBlock}
*/
sealed | BooleanBlock |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e2/a/DependentId.java | {
"start": 271,
"end": 444
} | class ____ implements Serializable {
String name; // matches name of @Id attribute
@Embedded
EmployeeId emp; //matches name of attribute and type of Employee PK
}
| DependentId |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/BitFieldArgs.java | {
"start": 19819,
"end": 19887
} | class ____ bitfield subcommands.
*/
private abstract static | for |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_106_hints.java | {
"start": 919,
"end": 5875
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "\n" +
"select sum(CASE WHEN a.purchase_times=1 THEN 1 else 0 END ) oneCustomersNum,"
+ " sum(CASE WHEN a.purchase_times=1 THEN a.payment else 0 END ) onceMoney, "
+ "sum(CASE WHEN a.purchase_times=1 THEN a.interval_day else 0 END ) oneIntervalDay, "
+ "sum(CASE WHEN a.purchase_times=2 THEN 1 else 0 END ) twoCustomersNum, "
+ "sum(CASE WHEN a.purchase_times=2 THEN a.payment else 0 END ) twoMoney,"
+ " sum(CASE WHEN a.purchase_times=2 THEN a.interval_day else 0 END ) twoIntervalDay, "
+ "sum(CASE WHEN a.purchase_times=3 THEN 1 else 0 END ) threeCustomersNum, "
+ "sum(CASE WHEN a.purchase_times=3 THEN a.payment else 0 END ) threeMoney, "
+ "sum(CASE WHEN a.purchase_times=3 THEN a.interval_day else 0 END ) threeIntervalDay, "
+ "sum(CASE WHEN a.purchase_times=4 THEN 1 else 0 END ) fourCustomersNum, "
+ "sum(CASE WHEN a.purchase_times=4 THEN a.payment else 0 END ) fourMoney, "
+ "sum(CASE WHEN a.purchase_times=4 THEN a.interval_day else 0 END ) fourIntervalDay,"
+ " sum(CASE WHEN a.purchase_times=5 THEN 1 else 0 END ) fiveCustomersNum, "
+ "sum(CASE WHEN a.purchase_times=5 THEN a.payment else 0 END ) fiveMoney, "
+ "sum(CASE WHEN a.purchase_times=5 THEN a.interval_day else 0 END ) fiveIntervalDay "
+ "from t_buyer_day a force index (sellerId_during) WHERE a.sellerId = 3234284498 "
+ "and a.pay_trades>0 and ( a.during = str_to_date('2018-01-10', '%Y-%m-%d') );";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
assertEquals(1, statementList.size());
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals("SELECT sum(CASE\n" +
"\t\tWHEN a.purchase_times = 1 THEN 1\n" +
"\t\tELSE 0\n" +
"\tEND) AS oneCustomersNum\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 1 THEN a.payment\n" +
"\t\tELSE 0\n" +
"\tEND) AS onceMoney\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 1 THEN a.interval_day\n" +
"\t\tELSE 0\n" +
"\tEND) AS oneIntervalDay\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 2 THEN 1\n" +
"\t\tELSE 0\n" +
"\tEND) AS twoCustomersNum\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 2 THEN a.payment\n" +
"\t\tELSE 0\n" +
"\tEND) AS twoMoney\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 2 THEN a.interval_day\n" +
"\t\tELSE 0\n" +
"\tEND) AS twoIntervalDay\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 3 THEN 1\n" +
"\t\tELSE 0\n" +
"\tEND) AS threeCustomersNum\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 3 THEN a.payment\n" +
"\t\tELSE 0\n" +
"\tEND) AS threeMoney\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 3 THEN a.interval_day\n" +
"\t\tELSE 0\n" +
"\tEND) AS threeIntervalDay\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 4 THEN 1\n" +
"\t\tELSE 0\n" +
"\tEND) AS fourCustomersNum\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 4 THEN a.payment\n" +
"\t\tELSE 0\n" +
"\tEND) AS fourMoney\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 4 THEN a.interval_day\n" +
"\t\tELSE 0\n" +
"\tEND) AS fourIntervalDay\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 5 THEN 1\n" +
"\t\tELSE 0\n" +
"\tEND) AS fiveCustomersNum\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 5 THEN a.payment\n" +
"\t\tELSE 0\n" +
"\tEND) AS fiveMoney\n" +
"\t, sum(CASE\n" +
"\t\tWHEN a.purchase_times = 5 THEN a.interval_day\n" +
"\t\tELSE 0\n" +
"\tEND) AS fiveIntervalDay\n" +
"FROM t_buyer_day a FORCE INDEX (sellerId_during)\n" +
"WHERE a.sellerId = 3234284498\n" +
"\tAND a.pay_trades > 0\n" +
"\tAND (a.during = str_to_date('2018-01-10', '%Y-%m-%d'));", stmt.toString());
}
}
| MySqlSelectTest_106_hints |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java | {
"start": 9038,
"end": 10102
} | class ____ extends SnappyDecompressor implements
DirectDecompressor {
@Override
public boolean finished() {
return (endOfInput && super.finished());
}
@Override
public void reset() {
super.reset();
endOfInput = true;
}
private boolean endOfInput;
@Override
public void decompress(ByteBuffer src, ByteBuffer dst)
throws IOException {
assert dst.isDirect() : "dst.isDirect()";
assert src.isDirect() : "src.isDirect()";
assert dst.remaining() > 0 : "dst.remaining() > 0";
this.decompressDirect(src, dst);
endOfInput = !src.hasRemaining();
}
@Override
public void setDictionary(byte[] b, int off, int len) {
throw new UnsupportedOperationException(
"byte[] arrays are not supported for DirectDecompressor");
}
@Override
public int decompress(byte[] b, int off, int len) {
throw new UnsupportedOperationException(
"byte[] arrays are not supported for DirectDecompressor");
}
}
}
| SnappyDirectDecompressor |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/cglib/transform/TransformingClassLoader.java | {
"start": 827,
"end": 1305
} | class ____ extends AbstractClassLoader {
private final ClassTransformerFactory t;
public TransformingClassLoader(ClassLoader parent, ClassFilter filter, ClassTransformerFactory t) {
super(parent, parent, filter);
this.t = t;
}
@Override
protected ClassGenerator getGenerator(ClassReader r) {
ClassTransformer t2 = t.newInstance();
return new TransformingClassGenerator(super.getGenerator(r), t2);
}
}
| TransformingClassLoader |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/engineconfigurations/parserhook/CustomParserHookRuntimeTest.java | {
"start": 502,
"end": 1029
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(
root -> root.addClasses(CustomParserHook.class)
.addAsResource(new StringAsset("{foo}"), "templates/foo.html"));
@Inject
Engine engine;
@Test
public void testParserHook() {
assertEquals("42", engine.getTemplate("foo").data("bar", 42).render());
}
@EngineConfiguration
public static | CustomParserHookRuntimeTest |
java | spring-projects__spring-boot | module/spring-boot-hibernate/src/main/java/org/springframework/boot/hibernate/SpringImplicitNamingStrategy.java | {
"start": 1336,
"end": 1698
} | class ____ extends ImplicitNamingStrategyJpaCompliantImpl {
@Override
public Identifier determineJoinTableName(ImplicitJoinTableNameSource source) {
String name = source.getOwningPhysicalTableName() + "_"
+ source.getAssociationOwningAttributePath().getProperty();
return toIdentifier(name, source.getBuildingContext());
}
}
| SpringImplicitNamingStrategy |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/SystemOutTest.java | {
"start": 2679,
"end": 2963
} | class ____ {
void f() {
new Exception().printStackTrace(new PrintStream((OutputStream) null));
new Exception().printStackTrace(new PrintWriter((OutputStream) null));
}
}
""")
.doTest();
}
}
| Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CacheLoaderNullTest.java | {
"start": 1522,
"end": 2130
} | class ____ extends CacheLoader<String, String> {}
new MyCacheLoader() {
@Override
public String load(String key) {
// BUG: Diagnostic contains:
return null;
}
};
}
}
""")
.doTest();
}
@Test
public void negative() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.cache.CacheLoader;
import java.util.function.Supplier;
| MyCacheLoader |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/reflect/InstantiationUtils.java | {
"start": 1645,
"end": 1761
} | class ____ {
/**
* Try to instantiate the given class.
*
* @param name The | InstantiationUtils |
java | apache__camel | components/camel-workday/src/generated/java/org/apache/camel/component/workday/WorkdayEndpointConfigurer.java | {
"start": 734,
"end": 4533
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
WorkdayEndpoint target = (WorkdayEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "clientid":
case "clientId": target.getWorkdayConfiguration().setClientId(property(camelContext, java.lang.String.class, value)); return true;
case "clientsecret":
case "clientSecret": target.getWorkdayConfiguration().setClientSecret(property(camelContext, java.lang.String.class, value)); return true;
case "host": target.getWorkdayConfiguration().setHost(property(camelContext, java.lang.String.class, value)); return true;
case "httpconnectionmanager":
case "httpConnectionManager": target.getWorkdayConfiguration().setHttpConnectionManager(property(camelContext, org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManager.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "reportformat":
case "reportFormat": target.getWorkdayConfiguration().setReportFormat(property(camelContext, java.lang.String.class, value)); return true;
case "tenant": target.getWorkdayConfiguration().setTenant(property(camelContext, java.lang.String.class, value)); return true;
case "tokenrefresh":
case "tokenRefresh": target.getWorkdayConfiguration().setTokenRefresh(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "clientid":
case "clientId": return java.lang.String.class;
case "clientsecret":
case "clientSecret": return java.lang.String.class;
case "host": return java.lang.String.class;
case "httpconnectionmanager":
case "httpConnectionManager": return org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManager.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "reportformat":
case "reportFormat": return java.lang.String.class;
case "tenant": return java.lang.String.class;
case "tokenrefresh":
case "tokenRefresh": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
WorkdayEndpoint target = (WorkdayEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "clientid":
case "clientId": return target.getWorkdayConfiguration().getClientId();
case "clientsecret":
case "clientSecret": return target.getWorkdayConfiguration().getClientSecret();
case "host": return target.getWorkdayConfiguration().getHost();
case "httpconnectionmanager":
case "httpConnectionManager": return target.getWorkdayConfiguration().getHttpConnectionManager();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "reportformat":
case "reportFormat": return target.getWorkdayConfiguration().getReportFormat();
case "tenant": return target.getWorkdayConfiguration().getTenant();
case "tokenrefresh":
case "tokenRefresh": return target.getWorkdayConfiguration().getTokenRefresh();
default: return null;
}
}
}
| WorkdayEndpointConfigurer |
java | elastic__elasticsearch | libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java | {
"start": 12232,
"end": 13861
} | class ____", checkerMethodName));
}
return new ParsedCheckerMethod(targetClassName, targetMethodName, targetMethodIsStatic, targetMethodIsCtor);
}
static MethodKey parseCheckerMethodSignature(String checkerMethodName, Type[] checkerMethodArgumentTypes) {
ParsedCheckerMethod checkerMethod = parseCheckerMethodName(checkerMethodName);
final List<String> targetParameterTypes;
if (checkerMethod.targetMethodIsStatic() || checkerMethod.targetMethodIsCtor()) {
if (checkerMethodArgumentTypes.length < 1 || CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Checker method %s has incorrect argument types. " + "It must have a first argument of Class<?> type.",
checkerMethodName
)
);
}
targetParameterTypes = Arrays.stream(checkerMethodArgumentTypes).skip(1).map(Type::getInternalName).toList();
} else {
if (checkerMethodArgumentTypes.length < 2
|| CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false
|| checkerMethodArgumentTypes[1].getSort() != Type.OBJECT) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Checker method %s has incorrect argument types. "
+ "It must have a first argument of Class<?> type, and a second argument of the | name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.