language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Issue96.java | {
"start": 1164,
"end": 1495
} | class ____<T> {
public Page(){
super();
}
public Page(T sub){
super();
this.sub = sub;
}
T sub;
public T getSub() {
return sub;
}
public void setSub(T sub) {
this.sub = sub;
}
}
static | Page |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/BasicDeserializerFactory.java | {
"start": 1233,
"end": 1388
} | class ____ can provide deserializers for standard
* JDK classes, including collection classes and simple heuristics for
* "up-casting" common collection | that |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/ConfigConfig.java | {
"start": 858,
"end": 2936
} | interface ____ {
/**
* A comma separated list of profiles that will be active when Quarkus launches.
*/
Optional<List<String>> profile();
/**
* Accepts a single configuration profile name. If a configuration property cannot be found in the current active
* profile, the config performs the same lookup in the profile set by this configuration.
*/
@WithName("config.profile.parent")
Optional<String> profileParent();
/**
* Additional config locations to be loaded with the Config. The configuration support multiple locations
* separated by a comma and each must represent a valid {@link java.net.URI}.
*/
@WithName("config.locations")
Optional<List<URI>> locations();
/**
* Validates that a <code>@ConfigMapping</code> maps every available configuration name contained in the mapping
* prefix.
*/
@WithName("config.mapping.validate-unknown")
Optional<Boolean> mappingValidateUnknown();
/**
* Enable logging of configuration values lookup in DEBUG log level.
* <br>
* The log of configuration values require the category set to <code>DEBUG</code> in the
* <code>io.smallrye.config</code> category: <code>quarkus.log.category."io.smallrye.config".level=DEBUG</code>.
*/
@WithName("config.log.values")
Optional<Boolean> logValues();
/**
* What should happen if the application is started with a different build time configuration than it was compiled
* against. This may be useful to prevent misconfiguration.
* <p>
* If this is set to {@code warn} the application will warn at start up.
* <p>
* If this is set to {@code fail} the application will fail at start up.
* <p>
* Native tests leveraging<code>@io.quarkus.test.junit.TestProfile</code> are always run with
* {@code quarkus.config.build-time-mismatch-at-runtime = fail}.
*/
@WithName("config.build-time-mismatch-at-runtime")
@WithDefault("warn")
BuildTimeMismatchAtRuntime buildTimeMismatchAtRuntime();
| ConfigConfig |
java | google__dagger | javatests/dagger/internal/codegen/DaggerSuperficialValidationTest.java | {
"start": 29868,
"end": 30091
} | class ____ extends MissingType {}",
" Foo getFoo() { return null; }",
"}"),
CompilerTests.kotlinSource(
"test.Outer.kt",
"package test;",
"",
" | Baz |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/annotations/StaticInitSafe.java | {
"start": 1110,
"end": 2103
} | class ____ {
* static {
* Config config = ConfigProvider.getConfig();
* String url = config.getValue("database.url", String.class);
* String login = config.getValue("database.login", String.class);
* String password = config.getValue("database.password", String.class);
*
* initDatabase(url, login, password);
* }
* }
* </pre>
*
* Please check <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/initial.html">Initializing Fields</a> for
* more information about static blocks.
*
* Since Config initializes first, some ConfigSources may not be suited to be initialized at static init. Consider the
* previous code example and a ConfigSource that requires database access. In this case, it is impossible to properly
* initialize such ConfigSource, because the database services are not yet available so the ConfigSource in unusable.
*/
@Target({ TYPE, FIELD, PARAMETER })
@Retention(RUNTIME)
@Documented
public @ | Application |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/dao/ConfInfo.java | {
"start": 1765,
"end": 2277
} | class ____ {
private String name;
private String value;
public ConfItem() {
// JAXB needs this
}
public ConfItem(String name, String value){
this.name = name;
this.value = value;
}
public String getKey() {
return name;
}
public String getValue() {
return value;
}
}
public String getSubClusterId() {
return subClusterId;
}
public void setSubClusterId(String subClusterId) {
this.subClusterId = subClusterId;
}
}
| ConfItem |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/android/testdata/stubs/android/os/Parcelable.java | {
"start": 830,
"end": 948
} | interface ____<T> extends Creator<T> {
T createFromParcel(Parcel source, ClassLoader loader);
}
}
| ClassLoaderCreator |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/io/stream/AbstractStreamTests.java | {
"start": 14128,
"end": 21795
} | class ____ implements Writeable {
private final int foo;
private final int bar;
private FooBar(final int foo, final int bar) {
this.foo = foo;
this.bar = bar;
}
private FooBar(final StreamInput in) throws IOException {
this.foo = in.readInt();
this.bar = in.readInt();
}
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeInt(foo);
out.writeInt(bar);
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final FooBar that = (FooBar) o;
return foo == that.foo && bar == that.bar;
}
@Override
public int hashCode() {
return Objects.hash(foo, bar);
}
}
runWriteReadCollectionTest(
() -> new FooBar(randomInt(), randomInt()),
StreamOutput::writeCollection,
in -> in.readCollectionAsList(FooBar::new)
);
runWriteReadCollectionTest(
() -> new FooBar(randomInt(), randomInt()),
StreamOutput::writeOptionalCollection,
in -> in.readOptionalCollectionAsList(FooBar::new)
);
runWriteReadOptionalCollectionWithNullInput(
out -> out.writeOptionalCollection(null),
in -> in.readOptionalCollectionAsList(FooBar::new)
);
}
public void testStringCollection() throws IOException {
runWriteReadCollectionTest(
() -> randomUnicodeOfLength(16),
StreamOutput::writeStringCollection,
StreamInput::readStringCollectionAsList
);
}
public void testOptionalStringCollection() throws IOException {
runWriteReadCollectionTest(
() -> randomUnicodeOfLength(16),
StreamOutput::writeOptionalStringCollection,
StreamInput::readOptionalStringCollectionAsList
);
}
public void testOptionalStringCollectionWithNullInput() throws IOException {
runWriteReadOptionalCollectionWithNullInput(
out -> out.writeOptionalStringCollection(null),
StreamInput::readOptionalStringCollectionAsList
);
}
private <T> void runWriteReadCollectionTest(
final Supplier<T> supplier,
final CheckedBiConsumer<StreamOutput, Collection<T>, IOException> writer,
final CheckedFunction<StreamInput, Collection<T>, IOException> reader
) throws IOException {
final int length = randomIntBetween(0, 10);
final Collection<T> collection = new ArrayList<>(length);
for (int i = 0; i < length; i++) {
collection.add(supplier.get());
}
try (BytesStreamOutput out = new BytesStreamOutput()) {
writer.accept(out, collection);
try (StreamInput in = getStreamInput(out.bytes())) {
assertThat(collection, equalTo(reader.apply(in)));
}
}
}
private <T> void runWriteReadOptionalCollectionWithNullInput(
final CheckedConsumer<StreamOutput, IOException> nullWriter,
final CheckedFunction<StreamInput, Collection<T>, IOException> reader
) throws IOException {
try (BytesStreamOutput out = new BytesStreamOutput()) {
nullWriter.accept(out);
try (StreamInput in = getStreamInput(out.bytes())) {
assertNull(reader.apply(in));
}
}
}
public void testSetOfLongs() throws IOException {
final int size = randomIntBetween(0, 6);
final Set<Long> sourceSet = Sets.newHashSetWithExpectedSize(size);
for (int i = 0; i < size; i++) {
sourceSet.add(randomLongBetween(i * 1000, (i + 1) * 1000 - 1));
}
assertThat(sourceSet, iterableWithSize(size));
final BytesStreamOutput out = new BytesStreamOutput();
out.writeCollection(sourceSet, StreamOutput::writeLong);
final Set<Long> targetSet = getStreamInput(out.bytes()).readCollectionAsSet(StreamInput::readLong);
assertThat(targetSet, equalTo(sourceSet));
}
public void testInstantSerialization() throws IOException {
final Instant instant = Instant.now();
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeInstant(instant);
try (StreamInput in = getStreamInput(out.bytes())) {
final Instant serialized = in.readInstant();
assertEquals(instant, serialized);
}
}
}
public void testDurationSerialization() throws IOException {
Stream.generate(AbstractStreamTests::randomDuration).limit(100).forEach(this::assertDurationSerialization);
}
void assertDurationSerialization(Duration duration) {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeGenericValue(duration);
try (StreamInput in = getStreamInput(out.bytes())) {
final Duration deserialized = (Duration) in.readGenericValue();
assertEquals(duration, deserialized);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public void testPeriodSerialization() {
Stream.generate(AbstractStreamTests::randomPeriod).limit(100).forEach(this::assertPeriodSerialization);
}
void assertPeriodSerialization(Period period) {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeGenericValue(period);
try (StreamInput in = getStreamInput(out.bytes())) {
final Period deserialized = (Period) in.readGenericValue();
assertEquals(period, deserialized);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
static Duration randomDuration() {
return randomFrom(
List.of(
Duration.ofNanos(randomIntBetween(1, 100_000)),
Duration.ofMillis(randomIntBetween(1, 1_000)),
Duration.ofSeconds(randomIntBetween(1, 100)),
Duration.ofHours(randomIntBetween(1, 10)),
Duration.ofDays(randomIntBetween(1, 5))
)
);
}
static Period randomPeriod() {
return randomFrom(
List.of(
Period.ofDays(randomIntBetween(1, 31)),
Period.ofWeeks(randomIntBetween(1, 52)),
Period.ofMonths(randomIntBetween(1, 12)),
Period.ofYears(randomIntBetween(1, 1000))
)
);
}
public void testOptionalInstantSerialization() throws IOException {
final Instant instant = Instant.now();
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeOptionalInstant(instant);
try (StreamInput in = getStreamInput(out.bytes())) {
final Instant serialized = in.readOptionalInstant();
assertEquals(instant, serialized);
}
}
final Instant missing = null;
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeOptionalInstant(missing);
try (StreamInput in = getStreamInput(out.bytes())) {
final Instant serialized = in.readOptionalInstant();
assertEquals(missing, serialized);
}
}
}
static final | FooBar |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java | {
"start": 30279,
"end": 30327
} | class ____ YARN API protocol records.
*/
public | for |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/GetNamenodeRegistrationsResponse.java | {
"start": 1326,
"end": 2051
} | class ____ {
public static GetNamenodeRegistrationsResponse newInstance()
throws IOException {
return StateStoreSerializer.newRecord(
GetNamenodeRegistrationsResponse.class);
}
public static GetNamenodeRegistrationsResponse newInstance(
List<MembershipState> records) throws IOException {
GetNamenodeRegistrationsResponse response = newInstance();
response.setNamenodeMemberships(records);
return response;
}
@Public
@Unstable
public abstract List<MembershipState> getNamenodeMemberships()
throws IOException;
@Public
@Unstable
public abstract void setNamenodeMemberships(
List<MembershipState> records) throws IOException;
} | GetNamenodeRegistrationsResponse |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ForOverrideCheckerTest.java | {
"start": 6158,
"end": 6686
} | class ____ extends test.ExtendMe {",
" @Override",
" protected int overrideMe() {",
// This is identical to the above, with a slightly less common explicit qualification
" return Test.super.overrideMe();",
" }",
"}")
.doTest();
}
@Test
public void userCannotCallSuperFromNonOverriddenMethod() {
compilationHelper
.addSourceLines(
"test/Test.java",
"""
package test2;
public | Test |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java | {
"start": 1398,
"end": 8408
} | class ____ extends NlpTokenizer {
public static final String UNKNOWN_TOKEN = "[UNK]";
public static final String SEPARATOR_TOKEN = "[SEP]";
public static final String PAD_TOKEN = "[PAD]";
public static final String CLASS_TOKEN = "[CLS]";
public static final String MASK_TOKEN = BertTokenization.MASK_TOKEN;
private static final Set<String> NEVER_SPLIT = Set.of(MASK_TOKEN);
private final WordPieceAnalyzer wordPieceAnalyzer;
protected final List<String> originalVocab;
protected final boolean withSpecialTokens;
private final int maxSequenceLength;
private final int sepTokenId;
private final int clsTokenId;
private final String padToken;
private final int padTokenId;
private final String maskToken;
private final OptionalInt maskTokenId;
private final String unknownToken;
protected BertTokenizer(
List<String> originalVocab,
SortedMap<String, Integer> vocab,
boolean doLowerCase,
boolean doTokenizeCjKChars,
boolean doStripAccents,
boolean withSpecialTokens,
int maxSequenceLength,
Set<String> neverSplit
) {
this(
originalVocab,
vocab,
doLowerCase,
doTokenizeCjKChars,
doStripAccents,
withSpecialTokens,
maxSequenceLength,
Sets.union(neverSplit, NEVER_SPLIT),
SEPARATOR_TOKEN,
CLASS_TOKEN,
PAD_TOKEN,
MASK_TOKEN,
UNKNOWN_TOKEN
);
}
protected BertTokenizer(
List<String> originalVocab,
SortedMap<String, Integer> vocab,
boolean doLowerCase,
boolean doTokenizeCjKChars,
boolean doStripAccents,
boolean withSpecialTokens,
int maxSequenceLength,
Set<String> neverSplit,
String sepToken,
String clsToken,
String padToken,
String maskToken,
String unknownToken
) {
wordPieceAnalyzer = createWordPieceAnalyzer(
originalVocab,
new ArrayList<>(neverSplit),
doLowerCase,
doTokenizeCjKChars,
doStripAccents,
unknownToken
);
this.originalVocab = originalVocab;
this.withSpecialTokens = withSpecialTokens;
this.maxSequenceLength = maxSequenceLength;
if (vocab.containsKey(unknownToken) == false) {
throw ExceptionsHelper.conflictStatusException("stored vocabulary is missing required [{}] token", unknownToken);
}
if (vocab.containsKey(padToken) == false) {
throw ExceptionsHelper.conflictStatusException("stored vocabulary is missing required [{}] token", padToken);
}
this.padTokenId = vocab.get(padToken);
if (withSpecialTokens) {
Set<String> missingSpecialTokens = Sets.difference(Set.of(sepToken, clsToken), vocab.keySet());
if (missingSpecialTokens.isEmpty() == false) {
throw ExceptionsHelper.conflictStatusException("stored vocabulary is missing required {} token(s)", missingSpecialTokens);
}
this.sepTokenId = vocab.get(sepToken);
this.clsTokenId = vocab.get(clsToken);
} else {
this.sepTokenId = -1;
this.clsTokenId = -1;
}
this.padToken = padToken;
this.maskToken = maskToken;
this.maskTokenId = vocab.containsKey(maskToken) ? OptionalInt.of(vocab.get(maskToken)) : OptionalInt.empty();
this.unknownToken = unknownToken;
}
protected WordPieceAnalyzer createWordPieceAnalyzer(
List<String> vocabulary,
List<String> neverSplit,
boolean doLowerCase,
boolean doTokenizeCjKChars,
boolean doStripAccents,
String unknownToken
) {
return new WordPieceAnalyzer(
vocabulary,
new ArrayList<>(neverSplit),
doLowerCase,
doTokenizeCjKChars,
doStripAccents,
unknownToken
);
}
@Override
int sepTokenId() {
return sepTokenId;
}
@Override
int maxSequenceLength() {
return maxSequenceLength;
}
@Override
boolean isWithSpecialTokens() {
return withSpecialTokens;
}
@Override
int getNumExtraTokensForSeqPair() {
return 3;
}
@Override
int numExtraTokensForSingleSequence() {
return 2;
}
@Override
int clsTokenId() {
return clsTokenId;
}
public String getPadToken() {
return padToken;
}
public String getUnknownToken() {
return unknownToken;
}
@Override
public OptionalInt getPadTokenId() {
return OptionalInt.of(padTokenId);
}
@Override
public OptionalInt getMaskTokenId() {
return maskTokenId;
}
@Override
public String getMaskToken() {
return maskToken;
}
@Override
public List<String> getVocabulary() {
return originalVocab;
}
@Override
public TokenizationResult buildTokenizationResult(List<TokenizationResult.Tokens> tokenizations) {
return new BertTokenizationResult(originalVocab, tokenizations, padTokenId);
}
TokenizationResult.TokensBuilder createTokensBuilder(int clsTokenId, int sepTokenId, boolean withSpecialTokens) {
return new BertTokenizationResult.BertTokensBuilder(withSpecialTokens, clsTokenId, sepTokenId);
}
@Override
public NlpTask.RequestBuilder requestBuilder() {
return (inputs, requestId, truncate, span, windowSize) -> buildTokenizationResult(
IntStream.range(0, inputs.size())
.boxed()
.flatMap(seqId -> tokenize(inputs.get(seqId), truncate, span, seqId, windowSize).stream())
.collect(Collectors.toList())
).buildRequest(requestId, truncate);
}
/**
* @param seq cannot be null
* @return InnerTokenization
*/
@Override
public InnerTokenization innerTokenize(String seq) {
List<Integer> tokenPositionMap = new ArrayList<>();
try (TokenStream ts = wordPieceAnalyzer.tokenStream("input", seq)) {
ts.reset();
PositionIncrementAttribute tokenPos = ts.addAttribute(PositionIncrementAttribute.class);
int currPos = -1;
while (ts.incrementToken()) {
currPos += tokenPos.getPositionIncrement();
tokenPositionMap.add(currPos);
}
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
return new InnerTokenization(new ArrayList<>(wordPieceAnalyzer.getTokens()), tokenPositionMap);
}
@Override
public void close() {
wordPieceAnalyzer.close();
}
public static Builder builder(List<String> vocab, Tokenization tokenization) {
return new Builder(vocab, tokenization);
}
public static | BertTokenizer |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java | {
"start": 2492,
"end": 5038
} | class ____ extends BinaryScalarFunction implements SpatialEvaluatorFactory.SpatialSourceResolution {
private static final TransportVersion ESQL_SERIALIZE_SOURCE_FUNCTIONS_WARNINGS = TransportVersion.fromName(
"esql_serialize_source_functions_warnings"
);
private final SpatialTypeResolver spatialTypeResolver;
private SpatialCrsType crsType;
protected final boolean leftDocValues;
protected final boolean rightDocValues;
protected BinarySpatialFunction(
Source source,
Expression left,
Expression right,
boolean leftDocValues,
boolean rightDocValues,
boolean pointsOnly,
boolean supportsGrid
) {
super(source, left, right);
this.leftDocValues = leftDocValues;
this.rightDocValues = rightDocValues;
this.spatialTypeResolver = new SpatialTypeResolver(this, pointsOnly, supportsGrid);
}
protected BinarySpatialFunction(StreamInput in, boolean leftDocValues, boolean rightDocValues, boolean pointsOnly, boolean supportsGrid)
throws IOException {
// The doc-values fields are only used on data nodes local planning, and therefor never serialized
this(
in.getTransportVersion().supports(ESQL_SERIALIZE_SOURCE_FUNCTIONS_WARNINGS)
? Source.readFrom((PlanStreamInput) in)
: Source.EMPTY,
in.readNamedWriteable(Expression.class),
in.readNamedWriteable(Expression.class),
leftDocValues,
rightDocValues,
pointsOnly,
supportsGrid
);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getTransportVersion().supports(ESQL_SERIALIZE_SOURCE_FUNCTIONS_WARNINGS)) {
source().writeTo(out);
}
out.writeNamedWriteable(left());
out.writeNamedWriteable(right());
// The doc-values fields are only used on data nodes local planning, and therefor never serialized
// The CRS type is re-resolved from the combination of left and right fields, and also not necessary to serialize
}
/**
* Mark the function as expecting the specified fields to arrive as doc-values.
*/
public abstract BinarySpatialFunction withDocValues(boolean foundLeft, boolean foundRight);
@Override
public int hashCode() {
// NB: the hashcode is currently used for key generation so
// to avoid clashes between aggs with the same arguments, add the | BinarySpatialFunction |
java | quarkusio__quarkus | independent-projects/qute/debug/src/main/java/io/quarkus/qute/debug/DebuggerStoppedException.java | {
"start": 39,
"end": 165
} | class ____ extends DebuggerException {
public DebuggerStoppedException() {
super();
}
}
| DebuggerStoppedException |
java | apache__avro | lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java | {
"start": 8904,
"end": 9252
} | class ____ be automatically set to
* {@link org.apache.avro.mapred.AvroValue}.
* </p>
*
* @param valueClass The value class.
* @return This options instance.
*/
public Options withValueClass(Class<?> valueClass) {
if (null == valueClass) {
throw new IllegalArgumentException("Value | will |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/util/CompactStringObjectMap.java | {
"start": 493,
"end": 4767
} | class ____
implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
/**
* Shared instance that can be used when there are no contents to Map.
*/
private final static CompactStringObjectMap EMPTY = new CompactStringObjectMap(1, 0,
new Object[4]);
private final int _hashMask, _spillCount;
private final Object[] _hashArea;
private CompactStringObjectMap(int hashMask, int spillCount, Object[] hashArea)
{
_hashMask = hashMask;
_spillCount = spillCount;
_hashArea = hashArea;
}
public static CompactStringObjectMap construct(Map<String,?> all)
{
if (all.isEmpty()) { // can this happen?
return EMPTY;
}
// First: calculate size of primary hash area
final int size = findSize(all.size());
final int mask = size-1;
// and allocate enough to contain primary/secondary, expand for spillovers as need be
int alloc = (size + (size>>1)) * 2;
Object[] hashArea = new Object[alloc];
int spillCount = 0;
for (Map.Entry<String,?> entry : all.entrySet()) {
String key = entry.getKey();
// 09-Sep-2019, tatu: [databind#2309] skip `null`s if any included
if (key == null) {
continue;
}
int slot = key.hashCode() & mask;
int ix = slot+slot;
// primary slot not free?
if (hashArea[ix] != null) {
// secondary?
ix = (size + (slot >> 1)) << 1;
if (hashArea[ix] != null) {
// ok, spill over.
ix = ((size + (size >> 1) ) << 1) + spillCount;
spillCount += 2;
if (ix >= hashArea.length) {
hashArea = Arrays.copyOf(hashArea, hashArea.length + 4);
}
}
}
hashArea[ix] = key;
hashArea[ix+1] = entry.getValue();
}
return new CompactStringObjectMap(mask, spillCount, hashArea);
}
private final static int findSize(int size)
{
if (size <= 5) {
return 8;
}
if (size <= 12) {
return 16;
}
int needed = size + (size >> 2); // at most 80% full
int result = 32;
while (result < needed) {
result += result;
}
return result;
}
public Object find(String key)
{
int slot = key.hashCode() & _hashMask;
int ix = (slot<<1);
Object match = _hashArea[ix];
if ((match == key) || key.equals(match)) {
return _hashArea[ix+1];
}
return _find2(key, slot, match);
}
private final Object _find2(String key, int slot, Object match)
{
if (match == null) {
return null;
}
int hashSize = _hashMask+1;
int ix = (hashSize + (slot>>1)) << 1;
match = _hashArea[ix];
if (key.equals(match)) {
return _hashArea[ix+1];
}
if (match != null) { // _findFromSpill(...)
int i = (hashSize + (hashSize>>1)) << 1;
for (int end = i + _spillCount; i < end; i += 2) {
match = _hashArea[i];
if ((match == key) || key.equals(match)) {
return _hashArea[i+1];
}
}
}
return null;
}
public Object findCaseInsensitive(String key) {
for (int i = 0, end = _hashArea.length; i < end; i += 2) {
Object k2 = _hashArea[i];
if (k2 != null) {
String s = (String) k2;
if (s.equalsIgnoreCase(key)) {
return _hashArea[i+1]; // lgtm [java/index-out-of-bounds]
}
}
}
return null;
}
public List<String> keys() {
final int end = _hashArea.length;
List<String> keys = new ArrayList<String>(end >> 2);
for (int i = 0; i < end; i += 2) {
Object key = _hashArea[i];
if (key != null) {
keys.add((String) key);
}
}
return keys;
}
}
| CompactStringObjectMap |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java | {
"start": 5607,
"end": 6548
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final Function<DriverContext, BreakingBytesRefBuilder> scratch;
private final EvalOperator.ExpressionEvaluator.Factory[] values;
public Factory(Source source, Function<DriverContext, BreakingBytesRefBuilder> scratch,
EvalOperator.ExpressionEvaluator.Factory[] values) {
this.source = source;
this.scratch = scratch;
this.values = values;
}
@Override
public ConcatEvaluator get(DriverContext context) {
EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new);
return new ConcatEvaluator(source, scratch.apply(context), values, context);
}
@Override
public String toString() {
return "ConcatEvaluator[" + "values=" + Arrays.toString(values) + "]";
}
}
}
| Factory |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java | {
"start": 21563,
"end": 22500
} | class ____ extends ByteDenseVectorFunction implements CosineSimilarityInterface {
public ByteCosineSimilarity(ScoreScript scoreScript, DenseVectorDocValuesField field, List<Number> queryVector) {
super(scoreScript, field, queryVector, true, ElementType.BYTE, ElementType.FLOAT);
}
public ByteCosineSimilarity(ScoreScript scoreScript, DenseVectorDocValuesField field, byte[] queryVector) {
super(scoreScript, field, queryVector);
}
public double cosineSimilarity() {
setNextVector();
if (floatQueryVector != null) {
// float vector is already normalized by the superclass constructor
return field.get().cosineSimilarity(floatQueryVector, false);
} else {
return field.get().cosineSimilarity(byteQueryVector, qvMagnitude);
}
}
}
public static | ByteCosineSimilarity |
java | playframework__playframework | web/play-java-forms/src/main/java/play/data/validation/Constraints.java | {
"start": 2452,
"end": 6848
} | class ____ {
private final Lang lang;
private final Messages messages;
private final TypedMap attrs;
private final Config config;
public ValidationPayload(
final Lang lang, final Messages messages, final TypedMap attrs, final Config config) {
this.lang = lang;
this.messages = messages;
this.attrs = attrs;
this.config = config;
}
/**
* @return if validation happens during a Http Request the lang of that request, otherwise null
*/
public Lang getLang() {
return this.lang;
}
/**
* @return if validation happens during a Http Request the messages for the lang of that
* request, otherwise null
*/
public Messages getMessages() {
return this.messages;
}
/**
* @return if validation happens during a Http Request the request attributes of that request,
* otherwise null
*/
public TypedMap getAttrs() {
return this.attrs;
}
/**
* @return the current application configuration, will always be set, even when accessed outside
* a Http Request
*/
public Config getConfig() {
return this.config;
}
}
/**
* Converts a set of constraints to human-readable values. Does not guarantee the order of the
* returned constraints.
*
* <p>This method calls {@code displayableConstraint} under the hood.
*
* @param constraints the set of constraint descriptors.
* @return a list of pairs of tuples assembled from displayableConstraint.
*/
public static List<Tuple<String, List<Object>>> displayableConstraint(
Set<ConstraintDescriptor<?>> constraints) {
return constraints
.parallelStream()
.filter(c -> c.getAnnotation().annotationType().isAnnotationPresent(Display.class))
.map(Constraints::displayableConstraint)
.collect(Collectors.toList());
}
/**
* Converts a set of constraints to human-readable values in guaranteed order. Only constraints
* that have an annotation that intersect with the {@code orderedAnnotations} parameter will be
* considered. The order of the returned constraints corresponds to the order of the {@code
* orderedAnnotations parameter}.
*
* @param constraints the set of constraint descriptors.
* @param orderedAnnotations the array of annotations
* @return a list of tuples showing readable constraints.
*/
public static List<Tuple<String, List<Object>>> displayableConstraint(
Set<ConstraintDescriptor<?>> constraints, Annotation[] orderedAnnotations) {
final List<Annotation> constraintAnnot =
constraints.stream()
.map(ConstraintDescriptor::getAnnotation)
.collect(Collectors.<Annotation>toList());
return Stream.of(orderedAnnotations)
.filter(
constraintAnnot
::contains) // only use annotations for which we actually have a constraint
.filter(a -> a.annotationType().isAnnotationPresent(Display.class))
.map(
a ->
displayableConstraint(
constraints
.parallelStream()
.filter(c -> c.getAnnotation().equals(a))
.findFirst()
.get()))
.collect(Collectors.toList());
}
/**
* Converts a constraint to a human-readable value.
*
* @param constraint the constraint descriptor.
* @return A tuple containing the constraint's display name and the constraint attributes.
*/
public static Tuple<String, List<Object>> displayableConstraint(
ConstraintDescriptor<?> constraint) {
final Display displayAnnotation =
constraint.getAnnotation().annotationType().getAnnotation(Display.class);
return Tuple(
displayAnnotation.name(),
Collections.unmodifiableList(
Stream.of(displayAnnotation.attributes())
.map(attr -> constraint.getAttributes().get(attr))
.collect(Collectors.toList())));
}
// --- Required
/** Defines a field as required. */
@Target({METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER, TYPE_USE})
@Retention(RUNTIME)
@Constraint(validatedBy = RequiredValidator.class)
@Repeatable(play.data.validation.Constraints.Required.List.class)
@Display(name = "constraint.required")
public @ | ValidationPayload |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/version/SemanticVersion.java | {
"start": 653,
"end": 745
} | class ____ comparing semantic versions.
*
* @author Graeme Rocher
* @since 1.0
*/
public | for |
java | grpc__grpc-java | netty/src/main/java/io/grpc/netty/InternalProtocolNegotiators.java | {
"start": 3576,
"end": 4297
} | class ____ implements InternalProtocolNegotiator.ProtocolNegotiator {
@Override
public AsciiString scheme() {
return negotiator.scheme();
}
@Override
public ChannelHandler newHandler(GrpcHttp2ConnectionHandler grpcHandler) {
return negotiator.newHandler(grpcHandler);
}
@Override
public void close() {
negotiator.close();
}
}
return new ServerTlsNegotiator();
}
/** Returns a {@link ProtocolNegotiator} for plaintext client channel. */
public static InternalProtocolNegotiator.ProtocolNegotiator plaintext() {
final io.grpc.netty.ProtocolNegotiator negotiator = ProtocolNegotiators.plaintext();
final | ServerTlsNegotiator |
java | reactor__reactor-core | reactor-test/src/main/java/reactor/test/ValueFormatters.java | {
"start": 3891,
"end": 8858
} | interface ____<CONTAINER> extends Predicate<Object>, BiFunction<Object, Function<Object, String>, String> {
/**
* Return the targeted container {@link Class}. The {@link BiFunction} shouldn't be
* applied to objects that are not of that class, although it will default to using
* {@link String#valueOf(Object)} on them. This verification is included in {@link #test(Object)}.
*
* @return the target container {@link Class}
*/
Class<CONTAINER> getTargetClass();
/**
* An additional test to perform on a matching container to further decide to convert
* it or not. The {@link BiFunction} shouldn't be applied to container that do not
* match that predicate, although it will default to using {@link String#valueOf(Object)}
* on them. This verification is included in {@link #test(Object)}.
* <p>
* Defaults to always matching instances of the {@link #getTargetClass() target Class}.
*
* @param value the candidate container
* @return true if it can be extracted and converted, false otherwise
*/
default boolean matches(CONTAINER value) {
return true;
}
/**
* Return the prefix to use in the container's {@link String} representation, given
* the original container.
* <p>
* Defaults to {@code "["}.
*
* @param original the original container
* @return the prefix to use
*/
default String prefix(CONTAINER original) {
return "[";
}
/**
* Return the suffix to use in the container's {@link String} representation, given
* the original container.
* <p>
* Defaults to {@code "]"}.
*
* @param original the original container
* @return the suffix to use
*/
default String suffix(CONTAINER original) {
return "]";
}
/**
* Explode the container into a {@link Stream} of {@link Object}, each of which
* is a candidate for individual {@link String} conversion by a {@link ToStringConverter}
* when applied as a {@link BiFunction}.
*
* @param original the container to extract contents from
* @return the {@link Stream} of elements contained in the container
*/
Stream<Object> explode(CONTAINER original);
/**
* Test if an object is a container that can be extracted and converted by this
* {@link Extractor}. Defaults to testing {@link #getTargetClass()} and {@link #matches(Object)}.
* The {@link BiFunction} shouldn't be applied to objects that do not match this
* test, although it will default to using {@link String#valueOf(Object)} on them.
*
* @param o the arbitrary object to test.
* @return true if the object can be extracted and converted to {@link String}
*/
@Override
default boolean test(Object o) {
Class<CONTAINER> containerClass = getTargetClass();
if (containerClass.isInstance(o)) {
CONTAINER container = containerClass.cast(o);
return matches(container);
}
return false;
}
/**
* Given an arbitrary object and a {@link ToStringConverter}, if the object passes
* the {@link #test(Object)}, extract elements from it and convert them using the
* {@link ToStringConverter}, joining the result together to obtain a customized
* {@link String} representation of both the container and its contents.
* Any object that doesn't match this {@link Extractor} is naively transformed
* using {@link String#valueOf(Object)}, use {@link #test(Object)} to avoid that
* when choosing between multiple {@link Extractor}.
*
* @param target the arbitrary object to potentially convert.
* @param contentFormatter the {@link ToStringConverter} to apply on each element
* contained in the target
* @return the {@link String} representation of the target, customized as needed
*/
default @Nullable String apply(Object target, Function<Object, String> contentFormatter) {
Class<CONTAINER> containerClass = getTargetClass();
if (containerClass.isInstance(target)) {
CONTAINER container = containerClass.cast(target);
if (matches(container)) {
return explode(container)
.map(contentFormatter)
.collect(Collectors.joining(", ", prefix(container), suffix(container)));
}
}
return String.valueOf(target);
}
}
/**
* Create a value formatter that is specific to a particular {@link Class}, applying
* the given String conversion {@link Function} provided the object is an instance of
* that Class.
*
* @param tClass the {@link Class} to convert
* @param tToString the {@link String} conversion {@link Function} for objects of that class
* @param <T> the generic type of the matching objects
* @return the class-specific formatter
*/
public static <T> ToStringConverter forClass(Class<T> tClass,
Function<T, String> tToString) {
return new ClassBasedToStringConverter<>(tClass, t -> true, tToString);
}
/**
* Create a value formatter that is specific to a particular {@link Class} and filters
* based on a provided {@link Predicate}. All objects of said | Extractor |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/nestedsource/exceptions/Bucket.java | {
"start": 263,
"end": 458
} | class ____ {
String userId;
public Bucket(String userId) {
this.userId = userId;
}
public User getUser() throws NoSuchUser {
throw new NoSuchUser();
}
}
| Bucket |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/http/StreamResetException.java | {
"start": 638,
"end": 1062
} | class ____ extends VertxException {
private final long code;
public StreamResetException(long code) {
super("Stream reset: " + code, true);
this.code = code;
}
public StreamResetException(long code, Throwable cause) {
super("Stream reset: " + code, cause, true);
this.code = code;
}
/**
* @return the reset error code
*/
public long getCode() {
return code;
}
}
| StreamResetException |
java | elastic__elasticsearch | client/rest/src/main/java/org/elasticsearch/client/RestClient.java | {
"start": 41722,
"end": 42378
} | class ____ extends GzipCompressingEntity {
public ContentCompressingEntity(HttpEntity entity) {
super(entity);
}
@Override
public InputStream getContent() throws IOException {
ByteArrayInputOutputStream out = new ByteArrayInputOutputStream(1024);
try (GZIPOutputStream gzipOut = new GZIPOutputStream(out)) {
wrappedEntity.writeTo(gzipOut);
}
return out.asInput();
}
}
/**
* A ByteArrayOutputStream that can be turned into an input stream without copying the underlying buffer.
*/
private static | ContentCompressingEntity |
java | google__jimfs | jimfs/src/main/java/com/google/common/jimfs/AbstractWatchService.java | {
"start": 9181,
"end": 9235
} | enum ____ {
READY,
SIGNALLED
}
}
}
| State |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ResourceElementResolver.java | {
"start": 7105,
"end": 8605
} | class ____ extends ResourceElementResolver {
private final String fieldName;
public ResourceFieldResolver(String name, boolean defaultName, String fieldName) {
super(name, defaultName);
this.fieldName = fieldName;
}
@Override
public void resolveAndSet(RegisteredBean registeredBean, Object instance) {
Assert.notNull(registeredBean, "'registeredBean' must not be null");
Assert.notNull(instance, "'instance' must not be null");
Field field = getField(registeredBean);
Object resolved = resolve(registeredBean);
ReflectionUtils.makeAccessible(field);
ReflectionUtils.setField(field, instance, resolved);
}
@Override
protected DependencyDescriptor createDependencyDescriptor(RegisteredBean registeredBean) {
Field field = getField(registeredBean);
return new LookupDependencyDescriptor(field, field.getType(), isLazyLookup(registeredBean));
}
@Override
protected Class<?> getLookupType(RegisteredBean registeredBean) {
return getField(registeredBean).getType();
}
@Override
protected AnnotatedElement getAnnotatedElement(RegisteredBean registeredBean) {
return getField(registeredBean);
}
private Field getField(RegisteredBean registeredBean) {
Field field = ReflectionUtils.findField(registeredBean.getBeanClass(), this.fieldName);
Assert.notNull(field,
() -> "No field '" + this.fieldName + "' found on " + registeredBean.getBeanClass().getName());
return field;
}
}
private static final | ResourceFieldResolver |
java | apache__camel | components/camel-test/camel-test-spring-junit5/src/main/java/org/apache/camel/test/spring/junit5/CamelAnnotationsHandler.java | {
"start": 5635,
"end": 6772
} | class ____ executed
*/
public static void handleRouteCoverageEnable(
ConfigurableApplicationContext context, Class<?> testClass, Function testMethod)
throws Exception {
boolean enabled = isRouteCoverageEnabled(testClass.isAnnotationPresent(EnableRouteCoverage.class));
if (enabled) {
CamelSpringTestHelper.doToSpringCamelContexts(context, new CamelSpringTestHelper.DoToSpringCamelContextsStrategy() {
@Override
public void execute(String contextName, SpringCamelContext camelContext) throws Exception {
LOGGER.info("Enabling @RouteCoverage");
EventNotifier notifier = new RouteCoverageEventNotifier(testClass.getName(), testMethod);
camelContext.addService(notifier, true);
camelContext.getManagementStrategy().addEventNotifier(notifier);
}
});
}
}
/**
* Handles enabling route dump based on {@link EnableRouteDump}.
*
* @param context the initialized Spring context
* @param testClass the test | being |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/ssl/ClientSslConfiguration.java | {
"start": 3662,
"end": 4071
} | class ____ extends KeyConfiguration {
}
/**
* The default {@link io.micronaut.http.ssl.SslConfiguration.KeyStoreConfiguration}.
*/
@SuppressWarnings("WeakerAccess")
@ConfigurationProperties(KeyStoreConfiguration.PREFIX)
@BootstrapContextCompatible
@Requires(property = ClientSslConfiguration.PREFIX + "." + KeyStoreConfiguration.PREFIX)
public static | DefaultKeyConfiguration |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/SCMUploaderCanUploadRequestPBImpl.java | {
"start": 1179,
"end": 2578
} | class ____
extends SCMUploaderCanUploadRequest {
SCMUploaderCanUploadRequestProto proto =
SCMUploaderCanUploadRequestProto.getDefaultInstance();
SCMUploaderCanUploadRequestProto.Builder builder = null;
boolean viaProto = false;
public SCMUploaderCanUploadRequestPBImpl() {
builder = SCMUploaderCanUploadRequestProto.newBuilder();
}
public SCMUploaderCanUploadRequestPBImpl(
SCMUploaderCanUploadRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public SCMUploaderCanUploadRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public String getResourceKey() {
SCMUploaderCanUploadRequestProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasResourceKey()) ? p.getResourceKey() : null;
}
@Override
public void setResourceKey(String key) {
maybeInitBuilder();
if (key == null) {
builder.clearResourceKey();
return;
}
builder.setResourceKey(key);
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = SCMUploaderCanUploadRequestProto.newBuilder(proto);
}
viaProto = false;
}
}
| SCMUploaderCanUploadRequestPBImpl |
java | quarkusio__quarkus | core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java | {
"start": 10839,
"end": 10925
} | class ____<X extends A & B> implements Repo<X> {
}
public static | MultiBoundedRepo |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/view/groovy/GroovyMarkupViewResolver.java | {
"start": 1445,
"end": 2649
} | class ____ extends AbstractTemplateViewResolver {
/**
* Sets the default {@link #setViewClass view class} to {@link #requiredViewClass}:
* by default {@link GroovyMarkupView}.
*/
public GroovyMarkupViewResolver() {
setViewClass(requiredViewClass());
}
/**
* A convenience constructor that allows for specifying the {@link #setPrefix prefix}
* and {@link #setSuffix suffix} as constructor arguments.
* @param prefix the prefix that gets prepended to view names when building a URL
* @param suffix the suffix that gets appended to view names when building a URL
* @since 4.3
*/
public GroovyMarkupViewResolver(String prefix, String suffix) {
this();
setPrefix(prefix);
setSuffix(suffix);
}
@Override
protected Class<?> requiredViewClass() {
return GroovyMarkupView.class;
}
@Override
protected AbstractUrlBasedView instantiateView() {
return (getViewClass() == GroovyMarkupView.class ? new GroovyMarkupView() : super.instantiateView());
}
/**
* This resolver supports i18n, so cache keys should contain the locale.
*/
@Override
protected Object getCacheKey(String viewName, Locale locale) {
return viewName + '_' + locale;
}
}
| GroovyMarkupViewResolver |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/configuration/BuildTimeConfigurationReader.java | {
"start": 1934,
"end": 2590
} | class ____ {
private static final String CONFIG_ROOTS_LIST = "META-INF/quarkus-config-roots.list";
private static List<Class<?>> collectConfigRoots(ClassLoader classLoader) throws IOException, ClassNotFoundException {
Assert.checkNotNullParam("classLoader", classLoader);
// populate with all known types
List<Class<?>> roots = new ArrayList<>();
for (Class<?> clazz : ServiceUtil.classesNamedIn(classLoader, CONFIG_ROOTS_LIST)) {
if (!clazz.isInterface()) {
throw new IllegalArgumentException(
"The configuration " + clazz + " must be an | BuildTimeConfigurationReader |
java | apache__camel | components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileSendDynamicAware.java | {
"start": 1062,
"end": 4721
} | class ____ extends SendDynamicAwareSupport {
public static final String PROP_FILE_NAME = "fileName";
public static final String PROP_TEMP_FILE_NAME = "tempFileName";
public static final String PROP_IDEMPOTENT_KEY = "idempotentKey";
public static final String PROP_MOVE = "move";
public static final String PROP_MOVE_FAILED = "moveFailed";
public static final String PROP_PRE_MOVE = "preMove";
public static final String PROP_MOVE_EXISTING = "moveExisting";
@Override
public boolean isLenientProperties() {
return false;
}
@Override
public DynamicAwareEntry prepare(Exchange exchange, String uri, String originalUri) throws Exception {
Map<String, Object> properties = endpointProperties(exchange, uri);
return new DynamicAwareEntry(uri, originalUri, properties, null);
}
@Override
public String resolveStaticUri(Exchange exchange, DynamicAwareEntry entry) throws Exception {
String uri = entry.getUri();
// windows path problems such as C:\temp was by simple language evaluated \t as a tab character
// which should then be reversed
uri = uri.replace("\t", "\\\\t");
boolean fileName = entry.getProperties().containsKey(PROP_FILE_NAME);
boolean tempFileName = entry.getProperties().containsKey(PROP_TEMP_FILE_NAME);
boolean idempotentKey = entry.getProperties().containsKey(PROP_IDEMPOTENT_KEY);
boolean move = entry.getProperties().containsKey(PROP_MOVE);
boolean moveFailed = entry.getProperties().containsKey(PROP_MOVE_FAILED);
boolean preMove = entry.getProperties().containsKey(PROP_PRE_MOVE);
boolean moveExisting = entry.getProperties().containsKey(PROP_MOVE_EXISTING);
// if any of the above are in use, then they should not be pre evaluated
// and we need to rebuild a new uri with them as-is
if (fileName || tempFileName || idempotentKey || move || moveFailed || preMove || moveExisting) {
Map<String, Object> params = entry.getProperties();
Map<String, Object> originalParams = URISupport.parseQuery(URISupport.extractQuery(entry.getOriginalUri()));
if (fileName) {
compute(originalParams, PROP_FILE_NAME, params);
}
if (tempFileName) {
compute(originalParams, PROP_TEMP_FILE_NAME, params);
}
if (idempotentKey) {
compute(originalParams, PROP_IDEMPOTENT_KEY, params);
}
if (move) {
compute(originalParams, PROP_MOVE, params);
}
if (moveFailed) {
compute(originalParams, PROP_MOVE_FAILED, params);
}
if (preMove) {
compute(originalParams, PROP_PRE_MOVE, params);
}
if (moveExisting) {
compute(originalParams, PROP_MOVE_EXISTING, params);
}
return asEndpointUri(exchange, uri, params);
} else {
return uri;
}
}
private static void compute(Map<String, Object> originalParams, String propFileName, Map<String, Object> params) {
Object val = originalParams.get(propFileName);
if (val != null) {
params.put(propFileName, val.toString());
}
}
@Override
public Processor createPreProcessor(Exchange exchange, DynamicAwareEntry entry) throws Exception {
return null;
}
@Override
public Processor createPostProcessor(Exchange exchange, DynamicAwareEntry entry) throws Exception {
return null;
}
}
| GenericFileSendDynamicAware |
java | quarkusio__quarkus | extensions/hal/runtime/src/main/java/io/quarkus/hal/HalService.java | {
"start": 818,
"end": 2418
} | class ____ the objects in the collection. If null, it will not resolve the links for these objects.
* @return The Hal collection wrapper instance.
*/
public <T> HalCollectionWrapper<T> toHalCollectionWrapper(Collection<T> collection, String collectionName,
Class<?> entityClass) {
List<HalEntityWrapper<T>> items = new ArrayList<>();
for (T entity : collection) {
items.add(toHalWrapper(entity));
}
Map<String, HalLink> classLinks = Collections.emptyMap();
if (entityClass != null) {
classLinks = getClassLinks(entityClass);
}
return new HalCollectionWrapper<>(items, collectionName, classLinks);
}
/**
* Wrap an entity into a Hal instance by including the entity itself and the Hal links.
*
* @param entity The entity to wrap.
* @return The Hal entity wrapper.
*/
public <T> HalEntityWrapper<T> toHalWrapper(T entity) {
return new HalEntityWrapper<>(entity, getInstanceLinks(entity));
}
/**
* Get the HREF link with reference `self` from the Hal links of the entity instance.
*
* @param entity The entity instance where to get the Hal links.
* @return the HREF link with rel `self`.
*/
public String getSelfLink(Object entity) {
HalLink halLink = getInstanceLinks(entity).get(SELF_REF);
if (halLink != null) {
return halLink.getHref();
}
return null;
}
/**
* Get the Hal links using the entity type class.
*
* @param entityClass The entity | of |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/slot/TaskSlotTable.java | {
"start": 2079,
"end": 12290
} | interface ____<T extends TaskSlotPayload>
extends TimeoutListener<AllocationID>, AutoCloseableAsync {
/**
* Start the task slot table with the given slot actions.
*
* @param initialSlotActions to use for slot actions
* @param mainThreadExecutor {@link ComponentMainThreadExecutor} to schedule internal calls to
* the main thread
*/
void start(SlotActions initialSlotActions, ComponentMainThreadExecutor mainThreadExecutor);
/**
* Returns the all {@link AllocationID} for the given job.
*
* @param jobId for which to return the set of {@link AllocationID}.
* @return Set of {@link AllocationID} for the given job
*/
Set<AllocationID> getAllocationIdsPerJob(JobID jobId);
/**
* Returns the {@link AllocationID} of any active task listed in this {@code TaskSlotTable}.
*
* @return The {@code AllocationID} of any active task.
*/
Set<AllocationID> getActiveTaskSlotAllocationIds();
/**
* Returns the {@link AllocationID} of active {@link TaskSlot}s attached to the job with the
* given {@link JobID}.
*
* @param jobId The {@code JobID} of the job for which the {@code AllocationID}s of the attached
* active {@link TaskSlot}s shall be returned.
* @return A set of {@code AllocationID}s that belong to active {@code TaskSlot}s having the
* passed {@code JobID}.
*/
Set<AllocationID> getActiveTaskSlotAllocationIdsPerJob(JobID jobId);
SlotReport createSlotReport(ResourceID resourceId);
/**
* Allocate the slot with the given index for the given job and allocation id. If negative index
* is given, a new auto increasing index will be generated. Returns true if the slot could be
* allocated. Otherwise it returns false.
*
* @param index of the task slot to allocate, use negative value for dynamic slot allocation
* @param jobId to allocate the task slot for
* @param allocationId identifying the allocation
* @param slotTimeout until the slot times out
* @throws SlotAllocationException if allocating the slot failed.
*/
@VisibleForTesting
void allocateSlot(int index, JobID jobId, AllocationID allocationId, Duration slotTimeout)
throws SlotAllocationException;
/**
* Allocate the slot with the given index for the given job and allocation id. If negative index
* is given, a new auto increasing index will be generated. Returns true if the slot could be
* allocated. Otherwise it returns false.
*
* @param index of the task slot to allocate, use negative value for dynamic slot allocation
* @param jobId to allocate the task slot for
* @param allocationId identifying the allocation
* @param resourceProfile of the requested slot, used only for dynamic slot allocation and will
* be ignored otherwise
* @param slotTimeout until the slot times out
* @throws SlotAllocationException if allocating the slot failed.
*/
void allocateSlot(
int index,
JobID jobId,
AllocationID allocationId,
ResourceProfile resourceProfile,
Duration slotTimeout)
throws SlotAllocationException;
/**
* Marks the slot under the given allocation id as active. If the slot could not be found, then
* a {@link SlotNotFoundException} is thrown.
*
* @param allocationId to identify the task slot to mark as active
* @throws SlotNotFoundException if the slot could not be found for the given allocation id
* @return True if the slot could be marked active; otherwise false
*/
boolean markSlotActive(AllocationID allocationId) throws SlotNotFoundException;
/**
* Marks the slot under the given allocation id as inactive. If the slot could not be found,
* then a {@link SlotNotFoundException} is thrown.
*
* @param allocationId to identify the task slot to mark as inactive
* @param slotTimeout until the slot times out
* @throws SlotNotFoundException if the slot could not be found for the given allocation id
* @return True if the slot could be marked inactive
*/
boolean markSlotInactive(AllocationID allocationId, Duration slotTimeout)
throws SlotNotFoundException;
/**
* Try to free the slot. If the slot is empty it will set the state of the task slot to free and
* return its index. If the slot is not empty, then it will set the state of the task slot to
* releasing, fail all tasks and return -1.
*
* @param allocationId identifying the task slot to be freed
* @throws SlotNotFoundException if there is not task slot for the given allocation id
* @return Index of the freed slot if the slot could be freed; otherwise -1
*/
default int freeSlot(AllocationID allocationId) throws SlotNotFoundException {
return freeSlot(allocationId, new Exception("The task slot of this task is being freed."));
}
/**
* Tries to free the slot. If the slot is empty it will set the state of the task slot to free
* and return its index. If the slot is not empty, then it will set the state of the task slot
* to releasing, fail all tasks and return -1.
*
* @param allocationId identifying the task slot to be freed
* @param cause to fail the tasks with if slot is not empty
* @throws SlotNotFoundException if there is not task slot for the given allocation id
* @return Index of the freed slot if the slot could be freed; otherwise -1
*/
int freeSlot(AllocationID allocationId, Throwable cause) throws SlotNotFoundException;
/**
* Check whether the timeout with ticket is valid for the given allocation id.
*
* @param allocationId to check against
* @param ticket of the timeout
* @return True if the timeout is valid; otherwise false
*/
boolean isValidTimeout(AllocationID allocationId, UUID ticket);
/**
* Check whether the slot for the given index is allocated for the given job and allocation id.
*
* @param index of the task slot
* @param jobId for which the task slot should be allocated
* @param allocationId which should match the task slot's allocation id
* @return True if the given task slot is allocated for the given job and allocation id
*/
boolean isAllocated(int index, JobID jobId, AllocationID allocationId);
/**
* Try to mark the specified slot as active if it has been allocated by the given job.
*
* @param jobId of the allocated slot
* @param allocationId identifying the allocation
* @return True if the task slot could be marked active.
*/
boolean tryMarkSlotActive(JobID jobId, AllocationID allocationId);
/**
* Check whether the task slot with the given index is free.
*
* @param index of the task slot
* @return True if the task slot is free; otherwise false
*/
boolean isSlotFree(int index);
/**
* Check whether the job has allocated (not active) slots.
*
* @param jobId for which to check for allocated slots
* @return True if there are allocated slots for the given job id.
*/
boolean hasAllocatedSlots(JobID jobId);
/**
* Return an iterator of allocated slots for the given job id.
*
* @param jobId for which to return the allocated slots
* @return Iterator of allocated slots.
*/
Iterator<TaskSlot<T>> getAllocatedSlots(JobID jobId);
/**
* Returns the owning job of the {@link TaskSlot} identified by the given {@link AllocationID}.
*
* @param allocationId identifying the slot for which to retrieve the owning job
* @return Owning job of the specified {@link TaskSlot} or null if there is no slot for the
* given allocation id or if the slot has no owning job assigned
*/
@Nullable
JobID getOwningJob(AllocationID allocationId);
/**
* Add the given task to the slot identified by the task's allocation id.
*
* @param task to add to the task slot with the respective allocation id
* @throws SlotNotFoundException if there was no slot for the given allocation id
* @throws SlotNotActiveException if there was no slot active for task's job and allocation id
* @return True if the task could be added to the task slot; otherwise false
*/
boolean addTask(T task) throws SlotNotFoundException, SlotNotActiveException;
/**
* Remove the task with the given execution attempt id from its task slot. If the owning task
* slot is in state releasing and empty after removing the task, the slot is freed via the slot
* actions.
*
* @param executionAttemptID identifying the task to remove
* @return The removed task if there is any for the given execution attempt id; otherwise null
*/
T removeTask(ExecutionAttemptID executionAttemptID);
/**
* Get the task for the given execution attempt id. If none could be found, then return null.
*
* @param executionAttemptID identifying the requested task
* @return The task for the given execution attempt id if it exist; otherwise null
*/
T getTask(ExecutionAttemptID executionAttemptID);
/**
* Return an iterator over all tasks for a given job.
*
* @param jobId identifying the job of the requested tasks
* @return Iterator over all task for a given job
*/
Iterator<T> getTasks(JobID jobId);
/**
* Get the current allocation for the task slot with the given index.
*
* @param index identifying the slot for which the allocation id shall be retrieved
* @return Allocation id of the specified slot if allocated; otherwise null
*/
AllocationID getCurrentAllocation(int index);
/**
* Get the memory manager of the slot allocated for the task.
*
* @param allocationID allocation id of the slot allocated for the task
* @return the memory manager of the slot allocated for the task
*/
MemoryManager getTaskMemoryManager(AllocationID allocationID) throws SlotNotFoundException;
}
| TaskSlotTable |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java | {
"start": 1920,
"end": 6586
} | class ____ {
private static final String FILE = "file";
private static final Logger LOG =
LoggerFactory.getLogger(TestViewFileSystemOverloadSchemeLocalFileSystem.class);
private FileSystem fsTarget;
private Configuration conf;
private Path targetTestRoot;
private FileSystemTestHelper fileSystemTestHelper =
new FileSystemTestHelper();
@BeforeEach
public void setUp() throws Exception {
conf = new Configuration();
conf.set(String.format("fs.%s.impl", FILE),
ViewFileSystemOverloadScheme.class.getName());
conf.set(String.format(
FsConstants.FS_VIEWFS_OVERLOAD_SCHEME_TARGET_FS_IMPL_PATTERN, FILE),
LocalFileSystem.class.getName());
fsTarget = new LocalFileSystem();
fsTarget.initialize(new URI("file:///"), conf);
// create the test root on local_fs
targetTestRoot = fileSystemTestHelper.getAbsoluteTestRootPath(fsTarget);
fsTarget.delete(targetTestRoot, true);
fsTarget.mkdirs(targetTestRoot);
}
/**
* Adds the given mount links to config. sources contains mount link src and
* the respective index location in targets contains the target uri.
*/
void addMountLinks(String mountTable, String[] sources, String[] targets,
Configuration config) throws IOException, URISyntaxException {
ViewFsTestSetup.addMountLinksToConf(mountTable, sources, targets, config);
}
/**
* Tests write file and read file with ViewFileSystemOverloadScheme.
*/
@Test
public void testLocalTargetLinkWriteSimple()
throws IOException, URISyntaxException {
LOG.info("Starting testLocalTargetLinkWriteSimple");
final String testString = "Hello Local!...";
final Path lfsRoot = new Path("/lfsRoot");
addMountLinks(null, new String[] {lfsRoot.toString() },
new String[] {targetTestRoot + "/local" }, conf);
try (FileSystem lViewFs = FileSystem.get(URI.create("file:///"), conf)) {
final Path testPath = new Path(lfsRoot, "test.txt");
try (FSDataOutputStream fsDos = lViewFs.create(testPath)) {
fsDos.writeUTF(testString);
}
try (FSDataInputStream lViewIs = lViewFs.open(testPath)) {
assertEquals(testString, lViewIs.readUTF());
}
}
}
/**
* Tests create file and delete file with ViewFileSystemOverloadScheme.
*/
@Test
public void testLocalFsCreateAndDelete() throws Exception {
LOG.info("Starting testLocalFsCreateAndDelete");
addMountLinks("mt", new String[] {"/lfsroot" },
new String[] {targetTestRoot + "/wd2" }, conf);
final URI mountURI = URI.create("file://mt/");
try (FileSystem lViewFS = FileSystem.get(mountURI, conf)) {
Path testPath = new Path(mountURI.toString() + "/lfsroot/test");
lViewFS.createNewFile(testPath);
assertTrue(lViewFS.exists(testPath));
lViewFS.delete(testPath, true);
assertFalse(lViewFS.exists(testPath));
}
}
/**
* Tests root level file with linkMergeSlash with
* ViewFileSystemOverloadScheme.
*/
@Test
public void testLocalFsLinkSlashMerge() throws Exception {
LOG.info("Starting testLocalFsLinkSlashMerge");
addMountLinks("mt",
new String[] {Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH },
new String[] {targetTestRoot + "/wd2" }, conf);
final URI mountURI = URI.create("file://mt/");
try (FileSystem lViewFS = FileSystem.get(mountURI, conf)) {
Path fileOnRoot = new Path(mountURI.toString() + "/NewFile");
lViewFS.createNewFile(fileOnRoot);
assertTrue(lViewFS.exists(fileOnRoot));
}
}
/**
* Tests with linkMergeSlash and other mounts in
* ViewFileSystemOverloadScheme.
*/
@Test
public void testLocalFsLinkSlashMergeWithOtherMountLinks() throws Exception {
assertThrows(IOException.class, ()->{
LOG.info("Starting testLocalFsLinkSlashMergeWithOtherMountLinks");
addMountLinks("mt",
new String[] {"/lfsroot", Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH },
new String[] {targetTestRoot + "/wd2", targetTestRoot + "/wd2" }, conf);
final URI mountURI = URI.create("file://mt/");
FileSystem.get(mountURI, conf);
fail("A merge slash cannot be configured with other mount links.");
});
}
@AfterEach
public void tearDown() throws Exception {
if (null != fsTarget) {
fsTarget.delete(fileSystemTestHelper.getTestRootPath(fsTarget), true);
fsTarget.close();
}
}
/**
* Returns the test root dir.
*/
public Path getTestRoot() {
return this.targetTestRoot;
}
/**
* Returns the conf.
*/
public Configuration getConf() {
return this.conf;
}
}
| TestViewFileSystemOverloadSchemeLocalFileSystem |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/WrappingScheduledExecutorServiceTest.java | {
"start": 3686,
"end": 7620
} | class ____ implements ScheduledExecutorService {
String lastMethodCalled = "";
long lastInitialDelay;
long lastDelay;
TimeUnit lastUnit;
void assertLastMethodCalled(String method, long delay, TimeUnit unit) {
assertEquals(method, lastMethodCalled);
assertEquals(delay, lastDelay);
assertEquals(unit, lastUnit);
}
void assertLastMethodCalled(String method, long initialDelay, long delay, TimeUnit unit) {
assertEquals(method, lastMethodCalled);
assertEquals(initialDelay, lastInitialDelay);
assertEquals(delay, lastDelay);
assertEquals(unit, lastUnit);
}
@Override
public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) {
assertThat(command).isInstanceOf(WrappedRunnable.class);
lastMethodCalled = "scheduleRunnable";
lastDelay = delay;
lastUnit = unit;
return null;
}
@Override
public <V> ScheduledFuture<V> schedule(Callable<V> callable, long delay, TimeUnit unit) {
assertThat(callable).isInstanceOf(WrappedCallable.class);
lastMethodCalled = "scheduleCallable";
lastDelay = delay;
lastUnit = unit;
return null;
}
@Override
public ScheduledFuture<?> scheduleAtFixedRate(
Runnable command, long initialDelay, long period, TimeUnit unit) {
assertThat(command).isInstanceOf(WrappedRunnable.class);
lastMethodCalled = "scheduleAtFixedRate";
lastInitialDelay = initialDelay;
lastDelay = period;
lastUnit = unit;
return null;
}
@Override
public ScheduledFuture<?> scheduleWithFixedDelay(
Runnable command, long initialDelay, long delay, TimeUnit unit) {
assertThat(command).isInstanceOf(WrappedRunnable.class);
lastMethodCalled = "scheduleWithFixedDelay";
lastInitialDelay = initialDelay;
lastDelay = delay;
lastUnit = unit;
return null;
}
// No need to test these methods as they are handled by WrappingExecutorServiceTest
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) {
throw new UnsupportedOperationException();
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks)
throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public <T> List<Future<T>> invokeAll(
Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks)
throws ExecutionException, InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws ExecutionException, InterruptedException, TimeoutException {
throw new UnsupportedOperationException();
}
@Override
public boolean isShutdown() {
throw new UnsupportedOperationException();
}
@Override
public boolean isTerminated() {
throw new UnsupportedOperationException();
}
@Override
public void shutdown() {
throw new UnsupportedOperationException();
}
@Override
public List<Runnable> shutdownNow() {
throw new UnsupportedOperationException();
}
@Override
public <T> Future<T> submit(Callable<T> task) {
throw new UnsupportedOperationException();
}
@Override
public Future<?> submit(Runnable task) {
throw new UnsupportedOperationException();
}
@Override
public <T> Future<T> submit(Runnable task, T result) {
throw new UnsupportedOperationException();
}
@Override
public void execute(Runnable command) {
throw new UnsupportedOperationException();
}
}
}
| MockExecutor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/CanIgnoreReturnValueSuggesterTest.java | {
"start": 30290,
"end": 30737
} | interface ____ {
Builder setName(String name);
String build();
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void daggerSubcomponentBuilder_b318407972() {
helper
.addInputLines(
"Builder.java",
"""
package com.google.frobber;
import dagger.Subcomponent;
@Subcomponent.Builder
| Builder |
java | apache__camel | components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/IncomingMessageEntity.java | {
"start": 1209,
"end": 2382
} | class ____ {
private String type;
private Integer offset;
private Integer length;
private String url;
private User user;
public IncomingMessageEntity() {
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public Integer getOffset() {
return offset;
}
public void setOffset(Integer offset) {
this.offset = offset;
}
public Integer getLength() {
return length;
}
public void setLength(Integer length) {
this.length = length;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
@Override
public String toString() {
return "MessageEntity{"
+ "type='" + type + '\''
+ ", offset=" + offset
+ ", length=" + length
+ ", url='" + url + '\''
+ ", user=" + user
+ '}';
}
}
| IncomingMessageEntity |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/pivot/HistogramGroupSourceTests.java | {
"start": 604,
"end": 3168
} | class ____ extends AbstractXContentSerializingTestCase<HistogramGroupSource> {
public static HistogramGroupSource randomHistogramGroupSource() {
return randomHistogramGroupSource(TransformConfigVersion.CURRENT);
}
public static HistogramGroupSource randomHistogramGroupSourceNoScript() {
return randomHistogramGroupSource(TransformConfigVersion.CURRENT, false);
}
public static HistogramGroupSource randomHistogramGroupSourceNoScript(String fieldPrefix) {
return randomHistogramGroupSource(TransformConfigVersion.CURRENT, false, fieldPrefix);
}
public static HistogramGroupSource randomHistogramGroupSource(TransformConfigVersion version) {
return randomHistogramGroupSource(version, randomBoolean());
}
public static HistogramGroupSource randomHistogramGroupSource(TransformConfigVersion version, boolean withScript) {
return randomHistogramGroupSource(version, withScript, "");
}
public static HistogramGroupSource randomHistogramGroupSource(TransformConfigVersion version, boolean withScript, String fieldPrefix) {
ScriptConfig scriptConfig = null;
String field;
// either a field or a script must be specified, it's possible to have both, but disallowed to have none
if (version.onOrAfter(TransformConfigVersion.V_7_7_0) && withScript) {
scriptConfig = ScriptConfigTests.randomScriptConfig();
field = randomBoolean() ? null : fieldPrefix + randomAlphaOfLengthBetween(1, 20);
} else {
field = fieldPrefix + randomAlphaOfLengthBetween(1, 20);
}
boolean missingBucket = version.onOrAfter(TransformConfigVersion.V_7_10_0) ? randomBoolean() : false;
double interval = randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false);
return new HistogramGroupSource(field, scriptConfig, missingBucket, interval);
}
@Override
protected HistogramGroupSource doParseInstance(XContentParser parser) throws IOException {
return HistogramGroupSource.fromXContent(parser, false);
}
@Override
protected HistogramGroupSource createTestInstance() {
return randomHistogramGroupSource();
}
@Override
protected HistogramGroupSource mutateInstance(HistogramGroupSource instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Reader<HistogramGroupSource> instanceReader() {
return HistogramGroupSource::new;
}
}
| HistogramGroupSourceTests |
java | junit-team__junit5 | junit-platform-launcher/src/main/java/org/junit/platform/launcher/PostDiscoveryFilter.java | {
"start": 1091,
"end": 1156
} | interface ____ extends Filter<TestDescriptor> {
}
| PostDiscoveryFilter |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/function/FailableLongPredicate.java | {
"start": 1112,
"end": 3546
} | interface ____<E extends Throwable> {
/** FALSE singleton */
@SuppressWarnings("rawtypes")
FailableLongPredicate FALSE = t -> false;
/** TRUE singleton */
@SuppressWarnings("rawtypes")
FailableLongPredicate TRUE = t -> true;
/**
* Gets the FALSE singleton.
*
* @param <E> The kind of thrown exception or error.
* @return The NOP singleton.
*/
@SuppressWarnings("unchecked")
static <E extends Throwable> FailableLongPredicate<E> falsePredicate() {
return FALSE;
}
/**
* Gets the TRUE singleton.
*
* @param <E> The kind of thrown exception or error.
* @return The NOP singleton.
*/
@SuppressWarnings("unchecked")
static <E extends Throwable> FailableLongPredicate<E> truePredicate() {
return TRUE;
}
/**
* Returns a composed {@link FailableLongPredicate} like {@link LongPredicate#and(LongPredicate)}.
*
* @param other a predicate that will be logically-ANDed with this predicate.
* @return a composed {@link FailableLongPredicate} like {@link LongPredicate#and(LongPredicate)}.
* @throws NullPointerException if other is null
*/
default FailableLongPredicate<E> and(final FailableLongPredicate<E> other) {
Objects.requireNonNull(other);
return t -> test(t) && other.test(t);
}
/**
* Returns a predicate that negates this predicate.
*
* @return a predicate that negates this predicate.
*/
default FailableLongPredicate<E> negate() {
return t -> !test(t);
}
/**
* Returns a composed {@link FailableLongPredicate} like {@link LongPredicate#and(LongPredicate)}.
*
* @param other a predicate that will be logically-ORed with this predicate.
* @return a composed {@link FailableLongPredicate} like {@link LongPredicate#and(LongPredicate)}.
* @throws NullPointerException if other is null
*/
default FailableLongPredicate<E> or(final FailableLongPredicate<E> other) {
Objects.requireNonNull(other);
return t -> test(t) || other.test(t);
}
/**
* Tests the predicate.
*
* @param value the parameter for the predicate to accept.
* @return {@code true} if the input argument matches the predicate, {@code false} otherwise.
* @throws E Thrown when the consumer fails.
*/
boolean test(long value) throws E;
}
| FailableLongPredicate |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/ModelInitializerTests.java | {
"start": 9058,
"end": 10249
} | class ____ {
private @Nullable Validator validator;
void setValidator(Validator validator) {
this.validator = validator;
}
@InitBinder
public void initDataBinder(WebDataBinder dataBinder) {
if (this.validator != null) {
dataBinder.addValidators(this.validator);
}
}
@ModelAttribute("bean")
public TestBean returnValue() {
return new TestBean("Bean");
}
@ModelAttribute("monoBean")
public Mono<TestBean> returnValueMono() {
return Mono.just(new TestBean("Mono Bean"));
}
@ModelAttribute("singleBean")
public Single<TestBean> returnValueSingle() {
return Single.just(new TestBean("Single Bean"));
}
@ModelAttribute
public void voidMethodBean(Model model) {
model.addAttribute("voidMethodBean", new TestBean("Void Method Bean"));
}
@ModelAttribute
public Mono<Void> voidMonoMethodBean(Model model) {
return Mono.just("Void Mono Method Bean")
.doOnNext(name -> model.addAttribute("voidMonoMethodBean", new TestBean(name)))
.then();
}
@GetMapping
public void handleGet() {}
@PostMapping
public void handlePost(@ModelAttribute("missing-bean") TestBean testBean) {}
}
private static | TestController |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java | {
"start": 2082,
"end": 9404
} | class ____ {
@VisibleForTesting
static final String NA = "N/A";
protected long submitTime;
protected long startTime;
protected long finishTime;
protected String id;
protected String name;
protected String queue;
protected String user;
protected String state;
protected int mapsTotal;
protected int mapsCompleted;
protected int reducesTotal;
protected int reducesCompleted;
protected Boolean uberized;
protected String diagnostics;
protected Long avgMapTime;
protected Long avgReduceTime;
protected Long avgShuffleTime;
protected Long avgMergeTime;
protected Integer failedReduceAttempts;
protected Integer killedReduceAttempts;
protected Integer successfulReduceAttempts;
protected Integer failedMapAttempts;
protected Integer killedMapAttempts;
protected Integer successfulMapAttempts;
protected ArrayList<ConfEntryInfo> acls;
@XmlTransient
protected int numMaps;
@XmlTransient
protected int numReduces;
public JobInfo() {
}
public JobInfo(Job job) {
this.id = MRApps.toString(job.getID());
JobReport report = job.getReport();
this.mapsTotal = job.getTotalMaps();
this.mapsCompleted = job.getCompletedMaps();
this.reducesTotal = job.getTotalReduces();
this.reducesCompleted = job.getCompletedReduces();
this.submitTime = report.getSubmitTime();
this.startTime = report.getStartTime();
this.finishTime = report.getFinishTime();
this.name = job.getName().toString();
this.queue = job.getQueueName();
this.user = job.getUserName();
this.state = job.getState().toString();
this.acls = new ArrayList<ConfEntryInfo>();
if (job instanceof CompletedJob) {
avgMapTime = 0l;
avgReduceTime = 0l;
avgShuffleTime = 0l;
avgMergeTime = 0l;
failedReduceAttempts = 0;
killedReduceAttempts = 0;
successfulReduceAttempts = 0;
failedMapAttempts = 0;
killedMapAttempts = 0;
successfulMapAttempts = 0;
countTasksAndAttempts(job);
this.uberized = job.isUber();
this.diagnostics = "";
List<String> diagnostics = job.getDiagnostics();
if (diagnostics != null && !diagnostics.isEmpty()) {
StringBuilder b = new StringBuilder();
for (String diag : diagnostics) {
b.append(diag);
}
this.diagnostics = b.toString();
}
Map<JobACL, AccessControlList> allacls = job.getJobACLs();
if (allacls != null) {
for (Map.Entry<JobACL, AccessControlList> entry : allacls.entrySet()) {
this.acls.add(new ConfEntryInfo(entry.getKey().getAclName(), entry
.getValue().getAclString()));
}
}
}
}
public long getNumMaps() {
return numMaps;
}
public long getNumReduces() {
return numReduces;
}
public Long getAvgMapTime() {
return avgMapTime;
}
public Long getAvgReduceTime() {
return avgReduceTime;
}
public Long getAvgShuffleTime() {
return avgShuffleTime;
}
public Long getAvgMergeTime() {
return avgMergeTime;
}
public Integer getFailedReduceAttempts() {
return failedReduceAttempts;
}
public Integer getKilledReduceAttempts() {
return killedReduceAttempts;
}
public Integer getSuccessfulReduceAttempts() {
return successfulReduceAttempts;
}
public Integer getFailedMapAttempts() {
return failedMapAttempts;
}
public Integer getKilledMapAttempts() {
return killedMapAttempts;
}
public Integer getSuccessfulMapAttempts() {
return successfulMapAttempts;
}
public ArrayList<ConfEntryInfo> getAcls() {
return acls;
}
public int getReducesCompleted() {
return this.reducesCompleted;
}
public int getReducesTotal() {
return this.reducesTotal;
}
public int getMapsCompleted() {
return this.mapsCompleted;
}
public int getMapsTotal() {
return this.mapsTotal;
}
public String getState() {
return this.state;
}
public String getUserName() {
return this.user;
}
public String getName() {
return this.name;
}
public String getQueueName() {
return this.queue;
}
public String getId() {
return this.id;
}
public long getSubmitTime() {
return this.submitTime;
}
public long getStartTime() {
return this.startTime;
}
public String getFormattedStartTimeStr(final DateFormat dateFormat) {
String str = NA;
if (startTime >= 0) {
str = dateFormat.format(new Date(startTime));
}
return str;
}
public String getStartTimeStr() {
String str = NA;
if (startTime >= 0) {
str = new Date(startTime).toString();
}
return str;
}
public long getFinishTime() {
return this.finishTime;
}
public Boolean isUber() {
return this.uberized;
}
public String getDiagnostics() {
return this.diagnostics;
}
/**
* Go through a job and update the member variables with counts for
* information to output in the page.
*
* @param job
* the job to get counts for.
*/
private void countTasksAndAttempts(Job job) {
numReduces = 0;
numMaps = 0;
final Map<TaskId, Task> tasks = job.getTasks();
if (tasks == null) {
return;
}
for (Task task : tasks.values()) {
// Attempts counts
Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
int successful, failed, killed;
for (TaskAttempt attempt : attempts.values()) {
successful = 0;
failed = 0;
killed = 0;
if (TaskAttemptStateUI.NEW.correspondsTo(attempt.getState())) {
// Do Nothing
} else if (TaskAttemptStateUI.RUNNING.correspondsTo(attempt.getState())) {
// Do Nothing
} else if (TaskAttemptStateUI.SUCCESSFUL.correspondsTo(attempt
.getState())) {
++successful;
} else if (TaskAttemptStateUI.FAILED.correspondsTo(attempt.getState())) {
++failed;
} else if (TaskAttemptStateUI.KILLED.correspondsTo(attempt.getState())) {
++killed;
}
switch (task.getType()) {
case MAP:
successfulMapAttempts += successful;
failedMapAttempts += failed;
killedMapAttempts += killed;
if (attempt.getState() == TaskAttemptState.SUCCEEDED) {
numMaps++;
avgMapTime += (attempt.getFinishTime() - attempt.getLaunchTime());
}
break;
case REDUCE:
successfulReduceAttempts += successful;
failedReduceAttempts += failed;
killedReduceAttempts += killed;
if (attempt.getState() == TaskAttemptState.SUCCEEDED) {
numReduces++;
avgShuffleTime += (attempt.getShuffleFinishTime() - attempt
.getLaunchTime());
avgMergeTime += attempt.getSortFinishTime()
- attempt.getShuffleFinishTime();
avgReduceTime += (attempt.getFinishTime() - attempt
.getSortFinishTime());
}
break;
}
}
}
if (numMaps > 0) {
avgMapTime = avgMapTime / numMaps;
}
if (numReduces > 0) {
avgReduceTime = avgReduceTime / numReduces;
avgShuffleTime = avgShuffleTime / numReduces;
avgMergeTime = avgMergeTime / numReduces;
}
}
}
| JobInfo |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java | {
"start": 131288,
"end": 142697
} | class ____
extends RMContainerAllocator {
public RMContainerAllocatorForFinishedContainer(ClientService clientService,
AppContext context, AMPreemptionPolicy preemptionPolicy) {
super(clientService, context, preemptionPolicy);
}
@Override
protected AssignedRequests createAssignedRequests() {
AssignedRequests assignedReqs = mock(AssignedRequests.class);
TaskAttemptId taskAttempt = mock(TaskAttemptId.class);
when(assignedReqs.get(any(ContainerId.class))).thenReturn(taskAttempt);
return assignedReqs;
}
}
@Test
public void testAvoidAskMoreReducersWhenReducerPreemptionIsRequired()
throws Exception {
LOG.info("Running testAvoidAskMoreReducersWhenReducerPreemptionIsRequired");
Configuration conf = new Configuration();
MyResourceManager rm = new MyResourceManager(conf);
rm.start();
// Submit the application
RMApp app = MockRMAppSubmitter.submitWithMemory(1024, rm);
rm.drainEvents();
MockNM amNodeManager = rm.registerNode("amNM:1234", 1260);
amNodeManager.nodeHeartbeat(true);
rm.drainEvents();
ApplicationAttemptId appAttemptId = app.getCurrentAppAttempt()
.getAppAttemptId();
rm.sendAMLaunched(appAttemptId);
rm.drainEvents();
JobId jobId = MRBuilderUtils.newJobId(appAttemptId.getApplicationId(), 0);
Job mockJob = mock(Job.class);
when(mockJob.getReport()).thenReturn(
MRBuilderUtils.newJobReport(jobId, "job", "user", JobState.RUNNING, 0,
0, 0, 0, 0, 0, 0, "jobfile", null, false, ""));
MyContainerAllocator allocator = new MyContainerAllocator(rm, conf,
appAttemptId, mockJob);
// Use a controlled clock to advance time for test.
ControlledClock clock = (ControlledClock)allocator.getContext().getClock();
clock.setTime(System.currentTimeMillis());
// Register nodes to RM.
MockNM nodeManager = rm.registerNode("h1:1234", 1024);
rm.drainEvents();
// Request 2 maps and 1 reducer(sone on nodes which are not registered).
ContainerRequestEvent event1 =
ContainerRequestCreator.createRequest(jobId, 1,
Resource.newInstance(1024, 1),
new String[]{"h1"});
allocator.sendRequest(event1);
ContainerRequestEvent event2 =
ContainerRequestCreator.createRequest(jobId, 2,
Resource.newInstance(1024, 1),
new String[]{"h2"});
allocator.sendRequest(event2);
ContainerRequestEvent event3 =
createRequest(jobId, 3, Resource.newInstance(1024, 1),
new String[]{"h2"}, false, true);
allocator.sendRequest(event3);
// This will tell the scheduler about the requests but there will be no
// allocations as nodes are not added.
allocator.schedule();
rm.drainEvents();
// Advance clock so that maps can be considered as hanging.
clock.setTime(System.currentTimeMillis() + 500000L);
// Request for another reducer on h3 which has not registered.
ContainerRequestEvent event4 =
createRequest(jobId, 4, Resource.newInstance(1024, 1),
new String[]{"h3"}, false, true);
allocator.sendRequest(event4);
allocator.schedule();
rm.drainEvents();
// Update resources in scheduler through node heartbeat from h1.
nodeManager.nodeHeartbeat(true);
rm.drainEvents();
rm.getMyFifoScheduler().forceResourceLimit(Resource.newInstance(1024, 1));
allocator.schedule();
rm.drainEvents();
// One map is assigned.
assertEquals(1, allocator.getAssignedRequests().maps.size());
// Send deallocate request for map so that no maps are assigned after this.
ContainerAllocatorEvent deallocate = createDeallocateEvent(jobId, 1, false);
allocator.sendDeallocate(deallocate);
// Now one reducer should be scheduled and one should be pending.
assertEquals(1, allocator.getScheduledRequests().reduces.size());
assertEquals(1, allocator.getNumOfPendingReduces());
// No map should be assigned and one should be scheduled.
assertEquals(1, allocator.getScheduledRequests().maps.size());
assertEquals(0, allocator.getAssignedRequests().maps.size());
assertEquals(6, allocator.getAsk().size());
for (ResourceRequest req : allocator.getAsk()) {
boolean isReduce =
req.getPriority().equals(RMContainerAllocator.PRIORITY_REDUCE);
if (isReduce) {
// 1 reducer each asked on h2, * and default-rack
assertTrue((req.getResourceName().equals("*") ||
req.getResourceName().equals("/default-rack") ||
req.getResourceName().equals("h2")) && req.getNumContainers() == 1);
} else { //map
// 0 mappers asked on h1 and 1 each on * and default-rack
assertTrue(((req.getResourceName().equals("*") ||
req.getResourceName().equals("/default-rack")) &&
req.getNumContainers() == 1) || (req.getResourceName().equals("h1")
&& req.getNumContainers() == 0));
}
}
clock.setTime(System.currentTimeMillis() + 500000L + 10 * 60 * 1000);
// On next allocate request to scheduler, headroom reported will be 2048.
rm.getMyFifoScheduler().forceResourceLimit(Resource.newInstance(2048, 0));
allocator.schedule();
rm.drainEvents();
// After allocate response from scheduler, all scheduled reduces are ramped
// down and move to pending. 3 asks are also updated with 0 containers to
// indicate ramping down of reduces to scheduler.
assertEquals(0, allocator.getScheduledRequests().reduces.size());
assertEquals(2, allocator.getNumOfPendingReduces());
assertEquals(3, allocator.getAsk().size());
for (ResourceRequest req : allocator.getAsk()) {
assertEquals(
RMContainerAllocator.PRIORITY_REDUCE, req.getPriority());
assertTrue(req.getResourceName().equals("*") ||
req.getResourceName().equals("/default-rack") ||
req.getResourceName().equals("h2"));
assertEquals(Resource.newInstance(1024, 1), req.getCapability());
assertEquals(0, req.getNumContainers());
}
}
/**
* Tests whether scheduled reducers are excluded from headroom while
* calculating headroom.
*/
@Test
public void testExcludeSchedReducesFromHeadroom() throws Exception {
LOG.info("Running testExcludeSchedReducesFromHeadroom");
Configuration conf = new Configuration();
conf.setInt(MRJobConfig.MR_JOB_REDUCER_UNCONDITIONAL_PREEMPT_DELAY_SEC, -1);
MyResourceManager rm = new MyResourceManager(conf);
rm.start();
// Submit the application
RMApp app = MockRMAppSubmitter.submitWithMemory(1024, rm);
rm.drainEvents();
MockNM amNodeManager = rm.registerNode("amNM:1234", 1260);
amNodeManager.nodeHeartbeat(true);
rm.drainEvents();
ApplicationAttemptId appAttemptId = app.getCurrentAppAttempt()
.getAppAttemptId();
rm.sendAMLaunched(appAttemptId);
rm.drainEvents();
JobId jobId = MRBuilderUtils.newJobId(appAttemptId.getApplicationId(), 0);
Job mockJob = mock(Job.class);
when(mockJob.getReport()).thenReturn(
MRBuilderUtils.newJobReport(jobId, "job", "user", JobState.RUNNING, 0,
0, 0, 0, 0, 0, 0, "jobfile", null, false, ""));
Task mockTask = mock(Task.class);
TaskAttempt mockTaskAttempt = mock(TaskAttempt.class);
when(mockJob.getTask((TaskId)any())).thenReturn(mockTask);
when(mockTask.getAttempt((TaskAttemptId)any())).thenReturn(mockTaskAttempt);
when(mockTaskAttempt.getProgress()).thenReturn(0.01f);
MyContainerAllocator allocator = new MyContainerAllocator(rm, conf,
appAttemptId, mockJob);
MockNM nodeManager = rm.registerNode("h1:1234", 4096);
rm.drainEvents();
// Register nodes to RM.
MockNM nodeManager2 = rm.registerNode("h2:1234", 1024);
rm.drainEvents();
// Request 2 maps and 1 reducer(sone on nodes which are not registered).
ContainerRequestEvent event1 =
ContainerRequestCreator.createRequest(jobId, 1,
Resource.newInstance(1024, 1),
new String[]{"h1"});
allocator.sendRequest(event1);
ContainerRequestEvent event2 =
ContainerRequestCreator.createRequest(jobId, 2,
Resource.newInstance(1024, 1),
new String[]{"h2"});
allocator.sendRequest(event2);
ContainerRequestEvent event3 =
createRequest(jobId, 3,
Resource.newInstance(1024, 1),
new String[]{"h1"}, false, true);
allocator.sendRequest(event3);
// This will tell the scheduler about the requests but there will be no
// allocations as nodes are not added.
allocator.schedule();
rm.drainEvents();
// Request for another reducer on h3 which has not registered.
ContainerRequestEvent event4 =
createRequest(jobId, 4, Resource.newInstance(1024, 1),
new String[] {"h3"}, false, true);
allocator.sendRequest(event4);
allocator.schedule();
rm.drainEvents();
// Update resources in scheduler through node heartbeat from h1.
nodeManager.nodeHeartbeat(true);
rm.drainEvents();
rm.getMyFifoScheduler().forceResourceLimit(Resource.newInstance(3072, 3));
allocator.schedule();
rm.drainEvents();
// Two maps are assigned.
assertEquals(2, allocator.getAssignedRequests().maps.size());
// Send deallocate request for map so that no maps are assigned after this.
ContainerAllocatorEvent deallocate1 = createDeallocateEvent(jobId, 1, false);
allocator.sendDeallocate(deallocate1);
ContainerAllocatorEvent deallocate2 = createDeallocateEvent(jobId, 2, false);
allocator.sendDeallocate(deallocate2);
// No map should be assigned.
assertEquals(0, allocator.getAssignedRequests().maps.size());
nodeManager.nodeHeartbeat(true);
rm.drainEvents();
rm.getMyFifoScheduler().forceResourceLimit(Resource.newInstance(1024, 1));
allocator.schedule();
rm.drainEvents();
// h2 heartbeats.
nodeManager2.nodeHeartbeat(true);
rm.drainEvents();
// Send request for one more mapper.
ContainerRequestEvent event5 =
ContainerRequestCreator.createRequest(jobId, 5,
Resource.newInstance(1024, 1),
new String[]{"h1"});
allocator.sendRequest(event5);
rm.getMyFifoScheduler().forceResourceLimit(Resource.newInstance(2048, 2));
allocator.schedule();
rm.drainEvents();
// One reducer is assigned and one map is scheduled
assertEquals(1, allocator.getScheduledRequests().maps.size());
assertEquals(1, allocator.getAssignedRequests().reduces.size());
// Headroom enough to run a mapper if headroom is taken as it is but wont be
// enough if scheduled reducers resources are deducted.
rm.getMyFifoScheduler().forceResourceLimit(Resource.newInstance(1260, 2));
allocator.schedule();
rm.drainEvents();
// After allocate response, the one assigned reducer is preempted and killed
assertEquals(1, MyContainerAllocator.getTaskAttemptKillEvents().size());
assertEquals(RMContainerAllocator.RAMPDOWN_DIAGNOSTIC,
MyContainerAllocator.getTaskAttemptKillEvents().get(0).getMessage());
assertEquals(1, allocator.getNumOfPendingReduces());
}
private static | RMContainerAllocatorForFinishedContainer |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/Mockito.java | {
"start": 117955,
"end": 118390
} | interface ____
* want to mock.
* @return mock controller
* @since 5.21.0
*/
@SafeVarargs
public static <T> MockedStatic<T> mockStatic(
@SuppressWarnings("rawtypes") Answer defaultAnswer, T... reified) {
return mockStatic(withSettings().defaultAnswer(defaultAnswer), reified);
}
/**
* Creates a thread-local mock controller for all static methods of the given | you |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/Attribute.java | {
"start": 20890,
"end": 21186
} | class ____ flags and
* signature, in the given byte vector. This includes the 6 header bytes (attribute_name_index and
* attribute_length) per attribute.
*
* @param symbolTable where the constants used in the attributes must be stored.
* @param accessFlags some field, method or | access |
java | dropwizard__dropwizard | dropwizard-hibernate/src/test/java/io/dropwizard/hibernate/JerseyIntegrationTest.java | {
"start": 1905,
"end": 2387
} | class ____ extends AbstractDAO<Person> {
public PersonDAO(SessionFactory sessionFactory) {
super(sessionFactory);
}
public Optional<Person> findByName(String name) {
return Optional.ofNullable(get(name));
}
@Override
public Person persist(Person entity) {
return super.persist(entity);
}
}
@Path("/people/{name}")
@Produces(MediaType.APPLICATION_JSON)
public static | PersonDAO |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/sql/exec/HqlDeleteExecutionTests.java | {
"start": 1134,
"end": 6164
} | class ____ {
@Test
public void testSimpleDelete(SessionFactoryScope scope) {
scope.inTransaction(
session -> session.createQuery( "delete BasicEntity" ).executeUpdate()
);
}
@Test
public void testSimpleRestrictedDelete(SessionFactoryScope scope) {
scope.inTransaction(
session -> session.createQuery( "delete BasicEntity where data = :filter" )
.setParameter( "filter", "abc" )
.executeUpdate()
);
}
@Test
public void testSimpleMultiTableDelete(SessionFactoryScope scope) {
scope.inTransaction(
session -> session.createQuery( "delete SimpleEntityWithSecondaryTables" )
.executeUpdate()
);
}
@Test
public void testSimpleMultiTableRestrictedDelete(SessionFactoryScope scope) {
scope.inTransaction(
session -> session.createQuery( "delete SimpleEntityWithSecondaryTables where data = :filter" )
.setParameter( "filter", "abc" )
.executeUpdate()
);
}
@Test
public void testSimpleMultiTableRestrictedDeleteResults(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.persist(
new SecondaryTableTests.SimpleEntityWithSecondaryTables(
1,
"first",
Date.from( Instant.now() ),
"1 - cfdjdjvokfobkofbvovoijjbvoijofjdbiof"
)
);
session.persist(
new SecondaryTableTests.SimpleEntityWithSecondaryTables(
2,
"second",
Date.from( Instant.now() ),
"2 - s3o2rj9 fcojv9j gj9jfv943jv29j9j4"
)
);
session.persist(
new SecondaryTableTests.SimpleEntityWithSecondaryTables(
3,
"third",
Date.from( Instant.now() ),
"abc"
)
);
}
);
scope.inTransaction(
session -> {
final int rows = session.createQuery( "delete SimpleEntityWithSecondaryTables where data = :filter" )
.setParameter( "filter", "abc" )
.executeUpdate();
assertThat( rows, is ( 1 ) );
}
);
scope.inTransaction(
session -> session.createQuery( "delete SimpleEntityWithSecondaryTables" ).executeUpdate()
);
}
@Test
public void testJoinedSubclassRootDelete(SessionFactoryScope scope) {
scope.inTransaction(
session -> session.createQuery( "delete Customer" ).executeUpdate()
);
}
@Test
public void testJoinedSubclassRootRestrictedDelete(SessionFactoryScope scope) {
scope.inTransaction(
session -> session.createQuery( "delete Customer where name = 'abc'" ).executeUpdate()
);
}
@Test
public void testJoinedSubclassRootRestrictedDeleteResults(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.persist(
new JoinedInheritanceTest.ForeignCustomer( 1, "Adventures Abroad", "123" )
);
session.persist(
new JoinedInheritanceTest.DomesticCustomer( 2, "Domestic Wonders", "456" )
);
}
);
scope.inTransaction(
session -> {
final int rows = session.createQuery( "delete Customer where name = 'Adventures Abroad'" ).executeUpdate();
assertThat( rows, is( 1 ) );
}
);
scope.inTransaction(
session -> {
final int rows = session.createQuery( "delete from Customer" ).executeUpdate();
assertThat( rows, is( 1 ) );
}
);
scope.inTransaction(
session -> {
final int rows = session.createQuery( "delete from Customer" ).executeUpdate();
assertThat( rows, is( 0 ) );
}
);
}
@Test
public void testJoinedSubclassLeafDelete(SessionFactoryScope scope) {
scope.inTransaction(
session -> session.createQuery( "delete ForeignCustomer" ).executeUpdate()
);
scope.inTransaction(
session -> session.createQuery( "delete DomesticCustomer" ).executeUpdate()
);
}
@Test
public void testJoinedSubclassLeafRestrictedDelete(SessionFactoryScope scope) {
scope.inTransaction(
session -> session.createQuery( "delete ForeignCustomer where name = 'abc'" ).executeUpdate()
);
scope.inTransaction(
session -> session.createQuery( "delete DomesticCustomer where name = 'abc'" ).executeUpdate()
);
}
@Test
public void testJoinedSubclassLeafRestrictedDeleteResult(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.persist(
new JoinedInheritanceTest.ForeignCustomer( 1, "Adventures Abroad", "123" )
);
session.persist(
new JoinedInheritanceTest.DomesticCustomer( 2, "Domestic Wonders", "456" )
);
}
);
scope.inTransaction(
session -> {
final int rows = session.createQuery( "delete ForeignCustomer where name = 'Adventures Abroad'" )
.executeUpdate();
assertThat( rows, is( 1 ) );
}
);
scope.inTransaction(
session -> {
final int rows = session.createQuery( "delete DomesticCustomer where name = 'Domestic Wonders'" )
.executeUpdate();
assertThat( rows, is( 1 ) );
}
);
scope.inTransaction(
session -> {
final int rows = session.createQuery( "delete Customer" )
.executeUpdate();
assertThat( rows, is( 0 ) );
}
);
}
}
| HqlDeleteExecutionTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryIndexFieldData.java | {
"start": 2945,
"end": 3502
} | class ____ implements IndexFieldData.Builder {
private final String name;
private final ValuesSourceType valuesSourceType;
public Builder(String name, ValuesSourceType valuesSourceType) {
this.name = name;
this.valuesSourceType = valuesSourceType;
}
@Override
public IndexFieldData<?> build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
// Ignore breaker
return new BytesBinaryIndexFieldData(name, valuesSourceType);
}
}
}
| Builder |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-expression/src/main/java/org/apache/maven/plugin/coreit/EvalMojo.java | {
"start": 2813,
"end": 6884
} | class ____ extends AbstractMojo {
/**
* The project's base directory, used for manual path translation.
*/
@Parameter(defaultValue = "${basedir", readonly = true)
private File basedir;
/**
* The path to the output file for the properties with the expression values. For each expression given by the
* parameter {@link #expressions}, a similar named properties key will be used to save the expression value. If an
* expression evaluated to <code>null</code>, there will be no corresponding key in the properties file.
*/
@Parameter(property = "expression.outputFile")
private File outputFile;
/**
* The set of expressions to evaluate.
*/
@Parameter
private String[] expressions;
/**
* The comma separated set of expressions to evaluate.
*/
@Parameter(property = "expression.expressions")
private String expressionList;
/**
* The current Maven project against which expressions are evaluated.
*/
@Parameter(defaultValue = "${project}", readonly = true)
private Object project;
/**
* The forked Maven project against which expressions are evaluated.
*/
@Parameter(defaultValue = "${executedProject}", readonly = true)
private Object executedProject;
/**
* The merged user/global settings of the current build against which expressions are evaluated.
*/
@Parameter(defaultValue = "${settings}", readonly = true)
private Object settings;
/**
* The session context of the current build against which expressions are evaluated.
*/
@Parameter(defaultValue = "${session}", readonly = true)
private Object session;
/**
* The local repository of the current build against which expressions are evaluated.
*/
@Parameter(defaultValue = "${session.request.localRepositoryPath}", readonly = true)
private Object localRepositoryBasedir;
/**
* Runs this mojo.
*
* @throws MojoExecutionException If the output file could not be created.
* @throws MojoFailureException If the output file has not been set.
*/
public void execute() throws MojoExecutionException, MojoFailureException {
if (outputFile == null) {
throw new MojoFailureException("Path name for output file has not been specified");
}
/*
* NOTE: We don't want to test path translation here.
*/
if (!outputFile.isAbsolute()) {
outputFile = new File(basedir, outputFile.getPath()).getAbsoluteFile();
}
getLog().info("[MAVEN-CORE-IT-LOG] Creating output file: " + outputFile);
Properties expressionProperties = new Properties();
if (expressionList != null && expressionList.length() > 0) {
expressions = expressionList.split(",");
}
if (expressions != null && expressions.length > 0) {
Map contexts = new HashMap();
contexts.put("project", project);
contexts.put("executedProject", executedProject);
contexts.put("pom", project);
contexts.put("settings", settings);
contexts.put("session", session);
contexts.put("localRepositoryBasedir", localRepositoryBasedir);
for (String expression : expressions) {
Map values = ExpressionUtil.evaluate(expression, contexts);
for (Object key : values.keySet()) {
Object value = values.get(key);
PropertyUtil.store(expressionProperties, key.toString().replace('/', '.'), value);
}
}
}
try {
PropertyUtil.write(expressionProperties, outputFile);
} catch (IOException e) {
throw new MojoExecutionException("Output file could not be created: " + outputFile, e);
}
getLog().info("[MAVEN-CORE-IT-LOG] Created output file: " + outputFile);
}
public void setOutputFile(File outputFile) {
this.outputFile = outputFile;
}
}
| EvalMojo |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/ParsedHistogramConverter.java | {
"start": 1127,
"end": 6069
} | interface ____ be more expensive and complex than just duplicating the logic.
List<Double> centroids = new ArrayList<>(); // sorted from descending to ascending
List<Long> counts = new ArrayList<>();
List<IndexWithCount> neg = expHisto.negativeBuckets();
for (int i = neg.size() - 1; i >= 0; i--) {
appendBucketCentroid(centroids, counts, neg.get(i), expHisto.scale(), -1);
}
if (expHisto.zeroCount() > 0) {
centroids.add(0.0);
counts.add(expHisto.zeroCount());
}
for (IndexWithCount positiveBucket : expHisto.positiveBuckets()) {
appendBucketCentroid(centroids, counts, positiveBucket, expHisto.scale(), 1);
}
assert centroids.size() == counts.size();
assert centroids.stream().sorted().toList().equals(centroids);
return new HistogramParser.ParsedHistogram(centroids, counts);
}
/**
* Converts t-digest histograms to exponential histograms, trying to do the inverse
* of {@link #exponentialToTDigest(ExponentialHistogramParser.ParsedExponentialHistogram)}
* as accurately as possible.
* <br>
* On a round-trip conversion from exponential histogram to T-Digest and back,
* the bucket centers will be preserved, however the bucket widths are lost.
* The conversion algorithm works by generating tiny buckets (scale set to MAX_SCALE)
* containing the T-Digest centroids.
*
* @param tDigest the t-digest histogram to convert
* @return the resulting exponential histogram
*/
public static ExponentialHistogramParser.ParsedExponentialHistogram tDigestToExponential(HistogramParser.ParsedHistogram tDigest) {
List<Double> centroids = tDigest.values();
List<Long> counts = tDigest.counts();
int numNegativeCentroids = 0;
while (numNegativeCentroids < centroids.size() && centroids.get(numNegativeCentroids) < 0) {
numNegativeCentroids++;
}
// iterate negative centroids from closest to zero to furthest away,
// which corresponds to ascending exponential histogram bucket indices
int scale = MAX_SCALE;
List<IndexWithCount> negativeBuckets = new ArrayList<>();
for (int i = numNegativeCentroids - 1; i >= 0; i--) {
double centroid = centroids.get(i);
long count = counts.get(i);
assert centroid < 0;
appendCentroidWithCountAsBucket(centroid, count, scale, negativeBuckets);
}
long zeroCount = 0;
int firstPositiveIndex = numNegativeCentroids;
if (firstPositiveIndex < centroids.size() && centroids.get(firstPositiveIndex) == 0) {
// we have a zero-centroid, which we'll map to the zero bucket
zeroCount = counts.get(firstPositiveIndex);
firstPositiveIndex++;
}
List<IndexWithCount> positiveBuckets = new ArrayList<>();
for (int i = firstPositiveIndex; i < centroids.size(); i++) {
double centroid = centroids.get(i);
long count = counts.get(i);
assert centroid > 0;
appendCentroidWithCountAsBucket(centroid, count, scale, positiveBuckets);
}
return new ExponentialHistogramParser.ParsedExponentialHistogram(
scale,
0.0,
zeroCount,
negativeBuckets,
positiveBuckets,
null, // sum, min, max will be estimated
null,
null
);
}
private static void appendCentroidWithCountAsBucket(double centroid, long count, int scale, List<IndexWithCount> outputBuckets) {
if (count == 0) {
return; // zero counts are allowed in T-Digests but not in exponential histograms
}
long index = ExponentialScaleUtils.computeIndex(centroid, scale);
assert outputBuckets.isEmpty() || outputBuckets.getLast().index() < index;
outputBuckets.add(new IndexWithCount(index, count));
}
private static void appendBucketCentroid(
List<Double> centroids,
List<Long> counts,
IndexWithCount expHistoBucket,
int scale,
int sign
) {
double lowerBound = ExponentialScaleUtils.getLowerBucketBoundary(expHistoBucket.index(), scale);
double upperBound = ExponentialScaleUtils.getUpperBucketBoundary(expHistoBucket.index(), scale);
double center = sign * (lowerBound + upperBound) / 2.0;
// the index + scale representation is higher precision than the centroid representation,
// so we can have multiple exp histogram buckets map to the same centroid.
if (centroids.isEmpty() == false && centroids.getLast() == center) {
counts.add(counts.removeLast() + expHistoBucket.count());
} else {
centroids.add(center);
counts.add(expHistoBucket.count());
}
}
}
| would |
java | spring-projects__spring-framework | spring-aop/src/test/java/org/springframework/aop/support/AopUtilsTests.java | {
"start": 1656,
"end": 2244
} | class ____ extends StaticMethodMatcherPointcut {
@Override
public boolean matches(Method method, @Nullable Class<?> clazzy) {
return false;
}
}
Pointcut no = new TestPointcut();
assertThat(AopUtils.canApply(no, Object.class)).isFalse();
}
@Test
void testPointcutAlwaysApplies() {
assertThat(AopUtils.canApply(new DefaultPointcutAdvisor(new NopInterceptor()), Object.class)).isTrue();
assertThat(AopUtils.canApply(new DefaultPointcutAdvisor(new NopInterceptor()), TestBean.class)).isTrue();
}
@Test
void testPointcutAppliesToOneMethodOnObject() {
| TestPointcut |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestPathOutputCommitterFactory.java | {
"start": 4335,
"end": 14214
} | class
____.set(COMMITTER_FACTORY_CLASS, "unknown");
intercept(RuntimeException.class,
() -> getCommitterFactory(HDFS_PATH, conf));
}
/**
* Verify that if the committer output path is null, you get back
* a FileOutputCommitter with null output & work paths.
*/
@Test
public void testCommitterNullOutputPath() throws Throwable {
// bind http to schema
Configuration conf = newBondedConfiguration();
// then ask committers for a null path
FileOutputCommitter committer = createCommitter(
FileOutputCommitterFactory.class,
FileOutputCommitter.class,
null, conf);
assertNull(committer.getOutputPath());
assertNull(committer.getWorkPath());
}
/**
* Verify that if you explicitly name a committer, that takes priority
* over any filesystem committer.
*/
@Test
public void testNamedCommitterFactory() throws Throwable {
Configuration conf = new Configuration();
// set up for the schema factory
conf.set(COMMITTER_FACTORY_CLASS, NAMED_COMMITTER_FACTORY);
conf.set(NAMED_COMMITTER_CLASS, SimpleCommitter.class.getName());
SimpleCommitter sc = createCommitter(
NamedCommitterFactory.class,
SimpleCommitter.class, HDFS_PATH, conf);
assertEquals(HDFS_PATH, sc.getOutputPath(), "Wrong output path from " + sc);
}
/**
* Verify that if you explicitly name a committer and there's no
* path, the committer is picked up.
*/
@Test
public void testNamedCommitterFactoryNullPath() throws Throwable {
Configuration conf = new Configuration();
// set up for the schema factory
conf.set(COMMITTER_FACTORY_CLASS, NAMED_COMMITTER_FACTORY);
conf.set(NAMED_COMMITTER_CLASS, SimpleCommitter.class.getName());
SimpleCommitter sc = createCommitter(
NamedCommitterFactory.class,
SimpleCommitter.class,
null, conf);
assertNull(sc.getOutputPath());
}
/**
* Verify that if you explicitly name a committer and there's no
* path, the committer is picked up.
*/
@Test
public void testNamedCommitterNullPath() throws Throwable {
Configuration conf = new Configuration();
// set up for the schema factory
conf.set(COMMITTER_FACTORY_CLASS, NAMED_COMMITTER_FACTORY);
conf.set(NAMED_COMMITTER_CLASS, SimpleCommitter.class.getName());
SimpleCommitter sc = createCommitter(
SimpleCommitter.class,
null, taskAttempt(conf));
assertNull(sc.getOutputPath());
}
/**
* Create a factory then a committer, validating the type of both.
* @param <T> type of factory
* @param <U> type of committer
* @param factoryClass expected factory class
* @param committerClass expected committer class
* @param path output path (may be null)
* @param conf configuration
* @return the committer
* @throws IOException failure to create
*/
private <T extends PathOutputCommitterFactory, U extends PathOutputCommitter>
U createCommitter(Class<T> factoryClass,
Class<U> committerClass,
Path path,
Configuration conf) throws IOException {
T f = createCommitterFactory(factoryClass, path, conf);
PathOutputCommitter committer = f.createOutputCommitter(path,
taskAttempt(conf));
assertEquals(committerClass, committer.getClass(),
" Wrong committer for path " + path + " from factory " + f);
return (U) committer;
}
/**
* Create a committer from a task context, via
* {@link PathOutputCommitterFactory#createCommitter(Path, TaskAttemptContext)}.
* @param <U> type of committer
* @param committerClass expected committer class
* @param path output path (may be null)
* @param context task attempt context
* @return the committer
* @throws IOException failure to create
*/
private <U extends PathOutputCommitter> U createCommitter(
Class<U> committerClass,
Path path,
TaskAttemptContext context) throws IOException {
PathOutputCommitter committer = PathOutputCommitterFactory
.createCommitter(path, context);
assertEquals(committerClass, committer.getClass(),
" Wrong committer for path " + path);
return (U) committer;
}
/**
* Create a factory then a committer, validating its type.
* @param factoryClass expected factory class
* @param path output path (may be null)
* @param conf configuration
* @param <T> type of factory
* @return the factory
*/
private <T extends PathOutputCommitterFactory> T createCommitterFactory(
Class<T> factoryClass,
Path path,
Configuration conf) {
PathOutputCommitterFactory factory = getCommitterFactory(path, conf);
assertEquals(factoryClass, factory.getClass(),
" Wrong factory for path " + path);
return (T)factory;
}
/**
* Create a new task attempt context.
* @param conf config
* @return a new context
*/
private TaskAttemptContext taskAttempt(Configuration conf) {
return new TaskAttemptContextImpl(conf, taskAttemptID);
}
/**
* Verify that if you explicitly name a committer, that takes priority
* over any filesystem committer.
*/
@Test
public void testFileOutputCommitterFactory() throws Throwable {
Configuration conf = new Configuration();
// set up for the schema factory
conf.set(COMMITTER_FACTORY_CLASS, FILE_COMMITTER_FACTORY);
conf.set(NAMED_COMMITTER_CLASS, SimpleCommitter.class.getName());
getCommitterFactory(HDFS_PATH, conf);
createCommitter(
FileOutputCommitterFactory.class,
FileOutputCommitter.class, null, conf);
}
/**
* Follow the entire committer chain down and create a new committer from
* the output format.
* @throws Throwable on a failure.
*/
@Test
public void testFileOutputFormatBinding() throws Throwable {
Configuration conf = newBondedConfiguration();
conf.set(FileOutputFormat.OUTDIR, HTTP_PATH.toUri().toString());
TextOutputFormat<String, String> off = new TextOutputFormat<>();
SimpleCommitter committer = (SimpleCommitter)
off.getOutputCommitter(taskAttempt(conf));
assertEquals(HTTP_PATH,
committer.getOutputPath(), "Wrong output path from "+ committer);
}
/**
* Follow the entire committer chain down and create a new committer from
* the output format.
* @throws Throwable on a failure.
*/
@Test
public void testFileOutputFormatBindingNoPath() throws Throwable {
Configuration conf = new Configuration();
conf.unset(FileOutputFormat.OUTDIR);
// set up for the schema factory
conf.set(COMMITTER_FACTORY_CLASS, NAMED_COMMITTER_FACTORY);
conf.set(NAMED_COMMITTER_CLASS, SimpleCommitter.class.getName());
httpToSimpleFactory(conf);
TextOutputFormat<String, String> off = new TextOutputFormat<>();
SimpleCommitter committer = (SimpleCommitter)
off.getOutputCommitter(taskAttempt(conf));
assertNull(committer.getOutputPath(), "Output path from "+ committer);
}
/**
* Bind the http schema CommitterFactory to {@link SimpleCommitterFactory}.
* @param conf config to patch
*/
private Configuration httpToSimpleFactory(Configuration conf) {
conf.set(HTTP_COMMITTER_FACTORY, SimpleCommitterFactory.class.getName());
return conf;
}
/**
* Create a configuration with the http schema bonded to the simple factory.
* @return a new, patched configuration
*/
private Configuration newBondedConfiguration() {
return httpToSimpleFactory(new Configuration());
}
/**
* Extract the (mandatory) cause of an exception.
* @param ex exception
* @param clazz expected class
* @return the cause, which will be of the expected type
* @throws AssertionError if there is a problem
*/
private <E extends Throwable> E verifyCauseClass(Throwable ex,
Class<E> clazz) throws AssertionError {
Throwable cause = ex.getCause();
if (cause == null) {
throw new AssertionError("No cause", ex);
}
if (!cause.getClass().equals(clazz)) {
throw new AssertionError("Wrong cause class", cause);
}
return (E)cause;
}
@Test
public void testBadCommitterFactory() throws Throwable {
expectFactoryConstructionFailure(HTTP_COMMITTER_FACTORY);
}
@Test
public void testBoundCommitterWithSchema() throws Throwable {
// this verifies that a bound committer relays to the underlying committer
Configuration conf = newBondedConfiguration();
TestPathOutputCommitter.TaskContext tac
= new TestPathOutputCommitter.TaskContext(conf);
BindingPathOutputCommitter committer
= new BindingPathOutputCommitter(HTTP_PATH, tac);
intercept(IOException.class, "setupJob",
() -> committer.setupJob(tac));
}
@Test
public void testBoundCommitterWithDefault() throws Throwable {
// this verifies that a bound committer relays to the underlying committer
Configuration conf = newBondedConfiguration();
TestPathOutputCommitter.TaskContext tac
= new TestPathOutputCommitter.TaskContext(conf);
BindingPathOutputCommitter committer
= new BindingPathOutputCommitter(HDFS_PATH, tac);
assertEquals(FileOutputCommitter.class,
committer.getCommitter().getClass());
}
/**
* Set the specific key to a string which is not a factory class; expect
* a failure.
* @param key key to set
* @throws Throwable on a failure
*/
@SuppressWarnings("ThrowableNotThrown")
protected void expectFactoryConstructionFailure(String key) throws Throwable {
Configuration conf = new Configuration();
conf.set(key, "Not a factory");
RuntimeException ex = intercept(RuntimeException.class,
() -> getCommitterFactory(HTTP_PATH, conf));
verifyCauseClass(
verifyCauseClass(ex, RuntimeException.class),
ClassNotFoundException.class);
}
/**
* A simple committer.
*/
public static final | conf |
java | grpc__grpc-java | grpclb/src/test/java/io/grpc/grpclb/SecretGrpclbNameResolverProviderTest.java | {
"start": 1195,
"end": 3456
} | class ____ {
private final SynchronizationContext syncContext = new SynchronizationContext(
new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
throw new AssertionError(e);
}
});
private final NameResolver.Args args = NameResolver.Args.newBuilder()
.setDefaultPort(8080)
.setProxyDetector(GrpcUtil.DEFAULT_PROXY_DETECTOR)
.setSynchronizationContext(syncContext)
.setServiceConfigParser(mock(ServiceConfigParser.class))
.setChannelLogger(mock(ChannelLogger.class))
.build();
private SecretGrpclbNameResolverProvider.Provider provider =
new SecretGrpclbNameResolverProvider.Provider();
@Test
public void isAvailable() {
assertThat(provider.isAvailable()).isTrue();
}
@Test
public void priority_shouldBeHigherThanDefaultDnsNameResolver() {
DnsNameResolverProvider defaultDnsNameResolver = new DnsNameResolverProvider();
assertThat(provider.priority()).isGreaterThan(defaultDnsNameResolver.priority());
}
@Test
public void newNameResolver() {
assertThat(provider.newNameResolver(URI.create("dns:///localhost:443"), args))
.isInstanceOf(GrpclbNameResolver.class);
assertThat(provider.newNameResolver(URI.create("notdns:///localhost:443"), args)).isNull();
}
@Test
public void invalidDnsName() throws Exception {
testInvalidUri(new URI("dns", null, "/[invalid]", null));
}
@Test
public void validIpv6() throws Exception {
testValidUri(new URI("dns", null, "/[::1]", null));
}
@Test
public void validDnsNameWithoutPort() throws Exception {
testValidUri(new URI("dns", null, "/foo.googleapis.com", null));
}
@Test
public void validDnsNameWithPort() throws Exception {
testValidUri(new URI("dns", null, "/foo.googleapis.com:456", null));
}
private void testInvalidUri(URI uri) {
try {
provider.newNameResolver(uri, args);
fail("Should have failed");
} catch (IllegalArgumentException e) {
// expected
}
}
private void testValidUri(URI uri) {
GrpclbNameResolver resolver = provider.newNameResolver(uri, args);
assertThat(resolver).isNotNull();
}
}
| SecretGrpclbNameResolverProviderTest |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/framework/CglibAopProxy.java | {
"start": 7016,
"end": 8907
} | class ____ be available for creating a CGLIB proxy");
Class<?> proxySuperClass = rootClass;
if (rootClass.getName().contains(ClassUtils.CGLIB_CLASS_SEPARATOR)) {
proxySuperClass = rootClass.getSuperclass();
Class<?>[] additionalInterfaces = rootClass.getInterfaces();
for (Class<?> additionalInterface : additionalInterfaces) {
this.advised.addInterface(additionalInterface);
}
}
// Validate the class, writing log messages as necessary.
validateClassIfNecessary(proxySuperClass, classLoader);
// Configure CGLIB Enhancer...
Enhancer enhancer = createEnhancer();
if (classLoader != null) {
enhancer.setClassLoader(classLoader);
if (classLoader instanceof SmartClassLoader smartClassLoader &&
smartClassLoader.isClassReloadable(proxySuperClass)) {
enhancer.setUseCache(false);
}
}
enhancer.setSuperclass(proxySuperClass);
enhancer.setInterfaces(AopProxyUtils.completeProxiedInterfaces(this.advised));
enhancer.setNamingPolicy(SpringNamingPolicy.INSTANCE);
enhancer.setAttemptLoad(enhancer.getUseCache() && AotDetector.useGeneratedArtifacts());
enhancer.setStrategy(KotlinDetector.isKotlinType(proxySuperClass) ?
new ClassLoaderAwareGeneratorStrategy(classLoader) :
new ClassLoaderAwareGeneratorStrategy(classLoader, undeclaredThrowableStrategy)
);
Callback[] callbacks = getCallbacks(rootClass);
Class<?>[] types = new Class<?>[callbacks.length];
for (int x = 0; x < types.length; x++) {
types[x] = callbacks[x].getClass();
}
// fixedInterceptorMap only populated at this point, after getCallbacks call above
ProxyCallbackFilter filter = new ProxyCallbackFilter(
this.advised.getConfigurationOnlyCopy(), this.fixedInterceptorMap, this.fixedInterceptorOffset);
enhancer.setCallbackFilter(filter);
enhancer.setCallbackTypes(types);
// Generate the proxy | must |
java | apache__kafka | connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorCheckpointConfigTest.java | {
"start": 1360,
"end": 2582
} | class ____ {
@Test
public void testTaskConfigConsumerGroups() {
List<String> groups = List.of("consumer-1", "consumer-2", "consumer-3");
MirrorCheckpointConfig config = new MirrorCheckpointConfig(makeProps());
Map<String, String> props = config.taskConfigForConsumerGroups(groups, 1);
MirrorCheckpointTaskConfig taskConfig = new MirrorCheckpointTaskConfig(props);
assertEquals(taskConfig.taskConsumerGroups(), new HashSet<>(groups),
"Setting consumer groups property configuration failed");
}
@Test
public void testGroupMatching() {
MirrorCheckpointConfig config = new MirrorCheckpointConfig(makeProps("groups", "group1"));
assertTrue(config.groupFilter().shouldReplicateGroup("group1"),
"topic1 group matching property configuration failed");
assertFalse(config.groupFilter().shouldReplicateGroup("group2"),
"topic2 group matching property configuration failed");
}
@Test
public void testNonMutationOfConfigDef() {
// Sanity check to make sure that these properties are actually defined for the task config,
// and that the task config | MirrorCheckpointConfigTest |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterCatalogReset.java | {
"start": 1417,
"end": 2459
} | class ____ extends SqlAlterCatalog {
private final SqlNodeList propertyKeyList;
public SqlAlterCatalogReset(
SqlParserPos position, SqlIdentifier catalogName, SqlNodeList propertyKeyList) {
super(position, catalogName);
this.propertyKeyList = requireNonNull(propertyKeyList, "propertyKeyList cannot be null");
}
@Override
public List<SqlNode> getOperandList() {
return ImmutableNullableList.of(name, propertyKeyList);
}
public SqlNodeList getPropertyList() {
return propertyKeyList;
}
public Set<String> getResetKeys() {
return propertyKeyList.getList().stream()
.map(SqlParseUtils::extractString)
.collect(Collectors.toSet());
}
@Override
public void unparseAlterOperation(SqlWriter writer, int leftPrec, int rightPrec) {
super.unparseAlterOperation(writer, leftPrec, rightPrec);
SqlUnparseUtils.unparseResetOptions(propertyKeyList, writer, leftPrec, rightPrec);
}
}
| SqlAlterCatalogReset |
java | apache__camel | components/camel-telemetry/src/test/java/org/apache/camel/telemetry/mock/MockTracer.java | {
"start": 1429,
"end": 1745
} | class ____ extends Tracer {
MockSpanLifecycleManager slcm;
@Override
protected void initTracer() {
this.slcm = new MockSpanLifecycleManager();
setSpanLifecycleManager(this.slcm);
}
public Map<String, MockTrace> traces() {
return slcm.traces();
}
private | MockTracer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilter.java | {
"start": 2725,
"end": 2930
} | class ____ {
private String proxyHost = "localhost";
private String proxyUri = "http://bogus";
private String doFilterRequest;
private AmIpServletRequestWrapper servletWrapper;
private | TestAmFilter |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsRequest.java | {
"start": 618,
"end": 1038
} | class ____ extends BaseNodesRequest {
private boolean includeStats;
public SqlStatsRequest() {
super((String[]) null);
}
public boolean includeStats() {
return includeStats;
}
public void includeStats(boolean includeStats) {
this.includeStats = includeStats;
}
@Override
public String toString() {
return "sql_stats";
}
static | SqlStatsRequest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/batch/BatchAndEmbeddedIdId2Test.java | {
"start": 1730,
"end": 4762
} | class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
for ( int i = 0; i < 10; i++ ) {
Parent parent = new Parent( (long) i );
Child child = new Child( (long) ( i + 1 ), parent );
Child child2 = new Child( (long) ( i + 2 ), parent );
Child child3 = new Child( (long) ( i + 3 ), parent );
Child child4 = new Child( (long) ( i + 4 ), parent );
Child child5 = new Child( (long) ( i + 5 ), parent );
Child child6 = new Child( (long) ( i + 6 ), parent );
Child child7 = new Child( (long) ( i + 7 ), parent );
Child child8 = new Child( (long) ( i + 8 ), parent );
Child child9 = new Child( (long) ( i + 9 ), parent );
Child child10 = new Child( (long) ( i + 10 ), parent );
Child child11 = new Child( (long) ( i + 11 ), parent );
session.persist( parent );
}
}
);
}
@Test
public void testFind(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent parent = session.find( Parent.class, 1L );
assertThat( parent.getChildren().size() ).isEqualTo( 11 );
}
);
}
@Test
public void testSelectChild(SessionFactoryScope scope) {
SQLStatementInspector statementInspector = (SQLStatementInspector) scope.getStatementInspector();
scope.inTransaction(
session -> {
statementInspector.clear();
List<Child> children = session.createQuery( "select c from Child c", Child.class ).getResultList();
statementInspector.assertExecutedCount( 3 );
if ( scope.getSessionFactory().getJdbcServices().getDialect().useArrayForMultiValuedParameters() ) {
Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "?" );
Assertions.assertThat( statementInspector.getSqlQueries().get( 2 ) ).containsOnlyOnce( "?" );
}
else {
Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "in (?,?,?,?,?)" );
Assertions.assertThat( statementInspector.getSqlQueries().get( 2 ) ).containsOnlyOnce( "in (?,?,?,?,?)" );
}
statementInspector.clear();
for ( Child c : children ) {
c.getParent().getName();
}
statementInspector.assertExecutedCount( 0 );
}
);
}
@Test
public void testGetReference(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent parent = session.getReference( Parent.class, 1l );
Parent parent1 = session.getReference( Parent.class, 2l );
Parent parent2 = session.getReference( Parent.class, 3l );
assertFalse( Hibernate.isInitialized( parent ) );
assertFalse( Hibernate.isInitialized( parent1 ) );
assertFalse( Hibernate.isInitialized( parent2 ) );
parent.getName();
assertTrue( Hibernate.isInitialized( parent ) );
assertTrue( Hibernate.isInitialized( parent1 ) );
assertTrue( Hibernate.isInitialized( parent2 ) );
}
);
}
@Entity(name = "Child")
@Table(name = "child_tablle")
public static | BatchAndEmbeddedIdId2Test |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 1110209,
"end": 1112342
} | class ____ extends YamlDeserializerBase<SwiftMxDataFormat> {
public SwiftMxDataFormatDeserializer() {
super(SwiftMxDataFormat.class);
}
@Override
protected SwiftMxDataFormat newInstance() {
return new SwiftMxDataFormat();
}
@Override
protected boolean setProperty(SwiftMxDataFormat target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "id": {
String val = asText(node);
target.setId(val);
break;
}
case "readConfig": {
String val = asText(node);
target.setReadConfig(val);
break;
}
case "readMessageId": {
String val = asText(node);
target.setReadMessageId(val);
break;
}
case "writeConfig": {
String val = asText(node);
target.setWriteConfig(val);
break;
}
case "writeInJson": {
String val = asText(node);
target.setWriteInJson(val);
break;
}
default: {
return false;
}
}
return true;
}
}
@YamlType(
nodes = "syslog",
types = org.apache.camel.model.dataformat.SyslogDataFormat.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Syslog",
description = "Marshall SyslogMessages to RFC3164 and RFC5424 messages and back.",
deprecated = false,
properties = @YamlProperty(name = "id", type = "string", description = "The id of this node", displayName = "Id")
)
public static | SwiftMxDataFormatDeserializer |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 91461,
"end": 92111
} | class ____ extends AggregateFunction<Integer, MyState> {
public void accumulate(
@StateHint(name = "myAcc") MyState acc, @ArgumentHint(name = "i") Integer i) {}
@Override
public Integer getValue(MyState accumulator) {
return null;
}
@Override
public MyState createAccumulator() {
return new MyState();
}
}
@FunctionHint(
state = {@StateHint(name = "myAcc", type = @DataTypeHint(bridgedTo = MyState.class))},
arguments = {@ArgumentHint(name = "i", type = @DataTypeHint("INT"))})
private static | StateHintAggregateFunction |
java | junit-team__junit5 | junit-platform-console/src/main/java/org/junit/platform/console/command/DiscoveryRequestCreator.java | {
"start": 2457,
"end": 7575
} | class ____ {
private static final Logger logger = LoggerFactory.getLogger(DiscoveryRequestCreator.class);
static LauncherDiscoveryRequestBuilder toDiscoveryRequestBuilder(TestDiscoveryOptions options) {
LauncherDiscoveryRequestBuilder requestBuilder = request();
List<? extends DiscoverySelector> selectors = createDiscoverySelectors(options);
requestBuilder.selectors(selectors);
addFilters(requestBuilder, options, selectors);
requestBuilder.configurationParameters(options.getConfigurationParameters());
requestBuilder.configurationParametersResources(
options.getConfigurationParametersResources().toArray(new String[0]));
return requestBuilder;
}
private static List<? extends DiscoverySelector> createDiscoverySelectors(TestDiscoveryOptions options) {
List<DiscoverySelector> explicitSelectors = options.getExplicitSelectors();
if (options.isScanClasspath()) {
Preconditions.condition(explicitSelectors.isEmpty(),
"Scanning the classpath and using explicit selectors at the same time is not supported");
return createClasspathRootSelectors(options);
}
if (options.isScanModulepath()) {
Preconditions.condition(explicitSelectors.isEmpty(),
"Scanning the module-path and using explicit selectors at the same time is not supported");
return selectModules(ModuleUtils.findAllNonSystemBootModuleNames());
}
return Preconditions.notEmpty(explicitSelectors,
"Please specify an explicit selector option or use --scan-class-path or --scan-modules");
}
private static List<ClasspathRootSelector> createClasspathRootSelectors(TestDiscoveryOptions options) {
Set<Path> classpathRoots = validateAndLogInvalidRoots(determineClasspathRoots(options));
return selectClasspathRoots(classpathRoots);
}
private static Set<Path> determineClasspathRoots(TestDiscoveryOptions options) {
var selectedClasspathEntries = Preconditions.notNull(options.getSelectedClasspathEntries(),
() -> "No classpath entries selected");
if (selectedClasspathEntries.isEmpty()) {
Set<Path> rootDirs = new LinkedHashSet<>(ReflectionUtils.getAllClasspathRootDirectories());
rootDirs.addAll(options.getAdditionalClasspathEntries());
return rootDirs;
}
return new LinkedHashSet<>(selectedClasspathEntries);
}
private static Set<Path> validateAndLogInvalidRoots(Set<Path> roots) {
LinkedHashSet<Path> valid = new LinkedHashSet<>();
HashSet<Path> seen = new HashSet<>();
for (Path root : roots) {
if (!seen.add(root)) {
continue;
}
if (Files.exists(root)) {
valid.add(root);
}
else {
logger.warn(() -> "Ignoring nonexistent classpath root: %s".formatted(root));
}
}
return valid;
}
private static void addFilters(LauncherDiscoveryRequestBuilder requestBuilder, TestDiscoveryOptions options,
List<? extends DiscoverySelector> selectors) {
requestBuilder.filters(includedClassNamePatterns(options, selectors));
if (!options.getExcludedClassNamePatterns().isEmpty()) {
requestBuilder.filters(
excludeClassNamePatterns(options.getExcludedClassNamePatterns().toArray(new String[0])));
}
if (!options.getIncludedPackages().isEmpty()) {
requestBuilder.filters(includePackageNames(options.getIncludedPackages()));
}
if (!options.getExcludedPackages().isEmpty()) {
requestBuilder.filters(excludePackageNames(options.getExcludedPackages()));
}
if (!options.getIncludedMethodNamePatterns().isEmpty()) {
requestBuilder.filters(includeMethodNamePatterns(options.getIncludedMethodNamePatterns()));
}
if (!options.getExcludedMethodNamePatterns().isEmpty()) {
requestBuilder.filters(excludeMethodNamePatterns(options.getExcludedMethodNamePatterns()));
}
if (!options.getIncludedTagExpressions().isEmpty()) {
requestBuilder.filters(includeTags(options.getIncludedTagExpressions()));
}
if (!options.getExcludedTagExpressions().isEmpty()) {
requestBuilder.filters(excludeTags(options.getExcludedTagExpressions()));
}
if (!options.getIncludedEngines().isEmpty()) {
requestBuilder.filters(includeEngines(options.getIncludedEngines()));
}
if (!options.getExcludedEngines().isEmpty()) {
requestBuilder.filters(excludeEngines(options.getExcludedEngines()));
}
}
private static ClassNameFilter includedClassNamePatterns(TestDiscoveryOptions options,
List<? extends DiscoverySelector> selectors) {
Stream<String> patternStreams = Stream.concat( //
options.getIncludedClassNamePatterns().stream(), //
selectors.stream() //
.map(selector -> selector instanceof IterationSelector iterationSelector
? iterationSelector.getParentSelector()
: selector) //
.map(selector -> {
if (selector instanceof ClassSelector classSelector) {
return classSelector.getClassName();
}
if (selector instanceof MethodSelector methodSelector) {
return methodSelector.getClassName();
}
return null;
}) //
.filter(Objects::nonNull) //
.map(Pattern::quote));
return includeClassNamePatterns(patternStreams.toArray(String[]::new));
}
private DiscoveryRequestCreator() {
}
}
| DiscoveryRequestCreator |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MockitoUtil.java | {
"start": 1033,
"end": 2533
} | class ____ {
/**
* Return a mock object for an IPC protocol. This special
* method is necessary, since the IPC proxies have to implement
* Closeable in addition to their protocol interface.
* @param clazz the protocol class
*/
public static <T> T mockProtocol(Class<T> clazz) {
return Mockito.mock(clazz,
Mockito.withSettings().extraInterfaces(Closeable.class));
}
/**
* Throw an exception from the mock/spy only in the case that the
* call stack at the time the method has a line which matches the given
* pattern.
*
* @param t the Throwable to throw
* @param pattern the pattern against which to match the call stack trace
* @return the stub in progress
*/
public static Stubber doThrowWhenCallStackMatches(
final Throwable t, final String pattern) {
return Mockito.doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
t.setStackTrace(Thread.currentThread().getStackTrace());
for (StackTraceElement elem : t.getStackTrace()) {
if (elem.toString().matches(pattern)) {
throw t;
}
}
return invocation.callRealMethod();
}
});
}
/**
* Verifies that there were no interactions with the given mock objects.
*
* @param mocks the mock objects to verify
*/
public static void verifyZeroInteractions(Object... mocks) {
Mockito.verifyNoInteractions(mocks);
}
}
| MockitoUtil |
java | google__gson | test-shrinker/src/main/java/com/example/ClassWithNamedFields.java | {
"start": 29,
"end": 181
} | class ____ {
public int myField;
public short notAccessedField = -1;
public ClassWithNamedFields(int i) {
myField = i;
}
}
| ClassWithNamedFields |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/FilterDatasourceConnectAndRecycleFilterTest.java | {
"start": 2083,
"end": 3117
} | class ____ extends FilterAdapter {
private AtomicLong dataSourceConnectCount = new AtomicLong();
private AtomicLong dataSourceRecycleCount = new AtomicLong();
@Override
public void dataSource_releaseConnection(FilterChain chain, DruidPooledConnection connection) throws SQLException {
chain.dataSource_recycle(connection);
dataSourceRecycleCount.incrementAndGet();
}
@Override
public DruidPooledConnection dataSource_getConnection(FilterChain chain, DruidDataSource dataSource,
long maxWaitMillis) throws SQLException {
dataSourceConnectCount.incrementAndGet();
return chain.dataSource_connect(dataSource, maxWaitMillis);
}
public long getDataSourceConnectCount() {
return dataSourceConnectCount.get();
}
public long getDataSourceRecycleCount() {
return dataSourceRecycleCount.get();
}
}
}
| TestFilter |
java | google__error-prone | core/src/test/java/com/google/errorprone/refaster/testdata/input/PlaceholderAllowedVarsTemplateExample.java | {
"start": 656,
"end": 1078
} | class ____ {
public void shouldMatch() {
String accum = "foo";
if (!"foo".equals("bar")) {
System.out.println("in if");
accum += "bar";
}
System.out.println("foo");
}
public void shouldNotMatch() {
String accum = "foo";
if (!"foo".equals("bar")) {
System.out.println(accum);
accum += "bar";
}
System.out.println("foo");
}
}
| PlaceholderAllowedVarsTemplateExample |
java | dropwizard__dropwizard | dropwizard-testing/src/test/java/io/dropwizard/testing/junit5/ReuseDropwizardAppExtensionTestSuite.java | {
"start": 755,
"end": 1341
} | class ____ {
static final DropwizardAppExtension<TestConfiguration> EXTENSION = ReuseDropwizardAppExtensionTestSuite.EXTENSION;
@Test
void clientHasNotBeenClosed() {
final String response = EXTENSION.client()
.target("http://localhost:" + EXTENSION.getAdminPort() + "/tasks/echo")
.request()
.post(Entity.entity("Custom message", MediaType.TEXT_PLAIN), String.class);
assertThat(response).isEqualTo("Custom message");
}
}
@ExtendWith(DropwizardExtensionsSupport.class)
| DropwizardAppExtensionTestSuiteFooTest |
java | apache__avro | lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java | {
"start": 4996,
"end": 7291
} | class ____ {
/**
* A magic value representing the default for buffer size, block size, and
* replication factor.
*/
private static final short DEFAULT = -1;
private FileSystem mFileSystem;
private Configuration mConf;
private Path mOutputPath;
private Class<?> mKeyClass;
private Schema mKeyWriterSchema;
private Class<?> mValueClass;
private Schema mValueWriterSchema;
private int mBufferSizeBytes;
private short mReplicationFactor;
private long mBlockSizeBytes;
private Progressable mProgressable;
private CompressionType mCompressionType;
private CompressionCodec mCompressionCodec;
private Metadata mMetadata;
/**
* Creates a new <code>Options</code> instance with default values.
*/
public Options() {
mBufferSizeBytes = DEFAULT;
mReplicationFactor = DEFAULT;
mBlockSizeBytes = DEFAULT;
mCompressionType = CompressionType.NONE;
mMetadata = new Metadata();
}
/**
* Sets the filesystem the SequenceFile should be written to.
*
* @param fileSystem The filesystem.
* @return This options instance.
*/
public Options withFileSystem(FileSystem fileSystem) {
if (null == fileSystem) {
throw new IllegalArgumentException("Filesystem may not be null");
}
mFileSystem = fileSystem;
return this;
}
/**
* Sets the Hadoop configuration.
*
* @param conf The configuration.
* @return This options instance.
*/
public Options withConfiguration(Configuration conf) {
if (null == conf) {
throw new IllegalArgumentException("Configuration may not be null");
}
mConf = conf;
return this;
}
/**
* Sets the output path for the SequenceFile.
*
* @param outputPath The output path.
* @return This options instance.
*/
public Options withOutputPath(Path outputPath) {
if (null == outputPath) {
throw new IllegalArgumentException("Output path may not be null");
}
mOutputPath = outputPath;
return this;
}
/**
* Sets the | Options |
java | elastic__elasticsearch | modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java | {
"start": 1618,
"end": 11475
} | class ____ extends ESTestCase {
private MustacheScriptEngine qe;
private MustacheFactory factory;
@Before
public void setup() {
qe = new MustacheScriptEngine(Settings.builder().put(MustacheScriptEngine.MUSTACHE_RESULT_SIZE_LIMIT.getKey(), "1kb").build());
factory = CustomMustacheFactory.builder().build();
}
public void testSimpleParameterReplace() {
Map<String, String> compileParams = Map.of("content_type", "application/json");
{
String template = """
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "solr"
}
}
},
"negative_boost": {{boost_val}}
}
}
}""";
Map<String, Object> vars = Map.of("boost_val", "0.3");
String o = qe.compile(null, template, TemplateScript.CONTEXT, compileParams).newInstance(vars).execute();
assertEquals("""
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "solr"
}
}
},
"negative_boost": 0.3
}
}
}""", o);
}
{
String template = """
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "{{body_val}}"
}
}
},
"negative_boost": {{boost_val}}
}
}
}""";
Map<String, Object> vars = Map.of("boost_val", "0.3", "body_val", "\"quick brown\"");
String o = qe.compile(null, template, TemplateScript.CONTEXT, compileParams).newInstance(vars).execute();
assertEquals("""
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "\\"quick brown\\""
}
}
},
"negative_boost": 0.3
}
}
}""", o);
}
}
public void testChangingDelimiters() {
Map<String, String> compileParams = Map.of("content_type", "application/json");
{
String template = """
GET _search
{
"query": {
"match": {
"content": "{{query_string}}"
}
},
"highlight": {
{{=<% %>=}}
"pre_tags": [
"{{{{"
],
"post_tags": [
"}}}}"
],
<%={{ }}=%>
"fields": {
"content": {},
"title": {}
}
}
}""";
Map<String, Object> vars = Map.of("query_string", "test");
String o = qe.compile(null, template, TemplateScript.CONTEXT, compileParams).newInstance(vars).execute();
assertEquals("""
GET _search
{
"query": {
"match": {
"content": "test"
}
},
"highlight": {
\s
"pre_tags": [
"{{{{"
],
"post_tags": [
"}}}}"
],
\s
"fields": {
"content": {},
"title": {}
}
}
}""", o);
}
}
public void testSimple() throws IOException {
String templateString = """
{"source":{"match_{{template}}": {}},"params":{"template":"all"}}""";
XContentParser parser = createParser(JsonXContent.jsonXContent, templateString);
Script script = Script.parse(parser);
TemplateScript.Factory compiled = qe.compile(null, script.getIdOrCode(), TemplateScript.CONTEXT, Map.of());
TemplateScript TemplateScript = compiled.newInstance(script.getParams());
assertThat(TemplateScript.execute(), equalTo("{\"match_all\":{}}"));
}
@SuppressWarnings("deprecation") // GeneralScriptException
public void testDetectMissingParam() {
Map<String, String> scriptOptions = Map.ofEntries(Map.entry(MustacheScriptEngine.DETECT_MISSING_PARAMS_OPTION, "true"));
// fails when a param is missing and the DETECT_MISSING_PARAMS_OPTION option is set to true.
{
String source = "{\"match\": { \"field\": \"{{query_string}}\" }";
TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions);
Map<String, Object> params = Collections.emptyMap();
GeneralScriptException e = expectThrows(GeneralScriptException.class, () -> compiled.newInstance(params).execute());
assertThat(e.getRootCause(), instanceOf(MustacheInvalidParameterException.class));
assertThat(e.getRootCause().getMessage(), startsWith("Parameter [query_string] is missing"));
}
// fails when params is null and the DETECT_MISSING_PARAMS_OPTION option is set to true.
{
String source = "{\"match\": { \"field\": \"{{query_string}}\" }";
TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions);
GeneralScriptException e = expectThrows(GeneralScriptException.class, () -> compiled.newInstance(null).execute());
assertThat(e.getRootCause(), instanceOf(MustacheInvalidParameterException.class));
assertThat(e.getRootCause().getMessage(), startsWith("Parameter [query_string] is missing"));
}
// works as expected when params are specified and the DETECT_MISSING_PARAMS_OPTION option is set to true
{
String source = "{\"match\": { \"field\": \"{{query_string}}\" }";
TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions);
Map<String, Object> params = Map.ofEntries(Map.entry("query_string", "foo"));
assertThat(compiled.newInstance(params).execute(), equalTo("{\"match\": { \"field\": \"foo\" }"));
}
// do not throw when using a missing param in the conditional when DETECT_MISSING_PARAMS_OPTION option is set to true
{
String source = "{\"match\": { \"field\": \"{{#query_string}}{{.}}{{/query_string}}\" }";
TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions);
Map<String, Object> params = Map.of();
assertThat(compiled.newInstance(params).execute(), equalTo("{\"match\": { \"field\": \"\" }"));
}
}
public void testMissingParam() {
Map<String, String> scriptOptions = Collections.emptyMap();
String source = "{\"match\": { \"field\": \"{{query_string}}\" }";
TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions);
// When the DETECT_MISSING_PARAMS_OPTION is not specified, missing variable is replaced with an empty string.
assertThat(compiled.newInstance(Collections.emptyMap()).execute(), equalTo("{\"match\": { \"field\": \"\" }"));
assertThat(compiled.newInstance(null).execute(), equalTo("{\"match\": { \"field\": \"\" }"));
}
public void testParseTemplateAsSingleStringWithConditionalClause() throws IOException {
String templateString = """
{
"source": "{ \\"match_{{#use_it}}{{template}}{{/use_it}}\\":{} }",
"params": {
"template": "all",
"use_it": true
}
}""";
XContentParser parser = createParser(JsonXContent.jsonXContent, templateString);
Script script = Script.parse(parser);
TemplateScript.Factory compiled = qe.compile(null, script.getIdOrCode(), TemplateScript.CONTEXT, Map.of());
TemplateScript TemplateScript = compiled.newInstance(script.getParams());
assertThat(TemplateScript.execute(), equalTo("{ \"match_all\":{} }"));
}
private static | MustacheScriptEngineTests |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/AppenderSkeleton.java | {
"start": 1010,
"end": 1119
} | class ____ Appenders in Log4j 1. Appenders constructed using this are ignored in Log4j 2.
*/
public abstract | for |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/aot/generate/GeneratedClassTests.java | {
"start": 4602,
"end": 5028
} | class ____");
}
@Test
void generateJavaFileOnInnerClassThrowsException() {
GeneratedClass generatedClass = createGeneratedClass(TEST_CLASS_NAME)
.getOrAdd("Inner", emptyTypeCustomizer);
assertThatIllegalStateException().isThrownBy(generatedClass::generateJavaFile);
}
private static GeneratedClass createGeneratedClass(ClassName className) {
return new GeneratedClass(className, emptyTypeCustomizer);
}
}
| Second |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/ConfigurableBeanContext.java | {
"start": 1065,
"end": 1322
} | interface ____
* return results allowing inspection of the context without needing to run the context.</p>
*
* @see io.micronaut.inject.BeanDefinition
* @see #getBeanDefinition(Class)
* @see #start()
*/
void configure();
}
| will |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ConstantOverflowTest.java | {
"start": 5306,
"end": 5479
} | class ____ {
int a = 'a' + Integer.MAX_VALUE;
}
""")
.addOutputLines(
"out/Test.java",
"""
| Test |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java | {
"start": 36136,
"end": 69488
} | class ____ not available because this JRE does not support JMX. "
+ "JMX lookups will not be available, continuing configuration. "
);
// separate method so that this can be checked again after suite scoped cluster is shut down
protected static void checkStaticState() throws Exception {
// ensure no one changed the status logger level on us
assertThat(StatusLogger.getLogger().getLevel(), equalTo(Level.WARN));
synchronized (statusData) {
try {
// ensure that there are no status logger messages which would indicate a problem with our Log4j usage; we map the
// StatusData instances to Strings as otherwise their toString output is useless
assertThat(
statusData.stream().map(status -> status.getMessage().getFormattedMessage()).collect(Collectors.toList()),
anyOf(
emptyCollectionOf(String.class),
contains(startsWith(LOG_4J_MSG_PREFIXES.get(0)), startsWith(LOG_4J_MSG_PREFIXES.get(1))),
contains(startsWith(LOG_4J_MSG_PREFIXES.get(1)))
)
);
} finally {
// we clear the list so that status data from other tests do not interfere with tests within the same JVM
statusData.clear();
}
}
synchronized (loggedLeaks) {
try {
assertThat(loggedLeaks, empty());
} finally {
loggedLeaks.clear();
}
}
}
/**
* Assert that at least one leak was detected, also clear the list of detected leaks
* so the test won't fail for leaks detected up until this point.
*/
protected static void assertLeakDetected() {
synchronized (loggedLeaks) {
assertFalse("No leaks have been detected", loggedLeaks.isEmpty());
loggedLeaks.clear();
}
}
// this must be a separate method from other ensure checks above so suite scoped integ tests can call...TODO: fix that
public final void ensureAllSearchContextsReleased() throws Exception {
assertBusy(() -> MockSearchService.assertNoInFlightContext());
}
// mockdirectorywrappers currently set this boolean if checkindex fails
// TODO: can we do this cleaner???
/** MockFSDirectoryService sets this: */
public static final List<Exception> checkIndexFailures = new CopyOnWriteArrayList<>();
@Before
public final void resetCheckIndexStatus() throws Exception {
checkIndexFailures.clear();
}
public final void ensureCheckIndexPassed() {
if (checkIndexFailures.isEmpty() == false) {
final AssertionError e = new AssertionError("at least one shard failed CheckIndex");
for (Exception failure : checkIndexFailures) {
e.addSuppressed(failure);
}
throw e;
}
}
// -----------------------------------------------------------------
// Test facilities and facades for subclasses.
// -----------------------------------------------------------------
// TODO: decide on one set of naming for between/scaledBetween and remove others
// TODO: replace frequently() with usually()
/**
* Returns a "scaled" random number between min and max (inclusive).
*
* @see RandomizedTest#scaledRandomIntBetween(int, int)
*/
public static int scaledRandomIntBetween(int min, int max) {
return RandomizedTest.scaledRandomIntBetween(min, max);
}
/**
* A random integer from <code>min</code> to <code>max</code> (inclusive).
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int randomIntBetween(int min, int max) {
return RandomNumbers.randomIntBetween(random(), min, max);
}
/**
* A random long number between min (inclusive) and max (inclusive).
*/
public static long randomLongBetween(long min, long max) {
return RandomNumbers.randomLongBetween(random(), min, max);
}
/**
* @return a random instant between a min and a max value with a random nanosecond precision
*/
public static Instant randomInstantBetween(Instant minInstant, Instant maxInstant) {
long epochSecond = randomLongBetween(minInstant.getEpochSecond(), maxInstant.getEpochSecond());
long minNanos = epochSecond == minInstant.getEpochSecond() ? minInstant.getNano() : 0;
long maxNanos = epochSecond == maxInstant.getEpochSecond() ? maxInstant.getNano() : 999999999;
long nanos = randomLongBetween(minNanos, maxNanos);
return Instant.ofEpochSecond(epochSecond, nanos);
}
/**
* The maximum value that can be represented as an unsigned long.
*/
public static final BigInteger UNSIGNED_LONG_MAX = BigInteger.ONE.shiftLeft(Long.SIZE).subtract(BigInteger.ONE);
/**
* An unsigned long in a {@link BigInteger} between min (inclusive) and max (inclusive).
*/
public static BigInteger randomUnsignedLongBetween(BigInteger min, BigInteger max) {
if (min.compareTo(BigInteger.ZERO) < 0) {
throw new IllegalArgumentException("Must be between [0] and [" + UNSIGNED_LONG_MAX + "]");
}
if (0 < max.compareTo(UNSIGNED_LONG_MAX)) {
throw new IllegalArgumentException("Must be between [0] and [" + UNSIGNED_LONG_MAX + "]");
}
// Shift the min and max down into the long range
long minShifted = min.add(BigInteger.valueOf(Long.MIN_VALUE)).longValueExact();
long maxShifted = max.add(BigInteger.valueOf(Long.MIN_VALUE)).longValueExact();
// Grab a random number in that range
long randomShifted = randomLongBetween(minShifted, maxShifted);
// Shift back up into long range
return BigInteger.valueOf(randomShifted).subtract(BigInteger.valueOf(Long.MIN_VALUE));
}
/**
* Returns a "scaled" number of iterations for loops which can have a variable
* iteration count. This method is effectively
* an alias to {@link #scaledRandomIntBetween(int, int)}.
*/
public static int iterations(int min, int max) {
return scaledRandomIntBetween(min, max);
}
/**
* An alias for {@link #randomIntBetween(int, int)}.
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int between(int min, int max) {
return randomIntBetween(min, max);
}
/**
* The exact opposite of {@link #rarely()}.
*/
public static boolean frequently() {
return rarely() == false;
}
public static boolean randomBoolean() {
return random().nextBoolean();
}
public static Boolean randomOptionalBoolean() {
return randomBoolean() ? Boolean.TRUE : randomFrom(Boolean.FALSE, null);
}
public static byte randomByte() {
return (byte) random().nextInt();
}
public static byte randomNonNegativeByte() {
byte randomByte = randomByte();
return (byte) (randomByte == Byte.MIN_VALUE ? 0 : Math.abs(randomByte));
}
/**
* Helper method to create a byte array of a given length populated with random byte values
*
* @see #randomByte()
*/
public static byte[] randomByteArrayOfLength(int size) {
byte[] bytes = new byte[size];
for (int i = 0; i < size; i++) {
bytes[i] = randomByte();
}
return bytes;
}
public static byte randomByteBetween(byte minInclusive, byte maxInclusive) {
return (byte) randomIntBetween(minInclusive, maxInclusive);
}
public static void randomBytesBetween(byte[] bytes, byte minInclusive, byte maxInclusive) {
for (int i = 0, len = bytes.length; i < len;) {
bytes[i++] = randomByteBetween(minInclusive, maxInclusive);
}
}
public static BytesReference randomBytesReference(int length) {
final var slices = new ArrayList<BytesReference>();
var remaining = length;
while (remaining > 0) {
final var sliceLen = between(1, remaining);
slices.add(new BytesArray(randomByteArrayOfLength(sliceLen)));
remaining -= sliceLen;
}
return CompositeBytesReference.of(slices.toArray(BytesReference[]::new));
}
public ReleasableBytesReference randomReleasableBytesReference(int length) {
return new ReleasableBytesReference(randomBytesReference(length), LeakTracker.wrap(new AbstractRefCounted() {
@Override
protected void closeInternal() {}
}));
}
public static short randomShort() {
return (short) random().nextInt();
}
public static int randomInt() {
return random().nextInt();
}
public static IntStream randomInts() {
return random().ints();
}
public static IntStream randomInts(long streamSize) {
return random().ints(streamSize);
}
/**
* @return a <code>long</code> between <code>0</code> and <code>Long.MAX_VALUE</code> (inclusive) chosen uniformly at random.
*/
public static long randomNonNegativeLong() {
return randomLong() & Long.MAX_VALUE;
}
/**
* @return a <code>long</code> between <code>Long.MIN_VALUE</code> and <code>-1</code> (inclusive) chosen uniformly at random.
*/
public static long randomNegativeLong() {
return randomLong() | Long.MIN_VALUE;
}
/**
* @return an <code>int</code> between <code>0</code> and <code>Integer.MAX_VALUE</code> (inclusive) chosen uniformly at random.
*/
public static int randomNonNegativeInt() {
return randomInt() & Integer.MAX_VALUE;
}
/**
* @return an <code>int</code> between <code>Integer.MIN_VALUE</code> and <code>-1</code> (inclusive) chosen uniformly at random.
*/
public static int randomNegativeInt() {
return randomInt() | Integer.MIN_VALUE;
}
public static float randomFloat() {
return random().nextFloat();
}
/**
* Returns a float value in the interval [start, end) if lowerInclusive is
* set to true, (start, end) otherwise.
*
* @param start lower bound of interval to draw uniformly distributed random numbers from
* @param end upper bound
* @param lowerInclusive whether or not to include lower end of the interval
*/
public static float randomFloatBetween(float start, float end, boolean lowerInclusive) {
float result;
if (start == -Float.MAX_VALUE || end == Float.MAX_VALUE) {
// formula below does not work with very large floats
result = Float.intBitsToFloat(randomInt());
while (result < start || result > end || Double.isNaN(result)) {
result = Float.intBitsToFloat(randomInt());
}
} else {
result = randomFloat();
if (lowerInclusive == false) {
while (result <= 0.0f) {
result = randomFloat();
}
}
result = result * end + (1.0f - result) * start;
}
return result;
}
public static double randomDouble() {
return random().nextDouble();
}
public static DoubleStream randomDoubles() {
return random().doubles();
}
public static DoubleStream randomDoubles(long streamSize) {
return random().doubles(streamSize);
}
/**
* Returns a pseudo-random double from a Gaussian distribution with mean 0.0 and standard deviation 1.0
*/
public static double randomGaussianDouble() {
return random().nextGaussian();
}
/**
* Returns a double value in the interval [start, end) if lowerInclusive is
* set to true, (start, end) otherwise.
*
* @param start lower bound of interval to draw uniformly distributed random numbers from
* @param end upper bound
* @param lowerInclusive whether or not to include lower end of the interval
*/
public static double randomDoubleBetween(double start, double end, boolean lowerInclusive) {
double result = 0.0;
if (start == -Double.MAX_VALUE || end == Double.MAX_VALUE) {
// formula below does not work with very large doubles
result = Double.longBitsToDouble(randomLong());
while (result < start || result > end || Double.isNaN(result)) {
result = Double.longBitsToDouble(randomLong());
}
} else {
result = randomDouble();
if (lowerInclusive == false) {
while (result <= 0.0) {
result = randomDouble();
}
}
result = result * end + (1.0 - result) * start;
}
return result;
}
public static Double randomOptionalDouble() {
return randomFrom(randomDouble(), null);
}
public static long randomLong() {
return random().nextLong();
}
public static LongStream randomLongs() {
return random().longs();
}
public static LongStream randomLongs(long streamSize) {
return random().longs(streamSize);
}
/**
* Returns a random BigInteger uniformly distributed over the range 0 to (2^64 - 1) inclusive
* Currently BigIntegers are only used for unsigned_long field type, where the max value is 2^64 - 1.
* Modify this random generator if a wider range for BigIntegers is necessary.
* @return a random bigInteger in the range [0 ; 2^64 - 1]
*/
public static BigInteger randomBigInteger() {
return new BigInteger(64, random());
}
/** A random integer from 0..max (inclusive). */
public static int randomInt(int max) {
return RandomizedTest.randomInt(max);
}
/** A random byte size value. */
public static ByteSizeValue randomByteSizeValue() {
return ByteSizeValue.ofBytes(randomLongBetween(0L, Long.MAX_VALUE >> 16));
}
/** Pick a random object from the given array. The array must not be empty. */
@SafeVarargs
@SuppressWarnings("varargs")
public static <T> T randomFrom(T... array) {
return randomFrom(random(), array);
}
/** Pick a random object from the given array. The array must not be empty. */
@SafeVarargs
@SuppressWarnings("varargs")
public static <T> T randomFrom(Random random, T... array) {
return RandomPicks.randomFrom(random, array);
}
/** Pick a random object from the given array of suppliers. The array must not be empty. */
@SafeVarargs
@SuppressWarnings("varargs")
public static <T> T randomFrom(Random random, Supplier<T>... array) {
Supplier<T> supplier = RandomPicks.randomFrom(random, array);
return supplier.get();
}
/** Pick a random object from the given list. */
public static <T> T randomFrom(List<T> list) {
return RandomPicks.randomFrom(random(), list);
}
/** Pick a random object from the given collection. */
public static <T> T randomFrom(Collection<T> collection) {
return randomFrom(random(), collection);
}
/** Pick a random object from the given collection. */
public static <T> T randomFrom(Random random, Collection<T> collection) {
return RandomPicks.randomFrom(random, collection);
}
public static String randomAlphaOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomizedTest.randomAsciiOfLengthBetween(minCodeUnits, maxCodeUnits);
}
public static String randomAlphaOfLength(int codeUnits) {
return RandomizedTest.randomAsciiOfLength(codeUnits);
}
/**
* Generate a random string containing only alphanumeric characters.
* <b>The locale for the string is {@link Locale#ROOT}.</b>
* @param length the length of the string to generate
* @return the generated string
*/
public static String randomAlphanumericOfLength(int length) {
StringBuilder sb = new StringBuilder();
Random random = random();
for (int i = 0; i < length; i++) {
sb.append(ALPHANUMERIC_CHARACTERS.charAt(random.nextInt(ALPHANUMERIC_CHARACTERS.length())));
}
return sb.toString();
}
public static SecureString randomSecureStringOfLength(int codeUnits) {
var randomAlpha = randomAlphaOfLength(codeUnits);
return new SecureString(randomAlpha.toCharArray());
}
public static String randomAlphaOfLengthOrNull(int codeUnits) {
return randomBoolean() ? null : randomAlphaOfLength(codeUnits);
}
public static Long randomLongOrNull() {
return randomBoolean() ? null : randomLong();
}
public static Long randomNonNegativeLongOrNull() {
return randomBoolean() ? null : randomNonNegativeLong();
}
public static Integer randomIntOrNull() {
return randomBoolean() ? null : randomInt();
}
public static Integer randomNonNegativeIntOrNull() {
return randomBoolean() ? null : randomNonNegativeInt();
}
public static Float randomFloatOrNull() {
return randomBoolean() ? null : randomFloat();
}
/**
* Creates a valid random identifier such as node id or index name
*/
public static String randomIdentifier() {
return randomAlphaOfLengthBetween(8, 12).toLowerCase(Locale.ROOT);
}
/**
* Returns a project id. This may be {@link Metadata#DEFAULT_PROJECT_ID}, or it may be a randomly-generated id.
*/
public static ProjectId randomProjectIdOrDefault() {
return randomBoolean() ? Metadata.DEFAULT_PROJECT_ID : randomUniqueProjectId();
}
/**
* Returns a new randomly-generated project id
*/
public static ProjectId randomUniqueProjectId() {
return ProjectId.fromId(randomUUID());
}
public static String randomUUID() {
return UUIDs.randomBase64UUID(random());
}
public static String randomUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomizedTest.randomUnicodeOfLengthBetween(minCodeUnits, maxCodeUnits);
}
public static String randomUnicodeOfLength(int codeUnits) {
return RandomizedTest.randomUnicodeOfLength(codeUnits);
}
public static String randomUnicodeOfCodepointLengthBetween(int minCodePoints, int maxCodePoints) {
return RandomizedTest.randomUnicodeOfCodepointLengthBetween(minCodePoints, maxCodePoints);
}
public static String randomUnicodeOfCodepointLength(int codePoints) {
return RandomizedTest.randomUnicodeOfCodepointLength(codePoints);
}
public static String randomRealisticUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomizedTest.randomRealisticUnicodeOfLengthBetween(minCodeUnits, maxCodeUnits);
}
public static String randomRealisticUnicodeOfLength(int codeUnits) {
return RandomizedTest.randomRealisticUnicodeOfLength(codeUnits);
}
public static String randomRealisticUnicodeOfCodepointLengthBetween(int minCodePoints, int maxCodePoints) {
return RandomizedTest.randomRealisticUnicodeOfCodepointLengthBetween(minCodePoints, maxCodePoints);
}
public static String randomRealisticUnicodeOfCodepointLength(int codePoints) {
return RandomizedTest.randomRealisticUnicodeOfCodepointLength(codePoints);
}
/**
* @param maxArraySize The maximum number of elements in the random array
* @param stringSize The length of each String in the array
* @param allowNull Whether the returned array may be null
* @param allowEmpty Whether the returned array may be empty (have zero elements)
*/
public static String[] generateRandomStringArray(int maxArraySize, int stringSize, boolean allowNull, boolean allowEmpty) {
if (allowNull && random().nextBoolean()) {
return null;
}
int arraySize = randomIntBetween(allowEmpty ? 0 : 1, maxArraySize);
String[] array = new String[arraySize];
for (int i = 0; i < arraySize; i++) {
array[i] = RandomStrings.randomAsciiOfLength(random(), stringSize);
}
return array;
}
public static String[] generateRandomStringArray(int maxArraySize, int stringSize, boolean allowNull) {
return generateRandomStringArray(maxArraySize, stringSize, allowNull, true);
}
public static <T> T[] randomArray(int maxArraySize, IntFunction<T[]> arrayConstructor, Supplier<T> valueConstructor) {
return randomArray(0, maxArraySize, arrayConstructor, valueConstructor);
}
public static <T> T[] randomArray(int minArraySize, int maxArraySize, IntFunction<T[]> arrayConstructor, Supplier<T> valueConstructor) {
final int size = randomIntBetween(minArraySize, maxArraySize);
final T[] array = arrayConstructor.apply(size);
for (int i = 0; i < array.length; i++) {
array[i] = valueConstructor.get();
}
return array;
}
public static <T> List<T> randomList(int maxListSize, Supplier<T> valueConstructor) {
return randomList(0, maxListSize, valueConstructor);
}
public static <T> List<T> randomList(int minListSize, int maxListSize, Supplier<T> valueConstructor) {
final int size = randomIntBetween(minListSize, maxListSize);
List<T> list = new ArrayList<>();
for (int i = 0; i < size; i++) {
list.add(valueConstructor.get());
}
return list;
}
public static <K, V> Map<K, V> randomMap(int minMapSize, int maxMapSize, Supplier<Tuple<K, V>> entryConstructor) {
final int size = randomIntBetween(minMapSize, maxMapSize);
Map<K, V> list = Maps.newMapWithExpectedSize(size);
for (int i = 0; i < size; i++) {
Tuple<K, V> entry = entryConstructor.get();
list.put(entry.v1(), entry.v2());
}
return list;
}
public static <T> Set<T> randomSet(int minSetSize, int maxSetSize, Supplier<T> valueConstructor) {
return new HashSet<>(randomList(minSetSize, maxSetSize, valueConstructor));
}
public static TimeValue randomTimeValue(int lower, int upper, TimeUnit... units) {
return new TimeValue(between(lower, upper), randomFrom(units));
}
public static TimeValue randomTimeValue(int lower, int upper) {
return randomTimeValue(lower, upper, TimeUnit.values());
}
public static TimeValue randomTimeValue() {
return randomTimeValue(0, 1000);
}
public static TimeValue randomPositiveTimeValue() {
return randomTimeValue(1, 1000);
}
/**
* Generate a random TimeValue that is greater than the provided timeValue.
* Chooses a random TimeUnit, adds between 1 and 1000 of that unit to {@code timeValue}, and returns a TimeValue in that unit.
*/
public static TimeValue randomTimeValueGreaterThan(TimeValue lowerBound) {
final TimeUnit randomUnit = randomFrom(TimeUnit.values());
// This conversion might round down, but that's fine since we add at least 1 below, ensuring we still satisfy the "greater than".
final long lowerBoundDuration = randomUnit.convert(lowerBound.duration(), lowerBound.timeUnit());
final long duration = lowerBoundDuration + randomLongBetween(1, 1000);
return new TimeValue(duration, randomUnit);
}
/**
* generate a random epoch millis in a range 1 to 9999-12-31T23:59:59.999
*/
public static long randomMillisUpToYear9999() {
return randomLongBetween(1, DateUtils.MAX_MILLIS_BEFORE_9999);
}
/**
* generate a random TimeZone from the ones available in java.util
*/
public static TimeZone randomTimeZone() {
return TimeZone.getTimeZone(randomFrom(JAVA_TIMEZONE_IDS));
}
/**
* generate a random TimeZone from the ones available in java.time
*/
public static ZoneId randomZone() {
return ZoneId.of(randomFrom(JAVA_ZONE_IDS));
}
/**
* Generate a random valid date formatter pattern.
*/
public static String randomDateFormatterPattern() {
return randomFrom(FormatNames.values()).getName();
}
/**
* Generate a random string of at least 112 bits to satisfy minimum entropy requirement when running in FIPS mode.
*/
public static String randomSecretKey() {
return randomAlphaOfLengthBetween(14, 20);
}
/**
* Randomly choose between {@link EsExecutors#DIRECT_EXECUTOR_SERVICE} (which does not fork), {@link ThreadPool#generic}, and one of the
* other named threadpool executors.
*/
public static Executor randomExecutor(ThreadPool threadPool, String... otherExecutorNames) {
final var choice = between(0, otherExecutorNames.length + 1);
if (choice < otherExecutorNames.length) {
return threadPool.executor(otherExecutorNames[choice]);
} else if (choice == otherExecutorNames.length) {
return threadPool.generic();
} else {
return EsExecutors.DIRECT_EXECUTOR_SERVICE;
}
}
/**
* helper to randomly perform on <code>consumer</code> with <code>value</code>
*/
public static <T> void maybeSet(Consumer<T> consumer, T value) {
if (randomBoolean()) {
consumer.accept(value);
}
}
/**
* helper to get a random value in a certain range that's different from the input
*/
public static <T> T randomValueOtherThan(T input, Supplier<T> randomSupplier) {
assert input == null || input.getClass().isArray() == false
: "randomValueOtherThan() does not work as expected with arrays, use randomArrayOtherThan() instead";
return randomValueOtherThanMany(v -> Objects.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input, for object arrays
*/
public static <T> T[] randomArrayOtherThan(T[] input, Supplier<T[]> randomSupplier) {
return randomValueOtherThanMany(v -> Arrays.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input, for boolean arrays
*/
public static boolean[] randomArrayOtherThan(boolean[] input, Supplier<boolean[]> randomSupplier) {
return randomValueOtherThanMany(v -> Arrays.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input, for byte arrays
*/
public static byte[] randomArrayOtherThan(byte[] input, Supplier<byte[]> randomSupplier) {
return randomValueOtherThanMany(v -> Arrays.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input, for char arrays
*/
public static char[] randomArrayOtherThan(char[] input, Supplier<char[]> randomSupplier) {
return randomValueOtherThanMany(v -> Arrays.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input, for short arrays
*/
public static short[] randomArrayOtherThan(short[] input, Supplier<short[]> randomSupplier) {
return randomValueOtherThanMany(v -> Arrays.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input, for int arrays
*/
public static int[] randomArrayOtherThan(int[] input, Supplier<int[]> randomSupplier) {
return randomValueOtherThanMany(v -> Arrays.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input, for long arrays
*/
public static long[] randomArrayOtherThan(long[] input, Supplier<long[]> randomSupplier) {
return randomValueOtherThanMany(v -> Arrays.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input, for float arrays
*/
public static float[] randomArrayOtherThan(float[] input, Supplier<float[]> randomSupplier) {
return randomValueOtherThanMany(v -> Arrays.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input, for double arrays
*/
public static double[] randomArrayOtherThan(double[] input, Supplier<double[]> randomSupplier) {
return randomValueOtherThanMany(v -> Arrays.equals(input, v), randomSupplier);
}
/**
* helper to get a random value in a certain range that's different from the input
*/
public static <T> T randomValueOtherThanMany(Predicate<T> input, Supplier<T> randomSupplier) {
T randomValue = null;
do {
randomValue = randomSupplier.get();
} while (input.test(randomValue));
return randomValue;
}
/**
* Runs the code block for 10 seconds waiting for no assertion to trip.
*/
public static void assertBusy(CheckedRunnable<Exception> codeBlock) throws Exception {
assertBusy(codeBlock, 10, TimeUnit.SECONDS);
}
/**
* Runs the code block for the provided interval, waiting for no assertions to trip. Retries on AssertionError
* with exponential backoff until provided time runs out
*/
public static void assertBusy(CheckedRunnable<Exception> codeBlock, long maxWaitTime, TimeUnit unit) throws Exception {
long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit);
// In case you've forgotten your high-school studies, log10(x) / log10(y) == log y(x)
long iterations = Math.max(Math.round(Math.log10(maxTimeInMillis) / Math.log10(2)), 1);
long timeInMillis = 1;
long sum = 0;
List<AssertionError> failures = new ArrayList<>();
for (int i = 0; i < iterations; i++) {
try {
codeBlock.run();
return;
} catch (AssertionError e) {
failures.add(e);
}
sum += timeInMillis;
Thread.sleep(timeInMillis);
timeInMillis *= 2;
}
timeInMillis = maxTimeInMillis - sum;
Thread.sleep(Math.max(timeInMillis, 0));
try {
codeBlock.run();
} catch (AssertionError e) {
for (AssertionError failure : failures) {
e.addSuppressed(failure);
}
throw e;
}
}
/**
* Periodically execute the supplied function until it returns true, or a timeout
* is reached. This version uses a timeout of 10 seconds. If at all possible,
* use {@link ESTestCase#assertBusy(CheckedRunnable)} instead.
*
* @param breakSupplier determines whether to return immediately or continue waiting.
* @return the last value returned by <code>breakSupplier</code>
*/
public static boolean waitUntil(BooleanSupplier breakSupplier) {
return waitUntil(breakSupplier, 10, TimeUnit.SECONDS);
}
// After 1s, we stop growing the sleep interval exponentially and just sleep 1s until maxWaitTime
private static final long AWAIT_BUSY_THRESHOLD = 1000L;
/**
* Periodically execute the supplied function until it returns true, or until the
* specified maximum wait time has elapsed. If at all possible, use
* {@link ESTestCase#assertBusy(CheckedRunnable)} instead.
*
* @param breakSupplier determines whether to return immediately or continue waiting.
* @param maxWaitTime the maximum amount of time to wait
* @param unit the unit of tie for <code>maxWaitTime</code>
* @return the last value returned by <code>breakSupplier</code>
*/
public static boolean waitUntil(BooleanSupplier breakSupplier, long maxWaitTime, TimeUnit unit) {
long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit);
long timeInMillis = 1;
long sum = 0;
while (sum + timeInMillis < maxTimeInMillis) {
if (breakSupplier.getAsBoolean()) {
return true;
}
safeSleep(timeInMillis);
sum += timeInMillis;
timeInMillis = Math.min(AWAIT_BUSY_THRESHOLD, timeInMillis * 2);
}
timeInMillis = maxTimeInMillis - sum;
safeSleep(Math.max(timeInMillis, 0));
return breakSupplier.getAsBoolean();
}
protected TestThreadPool createThreadPool(ExecutorBuilder<?>... executorBuilders) {
return new TestThreadPool(getTestName(), executorBuilders);
}
public static boolean terminate(ExecutorService... services) {
boolean terminated = true;
for (ExecutorService service : services) {
if (service != null) {
terminated &= ThreadPool.terminate(service, 10, TimeUnit.SECONDS);
}
}
return terminated;
}
public static boolean terminate(ThreadPool threadPool) {
return ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
/**
* Returns a {@link java.nio.file.Path} pointing to the | is |
java | redisson__redisson | redisson-spring-boot-starter/src/test/java/org/redisson/spring/starter/RedissonSessionManagerAutoConfigurationTest.java | {
"start": 1374,
"end": 2400
} | class ____ {
@Container
public static final GenericContainer REDIS = new FixedHostPortGenericContainer("redis:latest")
.withFixedExposedPort(6379, 6379)
.withCommand("redis-server", "--requirepass", "123456");
@LocalServerPort
private int port;
@Autowired
private TestRestTemplate restTemplate;
@Test
public void testApp() {
List<String> cookies = this.restTemplate.getForEntity("http://localhost:" + port + "/api/set", String.class).getHeaders().get("Set-Cookie");
Assertions.assertThat(cookies).isNotEmpty();
HttpHeaders requestHeaders = new HttpHeaders();
requestHeaders.put(HttpHeaders.COOKIE, cookies);
HttpEntity<Void> request = new HttpEntity<>(requestHeaders);
ResponseEntity<String> response = restTemplate.exchange("http://localhost:" + port + "/api/get", HttpMethod.GET, request, String.class);
Assertions.assertThat(response.getBody()).isEqualTo("1");
}
}
| RedissonSessionManagerAutoConfigurationTest |
java | spring-projects__spring-boot | module/spring-boot-liquibase/src/main/java/org/springframework/boot/liquibase/actuate/endpoint/LiquibaseEndpoint.java | {
"start": 4079,
"end": 4584
} | class ____ implements OperationResponseBody {
private final Map<@Nullable String, ContextLiquibaseBeansDescriptor> contexts;
private LiquibaseBeansDescriptor(Map<@Nullable String, ContextLiquibaseBeansDescriptor> contexts) {
this.contexts = contexts;
}
public Map<@Nullable String, ContextLiquibaseBeansDescriptor> getContexts() {
return this.contexts;
}
}
/**
* Description of an application context's {@link SpringLiquibase} beans.
*/
public static final | LiquibaseBeansDescriptor |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncSearchIndexServiceTests.java | {
"start": 1770,
"end": 1922
} | class ____ extends ESSingleNodeTestCase {
private AsyncTaskIndexService<TestAsyncResponse> indexService;
public static | AsyncSearchIndexServiceTests |
java | spring-projects__spring-boot | test-support/spring-boot-test-support/src/main/java/org/springframework/boot/testsupport/system/OutputCapture.java | {
"start": 6271,
"end": 6302
} | enum ____ {
OUT, ERR
}
}
| Type |
java | apache__dubbo | dubbo-metrics/dubbo-metrics-api/src/main/java/org/apache/dubbo/metrics/utils/MetricsSupportUtil.java | {
"start": 900,
"end": 1620
} | class ____ {
public static boolean isSupportMetrics() {
return isClassPresent("io.micrometer.core.instrument.MeterRegistry");
}
public static boolean isSupportPrometheus() {
return isClassPresent("io.micrometer.prometheus.PrometheusConfig")
&& isClassPresent("io.prometheus.client.exporter.BasicAuthHttpConnectionFactory")
&& isClassPresent("io.prometheus.client.exporter.HttpConnectionFactory")
&& isClassPresent("io.prometheus.client.exporter.PushGateway");
}
private static boolean isClassPresent(String className) {
return ClassUtils.isPresent(className, MetricsSupportUtil.class.getClassLoader());
}
}
| MetricsSupportUtil |
java | apache__maven | impl/maven-impl/src/test/java/org/apache/maven/impl/model/profile/AbstractProfileActivatorTest.java | {
"start": 1756,
"end": 3110
} | class ____ extends DefaultRootLocator {
@Override
protected boolean isRootDirectory(Path dir) {
return true;
}
}
protected T activator;
@BeforeEach
abstract void setUp() throws Exception;
@AfterEach
void tearDown() throws Exception {
activator = null;
}
protected DefaultProfileActivationContext newContext() {
return new DefaultProfileActivationContext(
new DefaultPathTranslator(), new FakeRootLocator(), new DefaultInterpolator());
}
protected ProfileActivationContext newContext(
Map<String, String> userProperties, Map<String, String> systemProperties) {
return newContext()
.setUserProperties(userProperties)
.setSystemProperties(systemProperties)
.setModel(Model.newInstance());
}
protected void assertActivation(boolean active, Profile profile, ProfileActivationContext context) {
SimpleProblemCollector problems = new SimpleProblemCollector();
boolean res = activator.isActive(profile, context, problems);
assertEquals(0, problems.getErrors().size(), problems.getErrors().toString());
assertEquals(0, problems.getWarnings().size(), problems.getWarnings().toString());
assertEquals(active, res);
}
}
| FakeRootLocator |
java | grpc__grpc-java | core/src/test/java/io/grpc/internal/InternalSubchannelTest.java | {
"start": 67942,
"end": 67993
} | class ____ extends SocketAddress {}
}
| FakeSocketAddress |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/jpa/internal/util/PersistenceUtilHelper.java | {
"start": 5181,
"end": 7682
} | class ____ resolution)
entity = lazyInitializer.getImplementation();
}
sureFromUs = true;
}
else {
sureFromUs = false;
}
// we are instrumenting, but we can't assume we are the only ones
if ( isPersistentAttributeInterceptable( entity ) ) {
final BytecodeLazyAttributeInterceptor interceptor =
extractInterceptor( asPersistentAttributeInterceptable( entity ) );
final boolean isInitialized = interceptor == null || interceptor.isAttributeLoaded( attributeName );
return getLoadState( entity, attributeName, cache, isInitialized, interceptor, sureFromUs );
}
else {
return UNKNOWN;
}
}
private static LoadState getLoadState(
Object entity, String attributeName,
MetadataCache cache,
boolean isInitialized,
BytecodeLazyAttributeInterceptor interceptor,
boolean sureFromUs) {
if ( isInitialized && interceptor != null) {
// attributeName is loaded according to bytecode enhancement, but is it loaded as far as association?
// it's ours, we can read
return getLoadState( entity, attributeName, cache );
}
else if ( interceptor != null ) {
return NOT_LOADED;
}
else if ( sureFromUs ) {
// property is loaded according to bytecode enhancement, but is it loaded as far as association?
// it's ours, we can read
return getLoadState( entity, attributeName, cache );
}
else {
return UNKNOWN;
}
}
private static LoadState getLoadState(Object entity, String attributeName, MetadataCache cache) {
try {
final LoadState state = getLoadState( getAttributeValue( entity, attributeName, cache ) );
// it's ours so we know it's loaded
return state == UNKNOWN ? LOADED : state;
}
catch (AttributeExtractionException ignore) {
return UNKNOWN;
}
}
/**
* Is the given attribute (by name) loaded? This form must take care to not access the attribute (trigger
* initialization).
*
* @param entity The entity
* @param attributeName The name of the attribute to check
* @param cache The cache we maintain of attribute resolutions
*
* @return The LoadState
*/
public static LoadState isLoadedWithReference(Object entity, String attributeName, MetadataCache cache) {
final LazyInitializer lazyInitializer = extractLazyInitializer( entity );
if ( lazyInitializer != null ) {
if ( lazyInitializer.isUninitialized() ) {
// we have an uninitialized proxy, the attribute cannot be loaded
return NOT_LOADED;
}
else {
// swap the proxy with target (for proper | name |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/PropertyBindingSupport.java | {
"start": 71673,
"end": 75816
} | class ____ so we need to use the bean type for that
factoryClass = type;
type = Object.class;
}
if (parameters != null) {
Class<?> target = factoryClass != null ? factoryClass : type;
answer = newInstanceFactoryParameters(camelContext, target, factoryMethod, parameters);
} else {
answer = camelContext.getInjector().newInstance(type, factoryClass, factoryMethod);
}
if (answer == null) {
throw new IllegalStateException(
"Cannot create bean instance using factory method: " + className + "#" + factoryMethod);
}
} else if (parameters != null) {
// special to support constructor parameters
answer = newInstanceConstructorParameters(camelContext, type, parameters);
} else {
answer = camelContext.getInjector().newInstance(type);
}
if (answer == null) {
throw new IllegalStateException("Cannot create instance of class: " + className);
}
} else if (strval.startsWith("#type:")) {
// its reference by type, so lookup the actual value and use it if there is only one instance in the registry
String typeName = strval.substring(6);
Class<?> type = camelContext.getClassResolver().resolveMandatoryClass(typeName);
answer = camelContext.getRegistry().mandatoryFindSingleByType(type);
} else if (strval.startsWith("#bean:")) {
String key = strval.substring(6);
answer = CamelContextHelper.mandatoryLookup(camelContext, key);
} else if (strval.startsWith("#valueAs(")) {
String text = strval.substring(8);
String typeName = StringHelper.between(text, "(", ")");
String constant = StringHelper.after(text, ":");
if (typeName == null || constant == null) {
throw new IllegalArgumentException("Illegal syntax: " + text + " when using function #valueAs(type):value");
}
Class<?> type = camelContext.getClassResolver().resolveMandatoryClass(typeName);
answer = camelContext.getTypeConverter().mandatoryConvertTo(type, constant);
}
return answer;
}
private static String undashKey(String key) {
// as we un-dash property keys then we need to prepare this for the configurer (reflection does this automatic)
key = StringHelper.dashToCamelCase(key);
return key;
}
private static boolean isDotKey(String key) {
// we only want to know if there is a dot in OGNL path, so any map keys [iso.code] is accepted
if (key.indexOf('[') == -1 && key.indexOf('.') != -1) {
return true;
}
boolean mapKey = false;
for (char ch : key.toCharArray()) {
if (ch == '[') {
mapKey = true;
} else if (ch == ']') {
mapKey = false;
}
if (ch == '.' && !mapKey) {
return true;
}
}
return false;
}
private static String[] splitKey(String key) {
// split the key into parts separated by dot (but handle map keys [iso.code] etc.
List<String> parts = new ArrayList<>();
boolean mapKey = false;
StringBuilder sb = new StringBuilder(key.length() + 16);
for (char ch : key.toCharArray()) {
if (ch == '[') {
mapKey = true;
} else if (ch == ']') {
mapKey = false;
}
if (ch == '.' && !mapKey) {
// dont include the separator dot
parts.add(sb.toString());
sb.setLength(0);
} else {
sb.append(ch);
}
}
if (!sb.isEmpty()) {
parts.add(sb.toString());
}
return parts.toArray(new String[0]);
}
@FunctionalInterface
public | given |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/security/oauthbearer/internals/secured/ConfigurationUtilsTest.java | {
"start": 1453,
"end": 8231
} | class ____ extends OAuthBearerTest {
private static final String URL_CONFIG_NAME = "fictitious.url.config";
private static final String FILE_CONFIG_NAME = "fictitious.file.config";
@AfterEach
public void tearDown() throws Exception {
System.clearProperty(ALLOWED_SASL_OAUTHBEARER_URLS_CONFIG);
}
@Test
public void testUrl() {
testUrl("http://www.example.com");
}
@Test
public void testUrlWithSuperfluousWhitespace() {
testUrl(String.format(" %s ", "http://www.example.com"));
}
@Test
public void testUrlCaseInsensitivity() {
testUrl("HTTPS://WWW.EXAMPLE.COM");
}
@Test
public void testUrlFile() {
assertThrowsWithMessage(ConfigException.class, () -> testFileUrl("file:///tmp/foo.txt"), "that doesn't exist");
}
@Test
public void testUrlFullPath() {
testUrl("https://myidp.example.com/oauth2/default/v1/token");
}
@Test
public void testUrlMissingProtocol() {
assertThrowsWithMessage(ConfigException.class, () -> testUrl("www.example.com"), "no protocol");
}
@Test
public void testUrlInvalidProtocol() {
assertThrowsWithMessage(ConfigException.class, () -> testFileUrl("ftp://ftp.example.com"), "invalid protocol");
}
@Test
public void testUrlNull() {
assertThrowsWithMessage(ConfigException.class, () -> testUrl(null), "is required");
}
@Test
public void testUrlEmptyString() {
assertThrowsWithMessage(ConfigException.class, () -> testUrl(""), "is required");
}
@Test
public void testUrlWhitespace() {
assertThrowsWithMessage(ConfigException.class, () -> testUrl(" "), "is required");
}
@Test
public void testFile() throws IOException {
File file = TestUtils.tempFile("some contents!");
testFile(file.getAbsolutePath());
}
@Test
public void testFileWithSuperfluousWhitespace() throws IOException {
File file = TestUtils.tempFile();
testFile(String.format(" %s ", file.getAbsolutePath()));
}
@Test
public void testFileDoesNotExist() {
assertThrowsWithMessage(ConfigException.class, () -> testFile(new File("/tmp/not/a/real/file.txt").toURI().toURL().toString()), "that doesn't exist");
}
@Test
public void testFileUnreadable() throws IOException {
File file = TestUtils.tempFile();
if (!file.setReadable(false))
throw new IllegalStateException(String.format("Can't test file permissions as test couldn't programmatically make temp file %s un-readable", file.getAbsolutePath()));
assertThrowsWithMessage(ConfigException.class, () -> testFile(file.getAbsolutePath()), "that doesn't have read permission");
}
@Test
public void testFileNull() {
assertThrowsWithMessage(ConfigException.class, () -> testFile(null), "is required");
}
@Test
public void testFileEmptyString() {
assertThrowsWithMessage(ConfigException.class, () -> testFile(""), "is required");
}
@Test
public void testFileWhitespace() {
assertThrowsWithMessage(ConfigException.class, () -> testFile(" "), "is required");
}
@Test
public void testThrowIfURLIsNotAllowed() {
String url = "http://www.example.com";
String fileUrl = "file:///etc/passwd";
ConfigurationUtils cu = new ConfigurationUtils(Map.of());
// By default, no URL is allowed
assertThrowsWithMessage(ConfigException.class, () -> cu.throwIfURLIsNotAllowed(URL_CONFIG_NAME, url),
ALLOWED_SASL_OAUTHBEARER_URLS_CONFIG);
assertThrowsWithMessage(ConfigException.class, () -> cu.throwIfURLIsNotAllowed(FILE_CONFIG_NAME, fileUrl),
ALLOWED_SASL_OAUTHBEARER_URLS_CONFIG);
// add one url into allowed list
System.setProperty(ALLOWED_SASL_OAUTHBEARER_URLS_CONFIG, url);
assertDoesNotThrow(() -> cu.throwIfURLIsNotAllowed(URL_CONFIG_NAME, url));
assertThrowsWithMessage(ConfigException.class, () -> cu.throwIfURLIsNotAllowed(FILE_CONFIG_NAME, fileUrl),
ALLOWED_SASL_OAUTHBEARER_URLS_CONFIG);
// add all urls into allowed list
System.setProperty(ALLOWED_SASL_OAUTHBEARER_URLS_CONFIG, url + "," + fileUrl);
assertDoesNotThrow(() -> cu.throwIfURLIsNotAllowed(URL_CONFIG_NAME, url));
assertDoesNotThrow(() -> cu.throwIfURLIsNotAllowed(FILE_CONFIG_NAME, fileUrl));
}
@Test
public void testThrowIfFileIsNotAllowed() {
String file1 = "file1";
String file2 = "file2";
ConfigurationUtils cu = new ConfigurationUtils(Map.of());
// By default, no file is allowed
assertThrowsWithMessage(ConfigException.class, () -> cu.throwIfFileIsNotAllowed(FILE_CONFIG_NAME, file1),
ALLOWED_SASL_OAUTHBEARER_FILES_CONFIG);
assertThrowsWithMessage(ConfigException.class, () -> cu.throwIfFileIsNotAllowed(FILE_CONFIG_NAME, file1),
ALLOWED_SASL_OAUTHBEARER_FILES_CONFIG);
// add one file into allowed list
System.setProperty(ALLOWED_SASL_OAUTHBEARER_FILES_CONFIG, file1);
assertDoesNotThrow(() -> cu.throwIfFileIsNotAllowed(FILE_CONFIG_NAME, file1));
assertThrowsWithMessage(ConfigException.class, () -> cu.throwIfFileIsNotAllowed(FILE_CONFIG_NAME, file2),
ALLOWED_SASL_OAUTHBEARER_FILES_CONFIG);
// add all files into allowed list
System.setProperty(ALLOWED_SASL_OAUTHBEARER_FILES_CONFIG, file1 + "," + file2);
assertDoesNotThrow(() -> cu.throwIfFileIsNotAllowed(FILE_CONFIG_NAME, file1));
assertDoesNotThrow(() -> cu.throwIfFileIsNotAllowed(FILE_CONFIG_NAME, file2));
}
private void testUrl(String value) {
System.setProperty(ALLOWED_SASL_OAUTHBEARER_URLS_CONFIG, value == null ? "" : value);
Map<String, Object> configs = Collections.singletonMap(URL_CONFIG_NAME, value);
ConfigurationUtils cu = new ConfigurationUtils(configs);
cu.validateUrl(URL_CONFIG_NAME);
}
private void testFile(String value) {
System.setProperty(ALLOWED_SASL_OAUTHBEARER_FILES_CONFIG, value == null ? "" : value);
Map<String, Object> configs = Collections.singletonMap(FILE_CONFIG_NAME, value);
ConfigurationUtils cu = new ConfigurationUtils(configs);
cu.validateFile(FILE_CONFIG_NAME);
}
private void testFileUrl(String value) {
System.setProperty(ALLOWED_SASL_OAUTHBEARER_URLS_CONFIG, value == null ? "" : value);
Map<String, Object> configs = Collections.singletonMap(URL_CONFIG_NAME, value);
ConfigurationUtils cu = new ConfigurationUtils(configs);
cu.validateFileUrl(URL_CONFIG_NAME);
}
}
| ConfigurationUtilsTest |
java | google__guava | android/guava-tests/test/com/google/common/collect/UnmodifiableTableRowMapTest.java | {
"start": 924,
"end": 1541
} | class ____ extends RowMapTests {
public UnmodifiableTableRowMapTest() {
super(false, false, false, false);
}
@Override
Table<String, Integer, Character> makeTable() {
Table<String, Integer, Character> original = HashBasedTable.create();
return unmodifiableTable(original);
}
@Override
protected Map<String, Map<Integer, Character>> makePopulatedMap() {
Table<String, Integer, Character> table = HashBasedTable.create();
table.put("foo", 1, 'a');
table.put("bar", 1, 'b');
table.put("foo", 3, 'c');
return unmodifiableTable(table).rowMap();
}
}
| UnmodifiableTableRowMapTest |
java | apache__flink | flink-yarn/src/main/java/org/apache/flink/yarn/configuration/YarnDeploymentTarget.java | {
"start": 1142,
"end": 1231
} | class ____ all the supported deployment target names for Yarn. */
@Internal
public | containing |
java | spring-projects__spring-boot | module/spring-boot-mongodb/src/test/java/org/springframework/boot/mongodb/autoconfigure/MongoAutoConfigurationTests.java | {
"start": 10783,
"end": 11080
} | class ____ {
@Bean
MongoClientSettings mongoClientSettings() {
return MongoClientSettings.builder()
.applyToSocketSettings((socketSettings) -> socketSettings.connectTimeout(300, TimeUnit.MILLISECONDS))
.build();
}
}
@Configuration(proxyBeanMethods = false)
static | SettingsConfig |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/io/StreamMultipleInputProcessorFactory.java | {
"start": 7488,
"end": 12819
} | interface ____ not supported with sorting inputs");
}
StreamTaskInput[] sortingInputs =
IntStream.range(0, inputsCount)
.filter(idx -> requiresSorting(inputConfigs[idx]))
.mapToObj(idx -> inputs[idx])
.toArray(StreamTaskInput[]::new);
KeySelector[] sortingInputKeySelectors =
IntStream.range(0, inputsCount)
.filter(idx -> requiresSorting(inputConfigs[idx]))
.mapToObj(idx -> streamConfig.getStatePartitioner(idx, userClassloader))
.toArray(KeySelector[]::new);
TypeSerializer[] sortingInputKeySerializers =
IntStream.range(0, inputsCount)
.filter(idx -> requiresSorting(inputConfigs[idx]))
.mapToObj(idx -> streamConfig.getTypeSerializerIn(idx, userClassloader))
.toArray(TypeSerializer[]::new);
StreamTaskInput[] passThroughInputs =
IntStream.range(0, inputsCount)
.filter(idx -> !requiresSorting(inputConfigs[idx]))
.mapToObj(idx -> inputs[idx])
.toArray(StreamTaskInput[]::new);
SelectableSortingInputs selectableSortingInputs =
MultiInputSortingDataInput.wrapInputs(
ownerTask,
sortingInputs,
sortingInputKeySelectors,
sortingInputKeySerializers,
streamConfig.getStateKeySerializer(userClassloader),
passThroughInputs,
memoryManager,
ioManager,
executionConfig.isObjectReuseEnabled(),
streamConfig.getManagedMemoryFractionOperatorUseCaseOfSlot(
ManagedMemoryUseCase.OPERATOR,
jobConfig,
taskManagerConfig,
userClassloader),
taskManagerConfig,
executionConfig);
StreamTaskInput<?>[] sortedInputs = selectableSortingInputs.getSortedInputs();
StreamTaskInput<?>[] passedThroughInputs =
selectableSortingInputs.getPassThroughInputs();
int sortedIndex = 0;
int passThroughIndex = 0;
for (int i = 0; i < inputs.length; i++) {
if (requiresSorting(inputConfigs[i])) {
inputs[i] = sortedInputs[sortedIndex];
sortedIndex++;
} else {
inputs[i] = passedThroughInputs[passThroughIndex];
passThroughIndex++;
}
}
inputSelectable = selectableSortingInputs.getInputSelectable();
}
for (int i = 0; i < inputsCount; i++) {
StreamConfig.InputConfig configuredInput = configuredInputs[i];
if (configuredInput instanceof StreamConfig.NetworkInputConfig) {
StreamTaskNetworkOutput dataOutput =
new StreamTaskNetworkOutput<>(
operatorChain.getFinishedOnRestoreInputOrDefault(
operatorInputs.get(i)),
inputWatermarkGauges[i],
mainOperatorRecordsIn,
networkRecordsIn);
inputProcessors[i] =
new StreamOneInputProcessor(inputs[i], dataOutput, operatorChain);
} else if (configuredInput instanceof StreamConfig.SourceInputConfig) {
StreamConfig.SourceInputConfig sourceInput =
(StreamConfig.SourceInputConfig) configuredInput;
OperatorChain.ChainedSource chainedSource =
operatorChain.getChainedSource(sourceInput);
inputProcessors[i] =
new StreamOneInputProcessor(
inputs[i],
new StreamTaskSourceOutput(
chainedSource.getSourceOutput(),
inputWatermarkGauges[i],
chainedSource
.getSourceTaskInput()
.getOperator()
.getSourceMetricGroup()),
operatorChain);
} else {
throw new UnsupportedOperationException("Unknown input type: " + configuredInput);
}
}
return new StreamMultipleInputProcessor(
new MultipleInputSelectionHandler(inputSelectable, inputsCount), inputProcessors);
}
/**
* The network data output implementation used for processing stream elements from {@link
* StreamTaskNetworkInput} in two input selective processor.
*/
private static | is |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/ReflectionUtils.java | {
"start": 1415,
"end": 6268
} | class ____ {
/**
* Pre-built {@link MethodFilter} that matches all non-bridge non-synthetic methods
* which are not declared on {@code java.lang.Object}.
* @since 3.0.5
*/
public static final MethodFilter USER_DECLARED_METHODS =
(method -> !method.isBridge() && !method.isSynthetic() && (method.getDeclaringClass() != Object.class));
/**
* Pre-built FieldFilter that matches all non-static, non-final fields.
*/
public static final FieldFilter COPYABLE_FIELDS =
(field -> !(Modifier.isStatic(field.getModifiers()) || Modifier.isFinal(field.getModifiers())));
/**
* Naming prefix for CGLIB-renamed methods.
* @see #isCglibRenamedMethod
*/
private static final String CGLIB_RENAMED_METHOD_PREFIX = "CGLIB$";
private static final Class<?>[] EMPTY_CLASS_ARRAY = new Class<?>[0];
private static final Method[] EMPTY_METHOD_ARRAY = new Method[0];
private static final Field[] EMPTY_FIELD_ARRAY = new Field[0];
private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0];
/**
* Cache for {@link Class#getDeclaredMethods()} plus equivalent default methods
* from Java 8 based interfaces, allowing for fast iteration.
*/
private static final Map<Class<?>, Method[]> declaredMethodsCache = new ConcurrentReferenceHashMap<>(256);
/**
* Cache for {@link Class#getDeclaredFields()}, allowing for fast iteration.
*/
private static final Map<Class<?>, Field[]> declaredFieldsCache = new ConcurrentReferenceHashMap<>(256);
// Exception handling
/**
* Handle the given reflection exception.
* <p>Should only be called if no checked exception is expected to be thrown
* by a target method, or if an error occurs while accessing a method or field.
* <p>Throws the underlying RuntimeException or Error in case of an
* InvocationTargetException with such a root cause. Throws an
* IllegalStateException with an appropriate message or
* UndeclaredThrowableException otherwise.
* @param ex the reflection exception to handle
*/
public static void handleReflectionException(Exception ex) {
if (ex instanceof NoSuchMethodException) {
throw new IllegalStateException("Method not found: " + ex.getMessage());
}
if (ex instanceof IllegalAccessException) {
throw new IllegalStateException("Could not access method or field: " + ex.getMessage());
}
if (ex instanceof InvocationTargetException invocationTargetException) {
handleInvocationTargetException(invocationTargetException);
}
if (ex instanceof RuntimeException runtimeException) {
throw runtimeException;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Handle the given invocation target exception. Should only be called if no
* checked exception is expected to be thrown by the target method.
* <p>Throws the underlying RuntimeException or Error in case of such a root
* cause. Throws an UndeclaredThrowableException otherwise.
* @param ex the invocation target exception to handle
*/
public static void handleInvocationTargetException(InvocationTargetException ex) {
rethrowRuntimeException(ex.getTargetException());
}
/**
* Rethrow the given {@link Throwable exception}, which is presumably the
* <em>target exception</em> of an {@link InvocationTargetException}.
* Should only be called if no checked exception is expected to be thrown
* by the target method.
* <p>Rethrows the underlying exception cast to a {@link RuntimeException} or
* {@link Error} if appropriate; otherwise, throws an
* {@link UndeclaredThrowableException}.
* @param ex the exception to rethrow
* @throws RuntimeException the rethrown exception
*/
@Contract("_ -> fail")
public static void rethrowRuntimeException(@Nullable Throwable ex) {
if (ex instanceof RuntimeException runtimeException) {
throw runtimeException;
}
if (ex instanceof Error error) {
throw error;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Rethrow the given {@link Throwable exception}, which is presumably the
* <em>target exception</em> of an {@link InvocationTargetException}.
* Should only be called if no checked exception is expected to be thrown
* by the target method.
* <p>Rethrows the underlying exception cast to an {@link Exception} or
* {@link Error} if appropriate; otherwise, throws an
* {@link UndeclaredThrowableException}.
* @param throwable the exception to rethrow
* @throws Exception the rethrown exception (in case of a checked exception)
*/
@Contract("_ -> fail")
public static void rethrowException(@Nullable Throwable throwable) throws Exception {
if (throwable instanceof Exception exception) {
throw exception;
}
if (throwable instanceof Error error) {
throw error;
}
throw new UndeclaredThrowableException(throwable);
}
// Constructor handling
/**
* Obtain an accessible constructor for the given | ReflectionUtils |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/boot/SessionFactoryNamingTests.java | {
"start": 999,
"end": 3113
} | class ____ {
@Test
@DomainModel
@ServiceRegistry( settings = {
@Setting( name = AvailableSettings.SESSION_FACTORY_NAME_IS_JNDI, value = "true" ),
@Setting( name = AvailableSettings.SESSION_FACTORY_JNDI_NAME, value = "jndi-named" )
} )
@SessionFactory()
void testExplicitJndiName(SessionFactoryScope scope, MessageKeyWatcher logWatcher) {
scope.getSessionFactory();
assertThat( logWatcher.wasTriggered() ).isTrue();
}
@Test
@DomainModel
@ServiceRegistry( settings = @Setting( name = AvailableSettings.SESSION_FACTORY_NAME_IS_JNDI, value = "true" ) )
@SessionFactory( sessionFactoryName = "named" )
void testSessionFactoryName(SessionFactoryScope scope, MessageKeyWatcher logWatcher) {
scope.getSessionFactory();
assertThat( logWatcher.wasTriggered() ).isTrue();
}
@Test
@DomainModel
@ServiceRegistry( settings = @Setting( name = AvailableSettings.SESSION_FACTORY_NAME_IS_JNDI, value = "false" ) )
@SessionFactory( sessionFactoryName = "named" )
void testNonJndiSessionFactoryName(SessionFactoryScope scope, MessageKeyWatcher logWatcher) {
scope.getSessionFactory();
assertThat( logWatcher.wasTriggered() ).isFalse();
}
@Test
@DomainModel
@ServiceRegistry( settings = {
@Setting( name = AvailableSettings.SESSION_FACTORY_NAME_IS_JNDI, value = "true" ),
// mimics the persistence.xml persistence-unit name
@Setting( name = PersistenceSettings.PERSISTENCE_UNIT_NAME, value = "named-pu" ),
} )
@SessionFactory
void testPuName(SessionFactoryScope scope, MessageKeyWatcher logWatcher) {
scope.getSessionFactory();
assertThat( logWatcher.wasTriggered() ).isFalse();
}
@Test
@DomainModel
@ServiceRegistry( settings = {
@Setting( name = AvailableSettings.SESSION_FACTORY_NAME_IS_JNDI, value = "false" ),
// mimics the persistence.xml persistence-unit name
@Setting( name = PersistenceSettings.PERSISTENCE_UNIT_NAME, value = "named-pu" ),
} )
@SessionFactory
void testNonJndiPuName(SessionFactoryScope scope, MessageKeyWatcher logWatcher) {
scope.getSessionFactory();
assertThat( logWatcher.wasTriggered() ).isFalse();
}
}
| SessionFactoryNamingTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/json/JsonQueryFunction.java | {
"start": 1967,
"end": 8300
} | class ____ extends AbstractSqmSelfRenderingFunctionDescriptor {
protected final boolean supportsJsonPathExpression;
protected final boolean supportsJsonPathPassingClause;
public JsonQueryFunction(
TypeConfiguration typeConfiguration,
boolean supportsJsonPathExpression,
boolean supportsJsonPathPassingClause) {
super(
"json_query",
FunctionKind.NORMAL,
StandardArgumentsValidators.composite(
new ArgumentTypesValidator( StandardArgumentsValidators.between( 2, 3 ), IMPLICIT_JSON, STRING, ANY )
),
StandardFunctionReturnTypeResolvers.invariant(
typeConfiguration.getBasicTypeRegistry().resolve( String.class, SqlTypes.JSON )
),
StandardFunctionArgumentTypeResolvers.invariant( typeConfiguration, JSON, STRING )
);
this.supportsJsonPathExpression = supportsJsonPathExpression;
this.supportsJsonPathPassingClause = supportsJsonPathPassingClause;
}
@Override
protected <T> SelfRenderingSqmFunction<T> generateSqmFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
ReturnableType<T> impliedResultType,
QueryEngine queryEngine) {
//noinspection unchecked
return (SelfRenderingSqmFunction<T>) new SqmJsonQueryExpression(
this,
this,
arguments,
(ReturnableType<String>) impliedResultType,
getArgumentsValidator(),
getReturnTypeResolver(),
queryEngine.getCriteriaBuilder(),
getName()
);
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
render( sqlAppender, JsonQueryArguments.extract( sqlAstArguments ), returnType, walker );
}
protected void render(
SqlAppender sqlAppender,
JsonQueryArguments arguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
sqlAppender.appendSql( "json_query(" );
arguments.jsonDocument().accept( walker );
sqlAppender.appendSql( ',' );
final JsonPathPassingClause passingClause = arguments.passingClause();
if ( supportsJsonPathPassingClause || passingClause == null ) {
if ( supportsJsonPathExpression ) {
arguments.jsonPath().accept( walker );
}
else {
walker.getSessionFactory().getJdbcServices().getDialect().appendLiteral(
sqlAppender,
walker.getLiteralValue( arguments.jsonPath() )
);
}
if ( passingClause != null ) {
sqlAppender.appendSql( " passing " );
final Map<String, Expression> passingExpressions = passingClause.getPassingExpressions();
final Iterator<Map.Entry<String, Expression>> iterator = passingExpressions.entrySet().iterator();
Map.Entry<String, Expression> entry = iterator.next();
entry.getValue().accept( walker );
sqlAppender.appendSql( " as " );
sqlAppender.appendDoubleQuoteEscapedString( entry.getKey() );
while ( iterator.hasNext() ) {
entry = iterator.next();
sqlAppender.appendSql( ',' );
entry.getValue().accept( walker );
sqlAppender.appendSql( " as " );
sqlAppender.appendDoubleQuoteEscapedString( entry.getKey() );
}
}
}
else {
JsonPathHelper.appendInlinedJsonPathIncludingPassingClause(
sqlAppender,
"",
arguments.jsonPath(),
passingClause,
walker
);
}
if ( arguments.wrapMode() != null ) {
switch ( arguments.wrapMode() ) {
case WITH_WRAPPER -> sqlAppender.appendSql( " with wrapper" );
case WITHOUT_WRAPPER -> sqlAppender.appendSql( " without wrapper" );
case WITH_CONDITIONAL_WRAPPER -> sqlAppender.appendSql( " with conditional wrapper" );
}
}
if ( arguments.errorBehavior() != null ) {
switch ( arguments.errorBehavior() ) {
case ERROR -> sqlAppender.appendSql( " error on error" );
case NULL -> sqlAppender.appendSql( " null on error" );
case EMPTY_OBJECT -> sqlAppender.appendSql( " empty object on error" );
case EMPTY_ARRAY -> sqlAppender.appendSql( " empty array on error" );
}
}
if ( arguments.emptyBehavior() != null ) {
switch ( arguments.emptyBehavior() ) {
case ERROR -> sqlAppender.appendSql( " error on empty" );
case NULL -> sqlAppender.appendSql( " null on empty" );
case EMPTY_OBJECT -> sqlAppender.appendSql( " empty object on empty" );
case EMPTY_ARRAY -> sqlAppender.appendSql( " empty array on empty" );
}
}
sqlAppender.appendSql( ')' );
}
protected record JsonQueryArguments(
Expression jsonDocument,
Expression jsonPath,
boolean isJsonType,
@Nullable JsonPathPassingClause passingClause,
@Nullable JsonQueryWrapMode wrapMode,
@Nullable JsonQueryErrorBehavior errorBehavior,
@Nullable JsonQueryEmptyBehavior emptyBehavior) {
public static JsonQueryArguments extract(List<? extends SqlAstNode> sqlAstArguments) {
int nextIndex = 2;
JsonPathPassingClause passingClause = null;
JsonQueryWrapMode wrapMode = null;
JsonQueryErrorBehavior errorBehavior = null;
JsonQueryEmptyBehavior emptyBehavior = null;
if ( nextIndex < sqlAstArguments.size() ) {
final SqlAstNode node = sqlAstArguments.get( nextIndex );
if ( node instanceof JsonPathPassingClause jsonPathPassingClause ) {
passingClause = jsonPathPassingClause;
nextIndex++;
}
}
if ( nextIndex < sqlAstArguments.size() ) {
final SqlAstNode node = sqlAstArguments.get( nextIndex );
if ( node instanceof JsonQueryWrapMode jsonQueryWrapMode ) {
wrapMode = jsonQueryWrapMode;
nextIndex++;
}
}
if ( nextIndex < sqlAstArguments.size() ) {
final SqlAstNode node = sqlAstArguments.get( nextIndex );
if ( node instanceof JsonQueryErrorBehavior jsonQueryErrorBehavior ) {
errorBehavior = jsonQueryErrorBehavior;
nextIndex++;
}
}
if ( nextIndex < sqlAstArguments.size() ) {
final SqlAstNode node = sqlAstArguments.get( nextIndex );
if ( node instanceof JsonQueryEmptyBehavior jsonQueryEmptyBehavior ) {
emptyBehavior = jsonQueryEmptyBehavior;
}
}
final Expression jsonDocument = (Expression) sqlAstArguments.get( 0 );
return new JsonQueryArguments(
jsonDocument,
(Expression) sqlAstArguments.get( 1 ),
jsonDocument.getExpressionType() != null
&& jsonDocument.getExpressionType().getSingleJdbcMapping().getJdbcType().isJson(),
passingClause,
wrapMode,
errorBehavior,
emptyBehavior
);
}
}
}
| JsonQueryFunction |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java | {
"start": 503,
"end": 3666
} | class ____ extends ESTestCase {
private static final String ELASTIC_INFERENCE_SERVICE_URL = "http://elastic-inference-service";
private static final String ELASTIC_INFERENCE_SERVICE_LEGACY_URL = "http://elastic-inference-service-legacy";
public static ElasticInferenceServiceSettings create(String elasticInferenceServiceUrl) {
var settings = Settings.builder()
.put(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_URL.getKey(), elasticInferenceServiceUrl)
.build();
return new ElasticInferenceServiceSettings(settings);
}
public static ElasticInferenceServiceSettings create(
String elasticInferenceServiceUrl,
TimeValue authorizationRequestInterval,
TimeValue maxJitter,
boolean periodicAuthorizationEnabled
) {
var settings = Settings.builder()
.put(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_URL.getKey(), elasticInferenceServiceUrl)
.put(ElasticInferenceServiceSettings.AUTHORIZATION_REQUEST_INTERVAL.getKey(), authorizationRequestInterval)
.put(ElasticInferenceServiceSettings.MAX_AUTHORIZATION_REQUEST_JITTER.getKey(), maxJitter)
.put(ElasticInferenceServiceSettings.PERIODIC_AUTHORIZATION_ENABLED.getKey(), periodicAuthorizationEnabled)
.build();
return new ElasticInferenceServiceSettings(settings);
}
public void testGetElasticInferenceServiceUrl_WithUrlSetting() {
var settings = Settings.builder()
.put(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_URL.getKey(), ELASTIC_INFERENCE_SERVICE_URL)
.build();
var eisSettings = new ElasticInferenceServiceSettings(settings);
assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(ELASTIC_INFERENCE_SERVICE_URL));
}
public void testGetElasticInferenceServiceUrl_WithLegacyUrlSetting() {
var settings = Settings.builder()
.put(ElasticInferenceServiceSettings.EIS_GATEWAY_URL.getKey(), ELASTIC_INFERENCE_SERVICE_LEGACY_URL)
.build();
var eisSettings = new ElasticInferenceServiceSettings(settings);
assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(ELASTIC_INFERENCE_SERVICE_LEGACY_URL));
}
public void testGetElasticInferenceServiceUrl_WithUrlSetting_TakesPrecedenceOverLegacyUrlSetting() {
var settings = Settings.builder()
.put(ElasticInferenceServiceSettings.EIS_GATEWAY_URL.getKey(), ELASTIC_INFERENCE_SERVICE_LEGACY_URL)
.put(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_URL.getKey(), ELASTIC_INFERENCE_SERVICE_URL)
.build();
var eisSettings = new ElasticInferenceServiceSettings(settings);
assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(ELASTIC_INFERENCE_SERVICE_URL));
}
public void testGetElasticInferenceServiceUrl_WithoutUrlSetting() {
var eisSettings = new ElasticInferenceServiceSettings(Settings.EMPTY);
assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(""));
}
}
| ElasticInferenceServiceSettingsTests |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java | {
"start": 58457,
"end": 60217
} | interface ____ plugging in classes that will be
* notified of new metric creation. The JmxReporter is always included
* to register JMX statistics.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param metricReporters the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder metricReporters(java.lang.String metricReporters) {
doSetProperty("metricReporters", metricReporters);
return this;
}
/**
* The window of time a metrics sample is computed over.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 30000
* Group: producer
*
* @param metricsSampleWindowMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder metricsSampleWindowMs(java.lang.Integer metricsSampleWindowMs) {
doSetProperty("metricsSampleWindowMs", metricsSampleWindowMs);
return this;
}
/**
* The number of samples maintained to compute metrics.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 2
* Group: producer
*
* @param noOfMetricsSample the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder noOfMetricsSample(java.lang.Integer noOfMetricsSample) {
doSetProperty("noOfMetricsSample", noOfMetricsSample);
return this;
}
/**
* The partitioner | allows |
java | apache__camel | components/camel-google/camel-google-bigquery/src/test/java/org/apache/camel/component/google/bigquery/integration/AsyncIT.java | {
"start": 1706,
"end": 3552
} | class ____ extends BigQueryITSupport {
private static final String TABLE_ID = "asynctest";
@EndpointInject("direct:in")
private Endpoint directIn;
@EndpointInject("google-bigquery:{{project.id}}:{{bigquery.datasetId}}:" + TABLE_ID)
private Endpoint bigqueryEndpoint;
@EndpointInject("mock:sendResult")
private MockEndpoint sendResult;
@Produce("direct:in")
private ProducerTemplate producer;
@BeforeEach
public void init() throws Exception {
createBqTable(TABLE_ID);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(directIn)
.to("seda:seda");
from("seda:seda")
.routeId("Async")
.to(ExchangePattern.InOnly, bigqueryEndpoint)
.log(LoggingLevel.INFO, "To sendresult")
.to(sendResult);
}
};
}
@Test
public void sendAsync() throws Exception {
List<Map<String, String>> objects = new ArrayList<>();
for (int i = 0; i < 5; i++) {
Exchange exchange = new DefaultExchange(context);
String uuidCol1 = UUID.randomUUID().toString();
String uuidCol2 = UUID.randomUUID().toString();
Map<String, String> object = new HashMap<>();
object.put("col1", uuidCol1);
object.put("col2", uuidCol2);
objects.add(object);
exchange.getIn().setBody(object);
producer.send(exchange);
}
sendResult.expectedMessageCount(5);
sendResult.assertIsSatisfied(4000);
for (Map<String, String> object : objects) {
assertRowExist(TABLE_ID, object);
}
}
}
| AsyncIT |
java | quarkusio__quarkus | extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java | {
"start": 24418,
"end": 34970
} | class ____ {
public final BindableService service;
public final ServerServiceDefinition definition;
GrpcServiceDefinition(BindableService service, ServerServiceDefinition definition) {
this.service = service;
this.definition = definition;
}
public String getImplementationClassName() {
return getImplementationClassName(service);
}
public static String getImplementationClassName(BindableService service) {
if (service instanceof Subclass) {
// All intercepted services are represented by a generated subclass
return service.getClass().getSuperclass().getName();
}
return service.getClass().getName();
}
}
private void devModeReload(GrpcContainer grpcContainer, Vertx vertx, GrpcServerConfiguration configuration,
GrpcBuilderProvider<?> provider, Map<String, List<String>> blockingMethodsPerService,
Map<String, List<String>> virtualMethodsPerService, ShutdownContext shutdown) {
List<GrpcServiceDefinition> services = collectServiceDefinitions(grpcContainer.getServices());
List<ServerServiceDefinition> definitions = new ArrayList<>();
Map<String, ServerMethodDefinition<?, ?>> methods = new HashMap<>();
for (GrpcServiceDefinition service : services) {
definitions.add(service.definition);
}
List<ServerInterceptor> globalInterceptors = grpcContainer.getSortedGlobalInterceptors();
List<ServerServiceDefinition> servicesWithInterceptors = new ArrayList<>();
CompressionInterceptor compressionInterceptor = prepareCompressionInterceptor(configuration);
for (GrpcServiceDefinition service : services) {
servicesWithInterceptors.add(
serviceWithInterceptors(vertx, grpcContainer, blockingMethodsPerService, virtualMethodsPerService,
compressionInterceptor, globalInterceptors, service, true));
}
// add after actual services, so we don't inspect them for interceptors, etc
servicesWithInterceptors.add(new ReflectionServiceV1(definitions).bindService());
servicesWithInterceptors.add(new ReflectionServiceV1alpha(definitions).bindService());
for (ServerServiceDefinition serviceWithInterceptors : servicesWithInterceptors) {
for (ServerMethodDefinition<?, ?> method : serviceWithInterceptors.getMethods()) {
methods.put(method.getMethodDescriptor().getFullMethodName(), method);
}
}
initHealthStorage();
List<ServerInterceptor> devModeInterceptors = new ArrayList<>();
if (provider != null) {
devModeInterceptors.add(new DevModeInterceptor(Thread.currentThread().getContextClassLoader()));
devModeInterceptors.add(new GrpcHotReplacementInterceptor());
provider.devModeReload(servicesWithInterceptors, methods, devModeInterceptors, shutdown);
} else {
devModeWrapper = new DevModeWrapper(Thread.currentThread().getContextClassLoader());
GrpcServerReloader.reinitialize(servicesWithInterceptors, methods, devModeInterceptors);
shutdown.addShutdownTask(GrpcServerReloader::reset);
}
}
public static int getVerticleCount() {
return grpcVerticleCount.get();
}
public RuntimeValue<ServerInterceptorStorage> initServerInterceptorStorage(
Map<String, Set<Class<?>>> perServiceInterceptors,
Set<Class<?>> globalInterceptors) {
return new RuntimeValue<>(new ServerInterceptorStorage(perServiceInterceptors, globalInterceptors));
}
@SuppressWarnings("rawtypes")
private Map.Entry<Integer, Server> buildServer(Vertx vertx, GrpcServerConfiguration configuration,
GrpcBuilderProvider provider, Map<String, List<String>> blockingMethodsPerService,
Map<String, List<String>> virtualMethodsPerService,
GrpcContainer grpcContainer, LaunchMode launchMode) {
int port = launchMode == LaunchMode.TEST ? configuration.testPort() : configuration.port();
AtomicBoolean usePlainText = new AtomicBoolean();
ServerBuilder<?> builder;
if (provider != null) {
builder = provider.createServerBuilder(vertx, configuration, launchMode);
} else {
builder = VertxServerBuilder.forAddress(vertx, configuration.host(), port);
}
List<ServerBuilderCustomizer<? extends ServerBuilder<?>>> serverBuilderCustomizers = Arc.container()
.select(new TypeLiteral<ServerBuilderCustomizer<?>>() {
}, Any.Literal.INSTANCE)
.stream()
.sorted(Comparator.<ServerBuilderCustomizer<?>, Integer> comparing(ServerBuilderCustomizer::priority))
.toList();
for (ServerBuilderCustomizer scb : serverBuilderCustomizers) {
scb.customize(configuration, builder);
}
// moved here - after ctor call, so it's applied after customizers
if (provider == null) {
VertxServerBuilder vsBuilder = (VertxServerBuilder) builder;
// add Vert.x specific stuff here
vsBuilder.useSsl(options -> {
try {
usePlainText.set(applySslOptions(configuration, options));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
applyNettySettings(configuration, vsBuilder);
if (launchMode == LaunchMode.DEVELOPMENT) {
vsBuilder.commandDecorator(command -> vertx.executeBlocking(
GrpcHotReplacementInterceptor::fire,
false)
.onComplete(result -> devModeWrapper.run(command)));
}
}
if (configuration.maxInboundMessageSize().isPresent()) {
builder.maxInboundMessageSize(configuration.maxInboundMessageSize().getAsInt());
}
if (configuration.maxInboundMetadataSize().isPresent()) {
builder.maxInboundMetadataSize(configuration.maxInboundMetadataSize().getAsInt());
}
Optional<Duration> handshakeTimeout = configuration.handshakeTimeout();
handshakeTimeout.ifPresent(duration -> builder.handshakeTimeout(duration.toMillis(), TimeUnit.MILLISECONDS));
applyTransportSecurityConfig(configuration, builder);
boolean reflectionServiceEnabled = configuration.enableReflectionService() || launchMode == LaunchMode.DEVELOPMENT;
List<GrpcServiceDefinition> toBeRegistered = collectServiceDefinitions(grpcContainer.getServices());
List<ServerServiceDefinition> definitions = new ArrayList<>();
CompressionInterceptor compressionInterceptor = prepareCompressionInterceptor(configuration);
List<ServerInterceptor> globalInterceptors = grpcContainer.getSortedGlobalInterceptors();
for (GrpcServiceDefinition service : toBeRegistered) {
builder.addService(
serviceWithInterceptors(vertx, grpcContainer, blockingMethodsPerService,
virtualMethodsPerService,
compressionInterceptor, globalInterceptors, service, launchMode == LaunchMode.DEVELOPMENT));
LOGGER.debugf("Registered gRPC service '%s'", service.definition.getServiceDescriptor().getName());
definitions.add(service.definition);
}
if (reflectionServiceEnabled) {
LOGGER.debug("Registering gRPC reflection service");
builder.addService(ServerInterceptors.intercept(new ReflectionServiceV1(definitions), globalInterceptors));
builder.addService(ServerInterceptors.intercept(new ReflectionServiceV1alpha(definitions), globalInterceptors));
}
if (LOGGER.isDebugEnabled()) {
String msg = "Starting ";
if (provider != null)
msg += provider.serverInfo(configuration.host(), port, configuration);
else
msg += String.format("gRPC server on %s:%d [TLS enabled: %s]", configuration.host(), port, !usePlainText.get());
LOGGER.debug(msg);
}
return new AbstractMap.SimpleEntry<>(port, builder.build());
}
/**
* Compression interceptor if needed, null otherwise
*
* @param configuration gRPC server configuration
* @return interceptor or null
*/
private CompressionInterceptor prepareCompressionInterceptor(GrpcServerConfiguration configuration) {
CompressionInterceptor compressionInterceptor = null;
if (configuration.compression().isPresent()) {
compressionInterceptor = new CompressionInterceptor(configuration.compression().get());
}
return compressionInterceptor;
}
private ServerServiceDefinition serviceWithInterceptors(Vertx vertx, GrpcContainer grpcContainer,
Map<String, List<String>> blockingMethodsPerService,
Map<String, List<String>> virtualMethodsPerService,
CompressionInterceptor compressionInterceptor,
List<ServerInterceptor> globalInterceptors,
GrpcServiceDefinition service, boolean devMode) {
List<ServerInterceptor> interceptors = new ArrayList<>();
if (compressionInterceptor != null) {
interceptors.add(compressionInterceptor);
}
interceptors.addAll(globalInterceptors);
interceptors.addAll(grpcContainer.getSortedPerServiceInterceptors(service.getImplementationClassName()));
// We only register the blocking interceptor if needed by at least one method of the service (either blocking or runOnVirtualThread)
if (!blockingMethodsPerService.isEmpty()) {
List<String> list = blockingMethodsPerService.get(service.getImplementationClassName());
List<String> virtuals = virtualMethodsPerService.get(service.getImplementationClassName());
if (list != null || virtuals != null) {
interceptors
.add(new BlockingServerInterceptor(vertx, list, virtuals,
VirtualThreadsRecorder.getCurrent(), devMode));
}
}
interceptors.sort(Interceptors.INTERCEPTOR_COMPARATOR);
return ServerInterceptors.intercept(service.definition, interceptors);
}
private | GrpcServiceDefinition |
java | spring-projects__spring-boot | module/spring-boot-micrometer-tracing-opentelemetry/src/test/java/org/springframework/boot/micrometer/tracing/opentelemetry/autoconfigure/zipkin/ZipkinWithOpenTelemetryTracingAutoConfigurationTests.java | {
"start": 1574,
"end": 6466
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(ZipkinWithOpenTelemetryTracingAutoConfiguration.class,
DefaultEncodingConfiguration.class));
@Test
void shouldNotSupplyBeansIfInfrastructureIsNotAvailable() {
new ApplicationContextRunner().withUserConfiguration(ZipkinWithOpenTelemetryTracingAutoConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(BytesEncoder.class)
.doesNotHaveBean(SpanExporter.class)
.doesNotHaveBean(ZipkinSpanExporter.class));
}
@Test
void shouldSupplyBeansIfInfrastructureIsAvailable() {
this.contextRunner.withConfiguration(AutoConfigurations.of(ZipkinAutoConfiguration.class)).run((context) -> {
assertThat(context).hasSingleBean(SpanExporter.class);
assertThat(context).hasSingleBean(ZipkinSpanExporter.class);
});
}
@Test
void shouldNotSupplyBeansIfTracingIsDisabled() {
this.contextRunner.withPropertyValues("management.tracing.export.enabled=false")
.withConfiguration(AutoConfigurations.of(ZipkinAutoConfiguration.class))
.run((context) -> {
assertThat(context).doesNotHaveBean(SpanExporter.class);
assertThat(context).doesNotHaveBean(ZipkinSpanExporter.class);
});
}
@Test
void backsOffWithoutEncoding() {
new ApplicationContextRunner().withUserConfiguration(ZipkinWithOpenTelemetryTracingAutoConfiguration.class)
.run((context) -> {
assertThat(context).hasNotFailed();
assertThat(context).doesNotHaveBean(ZipkinSpanExporter.class);
assertThat(context).doesNotHaveBean(BytesEncoder.class);
});
}
@Test
void shouldSupplyBeans() {
this.contextRunner.withUserConfiguration(SenderConfiguration.class, CustomEncoderConfiguration.class)
.run((context) -> {
assertThat(context).hasSingleBean(ZipkinSpanExporter.class);
assertThat(context).hasBean("customSpanEncoder");
});
}
@Test
void shouldNotSupplyZipkinSpanExporterIfSenderIsMissing() {
this.contextRunner.run((context) -> {
assertThat(context).doesNotHaveBean(ZipkinSpanExporter.class);
assertThat(context).hasBean("spanBytesEncoder");
});
}
@Test
void shouldNotSupplyZipkinSpanExporterIfNotOnClasspath() {
this.contextRunner.withClassLoader(new FilteredClassLoader("io.opentelemetry.exporter.zipkin"))
.withUserConfiguration(SenderConfiguration.class)
.run((context) -> {
assertThat(context).doesNotHaveBean(ZipkinSpanExporter.class);
assertThat(context).doesNotHaveBean("spanBytesEncoder");
});
}
@Test
void shouldBackOffIfZipkinIsNotOnClasspath() {
this.contextRunner.withClassLoader(new FilteredClassLoader("zipkin2.Span"))
.withUserConfiguration(SenderConfiguration.class)
.run((context) -> {
assertThat(context).doesNotHaveBean(ZipkinSpanExporter.class);
assertThat(context).doesNotHaveBean("spanBytesEncoder");
});
}
@Test
void shouldBackOffOnCustomBeans() {
this.contextRunner.withUserConfiguration(CustomConfiguration.class).run((context) -> {
assertThat(context).hasBean("customZipkinSpanExporter");
assertThat(context).hasSingleBean(ZipkinSpanExporter.class);
});
}
@Test
void shouldNotSupplyZipkinSpanExporterIfGlobalTracingIsDisabled() {
this.contextRunner.withPropertyValues("management.tracing.export.enabled=false")
.withUserConfiguration(SenderConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(ZipkinSpanExporter.class));
}
@Test
void shouldNotSupplyZipkinSpanExporterIfZipkinTracingIsDisabled() {
this.contextRunner.withPropertyValues("management.tracing.export.zipkin.enabled=false")
.withUserConfiguration(SenderConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(ZipkinSpanExporter.class));
}
@Test
void shouldUseCustomEncoderBean() {
this.contextRunner.withUserConfiguration(SenderConfiguration.class, CustomEncoderConfiguration.class)
.run((context) -> {
assertThat(context).hasSingleBean(ZipkinSpanExporter.class);
assertThat(context).hasBean("customSpanEncoder");
assertThat(context.getBean(ZipkinSpanExporter.class)).extracting("encoder")
.isInstanceOf(CustomSpanEncoder.class)
.extracting("encoding")
.isEqualTo(Encoding.JSON);
});
}
@Test
void shouldUseCustomEncodingBean() {
this.contextRunner
.withUserConfiguration(SenderConfiguration.class, CustomEncodingConfiguration.class,
CustomEncoderConfiguration.class)
.run((context) -> {
assertThat(context).hasSingleBean(ZipkinSpanExporter.class);
assertThat(context).hasBean("customSpanEncoder");
assertThat(context.getBean(ZipkinSpanExporter.class)).extracting("encoder")
.isInstanceOf(CustomSpanEncoder.class)
.extracting("encoding")
.isEqualTo(Encoding.PROTO3);
});
}
@Configuration(proxyBeanMethods = false)
private static final | ZipkinWithOpenTelemetryTracingAutoConfigurationTests |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.