language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/state/StateTtlConfig.java | {
"start": 14820,
"end": 14975
} | enum ____ {
FULL_STATE_SCAN_SNAPSHOT,
INCREMENTAL_CLEANUP,
ROCKSDB_COMPACTION_FILTER
}
/** Base | Strategies |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProxyCombiner.java | {
"start": 1304,
"end": 1408
} | class ____ to combine two protocol proxies.
* See {@link #combine(Class, Object...)}.
*/
public final | used |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/IdLoaderTests.java | {
"start": 1774,
"end": 13444
} | class ____ extends ESTestCase {
private final int routingHash = randomInt();
public void testSynthesizeIdSimple() throws Exception {
final boolean useSyntheticIds = randomBoolean();
var idLoader = IdLoader.createTsIdLoader(null, null, useSyntheticIds);
long startTime = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-01-01T00:00:00Z");
List<Doc> docs = List.of(
new Doc(startTime, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "xxx"))),
new Doc(startTime + 1, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "yyy"))),
new Doc(startTime + 2, List.of(new Dimension("dim1", "bbb"), new Dimension("dim2", "xxx")))
);
CheckedConsumer<IndexReader, IOException> verify = indexReader -> {
assertThat(indexReader.leaves(), hasSize(1));
LeafReader leafReader = indexReader.leaves().get(0).reader();
assertThat(leafReader.numDocs(), equalTo(3));
var leaf = idLoader.leaf(null, leafReader, new int[] { 0, 1, 2 });
// NOTE: time series data is ordered by (tsid, timestamp)
assertThat(leaf.getId(0), equalTo(expectedId(docs.get(2), routingHash, useSyntheticIds)));
assertThat(leaf.getId(1), equalTo(expectedId(docs.get(0), routingHash, useSyntheticIds)));
assertThat(leaf.getId(2), equalTo(expectedId(docs.get(1), routingHash, useSyntheticIds)));
};
prepareIndexReader(indexAndForceMerge(docs, routingHash), verify, false);
}
public void testSynthesizeIdMultipleSegments() throws Exception {
final boolean useSyntheticIds = randomBoolean();
var idLoader = IdLoader.createTsIdLoader(null, null, useSyntheticIds);
long startTime = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-01-01T00:00:00Z");
List<Doc> docs1 = List.of(
new Doc(startTime, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "xxx"))),
new Doc(startTime - 1, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "xxx"))),
new Doc(startTime - 2, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "xxx"))),
new Doc(startTime - 3, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "xxx")))
);
List<Doc> docs2 = List.of(
new Doc(startTime, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "yyy"))),
new Doc(startTime - 1, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "yyy"))),
new Doc(startTime - 2, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "yyy"))),
new Doc(startTime - 3, List.of(new Dimension("dim1", "aaa"), new Dimension("dim2", "yyy")))
);
List<Doc> docs3 = List.of(
new Doc(startTime, List.of(new Dimension("dim1", "bbb"), new Dimension("dim2", "yyy"))),
new Doc(startTime - 1, List.of(new Dimension("dim1", "bbb"), new Dimension("dim2", "yyy"))),
new Doc(startTime - 2, List.of(new Dimension("dim1", "bbb"), new Dimension("dim2", "yyy"))),
new Doc(startTime - 3, List.of(new Dimension("dim1", "bbb"), new Dimension("dim2", "yyy")))
);
CheckedConsumer<IndexWriter, IOException> buildIndex = writer -> {
for (Doc doc : docs1) {
indexDoc(writer, doc, routingHash);
}
writer.flush();
for (Doc doc : docs2) {
indexDoc(writer, doc, routingHash);
}
writer.flush();
for (Doc doc : docs3) {
indexDoc(writer, doc, routingHash);
}
writer.flush();
};
CheckedConsumer<IndexReader, IOException> verify = indexReader -> {
assertThat(indexReader.leaves(), hasSize(3));
{
LeafReader leafReader = indexReader.leaves().get(0).reader();
assertThat(leafReader.numDocs(), equalTo(docs1.size()));
var leaf = idLoader.leaf(null, leafReader, IntStream.range(0, docs1.size()).toArray());
for (int i = 0; i < docs1.size(); i++) {
assertThat(leaf.getId(i), equalTo(expectedId(docs1.get(i), routingHash, useSyntheticIds)));
}
}
{
LeafReader leafReader = indexReader.leaves().get(1).reader();
assertThat(leafReader.numDocs(), equalTo(docs2.size()));
var leaf = idLoader.leaf(null, leafReader, new int[] { 0, 3 });
assertThat(leaf.getId(0), equalTo(expectedId(docs2.get(0), routingHash, useSyntheticIds)));
assertThat(leaf.getId(3), equalTo(expectedId(docs2.get(3), routingHash, useSyntheticIds)));
}
{
LeafReader leafReader = indexReader.leaves().get(2).reader();
assertThat(leafReader.numDocs(), equalTo(docs3.size()));
var leaf = idLoader.leaf(null, leafReader, new int[] { 1, 2 });
assertThat(leaf.getId(1), equalTo(expectedId(docs3.get(1), routingHash, useSyntheticIds)));
assertThat(leaf.getId(2), equalTo(expectedId(docs3.get(2), routingHash, useSyntheticIds)));
}
{
LeafReader leafReader = indexReader.leaves().get(2).reader();
assertThat(leafReader.numDocs(), equalTo(docs3.size()));
var leaf = idLoader.leaf(null, leafReader, new int[] { 3 });
expectThrows(IllegalArgumentException.class, () -> leaf.getId(0));
}
};
prepareIndexReader(buildIndex, verify, true);
}
public void testSynthesizeIdRandom() throws Exception {
final boolean useSyntheticIds = randomBoolean();
var idLoader = IdLoader.createTsIdLoader(null, null, useSyntheticIds);
long startTime = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-01-01T00:00:00Z");
Set<String> expectedIDs = new HashSet<>();
List<Doc> randomDocs = new ArrayList<>();
int numberOfTimeSeries = randomIntBetween(8, 64);
for (int i = 0; i < numberOfTimeSeries; i++) {
int numberOfDimensions = randomIntBetween(1, 6);
List<Dimension> dimensions = new ArrayList<>(numberOfDimensions);
for (int j = 1; j <= numberOfDimensions; j++) {
String fieldName = "dim" + j;
Object value;
if (j == 5) {
value = randomLongBetween(1, 20);
} else {
value = randomAlphaOfLength(4);
}
dimensions.add(new Dimension(fieldName, value));
}
int numberOfSamples = randomIntBetween(1, 16);
for (int j = 0; j < numberOfSamples; j++) {
Doc doc = new Doc(startTime++, dimensions);
randomDocs.add(doc);
expectedIDs.add(expectedId(doc, routingHash, useSyntheticIds));
}
}
CheckedConsumer<IndexReader, IOException> verify = indexReader -> {
assertThat(indexReader.leaves(), hasSize(1));
LeafReader leafReader = indexReader.leaves().get(0).reader();
assertThat(leafReader.numDocs(), equalTo(randomDocs.size()));
var leaf = idLoader.leaf(null, leafReader, IntStream.range(0, randomDocs.size()).toArray());
for (int i = 0; i < randomDocs.size(); i++) {
String actualId = leaf.getId(i);
assertTrue("docId=" + i + " id=" + actualId, expectedIDs.remove(actualId));
}
};
prepareIndexReader(indexAndForceMerge(randomDocs, routingHash), verify, false);
assertThat(expectedIDs, empty());
}
private static CheckedConsumer<IndexWriter, IOException> indexAndForceMerge(List<Doc> docs, int routingHash) {
return writer -> {
for (Doc doc : docs) {
indexDoc(writer, doc, routingHash);
}
writer.forceMerge(1);
};
}
private void prepareIndexReader(
CheckedConsumer<IndexWriter, IOException> buildIndex,
CheckedConsumer<IndexReader, IOException> verify,
boolean noMergePolicy
) throws IOException {
try (Directory directory = newDirectory()) {
IndexWriterConfig config = LuceneTestCase.newIndexWriterConfig(random(), new MockAnalyzer(random()));
if (noMergePolicy) {
config.setMergePolicy(NoMergePolicy.INSTANCE);
config.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
}
Sort sort = new Sort(
new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING, false),
new SortField(TimeSeriesRoutingHashFieldMapper.NAME, SortField.Type.STRING, false),
new SortedNumericSortField(DataStreamTimestampFieldMapper.DEFAULT_PATH, SortField.Type.LONG, true)
);
config.setIndexSort(sort);
IndexWriter indexWriter = new IndexWriter(directory, config);
buildIndex.accept(indexWriter);
indexWriter.close();
try (DirectoryReader indexReader = DirectoryReader.open(directory);) {
verify.accept(indexReader);
}
}
}
private static void indexDoc(IndexWriter iw, Doc doc, int routingHash) throws IOException {
var routingFields = new RoutingPathFields(null);
final List<IndexableField> fields = new ArrayList<>();
fields.add(new SortedNumericDocValuesField(DataStreamTimestampFieldMapper.DEFAULT_PATH, doc.timestamp));
fields.add(new LongPoint(DataStreamTimestampFieldMapper.DEFAULT_PATH, doc.timestamp));
for (Dimension dimension : doc.dimensions) {
if (dimension.value instanceof Number n) {
routingFields.addLong(dimension.field, n.longValue());
fields.add(new SortedNumericDocValuesField(dimension.field, ((Number) dimension.value).longValue()));
} else {
routingFields.addString(dimension.field, dimension.value.toString());
fields.add(new SortedSetDocValuesField(dimension.field, new BytesRef(dimension.value.toString())));
}
}
BytesRef tsid = routingFields.buildHash().toBytesRef();
fields.add(new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, tsid));
fields.add(
new SortedDocValuesField(
TimeSeriesRoutingHashFieldMapper.NAME,
Uid.encodeId(TimeSeriesRoutingHashFieldMapper.encode(routingHash))
)
);
iw.addDocument(fields);
}
private static String expectedId(Doc doc, int routingHash, boolean useSyntheticIds) {
var routingFields = new RoutingPathFields(null);
for (Dimension dimension : doc.dimensions) {
if (dimension.value instanceof Number n) {
routingFields.addLong(dimension.field, n.longValue());
} else {
routingFields.addString(dimension.field, dimension.value.toString());
}
}
if (useSyntheticIds) {
return TsidExtractingIdFieldMapper.createSyntheticId(routingFields.buildHash().toBytesRef(), doc.timestamp, routingHash);
} else {
return TsidExtractingIdFieldMapper.createId(routingHash, routingFields.buildHash().toBytesRef(), doc.timestamp);
}
}
record Doc(long timestamp, List<Dimension> dimensions) {}
record Dimension(String field, Object value) {}
}
| IdLoaderTests |
java | google__gson | gson/src/test/java/com/google/gson/functional/ObjectTest.java | {
"start": 24596,
"end": 25282
} | class ____ {
static final String s = "initial";
}
@Test
public void testThrowingDefaultConstructor() {
// TODO: Adjust this once Gson throws more specific exception type
var e =
assertThrows(
RuntimeException.class, () -> gson.fromJson("{}", ClassWithThrowingConstructor.class));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Failed to invoke constructor"
+ " 'com.google.gson.functional.ObjectTest$ClassWithThrowingConstructor()' with"
+ " no args");
assertThat(e).hasCauseThat().isSameInstanceAs(ClassWithThrowingConstructor.thrownException);
}
static | ClassWithStaticFinalField |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/engine/GlideException.java | {
"start": 530,
"end": 7442
} | class ____ extends Exception {
private static final long serialVersionUID = 1L;
private static final StackTraceElement[] EMPTY_ELEMENTS = new StackTraceElement[0];
private final List<Throwable> causes;
private Key key;
private DataSource dataSource;
private Class<?> dataClass;
private String detailMessage;
@Nullable private Exception exception;
public GlideException(String message) {
this(message, Collections.<Throwable>emptyList());
}
public GlideException(String detailMessage, Throwable cause) {
this(detailMessage, Collections.singletonList(cause));
}
public GlideException(String detailMessage, List<Throwable> causes) {
this.detailMessage = detailMessage;
setStackTrace(EMPTY_ELEMENTS);
this.causes = causes;
}
void setLoggingDetails(Key key, DataSource dataSource) {
setLoggingDetails(key, dataSource, null);
}
void setLoggingDetails(Key key, DataSource dataSource, Class<?> dataClass) {
this.key = key;
this.dataSource = dataSource;
this.dataClass = dataClass;
}
/**
* Sets a stack trace that includes where the request originated.
*
* <p>This is an experimental API that may be removed in the future.
*/
public void setOrigin(@Nullable Exception exception) {
this.exception = exception;
}
/**
* Returns an {@link Exception} with a stack trace that includes where the request originated (if
* previously set via {@link #setOrigin(Exception)})
*
* <p>This is an experimental API that may be removed in the future.
*/
@Nullable
public Exception getOrigin() {
return exception;
}
// No need to synchronize when doing nothing whatsoever.
@SuppressWarnings("UnsynchronizedOverridesSynchronized")
@Override
public Throwable fillInStackTrace() {
// Avoid an expensive allocation by doing nothing here. Causes should contain all relevant
// stack traces.
return this;
}
/**
* Returns a list of causes that are immediate children of this exception.
*
* <p>Causes may or may not be {@link GlideException GlideExceptions}. Causes may also not be root
* causes, and in turn my have been caused by other failures.
*
* @see #getRootCauses()
*/
public List<Throwable> getCauses() {
return causes;
}
/**
* Returns the list of root causes that are the leaf nodes of all children of this exception.
*
* <p>Use this method to do things like look for http exceptions that indicate the load may have
* failed due to an error that can be retried. Keep in mind that because Glide may attempt to load
* a given model using multiple different pathways, there may be multiple related or unrelated
* reasons for a load to fail.
*/
public List<Throwable> getRootCauses() {
List<Throwable> rootCauses = new ArrayList<>();
addRootCauses(this, rootCauses);
return rootCauses;
}
/**
* Logs all root causes using the given tag.
*
* <p>Each root cause is logged separately to avoid throttling. {@link #printStackTrace()} will
* provide a more succinct overview of why the exception occurred, although it does not include
* complete stack traces.
*/
public void logRootCauses(String tag) {
List<Throwable> causes = getRootCauses();
for (int i = 0, size = causes.size(); i < size; i++) {
Log.i(tag, "Root cause (" + (i + 1) + " of " + size + ")", causes.get(i));
}
}
private void addRootCauses(Throwable throwable, List<Throwable> rootCauses) {
if (throwable instanceof GlideException) {
GlideException glideException = (GlideException) throwable;
for (Throwable t : glideException.getCauses()) {
addRootCauses(t, rootCauses);
}
} else if (throwable != null) {
rootCauses.add(throwable);
}
}
@Override
public void printStackTrace() {
printStackTrace(System.err);
}
@Override
public void printStackTrace(PrintStream err) {
printStackTrace((Appendable) err);
}
@Override
public void printStackTrace(PrintWriter err) {
printStackTrace((Appendable) err);
}
private void printStackTrace(Appendable appendable) {
appendExceptionMessage(this, appendable);
appendCauses(getCauses(), new IndentedAppendable(appendable));
}
// PMD doesn't seem to notice that we're allocating the builder with the suggested size.
@SuppressWarnings("PMD.InsufficientStringBufferDeclaration")
@Override
public String getMessage() {
StringBuilder result =
new StringBuilder(71)
.append(detailMessage)
.append(dataClass != null ? ", " + dataClass : "")
.append(dataSource != null ? ", " + dataSource : "")
.append(key != null ? ", " + key : "");
List<Throwable> rootCauses = getRootCauses();
if (rootCauses.isEmpty()) {
return result.toString();
} else if (rootCauses.size() == 1) {
result.append("\nThere was 1 root cause:");
} else {
result.append("\nThere were ").append(rootCauses.size()).append(" root causes:");
}
for (Throwable cause : rootCauses) {
result
.append('\n')
.append(cause.getClass().getName())
.append('(')
.append(cause.getMessage())
.append(')');
}
result.append("\n call GlideException#logRootCauses(String) for more detail");
return result.toString();
}
// Appendable throws, PrintWriter, PrintStream, and IndentedAppendable do not, so this should
// never happen.
@SuppressWarnings("PMD.PreserveStackTrace")
private static void appendExceptionMessage(Throwable t, Appendable appendable) {
try {
appendable.append(t.getClass().toString()).append(": ").append(t.getMessage()).append('\n');
} catch (IOException e1) {
throw new RuntimeException(t);
}
}
// Appendable throws, PrintWriter, PrintStream, and IndentedAppendable do not, so this should
// never happen.
@SuppressWarnings("PMD.PreserveStackTrace")
private static void appendCauses(List<Throwable> causes, Appendable appendable) {
try {
appendCausesWrapped(causes, appendable);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
private static void appendCausesWrapped(List<Throwable> causes, Appendable appendable)
throws IOException {
int size = causes.size();
for (int i = 0; i < size; i++) {
appendable
.append("Cause (")
.append(String.valueOf(i + 1))
.append(" of ")
.append(String.valueOf(size))
.append("): ");
Throwable cause = causes.get(i);
if (cause instanceof GlideException) {
GlideException glideCause = (GlideException) cause;
glideCause.printStackTrace(appendable);
} else {
appendExceptionMessage(cause, appendable);
}
}
}
private static final | GlideException |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/NullableOptional.java | {
"start": 1760,
"end": 3190
} | class ____ extends BugChecker
implements MethodTreeMatcher, VariableTreeMatcher {
private static final TypePredicate IS_OPTIONAL_TYPE =
TypePredicates.isExactTypeAny(
ImmutableSet.of(
java.util.Optional.class.getCanonicalName(),
com.google.common.base.Optional.class.getCanonicalName()));
@Override
public Description matchMethod(MethodTree tree, VisitorState state) {
if (hasNullableAnnotation(tree.getModifiers())
&& isOptional(ASTHelpers.getType(tree.getReturnType()), state)) {
return describeMatch(tree);
}
return Description.NO_MATCH;
}
@Override
public Description matchVariable(VariableTree tree, VisitorState state) {
if (hasNullableAnnotation(tree.getModifiers()) && isOptional(ASTHelpers.getType(tree), state)) {
return describeMatch(tree);
}
return Description.NO_MATCH;
}
/** Check if the input ModifiersTree has any kind of "Nullable" annotation. */
private static boolean hasNullableAnnotation(ModifiersTree modifiersTree) {
return ASTHelpers.getAnnotationWithSimpleName(modifiersTree.getAnnotations(), "Nullable")
!= null;
}
/**
* Check if the input Type is either {@link java.util.Optional} or{@link
* com.google.common.base.Optional}.
*/
private static boolean isOptional(Type type, VisitorState state) {
return IS_OPTIONAL_TYPE.apply(type, state);
}
}
| NullableOptional |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/naturalid/Device.java | {
"start": 508,
"end": 1564
} | class ____ implements Serializable {
@Id
@GeneratedValue
private Integer id;
@Audited
@ManyToOne
@JoinColumn(name = "customer_id", foreignKey = @ForeignKey(name = "fk_dev_cust_id"))
private Customer customer;
Device() {
}
public Device(Integer id, Customer customer) {
this.id = id;
this.customer = customer;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Customer getCustomer() {
return customer;
}
public void setCustomer(Customer customer) {
this.customer = customer;
}
@Override
public int hashCode() {
int result;
result = ( id != null ? id.hashCode() : 0 );
return result;
}
@Override
public boolean equals(Object obj) {
if ( obj == this ) {
return true;
}
if ( !( obj instanceof Device ) ) {
return false;
}
Device that = (Device) obj;
if ( id != null ? !id.equals( that.id ) : that.id != null ) {
return false;
}
return true;
}
@Override
public String toString() {
return "Device{" +
"id=" + id +
'}';
}
}
| Device |
java | apache__flink | flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/statement/FetchResultsTokenPathParameter.java | {
"start": 1013,
"end": 1561
} | class ____ extends MessagePathParameter<Long> {
public static final String KEY = "token";
public FetchResultsTokenPathParameter() {
super(KEY);
}
@Override
protected Long convertFromString(String token) {
return Long.valueOf(token);
}
@Override
protected String convertToString(Long token) {
return String.valueOf(token);
}
@Override
public String getDescription() {
return "The token that identifies which batch of data to fetch.";
}
}
| FetchResultsTokenPathParameter |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/security/RepeatedPermissionsAllowedTest.java | {
"start": 3301,
"end": 3567
} | class ____ {
@PermissionsAllowed(value = "write")
@PermissionsAllowed(value = "read")
@POST
public String sayHello(JsonObject entity) {
return "ignored";
}
}
@Path("/hello-interface")
public | HelloResource |
java | google__dagger | javatests/dagger/functional/membersinject/MembersInjectTest.java | {
"start": 3884,
"end": 4303
} | interface ____ {
NonLocalMembersComponent create(@BindsInstance String value);
}
}
@Test
public void testNonLocalMembersInjection() {
MembersInjector<A> membersInjector = DaggerMembersInjectTest_NonLocalMembersComponent.factory()
.create("test").getAMembersInjector();
A testA = new A();
membersInjector.injectMembers(testA);
assertThat(testA.value).isEqualTo("test");
}
}
| Factory |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/rr-external-artifacts/app/src/main/java/org/acme/GreetingListSortAttribute.java | {
"start": 62,
"end": 157
} | enum ____ implements SortAttribute{
FORMALITY_FACTOR,
LANGUAGE;
}
| GreetingListSortAttribute |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/bind/EscapedErrorsTests.java | {
"start": 1078,
"end": 5266
} | class ____ {
@Test
void testEscapedErrors() {
TestBean tb = new TestBean();
tb.setName("empty &");
Errors errors = new EscapedErrors(new BindException(tb, "tb"));
errors.rejectValue("name", "NAME_EMPTY &", null, "message: &");
errors.rejectValue("age", "AGE_NOT_SET <tag>", null, "message: <tag>");
errors.rejectValue("age", "AGE_NOT_32 <tag>", null, "message: <tag>");
errors.reject("GENERAL_ERROR \" '", null, "message: \" '");
assertThat(errors.hasErrors()).as("Correct errors flag").isTrue();
assertThat(errors.getErrorCount()).as("Correct number of errors").isEqualTo(4);
assertThat(errors.getObjectName()).as("Correct object name").isEqualTo("tb");
assertThat(errors.hasGlobalErrors()).as("Correct global errors flag").isTrue();
assertThat(errors.getGlobalErrorCount()).as("Correct number of global errors").isOne();
ObjectError globalError = errors.getGlobalError();
String defaultMessage = globalError.getDefaultMessage();
assertThat(defaultMessage).as("Global error message escaped").isEqualTo("message: " '");
assertThat(globalError.getCode()).as("Global error code not escaped").isEqualTo("GENERAL_ERROR \" '");
ObjectError globalErrorInList = errors.getGlobalErrors().get(0);
assertThat(defaultMessage).as("Same global error in list").isEqualTo(globalErrorInList.getDefaultMessage());
ObjectError globalErrorInAllList = errors.getAllErrors().get(3);
assertThat(defaultMessage).as("Same global error in list").isEqualTo(globalErrorInAllList.getDefaultMessage());
assertThat(errors.hasFieldErrors()).as("Correct field errors flag").isTrue();
assertThat(errors.getFieldErrorCount()).as("Correct number of field errors").isEqualTo(3);
assertThat(errors.getFieldErrors()).as("Correct number of field errors in list").hasSize(3);
FieldError fieldError = errors.getFieldError();
assertThat(fieldError.getCode()).as("Field error code not escaped").isEqualTo("NAME_EMPTY &");
assertThat(errors.getFieldValue("name")).as("Field value escaped").isEqualTo("empty &");
FieldError fieldErrorInList = errors.getFieldErrors().get(0);
assertThat(fieldError.getDefaultMessage()).as("Same field error in list")
.isEqualTo(fieldErrorInList.getDefaultMessage());
assertThat(errors.hasFieldErrors("name")).as("Correct name errors flag").isTrue();
assertThat(errors.getFieldErrorCount("name")).as("Correct number of name errors").isOne();
assertThat(errors.getFieldErrors("name")).as("Correct number of name errors in list").hasSize(1);
FieldError nameError = errors.getFieldError("name");
assertThat(nameError.getDefaultMessage()).as("Name error message escaped").isEqualTo("message: &");
assertThat(nameError.getCode()).as("Name error code not escaped").isEqualTo("NAME_EMPTY &");
assertThat(errors.getFieldValue("name")).as("Name value escaped").isEqualTo("empty &");
FieldError nameErrorInList = errors.getFieldErrors("name").get(0);
assertThat(nameError.getDefaultMessage()).as("Same name error in list")
.isEqualTo(nameErrorInList.getDefaultMessage());
assertThat(errors.hasFieldErrors("age")).as("Correct age errors flag").isTrue();
assertThat(errors.getFieldErrorCount("age")).as("Correct number of age errors").isEqualTo(2);
assertThat(errors.getFieldErrors("age")).as("Correct number of age errors in list").hasSize(2);
FieldError ageError = errors.getFieldError("age");
assertThat(ageError.getDefaultMessage()).as("Age error message escaped").isEqualTo("message: <tag>");
assertThat(ageError.getCode()).as("Age error code not escaped").isEqualTo("AGE_NOT_SET <tag>");
assertThat((Integer.valueOf(0))).as("Age value not escaped").isEqualTo(errors.getFieldValue("age"));
FieldError ageErrorInList = errors.getFieldErrors("age").get(0);
assertThat(ageError.getDefaultMessage()).as("Same name error in list")
.isEqualTo(ageErrorInList.getDefaultMessage());
FieldError ageError2 = errors.getFieldErrors("age").get(1);
assertThat(ageError2.getDefaultMessage()).as("Age error 2 message escaped").isEqualTo("message: <tag>");
assertThat(ageError2.getCode()).as("Age error 2 code not escaped").isEqualTo("AGE_NOT_32 <tag>");
}
}
| EscapedErrorsTests |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/VectorSearchITCase.java | {
"start": 1274,
"end": 1710
} | class ____ extends VectorSearchITCaseBase {
@Override
protected TableEnvironment getTableEnvironment() {
EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
return StreamTableEnvironment.create(
StreamExecutionEnvironment.getExecutionEnvironment(), settings);
}
@Override
protected boolean isAsync() {
return false;
}
}
| VectorSearchITCase |
java | quarkusio__quarkus | extensions/devui/deployment-spi/src/main/java/io/quarkus/devui/spi/buildtime/BuildTimeActionBuildItem.java | {
"start": 676,
"end": 3853
} | class ____ extends AbstractDevUIBuildItem {
private final List<DeploymentJsonRpcMethod> deploymentActions = new ArrayList<>();
private final List<DeploymentJsonRpcMethod> deploymentSubscriptions = new ArrayList<>();
private final List<RecordedJsonRpcMethod> recordedActions = new ArrayList<>();
private final List<RecordedJsonRpcMethod> recordedSubscriptions = new ArrayList<>();
public BuildTimeActionBuildItem() {
super();
}
public BuildTimeActionBuildItem(String customIdentifier) {
super(customIdentifier);
}
public List<DeploymentJsonRpcMethod> getDeploymentActions() {
return this.deploymentActions;
}
public List<RecordedJsonRpcMethod> getRecordedActions() {
return this.recordedActions;
}
public List<DeploymentJsonRpcMethod> getDeploymentSubscriptions() {
return deploymentSubscriptions;
}
public List<RecordedJsonRpcMethod> getRecordedSubscriptions() {
return recordedSubscriptions;
}
public ActionBuilder actionBuilder() {
return new ActionBuilder();
}
public SubscriptionBuilder subscriptionBuilder() {
return new SubscriptionBuilder();
}
@Deprecated
public <T> void addAction(String methodName,
Function<Map<String, String>, T> action) {
this.addAction(new DeploymentJsonRpcMethod(methodName, null, Usage.onlyDevUI(), true, action));
}
@Deprecated
public <T> void addAssistantAction(String methodName,
BiFunction<Object, Map<String, String>, T> action) {
this.addAction(new DeploymentJsonRpcMethod(methodName, null, Usage.onlyDevUI(), true, action));
}
@Deprecated
public <T> void addAction(String methodName,
RuntimeValue runtimeValue) {
this.addAction(new RecordedJsonRpcMethod(methodName, null, Usage.onlyDevUI(), true, runtimeValue));
}
@Deprecated
public <T> void addSubscription(String methodName,
Function<Map<String, String>, T> action) {
this.addSubscription(new DeploymentJsonRpcMethod(methodName, null, Usage.onlyDevUI(), true, action));
}
@Deprecated
public <T> void addSubscription(String methodName,
RuntimeValue runtimeValue) {
this.addSubscription(new RecordedJsonRpcMethod(methodName, null, Usage.onlyDevUI(), true, runtimeValue));
}
private BuildTimeActionBuildItem addAction(DeploymentJsonRpcMethod deploymentJsonRpcMethod) {
this.deploymentActions.add(deploymentJsonRpcMethod);
return this;
}
private BuildTimeActionBuildItem addAction(RecordedJsonRpcMethod recordedJsonRpcMethod) {
this.recordedActions.add(recordedJsonRpcMethod);
return this;
}
private BuildTimeActionBuildItem addSubscription(DeploymentJsonRpcMethod deploymentJsonRpcMethod) {
this.deploymentSubscriptions.add(deploymentJsonRpcMethod);
return this;
}
private BuildTimeActionBuildItem addSubscription(RecordedJsonRpcMethod recordedJsonRpcMethod) {
this.recordedSubscriptions.add(recordedJsonRpcMethod);
return this;
}
public final | BuildTimeActionBuildItem |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTests.java | {
"start": 1282,
"end": 9975
} | class ____ extends ESTestCase {
public void testResolveToAgg() {
AggregationPath path = AggregationPath.parse("the_avg");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms");
assertThat(value[0], equalTo(agg));
}
public void testResolveToAggValue() {
AggregationPath path = AggregationPath.parse("the_avg.value");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms");
assertThat(value[0], equalTo(2.0));
}
public void testResolveToNothing() {
AggregationPath path = AggregationPath.parse("foo.value");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
InvalidAggregationPathException e = expectThrows(
InvalidAggregationPathException.class,
() -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms")
);
assertThat(e.getMessage(), equalTo("Cannot find an aggregation named [foo] in [the_long_terms]"));
}
public void testResolveToUnknown() {
AggregationPath path = AggregationPath.parse("the_avg.unknown");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms")
);
assertThat(e.getMessage(), equalTo("path not supported for [the_avg]: [unknown]"));
}
public void testResolveToBucketCount() {
AggregationPath path = AggregationPath.parse("_bucket_count");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
Object value = resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms");
assertThat(value, equalTo(1));
}
public void testResolveToCount() {
AggregationPath path = AggregationPath.parse("_count");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms");
assertThat(value[0], equalTo(1L));
}
public void testResolveToKey() {
AggregationPath path = AggregationPath.parse("_key");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(agg));
LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms");
assertThat(value[0], equalTo(19L));
}
public void testResolveToSpecificBucket() {
AggregationPath path = AggregationPath.parse("string_terms['foo']>the_avg.value");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalStringAggs = InternalAggregations.from(Collections.singletonList(agg));
List<StringTerms.Bucket> stringBuckets = Collections.singletonList(
new StringTerms.Bucket(
new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length),
1,
internalStringAggs,
false,
0,
DocValueFormat.RAW
)
);
InternalTerms<?, ?> termsAgg = new StringTerms(
"string_terms",
BucketOrder.count(false),
BucketOrder.count(false),
1,
0,
Collections.emptyMap(),
DocValueFormat.RAW,
1,
false,
0,
stringBuckets,
0L
);
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(termsAgg));
LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms");
assertThat(value[0], equalTo(2.0));
}
public void testResolveToMissingSpecificBucket() {
AggregationPath path = AggregationPath.parse("string_terms['bar']>the_avg.value");
List<LongTerms.Bucket> buckets = new ArrayList<>();
InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap());
InternalAggregations internalStringAggs = InternalAggregations.from(Collections.singletonList(agg));
List<StringTerms.Bucket> stringBuckets = Collections.singletonList(
new StringTerms.Bucket(
new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length),
1,
internalStringAggs,
false,
0,
DocValueFormat.RAW
)
);
InternalTerms<?, ?> termsAgg = new StringTerms(
"string_terms",
BucketOrder.count(false),
BucketOrder.count(false),
1,
0,
Collections.emptyMap(),
DocValueFormat.RAW,
1,
false,
0,
stringBuckets,
0L
);
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(termsAgg));
LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW);
buckets.add(bucket);
InvalidAggregationPathException e = expectThrows(
InvalidAggregationPathException.class,
() -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms")
);
assertThat(e.getMessage(), equalTo("Cannot find an key ['bar'] in [string_terms]"));
}
}
| InternalMultiBucketAggregationTests |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/utils/Shell.java | {
"start": 5443,
"end": 6110
} | class ____ extends IOException {
int exitCode;
public ExitCodeException(int exitCode, String message) {
super(message);
this.exitCode = exitCode;
}
public int getExitCode() {
return exitCode;
}
}
/**
* A simple shell command executor.
*
* <code>ShellCommandExecutor</code>should be used in cases where the output
* of the command needs no explicit parsing and where the command, working
* directory and the environment remains unchanged. The output of the command
* is stored as-is and is expected to be small.
*/
public static | ExitCodeException |
java | quarkusio__quarkus | integration-tests/security-webauthn/src/main/java/io/quarkus/it/security/webauthn/AdminResource.java | {
"start": 240,
"end": 403
} | class ____ {
@GET
@RolesAllowed("admin")
@Produces(MediaType.TEXT_PLAIN)
public String adminResource() {
return "admin";
}
}
| AdminResource |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/criteria/internal/expression/SearchedCaseExpressionTest.java | {
"start": 5296,
"end": 5575
} | class ____ {
@Id
private Long id;
@Column
@Enumerated(EnumType.STRING)
private EventType type;
protected Event() {
}
public EventType getType() {
return type;
}
public Event type(EventType type) {
this.type = type;
return this;
}
}
public | Event |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java | {
"start": 982,
"end": 1941
} | class ____ extends ComputeTestCase {
public void testValuesAggregator() {
BlockFactory blockFactory = blockFactory();
DriverContext driverContext = new DriverContext(blockFactory.bigArrays(), blockFactory, "test");
List<BiFunction<List<Integer>, DriverContext, GroupingAggregatorFunction>> functions = List.of(
ValuesBooleanGroupingAggregatorFunction::create,
ValuesIntGroupingAggregatorFunction::create,
ValuesLongGroupingAggregatorFunction::create,
ValuesBytesRefGroupingAggregatorFunction::create,
DimensionValuesByteRefGroupingAggregatorFunction::new
);
for (var fn : functions) {
try (GroupingAggregatorFunction aggregator = fn.apply(List.of(randomNonNegativeInt()), driverContext)) {
assertTrue(TimeSeriesAggregationOperator.isValuesAggregator(aggregator));
}
}
}
}
| TimeSeriesAggregationOperatorTests |
java | alibaba__nacos | plugin/auth/src/main/java/com/alibaba/nacos/plugin/auth/spi/server/AuthPluginService.java | {
"start": 1111,
"end": 3257
} | interface ____ {
/**
* Define which identity information needed from request. e.q: username, password, accessToken.
*
* @return identity names
*/
Collection<String> identityNames();
/**
* Judgement whether this plugin enable auth for this action and type.
*
* @param action action of request, see {@link ActionTypes}
* @param type type of request, see {@link com.alibaba.nacos.plugin.auth.constant.SignType}
* @return @return {@code true} if enable auth, otherwise {@code false}
*/
boolean enableAuth(ActionTypes action, String type);
/**
* To validate whether the identity context from request is legal or illegal.
*
* @param identityContext where we can find the user information
* @param resource resource about this user information
* @return {@link AuthResult} of validate result
* @throws AccessException if authentication is failed
*/
AuthResult validateIdentity(IdentityContext identityContext, Resource resource) throws AccessException;
/**
* Validate the identity whether has the resource authority.
*
* @param identityContext where we can find the user information.
* @param permission permission to auth.
* @return {@link AuthResult} of validate result
* @throws AccessException if authentication is failed
*/
AuthResult validateAuthority(IdentityContext identityContext, Permission permission) throws AccessException;
/**
* AuthPluginService Name which for conveniently find AuthPluginService instance.
*
* @return AuthServiceName mark a AuthPluginService instance.
*/
String getAuthServiceName();
/**
* Is the plugin enable login.
*
* @return {@code true} if plugin need login, otherwise {@code false}
* @since 2.2.2
*/
default boolean isLoginEnabled() {
return false;
}
/**
* Whether need administrator .
*
* @return if need the administrator role.
*/
default boolean isAdminRequest() {
return true;
}
}
| AuthPluginService |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/fuseable/HasUpstreamObservableSource.java | {
"start": 930,
"end": 1200
} | interface ____<@NonNull T> {
/**
* Returns the upstream source of this Observable.
* <p>Allows discovering the chain of observables.
* @return the source ObservableSource
*/
@NonNull
ObservableSource<T> source();
}
| HasUpstreamObservableSource |
java | bumptech__glide | annotation/compiler/test/src/test/resources/GlideExtensionWithOptionTest/GlideRequest.java | {
"start": 1656,
"end": 16656
} | class ____<TranscodeType> extends RequestBuilder<TranscodeType> implements Cloneable {
GlideRequest(@NonNull Class<TranscodeType> transcodeClass, @NonNull RequestBuilder<?> other) {
super(transcodeClass, other);
}
GlideRequest(@NonNull Glide glide, @NonNull RequestManager requestManager,
@NonNull Class<TranscodeType> transcodeClass, @NonNull Context context) {
super(glide, requestManager ,transcodeClass, context);
}
@Override
@CheckResult
@NonNull
protected GlideRequest<File> getDownloadOnlyRequest() {
return new GlideRequest<>(File.class, this).apply(DOWNLOAD_ONLY_OPTIONS);
}
/**
* @see GlideOptions#sizeMultiplier(float)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> sizeMultiplier(@FloatRange(from = 0.0, to = 1.0) float value) {
return (GlideRequest<TranscodeType>) super.sizeMultiplier(value);
}
/**
* @see GlideOptions#useUnlimitedSourceGeneratorsPool(boolean)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> useUnlimitedSourceGeneratorsPool(boolean flag) {
return (GlideRequest<TranscodeType>) super.useUnlimitedSourceGeneratorsPool(flag);
}
/**
* @see GlideOptions#useAnimationPool(boolean)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> useAnimationPool(boolean flag) {
return (GlideRequest<TranscodeType>) super.useAnimationPool(flag);
}
/**
* @see GlideOptions#onlyRetrieveFromCache(boolean)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> onlyRetrieveFromCache(boolean flag) {
return (GlideRequest<TranscodeType>) super.onlyRetrieveFromCache(flag);
}
/**
* @see GlideOptions#diskCacheStrategy(DiskCacheStrategy)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> diskCacheStrategy(@NonNull DiskCacheStrategy strategy) {
return (GlideRequest<TranscodeType>) super.diskCacheStrategy(strategy);
}
/**
* @see GlideOptions#priority(Priority)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> priority(@NonNull Priority priority) {
return (GlideRequest<TranscodeType>) super.priority(priority);
}
/**
* @see GlideOptions#placeholder(Drawable)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> placeholder(@Nullable Drawable drawable) {
return (GlideRequest<TranscodeType>) super.placeholder(drawable);
}
/**
* @see GlideOptions#placeholder(int)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> placeholder(@DrawableRes int id) {
return (GlideRequest<TranscodeType>) super.placeholder(id);
}
/**
* @see GlideOptions#fallback(Drawable)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> fallback(@Nullable Drawable drawable) {
return (GlideRequest<TranscodeType>) super.fallback(drawable);
}
/**
* @see GlideOptions#fallback(int)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> fallback(@DrawableRes int id) {
return (GlideRequest<TranscodeType>) super.fallback(id);
}
/**
* @see GlideOptions#error(Drawable)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> error(@Nullable Drawable drawable) {
return (GlideRequest<TranscodeType>) super.error(drawable);
}
/**
* @see GlideOptions#error(int)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> error(@DrawableRes int id) {
return (GlideRequest<TranscodeType>) super.error(id);
}
/**
* @see GlideOptions#theme(Resources.Theme)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> theme(@Nullable Resources.Theme theme) {
return (GlideRequest<TranscodeType>) super.theme(theme);
}
/**
* @see GlideOptions#skipMemoryCache(boolean)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> skipMemoryCache(boolean skip) {
return (GlideRequest<TranscodeType>) super.skipMemoryCache(skip);
}
/**
* @see GlideOptions#override(int, int)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> override(int width, int height) {
return (GlideRequest<TranscodeType>) super.override(width, height);
}
/**
* @see GlideOptions#override(int)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> override(int size) {
return (GlideRequest<TranscodeType>) super.override(size);
}
/**
* @see GlideOptions#signature(Key)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> signature(@NonNull Key key) {
return (GlideRequest<TranscodeType>) super.signature(key);
}
/**
* @see GlideOptions#set(Option<Y>, Y)
*/
@NonNull
@CheckResult
public <Y> GlideRequest<TranscodeType> set(@NonNull Option<Y> option, @NonNull Y y) {
return (GlideRequest<TranscodeType>) super.set(option, y);
}
/**
* @see GlideOptions#decode(Class<?>)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> decode(@NonNull Class<?> clazz) {
return (GlideRequest<TranscodeType>) super.decode(clazz);
}
/**
* @see GlideOptions#encodeFormat(Bitmap.CompressFormat)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> encodeFormat(@NonNull Bitmap.CompressFormat format) {
return (GlideRequest<TranscodeType>) super.encodeFormat(format);
}
/**
* @see GlideOptions#encodeQuality(int)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> encodeQuality(@IntRange(from = 0, to = 100) int value) {
return (GlideRequest<TranscodeType>) super.encodeQuality(value);
}
/**
* @see GlideOptions#frame(long)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> frame(@IntRange(from = 0) long value) {
return (GlideRequest<TranscodeType>) super.frame(value);
}
/**
* @see GlideOptions#format(DecodeFormat)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> format(@NonNull DecodeFormat format) {
return (GlideRequest<TranscodeType>) super.format(format);
}
/**
* @see GlideOptions#disallowHardwareConfig()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> disallowHardwareConfig() {
return (GlideRequest<TranscodeType>) super.disallowHardwareConfig();
}
/**
* @see GlideOptions#downsample(DownsampleStrategy)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> downsample(@NonNull DownsampleStrategy strategy) {
return (GlideRequest<TranscodeType>) super.downsample(strategy);
}
/**
* @see GlideOptions#timeout(int)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> timeout(@IntRange(from = 0) int value) {
return (GlideRequest<TranscodeType>) super.timeout(value);
}
/**
* @see GlideOptions#optionalCenterCrop()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> optionalCenterCrop() {
return (GlideRequest<TranscodeType>) super.optionalCenterCrop();
}
/**
* @see GlideOptions#centerCrop()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> centerCrop() {
return (GlideRequest<TranscodeType>) super.centerCrop();
}
/**
* @see GlideOptions#optionalFitCenter()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> optionalFitCenter() {
return (GlideRequest<TranscodeType>) super.optionalFitCenter();
}
/**
* @see GlideOptions#fitCenter()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> fitCenter() {
return (GlideRequest<TranscodeType>) super.fitCenter();
}
/**
* @see GlideOptions#optionalCenterInside()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> optionalCenterInside() {
return (GlideRequest<TranscodeType>) super.optionalCenterInside();
}
/**
* @see GlideOptions#centerInside()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> centerInside() {
return (GlideRequest<TranscodeType>) super.centerInside();
}
/**
* @see GlideOptions#optionalCircleCrop()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> optionalCircleCrop() {
return (GlideRequest<TranscodeType>) super.optionalCircleCrop();
}
/**
* @see GlideOptions#circleCrop()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> circleCrop() {
return (GlideRequest<TranscodeType>) super.circleCrop();
}
/**
* @see GlideOptions#transform(Transformation<Bitmap>)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> transform(@NonNull Transformation<Bitmap> transformation) {
return (GlideRequest<TranscodeType>) super.transform(transformation);
}
/**
* @see GlideOptions#transform(Transformation<Bitmap>[])
*/
@NonNull
@CheckResult
@SuppressWarnings({
"unchecked",
"varargs"
})
public GlideRequest<TranscodeType> transform(@NonNull Transformation<Bitmap>... transformations) {
return (GlideRequest<TranscodeType>) super.transform(transformations);
}
/**
* @see GlideOptions#transforms(Transformation<Bitmap>[])
*/
@Deprecated
@NonNull
@CheckResult
@SuppressWarnings({
"unchecked",
"varargs"
})
public GlideRequest<TranscodeType> transforms(
@NonNull Transformation<Bitmap>... transformations) {
return (GlideRequest<TranscodeType>) super.transforms(transformations);
}
/**
* @see GlideOptions#optionalTransform(Transformation<Bitmap>)
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> optionalTransform(
@NonNull Transformation<Bitmap> transformation) {
return (GlideRequest<TranscodeType>) super.optionalTransform(transformation);
}
/**
* @see GlideOptions#optionalTransform(Class<Y>, Transformation<Y>)
*/
@NonNull
@CheckResult
public <Y> GlideRequest<TranscodeType> optionalTransform(@NonNull Class<Y> clazz,
@NonNull Transformation<Y> transformation) {
return (GlideRequest<TranscodeType>) super.optionalTransform(clazz, transformation);
}
/**
* @see GlideOptions#transform(Class<Y>, Transformation<Y>)
*/
@NonNull
@CheckResult
public <Y> GlideRequest<TranscodeType> transform(@NonNull Class<Y> clazz,
@NonNull Transformation<Y> transformation) {
return (GlideRequest<TranscodeType>) super.transform(clazz, transformation);
}
/**
* @see GlideOptions#dontTransform()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> dontTransform() {
return (GlideRequest<TranscodeType>) super.dontTransform();
}
/**
* @see GlideOptions#dontAnimate()
*/
@NonNull
@CheckResult
public GlideRequest<TranscodeType> dontAnimate() {
return (GlideRequest<TranscodeType>) super.dontAnimate();
}
/**
* @see GlideOptions#lock()
*/
@NonNull
public GlideRequest<TranscodeType> lock() {
return (GlideRequest<TranscodeType>) super.lock();
}
/**
* @see GlideOptions#autoClone()
*/
@NonNull
public GlideRequest<TranscodeType> autoClone() {
return (GlideRequest<TranscodeType>) super.autoClone();
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> apply(@NonNull BaseRequestOptions<?> options) {
return (GlideRequest<TranscodeType>) super.apply(options);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> transition(
@NonNull TransitionOptions<?, ? super TranscodeType> options) {
return (GlideRequest<TranscodeType>) super.transition(options);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> listener(@Nullable RequestListener<TranscodeType> listener) {
return (GlideRequest<TranscodeType>) super.listener(listener);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> addListener(
@Nullable RequestListener<TranscodeType> listener) {
return (GlideRequest<TranscodeType>) super.addListener(listener);
}
@Override
@NonNull
public GlideRequest<TranscodeType> error(@Nullable RequestBuilder<TranscodeType> builder) {
return (GlideRequest<TranscodeType>) super.error(builder);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> error(Object o) {
return (GlideRequest<TranscodeType>) super.error(o);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> thumbnail(@Nullable RequestBuilder<TranscodeType> builder) {
return (GlideRequest<TranscodeType>) super.thumbnail(builder);
}
@Override
@NonNull
@CheckResult
@SafeVarargs
@SuppressWarnings("varargs")
public final GlideRequest<TranscodeType> thumbnail(
@Nullable RequestBuilder<TranscodeType>... builders) {
return (GlideRequest<TranscodeType>) super.thumbnail(builders);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> thumbnail(@Nullable List<RequestBuilder<TranscodeType>> list) {
return (GlideRequest<TranscodeType>) super.thumbnail(list);
}
@Override
@Deprecated
@NonNull
@CheckResult
public GlideRequest<TranscodeType> thumbnail(float sizeMultiplier) {
return (GlideRequest<TranscodeType>) super.thumbnail(sizeMultiplier);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> load(@Nullable Object o) {
return (GlideRequest<TranscodeType>) super.load(o);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> load(@Nullable Bitmap bitmap) {
return (GlideRequest<TranscodeType>) super.load(bitmap);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> load(@Nullable Drawable drawable) {
return (GlideRequest<TranscodeType>) super.load(drawable);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> load(@Nullable String string) {
return (GlideRequest<TranscodeType>) super.load(string);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> load(@Nullable Uri uri) {
return (GlideRequest<TranscodeType>) super.load(uri);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> load(@Nullable File file) {
return (GlideRequest<TranscodeType>) super.load(file);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> load(@RawRes @DrawableRes @Nullable Integer id) {
return (GlideRequest<TranscodeType>) super.load(id);
}
@Override
@Deprecated
@CheckResult
public GlideRequest<TranscodeType> load(@Nullable URL url) {
return (GlideRequest<TranscodeType>) super.load(url);
}
@Override
@NonNull
@CheckResult
public GlideRequest<TranscodeType> load(@Nullable byte[] bytes) {
return (GlideRequest<TranscodeType>) super.load(bytes);
}
@Override
@CheckResult
public GlideRequest<TranscodeType> clone() {
return (GlideRequest<TranscodeType>) super.clone();
}
/**
* @see ExtensionWithOption#squareThumb(BaseRequestOptions)
*/
@SuppressWarnings("unchecked")
@CheckResult
@NonNull
public GlideRequest<TranscodeType> squareThumb() {
return (GlideRequest<TranscodeType>) ExtensionWithOption.squareThumb(this);
}
}
| GlideRequest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/id/PooledHiLoSequenceIdentifierTest.java | {
"start": 2282,
"end": 3560
} | class ____ {
@Id
@GenericGenerator(name = "sampleGenerator", strategy = "enhanced-sequence",
parameters = {
@org.hibernate.annotations.Parameter(name = "optimizer", value = "pooled"),
@org.hibernate.annotations.Parameter(name = "initial_value", value = "1"),
@org.hibernate.annotations.Parameter(name = "increment_size", value = "2")
}
)
@GeneratedValue(strategy = GenerationType.TABLE, generator = "sampleGenerator")
private Long id;
private String name;
}
private void insertNewRow(Session session) {
final SessionImplementor si = (SessionImplementor) session;
final SessionFactoryImplementor sfi = si.getFactory();
session.doWork(
connection -> {
PreparedStatement statement = null;
try {
statement = connection.prepareStatement( "INSERT INTO sequenceIdentifier VALUES (?,?)" );
statement.setObject(
1,
((BeforeExecutionGenerator) sfi.getMappingMetamodel().getEntityDescriptor( SequenceIdentifier.class).getGenerator())
.generate( si, null, null, EventType.INSERT )
);
statement.setString( 2,"name" );
statement.executeUpdate();
}
finally {
if ( statement != null ) {
statement.close();
}
}
}
);
}
}
| SequenceIdentifier |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestInMemoryLevelDBAliasMapClient.java | {
"start": 9886,
"end": 14165
} | class ____ implements Runnable {
private final Block block;
private final BlockAliasMap.Writer<FileRegion> writer;
private final ProvidedStorageLocation providedStorageLocation;
private int delay;
WriteThread(Block block, ProvidedStorageLocation providedStorageLocation,
BlockAliasMap.Writer<FileRegion> writer, int delay) {
this.block = block;
this.writer = writer;
this.providedStorageLocation = providedStorageLocation;
this.delay = delay;
}
@Override
public void run() {
try {
Thread.sleep(delay);
writer.store(new FileRegion(block, providedStorageLocation));
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
public FileRegion generateRandomFileRegion(int seed) {
Block block = new Block(seed, seed + 1, seed + 2);
Path path = new Path("koekoek");
byte[] nonce = new byte[0];
ProvidedStorageLocation providedStorageLocation =
new ProvidedStorageLocation(path, seed + 3, seed + 4, nonce);
return new FileRegion(block, providedStorageLocation);
}
@Test
public void multipleReads() throws IOException {
levelDBAliasMapServer.setConf(conf);
levelDBAliasMapServer.start();
inMemoryLevelDBAliasMapClient.setConf(conf);
Random r = new Random();
List<FileRegion> expectedFileRegions = r.ints(0, 200)
.limit(50)
.boxed()
.map(i -> generateRandomFileRegion(i))
.collect(Collectors.toList());
BlockAliasMap.Reader<FileRegion> reader =
inMemoryLevelDBAliasMapClient.getReader(null, BPID);
BlockAliasMap.Writer<FileRegion> writer =
inMemoryLevelDBAliasMapClient.getWriter(null, BPID);
ExecutorService executor = Executors.newCachedThreadPool();
List<ReadThread> readThreads = expectedFileRegions
.stream()
.map(fileRegion -> new ReadThread(fileRegion.getBlock(),
reader,
4000))
.collect(Collectors.toList());
List<? extends Future<?>> readFutures =
readThreads.stream()
.map(readThread -> executor.submit(readThread))
.collect(Collectors.toList());
List<? extends Future<?>> writeFutures = expectedFileRegions
.stream()
.map(fileRegion -> new WriteThread(fileRegion.getBlock(),
fileRegion.getProvidedStorageLocation(),
writer,
1000))
.map(writeThread -> executor.submit(writeThread))
.collect(Collectors.toList());
readFutures.stream()
.map(readFuture -> {
try {
return readFuture.get();
} catch (InterruptedException e) {
throw new RuntimeException(e);
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
})
.collect(Collectors.toList());
List<FileRegion> actualFileRegions = readThreads.stream()
.map(readThread -> readThread.getFileRegion().get())
.collect(Collectors.toList());
assertThat(actualFileRegions).containsExactlyInAnyOrder(
expectedFileRegions.toArray(new FileRegion[0]));
}
@Test
public void testServerBindHost() throws Exception {
conf.set(DFS_NAMENODE_SERVICE_RPC_BIND_HOST_KEY, "0.0.0.0");
writeRead();
}
@Test
public void testNonExistentBlock() throws Exception {
inMemoryLevelDBAliasMapClient.setConf(conf);
levelDBAliasMapServer.setConf(conf);
levelDBAliasMapServer.start();
Block block1 = new Block(100, 43, 44);
ProvidedStorageLocation providedStorageLocation1 = null;
BlockAliasMap.Writer<FileRegion> writer1 =
inMemoryLevelDBAliasMapClient.getWriter(null, BPID);
try {
writer1.store(new FileRegion(block1, providedStorageLocation1));
fail("Should fail on writing a region with null ProvidedLocation");
} catch (IOException | IllegalArgumentException e) {
assertTrue(e.getMessage().contains("not be null"));
}
BlockAliasMap.Reader<FileRegion> reader =
inMemoryLevelDBAliasMapClient.getReader(null, BPID);
LambdaTestUtils.assertOptionalUnset("Expected empty BlockAlias",
reader.resolve(block1));
}
} | WriteThread |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/ast/tree/predicate/Junction.java | {
"start": 385,
"end": 1571
} | enum ____ {
/**
* An AND
*/
CONJUNCTION,
/**
* An OR
*/
DISJUNCTION
}
private final Nature nature;
private final JdbcMappingContainer expressionType;
private final List<Predicate> predicates;
public Junction() {
this( Nature.CONJUNCTION );
}
public Junction(Nature nature) {
this( nature, null );
}
public Junction(Nature nature, JdbcMappingContainer expressionType) {
this.nature = nature;
this.expressionType = expressionType;
this.predicates = new ArrayList<>();
}
public Junction(
Nature nature,
List<Predicate> predicates,
JdbcMappingContainer expressionType) {
this.nature = nature;
this.expressionType = expressionType;
this.predicates = predicates;
}
public void add(Predicate predicate) {
predicates.add( predicate );
}
public Nature getNature() {
return nature;
}
public List<Predicate> getPredicates() {
return predicates;
}
@Override
public boolean isEmpty() {
return predicates.isEmpty();
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
sqlTreeWalker.visitJunction( this );
}
@Override
public JdbcMappingContainer getExpressionType() {
return expressionType;
}
}
| Nature |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/scenario/ConnectionEventBusMonitoringUtil.java | {
"start": 7499,
"end": 9390
} | class ____ {
private final boolean wasDisconnected;
private final boolean wasDeactivated;
private final boolean eventBusCleanup;
private final boolean nettyCleanup;
private final boolean connectionHandedOff;
private final String initialChannelId;
private final String currentChannelId;
public ConnectionAnalysisResult(boolean wasDisconnected, boolean wasDeactivated, boolean eventBusCleanup,
boolean nettyCleanup, boolean connectionHandedOff, String initialChannelId, String currentChannelId) {
this.wasDisconnected = wasDisconnected;
this.wasDeactivated = wasDeactivated;
this.eventBusCleanup = eventBusCleanup;
this.nettyCleanup = nettyCleanup;
this.connectionHandedOff = connectionHandedOff;
this.initialChannelId = initialChannelId;
this.currentChannelId = currentChannelId;
}
/**
* Primary indicator: connection is fully cleaned up based on EventBus monitoring.
*/
public boolean isFullyCleanedUpViaEventBus() {
return eventBusCleanup && nettyCleanup && connectionHandedOff;
}
public boolean wasDisconnected() {
return wasDisconnected;
}
public boolean wasDeactivated() {
return wasDeactivated;
}
public boolean isEventBusCleanup() {
return eventBusCleanup;
}
public boolean isNettyCleanup() {
return nettyCleanup;
}
public boolean isConnectionHandedOff() {
return connectionHandedOff;
}
public String getInitialChannelId() {
return initialChannelId;
}
public String getCurrentChannelId() {
return currentChannelId;
}
}
}
| ConnectionAnalysisResult |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/id/enhanced/StandardNamingStrategy.java | {
"start": 2624,
"end": 5603
} | class ____ implements ImplicitDatabaseObjectNamingStrategy {
public static final String STRATEGY_NAME = "standard";
@Override
public QualifiedName determineSequenceName(
Identifier catalogName,
Identifier schemaName,
Map<?, ?> configValues,
ServiceRegistry serviceRegistry) {
final String rootTableName = getString( TABLE, configValues );
final String implicitName = implicitSequenceName( rootTableName, configValues );
return qualifiedSequenceName( catalogName, schemaName, serviceRegistry, implicitName );
}
private static String implicitSequenceName(String rootTableName, Map<?, ?> configValues) {
final String explicitSuffix = getString( CONFIG_SEQUENCE_PER_ENTITY_SUFFIX, configValues );
final String base = getString( IMPLICIT_NAME_BASE, configValues, rootTableName );
if ( isNotEmpty( explicitSuffix ) ) {
// an "implicit name suffix" was specified
if ( isNotEmpty( base ) ) {
return isQuoted( base )
? "`" + unQuote( base ) + explicitSuffix + "`"
: base + explicitSuffix;
}
}
final String annotationGeneratorName = getString( GENERATOR_NAME, configValues );
if ( isNotEmpty( annotationGeneratorName ) ) {
return annotationGeneratorName;
}
else if ( isNotEmpty( base ) ) {
return isQuoted( base )
? "`" + unQuote( base ) + DEF_SEQUENCE_SUFFIX + "`"
: base + DEF_SEQUENCE_SUFFIX;
}
else {
throw new MappingException( "Unable to determine implicit sequence name; target table - " + rootTableName );
}
}
@Override
public QualifiedName determineTableName(
Identifier catalogName,
Identifier schemaName,
Map<?, ?> configValues,
ServiceRegistry serviceRegistry) {
final String implicitName = implicitTableName( configValues );
return qualifiedTableName( catalogName, schemaName, serviceRegistry, implicitName );
}
private static String implicitTableName(Map<?, ?> configValues) {
final String generatorName = getString( GENERATOR_NAME, configValues );
return isNotEmpty( generatorName ) ? generatorName : DEF_TABLE;
}
private static QualifiedName qualifiedSequenceName(Identifier catalogName, Identifier schemaName, ServiceRegistry serviceRegistry, String implicitName) {
return implicitName.contains( "." )
? QualifiedNameParser.INSTANCE.parse( implicitName )
: new QualifiedSequenceName(
catalogName,
schemaName,
serviceRegistry.requireService( JdbcEnvironment.class )
.getIdentifierHelper()
.toIdentifier( implicitName )
);
}
private static QualifiedName qualifiedTableName(Identifier catalogName, Identifier schemaName, ServiceRegistry serviceRegistry, String implicitName) {
return implicitName.contains( "." )
? QualifiedNameParser.INSTANCE.parse( implicitName )
: new QualifiedTableName(
catalogName,
schemaName,
serviceRegistry.requireService( JdbcEnvironment.class )
.getIdentifierHelper()
.toIdentifier( implicitName )
);
}
}
| StandardNamingStrategy |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/InvokerGenerator.java | {
"start": 37073,
"end": 40924
} | class ____ {
final MethodInfo method;
private final Assignability assignability;
TransformerMethod(MethodInfo method, Assignability assignability) {
this.method = method;
this.assignability = assignability;
}
boolean matches(InvocationTransformer transformer, Type expectedType) {
if (transformer.isInputTransformer()) {
// for input transformer (target instance, argument):
// - we can't check what comes into the transformer
// - we can check what comes out of the transformer, because that's what the invokable method consumes
// (and the transformer must produce a subtype)
boolean returnTypeOk = isAnyType(method.returnType()) || isSubtype(method.returnType(), expectedType);
if (Modifier.isStatic(method.flags())) {
return method.parametersCount() == 1 && returnTypeOk
|| method.parametersCount() == 2 && returnTypeOk && isConsumerOfRunnable(method.parameterType(1));
} else {
return method.parametersCount() == 0 && returnTypeOk;
}
} else if (transformer.isOutputTransformer()) {
// for output transformer (return value, exception):
// - we can check what comes into the transformer, because that's what the invokable method produces
// (and the transformer must consume a supertype)
// - we can't check what comes out of the transformer
if (Modifier.isStatic(method.flags())) {
return method.parametersCount() == 1
&& (isAnyType(method.parameterType(0)) || isSupertype(method.parameterType(0), expectedType));
} else {
return method.parametersCount() == 0
&& isSupertype(ClassType.create(method.declaringClass().name()), expectedType);
}
} else {
throw new IllegalArgumentException(transformer.toString());
}
}
// if `matches()` returns `false`, there's no point in calling this method
boolean usesCleanupTasks() {
return Modifier.isStatic(method.flags())
&& method.parametersCount() == 2
&& isConsumerOfRunnable(method.parameterType(1));
}
private boolean isConsumerOfRunnable(Type type) {
return type.kind() == Type.Kind.PARAMETERIZED_TYPE
&& type.name().equals(DotName.createSimple(Consumer.class))
&& type.asParameterizedType().arguments().size() == 1
&& type.asParameterizedType().arguments().get(0).kind() == Type.Kind.CLASS
&& type.asParameterizedType().arguments().get(0).name().equals(DotName.createSimple(Runnable.class));
}
private boolean isSubtype(Type a, Type b) {
return assignability.isSubtype(a, b);
}
private boolean isSupertype(Type a, Type b) {
return assignability.isSupertype(a, b);
}
@Override
public String toString() {
return method.toString() + " declared on " + method.declaringClass();
}
}
// ---
static boolean isAnyType(Type t) {
if (ClassType.OBJECT_TYPE.equals(t)) {
return true;
}
if (t.kind() == Type.Kind.TYPE_VARIABLE) {
TypeVariable typeVar = t.asTypeVariable();
return typeVar.bounds().isEmpty() || typeVar.hasImplicitObjectBound() || isAnyType(typeVar.bounds().get(0));
}
return false;
}
// this is mostly a prototype, doesn't follow any specification
static | TransformerMethod |
java | google__auto | value/src/main/java/com/google/auto/value/processor/BuilderSpec.java | {
"start": 19390,
"end": 21953
} | class ____ {
private final ExecutableElement setter;
private final String access;
private final String name;
private final String parameterTypeString;
private final boolean primitiveParameter;
private final String nullableAnnotation;
private final Copier copier;
PropertySetter(ExecutableElement setter, AnnotatedTypeMirror parameterType, Copier copier) {
this.setter = setter;
this.copier = copier;
this.access = SimpleMethod.access(setter);
this.name = setter.getSimpleName().toString();
primitiveParameter = parameterType.getKind().isPrimitive();
this.parameterTypeString = parameterTypeString(setter, parameterType);
VariableElement parameterElement = Iterables.getOnlyElement(setter.getParameters());
Optional<String> maybeNullable =
nullableAnnotationFor(parameterElement, parameterType.getType());
this.nullableAnnotation = maybeNullable.orElse("");
}
ExecutableElement getSetter() {
return setter;
}
private static String parameterTypeString(
ExecutableElement setter, AnnotatedTypeMirror parameterType) {
if (setter.isVarArgs()) {
TypeMirror componentType = MoreTypes.asArray(parameterType.getType()).getComponentType();
// This is a bit ugly. It's OK to annotate just the component type, because if it is
// say `@Nullable String` then we will end up with `@Nullable String...`. Unlike the
// normal array case, we can't have the situation where the array itself is annotated;
// you can write `String @Nullable []` to mean that, but you can't write
// `String @Nullable ...`.
return TypeEncoder.encodeWithAnnotations(componentType) + "...";
} else {
return TypeEncoder.encodeWithAnnotations(parameterType);
}
}
public String getAccess() {
return access;
}
public String getName() {
return name;
}
public String getParameterType() {
return parameterTypeString;
}
public boolean getPrimitiveParameter() {
return primitiveParameter;
}
public String getNullableAnnotation() {
return nullableAnnotation;
}
public String copy(Property property) {
String copy = copier.copy.apply(property.toString());
if (property.isNullable() && !copier.acceptsNull) {
copy = String.format("(%s == null ? null : %s)", property, copy);
}
return copy;
}
}
/**
* Returns a representation of the given {@code @AutoValue.Builder} | PropertySetter |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java | {
"start": 17991,
"end": 19701
} | class ____ extends AbstractArrayWrapper implements LongArray {
private final LongArray in;
LongArrayWrapper(LongArray in, boolean clearOnResize) {
super(clearOnResize);
this.in = in;
}
@Override
protected BigArray getDelegate() {
return in;
}
@Override
protected void randomizeContent(long from, long to) {
fill(from, to, random.nextLong());
}
@Override
public long get(long index) {
return in.get(index);
}
@Override
public long getAndSet(long index, long value) {
return in.getAndSet(index, value);
}
@Override
public void set(long index, long value) {
in.set(index, value);
}
@Override
public long increment(long index, long inc) {
return in.increment(index, inc);
}
@Override
public void fill(long fromIndex, long toIndex, long value) {
in.fill(fromIndex, toIndex, value);
}
@Override
public void set(long index, byte[] buf, int offset, int len) {
in.set(index, buf, offset, len);
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.singleton(Accountables.namedAccountable("delegate", in));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
in.writeTo(out);
}
@Override
public void fillWith(StreamInput streamInput) throws IOException {
in.fillWith(streamInput);
}
}
private | LongArrayWrapper |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/scripting/support/StandardScriptEvaluator.java | {
"start": 1975,
"end": 6440
} | class ____ to use for script engine detection
*/
public StandardScriptEvaluator(ClassLoader classLoader) {
this.scriptEngineManager = new ScriptEngineManager(classLoader);
}
/**
* Construct a new {@code StandardScriptEvaluator} for the given JSR-223
* {@link ScriptEngineManager} to obtain script engines from.
* @param scriptEngineManager the ScriptEngineManager (or subclass thereof) to use
* @since 4.2.2
*/
public StandardScriptEvaluator(ScriptEngineManager scriptEngineManager) {
this.scriptEngineManager = scriptEngineManager;
}
/**
* Set the name of the language meant for evaluating the scripts (for example, "Groovy").
* <p>This is effectively an alias for {@link #setEngineName "engineName"},
* potentially (but not yet) providing common abbreviations for certain languages
* beyond what the JSR-223 script engine factory exposes.
* @see #setEngineName
*/
public void setLanguage(String language) {
this.engineName = language;
}
/**
* Set the name of the script engine for evaluating the scripts (for example, "Groovy"),
* as exposed by the JSR-223 script engine factory.
* @since 4.2.2
* @see #setLanguage
*/
public void setEngineName(String engineName) {
this.engineName = engineName;
}
/**
* Set the globally scoped bindings on the underlying script engine manager,
* shared by all scripts, as an alternative to script argument bindings.
* @since 4.2.2
* @see #evaluate(ScriptSource, Map)
* @see javax.script.ScriptEngineManager#setBindings(Bindings)
* @see javax.script.SimpleBindings
*/
public void setGlobalBindings(Map<String, Object> globalBindings) {
Bindings bindings = StandardScriptUtils.getBindings(globalBindings);
this.globalBindings = bindings;
ScriptEngineManager scriptEngineManager = this.scriptEngineManager;
if (scriptEngineManager != null) {
scriptEngineManager.setBindings(bindings);
}
}
@Override
public void setBeanClassLoader(ClassLoader classLoader) {
ScriptEngineManager scriptEngineManager = this.scriptEngineManager;
if (scriptEngineManager == null) {
scriptEngineManager = new ScriptEngineManager(classLoader);
this.scriptEngineManager = scriptEngineManager;
Bindings bindings = this.globalBindings;
if (bindings != null) {
scriptEngineManager.setBindings(bindings);
}
}
}
@Override
public @Nullable Object evaluate(ScriptSource script) {
return evaluate(script, null);
}
@Override
public @Nullable Object evaluate(ScriptSource script, @Nullable Map<String, Object> argumentBindings) {
ScriptEngine engine = getScriptEngine(script);
try {
if (CollectionUtils.isEmpty(argumentBindings)) {
return engine.eval(script.getScriptAsString());
}
else {
Bindings bindings = StandardScriptUtils.getBindings(argumentBindings);
return engine.eval(script.getScriptAsString(), bindings);
}
}
catch (IOException ex) {
throw new ScriptCompilationException(script, "Cannot access script for ScriptEngine", ex);
}
catch (ScriptException ex) {
throw new ScriptCompilationException(script, new StandardScriptEvalException(ex));
}
}
/**
* Obtain the JSR-223 ScriptEngine to use for the given script.
* @param script the script to evaluate
* @return the ScriptEngine (never {@code null})
*/
protected ScriptEngine getScriptEngine(ScriptSource script) {
ScriptEngineManager scriptEngineManager = this.scriptEngineManager;
if (scriptEngineManager == null) {
scriptEngineManager = new ScriptEngineManager();
this.scriptEngineManager = scriptEngineManager;
}
if (StringUtils.hasText(this.engineName)) {
return StandardScriptUtils.retrieveEngineByName(scriptEngineManager, this.engineName);
}
else if (script instanceof ResourceScriptSource resourceScriptSource) {
Resource resource = resourceScriptSource.getResource();
String extension = StringUtils.getFilenameExtension(resource.getFilename());
if (extension == null) {
throw new IllegalStateException(
"No script language defined, and no file extension defined for resource: " + resource);
}
ScriptEngine engine = scriptEngineManager.getEngineByExtension(extension);
if (engine == null) {
throw new IllegalStateException("No matching engine found for file extension '" + extension + "'");
}
return engine;
}
else {
throw new IllegalStateException(
"No script language defined, and no resource associated with script: " + script);
}
}
}
| loader |
java | apache__camel | components/camel-mllp/src/test/java/org/apache/camel/component/mllp/MllpTcpClientProducerConnectionErrorTest.java | {
"start": 1891,
"end": 10825
} | class ____ extends CamelTestSupport {
@RegisterExtension
public MllpServerResource mllpServer = new MllpServerResource("localhost", AvailablePortFinder.getNextAvailable());
@EndpointInject("direct://source")
ProducerTemplate source;
@EndpointInject("mock://target")
MockEndpoint target;
@EndpointInject("mock://complete")
MockEndpoint complete;
@EndpointInject("mock://write-ex")
MockEndpoint writeEx;
@EndpointInject("mock://connect-ex")
MockEndpoint connectEx;
@EndpointInject("mock://acknowledgement-ex")
MockEndpoint acknowledgementEx;
@Override
protected CamelContext createCamelContext() throws Exception {
DefaultCamelContext context = (DefaultCamelContext) super.createCamelContext();
context.setUseMDCLogging(true);
context.getCamelContextExtension().setName(this.getClass().getSimpleName());
return context;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
String routeId = "mllp-sender";
public void configure() {
onCompletion()
.to(complete);
onException(ConnectException.class)
.handled(true)
.to(connectEx)
.log(LoggingLevel.ERROR, routeId, "Connect Error")
.stop();
onException(MllpWriteException.class)
.handled(true)
.to(writeEx)
.log(LoggingLevel.ERROR, routeId, "Write Error")
.stop();
onException(MllpAcknowledgementException.class)
.handled(true)
.to(acknowledgementEx)
.log(LoggingLevel.ERROR, routeId, "Acknowledgement Error")
.stop();
from(source.getDefaultEndpoint()).routeId(routeId)
.log(LoggingLevel.INFO, routeId, "Sending Message")
.toF("mllp://%s:%d", mllpServer.getListenHost(), mllpServer.getListenPort())
.log(LoggingLevel.INFO, routeId, "Received Acknowledgement")
.to(target);
}
};
}
/**
* The component should reconnect, so the route shouldn't see any errors.
*
* @throws Exception
*/
@Test
public void testConnectionClosedBeforeSendingHL7Message() throws Exception {
target.expectedMessageCount(2);
complete.expectedMessageCount(2);
connectEx.expectedMessageCount(0);
writeEx.expectedMessageCount(0);
acknowledgementEx.expectedMessageCount(0);
NotifyBuilder oneDone = new NotifyBuilder(context).whenCompleted(1).create();
NotifyBuilder twoDone = new NotifyBuilder(context).whenCompleted(2).create();
// Need to send one message to get the connection established
source.sendBody(Hl7TestMessageGenerator.generateMessage());
assertTrue(oneDone.matches(5, TimeUnit.SECONDS), "Should have completed an exchange");
mllpServer.closeClientConnections();
source.sendBody(Hl7TestMessageGenerator.generateMessage());
assertTrue(twoDone.matches(5, TimeUnit.SECONDS), "Should have completed two exchanges");
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
}
/**
* The component should reconnect, so the route shouldn't see any errors.
*
* @throws Exception
*/
@Test()
public void testConnectionResetBeforeSendingHL7Message() throws Exception {
target.expectedMessageCount(2);
complete.expectedMessageCount(2);
connectEx.expectedMessageCount(0);
writeEx.expectedMessageCount(0);
acknowledgementEx.expectedMessageCount(0);
NotifyBuilder oneDone = new NotifyBuilder(context).whenCompleted(1).create();
NotifyBuilder twoDone = new NotifyBuilder(context).whenCompleted(2).create();
// Need to send one message to get the connection established
source.sendBody(Hl7TestMessageGenerator.generateMessage());
assertTrue(oneDone.matches(5, TimeUnit.SECONDS), "Should have completed an exchange");
mllpServer.resetClientConnections();
source.sendBody(Hl7TestMessageGenerator.generateMessage());
assertTrue(twoDone.matches(5, TimeUnit.SECONDS), "Should have completed two exchanges");
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
}
@Test()
public void testConnectionClosedBeforeReadingAcknowledgement() throws Exception {
target.expectedMessageCount(0);
complete.expectedMessageCount(1);
connectEx.expectedMessageCount(0);
writeEx.expectedMessageCount(0);
acknowledgementEx.expectedMessageCount(1);
mllpServer.setCloseSocketBeforeAcknowledgementModulus(1);
NotifyBuilder done = new NotifyBuilder(context).whenCompleted(1).create();
source.sendBody(Hl7TestMessageGenerator.generateMessage());
assertTrue(done.matches(5, TimeUnit.SECONDS), "Should have completed an exchange");
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
}
@Test()
public void testConnectionResetBeforeReadingAcknowledgement() throws Exception {
target.expectedMessageCount(0);
complete.expectedMessageCount(1);
connectEx.expectedMessageCount(0);
writeEx.expectedMessageCount(0);
acknowledgementEx.expectedMessageCount(1);
mllpServer.setResetSocketBeforeAcknowledgementModulus(1);
NotifyBuilder done = new NotifyBuilder(context).whenCompleted(1).create();
source.sendBody(Hl7TestMessageGenerator.generateMessage());
assertTrue(done.matches(5, TimeUnit.SECONDS), "Should have completed an exchange");
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
}
@Test()
public void testServerShutdownBeforeSendingHL7Message() throws Exception {
target.expectedMessageCount(1);
complete.expectedMessageCount(2);
connectEx.expectedMessageCount(0);
NotifyBuilder done = new NotifyBuilder(context).whenCompleted(2).create();
// Need to send one message to get the connection established
source.sendBody(Hl7TestMessageGenerator.generateMessage());
mllpServer.shutdown();
source.sendBody(Hl7TestMessageGenerator.generateMessage());
assertTrue(done.matches(5, TimeUnit.SECONDS), "Should have completed an exchange");
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
// Depending on the timing, either a write or a receive exception will be thrown
assertEquals(1, writeEx.getExchanges().size() + acknowledgementEx.getExchanges().size(),
"Either a write or a receive exception should have been be thrown");
}
@Test()
public void testConnectionCloseAndServerShutdownBeforeSendingHL7Message() throws Exception {
target.expectedMessageCount(1);
complete.expectedMessageCount(2);
connectEx.expectedMessageCount(0);
NotifyBuilder done = new NotifyBuilder(context).whenCompleted(2).create();
// Need to send one message to get the connection established
source.sendBody(Hl7TestMessageGenerator.generateMessage());
mllpServer.closeClientConnections();
mllpServer.shutdown();
source.sendBody(Hl7TestMessageGenerator.generateMessage());
assertTrue(done.matches(5, TimeUnit.SECONDS), "Should have completed an exchange");
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
// Depending on the timing, either a write or a receive exception will be thrown
assertEquals(1, writeEx.getExchanges().size() + acknowledgementEx.getExchanges().size(),
"Either a write or a receive exception should have been be thrown");
}
@Test()
public void testConnectionResetAndServerShutdownBeforeSendingHL7Message() throws Exception {
target.expectedMessageCount(1);
complete.expectedMessageCount(2);
connectEx.expectedMessageCount(0);
writeEx.expectedMessageCount(1);
acknowledgementEx.expectedMessageCount(0);
NotifyBuilder done = new NotifyBuilder(context).whenCompleted(2).create();
// Need to send one message to get the connection established
source.sendBody(Hl7TestMessageGenerator.generateMessage());
mllpServer.resetClientConnections();
mllpServer.shutdown();
source.sendBody(Hl7TestMessageGenerator.generateMessage());
assertTrue(done.matches(5, TimeUnit.SECONDS), "Should have completed an exchange");
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
}
}
| MllpTcpClientProducerConnectionErrorTest |
java | apache__camel | components/camel-openstack/src/test/java/org/apache/camel/component/openstack/nova/FlavorProducerTest.java | {
"start": 2089,
"end": 7379
} | class ____ extends NovaProducerTestSupport {
@Mock
private Flavor testOSFlavor;
@Mock
private FlavorService flavorService;
@Captor
private ArgumentCaptor<Flavor> flavorCaptor;
@Captor
private ArgumentCaptor<String> flavorIdCaptor;
private Flavor dummyFlavor;
@BeforeEach
public void setUp() {
when(computeService.flavors()).thenReturn(flavorService);
producer = new FlavorsProducer(endpoint, client);
when(flavorService.create(any())).thenReturn(testOSFlavor);
when(flavorService.get(anyString())).thenReturn(testOSFlavor);
List<org.openstack4j.model.compute.Flavor> getAllList = new ArrayList<>();
getAllList.add(testOSFlavor);
getAllList.add(testOSFlavor);
doReturn(getAllList).when(flavorService).list();
dummyFlavor = createTestFlavor();
when(testOSFlavor.getId()).thenReturn(UUID.randomUUID().toString());
when(testOSFlavor.getName()).thenReturn(dummyFlavor.getName());
when(testOSFlavor.getRam()).thenReturn(dummyFlavor.getRam());
when(testOSFlavor.getVcpus()).thenReturn(dummyFlavor.getVcpus());
when(testOSFlavor.getDisk()).thenReturn(dummyFlavor.getDisk());
}
@Test
public void createFlavor() throws Exception {
when(endpoint.getOperation()).thenReturn(OpenstackConstants.CREATE);
final String expectedFlavorID = UUID.randomUUID().toString();
when(testOSFlavor.getId()).thenReturn(expectedFlavorID);
//send dummyFlavor to create
msg.setBody(dummyFlavor);
producer.process(exchange);
verify(flavorService).create(flavorCaptor.capture());
assertEquals(dummyFlavor, flavorCaptor.getValue());
final Flavor createdFlavor = msg.getBody(Flavor.class);
assertEqualsFlavors(dummyFlavor, createdFlavor);
assertNotNull(createdFlavor.getId());
}
@Test
public void createFlavorWithHeaders() throws Exception {
Map<String, Object> headers = new HashMap<>();
headers.put(OpenstackConstants.OPERATION, OpenstackConstants.CREATE);
headers.put(OpenstackConstants.NAME, dummyFlavor.getName());
headers.put(NovaConstants.VCPU, dummyFlavor.getVcpus());
headers.put(NovaConstants.DISK, dummyFlavor.getDisk());
headers.put(NovaConstants.SWAP, dummyFlavor.getSwap());
headers.put(NovaConstants.RAM, dummyFlavor.getRam());
msg.setHeaders(headers);
producer.process(exchange);
verify(flavorService).create(flavorCaptor.capture());
assertEqualsFlavors(dummyFlavor, flavorCaptor.getValue());
final Flavor created = msg.getBody(Flavor.class);
assertNotNull(created.getId());
assertEqualsFlavors(dummyFlavor, created);
}
@Test
public void getTest() throws Exception {
msg.setHeader(OpenstackConstants.OPERATION, OpenstackConstants.GET);
msg.setHeader(OpenstackConstants.ID, "anything - client is mocked");
//should return dummyFlavor
producer.process(exchange);
final Flavor result = msg.getBody(Flavor.class);
assertEqualsFlavors(dummyFlavor, result);
assertNotNull(result.getId());
}
@Test
public void getAllTest() throws Exception {
when(endpoint.getOperation()).thenReturn(OpenstackConstants.GET_ALL);
producer.process(exchange);
List<Flavor> result = msg.getBody(List.class);
assertEquals(2, result.size());
for (Flavor f : result) {
assertEqualsFlavors(dummyFlavor, f);
assertNotNull(f.getId());
}
}
@Test
public void deleteSuccess() throws Exception {
when(flavorService.delete(anyString())).thenReturn(ActionResponse.actionSuccess());
when(endpoint.getOperation()).thenReturn(OpenstackConstants.DELETE);
String id = "myID";
msg.setHeader(OpenstackConstants.ID, id);
producer.process(exchange);
verify(flavorService).delete(flavorIdCaptor.capture());
assertEquals(id, flavorIdCaptor.getValue());
assertNull(msg.getBody());
}
@Test
public void deleteFailure() throws Exception {
final String failReason = "unknown";
when(flavorService.delete(anyString())).thenReturn(ActionResponse.actionFailed(failReason, 401));
when(endpoint.getOperation()).thenReturn(OpenstackConstants.DELETE);
String id = "myID";
msg.setHeader(OpenstackConstants.ID, id);
producer.process(exchange);
verify(flavorService).delete(flavorIdCaptor.capture());
assertEquals(id, flavorIdCaptor.getValue());
}
private Flavor createTestFlavor() {
FlavorBuilder builder = Builders.flavor()
.name("dummy flavor")
.ram(3)
.vcpus(2)
.disk(5)
.swap(2);
return builder.build();
}
private void assertEqualsFlavors(Flavor old, Flavor createdFlavor) {
assertEquals(old.getName(), createdFlavor.getName());
assertEquals(old.getRam(), createdFlavor.getRam());
assertEquals(old.getVcpus(), createdFlavor.getVcpus());
assertEquals(old.getDisk(), createdFlavor.getDisk());
}
}
| FlavorProducerTest |
java | quarkusio__quarkus | extensions/reactive-mssql-client/deployment/src/test/java/io/quarkus/reactive/mssql/client/ConfigUrlMissingDefaultDatasourceDynamicInjectionTest.java | {
"start": 536,
"end": 2458
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
// The URL won't be missing if dev services are enabled
.overrideConfigKey("quarkus.devservices.enabled", "false");
@Inject
InjectableInstance<Pool> pool;
@Inject
InjectableInstance<io.vertx.mutiny.sqlclient.Pool> mutinyPool;
@Inject
InjectableInstance<MSSQLPool> vendorPool;
@Inject
InjectableInstance<io.vertx.mutiny.mssqlclient.MSSQLPool> mutinyVendorPool;
@Test
public void pool() {
doTest(pool, pool1 -> pool1.getConnection().toCompletionStage().toCompletableFuture().join());
}
@Test
public void mutinyPool() {
doTest(mutinyPool, pool1 -> pool1.getConnection().subscribe().asCompletionStage().join());
}
@Test
public void vendorPool() {
doTest(vendorPool, MSSQLPool -> MSSQLPool.getConnection().toCompletionStage().toCompletableFuture().join());
}
@Test
public void mutinyVendorPool() {
doTest(mutinyVendorPool, MSSQLPool -> MSSQLPool.getConnection().subscribe().asCompletionStage().join());
}
private <T> void doTest(InjectableInstance<T> instance, Consumer<T> action) {
var pool = instance.get();
assertThat(pool).isNotNull();
assertThatThrownBy(() -> action.accept(pool))
.isInstanceOf(InactiveBeanException.class)
.hasMessageContainingAll("Datasource '<default>' was deactivated automatically because its URL is not set.",
"To avoid this exception while keeping the bean inactive", // Message from Arc with generic hints
"To activate the datasource, set configuration property 'quarkus.datasource.reactive.url'",
"Refer to https://quarkus.io/guides/datasource for guidance.");
}
}
| ConfigUrlMissingDefaultDatasourceDynamicInjectionTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ExtendingJUnitAssertTest.java | {
"start": 1926,
"end": 2442
} | class ____<T extends String> extends Assert {
public void test() {
assertEquals(5, 5);
assertNull(2);
assertNotNull(3);
}
}
""")
.addOutputLines(
"in/Foo.java",
"""
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import org.junit.Assert;
| Foo |
java | spring-projects__spring-boot | module/spring-boot-webflux/src/main/java/org/springframework/boot/webflux/autoconfigure/WebFluxAutoConfiguration.java | {
"start": 7246,
"end": 7579
} | class ____ {
@Bean
@ConditionalOnMissingBean(HiddenHttpMethodFilter.class)
@ConditionalOnBooleanProperty("spring.webflux.hiddenmethod.filter.enabled")
OrderedHiddenHttpMethodFilter hiddenHttpMethodFilter() {
return new OrderedHiddenHttpMethodFilter();
}
@Configuration(proxyBeanMethods = false)
static | WebFluxAutoConfiguration |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java | {
"start": 1607,
"end": 22071
} | class ____ extends AbstractQueryBuilder<RangeQueryBuilder> implements MultiTermQueryBuilder {
public static final String NAME = "range";
public static final boolean DEFAULT_INCLUDE_UPPER = true;
public static final boolean DEFAULT_INCLUDE_LOWER = true;
public static final ParseField LTE_FIELD = new ParseField("lte");
public static final ParseField GTE_FIELD = new ParseField("gte");
public static final ParseField GT_FIELD = new ParseField("gt");
public static final ParseField LT_FIELD = new ParseField("lt");
private static final ParseField TIME_ZONE_FIELD = new ParseField("time_zone");
private static final ParseField FORMAT_FIELD = new ParseField("format");
private static final ParseField RELATION_FIELD = new ParseField("relation");
private final String fieldName;
private Object from;
private Object to;
private ZoneId timeZone;
private boolean includeLower = DEFAULT_INCLUDE_LOWER;
private boolean includeUpper = DEFAULT_INCLUDE_UPPER;
private String format;
private ShapeRelation relation;
/**
* A Query that matches documents within an range of terms.
*
* @param fieldName The field name
*/
public RangeQueryBuilder(String fieldName) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("field name is null or empty");
}
this.fieldName = fieldName;
}
/**
* Read from a stream.
*/
public RangeQueryBuilder(StreamInput in) throws IOException {
super(in);
fieldName = in.readString();
from = in.readGenericValue();
to = in.readGenericValue();
includeLower = in.readBoolean();
includeUpper = in.readBoolean();
timeZone = in.readOptionalZoneId();
format = in.readOptionalString();
String relationString = in.readOptionalString();
if (relationString != null) {
relation = ShapeRelation.getRelationByName(relationString);
if (relation != null && isRelationAllowed(relation) == false) {
throw new IllegalArgumentException("[range] query does not support relation [" + relationString + "]");
}
}
}
private static boolean isRelationAllowed(ShapeRelation relation) {
return relation == ShapeRelation.INTERSECTS || relation == ShapeRelation.CONTAINS || relation == ShapeRelation.WITHIN;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(this.fieldName);
out.writeGenericValue(this.from);
out.writeGenericValue(this.to);
out.writeBoolean(this.includeLower);
out.writeBoolean(this.includeUpper);
out.writeOptionalZoneId(timeZone);
out.writeOptionalString(format);
String relationString = null;
if (this.relation != null) {
relationString = this.relation.getRelationName();
}
out.writeOptionalString(relationString);
}
/**
* Get the field name for this query.
*/
@Override
public String fieldName() {
return this.fieldName;
}
/**
* The from part of the range query. Null indicates unbounded.
* In case lower bound is assigned to a string, we internally convert it to a {@link BytesRef} because
* in {@link RangeQueryBuilder} field are later parsed as {@link BytesRef} and we need internal representation
* of query to be equal regardless of whether it was created from XContent or via Java API.
*/
public RangeQueryBuilder from(Object from, boolean includeLower) {
this.from = maybeConvertToBytesRef(from);
this.includeLower = includeLower;
return this;
}
/**
* The from part of the range query. Null indicates unbounded.
*/
public RangeQueryBuilder from(Object from) {
return from(from, this.includeLower);
}
/**
* Gets the lower range value for this query.
*/
public Object from() {
return maybeConvertToString(this.from);
}
/**
* The from part of the range query. Null indicates unbounded.
*/
public RangeQueryBuilder gt(Object from) {
return from(from, false);
}
/**
* The from part of the range query. Null indicates unbounded.
*/
public RangeQueryBuilder gte(Object from) {
return from(from, true);
}
/**
* The to part of the range query. Null indicates unbounded.
*/
public RangeQueryBuilder to(Object to, boolean includeUpper) {
this.to = maybeConvertToBytesRef(to);
this.includeUpper = includeUpper;
return this;
}
/**
* The to part of the range query. Null indicates unbounded.
*/
public RangeQueryBuilder to(Object to) {
return to(to, this.includeUpper);
}
/**
* Gets the upper range value for this query.
* In case upper bound is assigned to a string, we internally convert it to a {@link BytesRef} because
* in {@link RangeQueryBuilder} field are later parsed as {@link BytesRef} and we need internal representation
* of query to be equal regardless of whether it was created from XContent or via Java API.
*/
public Object to() {
return maybeConvertToString(this.to);
}
/**
* The to part of the range query. Null indicates unbounded.
*/
public RangeQueryBuilder lt(Object to) {
return to(to, false);
}
/**
* The to part of the range query. Null indicates unbounded.
*/
public RangeQueryBuilder lte(Object to) {
return to(to, true);
}
/**
* Should the lower bound be included or not. Defaults to {@code true}.
*/
public RangeQueryBuilder includeLower(boolean includeLower) {
this.includeLower = includeLower;
return this;
}
/**
* Gets the includeLower flag for this query.
*/
public boolean includeLower() {
return this.includeLower;
}
/**
* Should the upper bound be included or not. Defaults to {@code true}.
*/
public RangeQueryBuilder includeUpper(boolean includeUpper) {
this.includeUpper = includeUpper;
return this;
}
/**
* Gets the includeUpper flag for this query.
*/
public boolean includeUpper() {
return this.includeUpper;
}
/**
* In case of date field, we can adjust the from/to fields using a timezone
*/
public RangeQueryBuilder timeZone(String timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("timezone cannot be null");
}
try {
this.timeZone = ZoneId.of(timeZone);
} catch (DateTimeException e) {
throw new IllegalArgumentException(e);
}
return this;
}
/**
* In case of date field, gets the from/to fields timezone adjustment
*/
public String timeZone() {
return this.timeZone == null ? null : this.timeZone.getId();
}
ZoneId getDateTimeZone() { // for testing
return timeZone;
}
/**
* In case of format field, we can parse the from/to fields using this time format
*/
public RangeQueryBuilder format(String format) {
if (format == null) {
throw new IllegalArgumentException("format cannot be null");
}
// this just ensure that the pattern is actually valid, no need to keep it here
DateFormatter.forPattern(format);
this.format = format;
return this;
}
/**
* Gets the format field to parse the from/to fields
*/
public String format() {
return format;
}
DateMathParser getForceDateParser() { // pkg private for testing
if (Strings.hasText(format)) {
return DateFormatter.forPattern(this.format).toDateMathParser();
}
return null;
}
public ShapeRelation relation() {
return this.relation;
}
public RangeQueryBuilder relation(String relation) {
if (relation == null) {
throw new IllegalArgumentException("relation cannot be null");
}
this.relation = ShapeRelation.getRelationByName(relation);
if (this.relation == null) {
throw new IllegalArgumentException(relation + " is not a valid relation");
}
if (isRelationAllowed(this.relation) == false) {
throw new IllegalArgumentException("[range] query does not support relation [" + relation + "]");
}
return this;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.startObject(fieldName);
Object from = maybeConvertToString(this.from);
if (from != null) {
if (includeLower) {
builder.field(GTE_FIELD.getPreferredName(), from);
} else {
builder.field(GT_FIELD.getPreferredName(), from);
}
}
Object to = maybeConvertToString(this.to);
if (to != null) {
if (includeUpper) {
builder.field(LTE_FIELD.getPreferredName(), to);
} else {
builder.field(LT_FIELD.getPreferredName(), to);
}
}
if (timeZone != null) {
builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getId());
}
if (Strings.hasText(format)) {
builder.field(FORMAT_FIELD.getPreferredName(), format);
}
if (relation != null) {
builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName());
}
printBoostAndQueryName(builder);
builder.endObject();
builder.endObject();
}
public static RangeQueryBuilder fromXContent(XContentParser parser) throws IOException {
String fieldName = null;
Object from = null;
Object to = null;
boolean includeLower = RangeQueryBuilder.DEFAULT_INCLUDE_LOWER;
boolean includeUpper = RangeQueryBuilder.DEFAULT_INCLUDE_UPPER;
String timeZone = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null;
String format = null;
String relation = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, currentFieldName);
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
boost = parser.floatValue();
} else if (GT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
from = maybeConvertToBytesRef(parser.objectBytes());
includeLower = false;
} else if (GTE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
from = maybeConvertToBytesRef(parser.objectBytes());
includeLower = true;
} else if (LT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
to = maybeConvertToBytesRef(parser.objectBytes());
includeUpper = false;
} else if (LTE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
to = maybeConvertToBytesRef(parser.objectBytes());
includeUpper = true;
} else if (TIME_ZONE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
timeZone = parser.text();
} else if (FORMAT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
format = parser.text();
} else if (RELATION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
relation = parser.text();
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
queryName = parser.text();
} else {
throw new ParsingException(
parser.getTokenLocation(),
"[range] query does not support [" + currentFieldName + "]"
);
}
}
}
} else if (token.isValue()) {
throw new ParsingException(parser.getTokenLocation(), "[range] query does not support [" + currentFieldName + "]");
}
}
RangeQueryBuilder rangeQuery = new RangeQueryBuilder(fieldName);
rangeQuery.from(from);
rangeQuery.to(to);
rangeQuery.includeLower(includeLower);
rangeQuery.includeUpper(includeUpper);
if (timeZone != null) {
rangeQuery.timeZone(timeZone);
}
rangeQuery.boost(boost);
rangeQuery.queryName(queryName);
if (format != null) {
rangeQuery.format(format);
}
if (relation != null) {
rangeQuery.relation(relation);
}
return rangeQuery;
}
@Override
public String getWriteableName() {
return NAME;
}
// Overridable for testing only
protected MappedFieldType.Relation getRelation(final CoordinatorRewriteContext coordinatorRewriteContext) {
final MappedFieldType fieldType = coordinatorRewriteContext.getFieldType(fieldName);
if (fieldType instanceof final DateFieldMapper.DateFieldType dateFieldType) {
assert fieldName.equals(fieldType.name());
IndexLongFieldRange fieldRange = coordinatorRewriteContext.getFieldRange(fieldName);
if (fieldRange.isComplete() == false || fieldRange == IndexLongFieldRange.EMPTY) {
// if not all shards for this (frozen) index have reported ranges to cluster state, OR if they
// have reported in and the range is empty (no data for that field), then return DISJOINT in order
// to rewrite the query to MatchNone
return MappedFieldType.Relation.DISJOINT;
}
if (fieldRange == IndexLongFieldRange.UNKNOWN) {
// do a full search if UNKNOWN for whatever reason (e.g., event.ingested is UNKNOWN in a
// mixed-cluster where nodes with a version before event.ingested was added to cluster state)
return MappedFieldType.Relation.INTERSECTS;
}
DateMathParser dateMathParser = getForceDateParser();
return dateFieldType.isFieldWithinQuery(
fieldRange.getMin(),
fieldRange.getMax(),
from,
to,
includeLower,
includeUpper,
timeZone,
dateMathParser,
coordinatorRewriteContext,
dateFieldType.name()
);
}
// If the field type is null or not of type DataFieldType then we have no idea whether this range query will match during
// coordinating rewrite. So we should return that it intersects, either the data node query rewrite or by actually running
// the query we know whether this range query actually matches.
return MappedFieldType.Relation.INTERSECTS;
}
protected MappedFieldType.Relation getRelation(final SearchExecutionContext searchExecutionContext) throws IOException {
final MappedFieldType fieldType = searchExecutionContext.getFieldType(fieldName);
if (fieldType == null) {
return MappedFieldType.Relation.DISJOINT;
}
if (searchExecutionContext.getIndexReader() == null) {
// No reader, this may happen e.g. for percolator queries.
return MappedFieldType.Relation.INTERSECTS;
}
DateMathParser dateMathParser = getForceDateParser();
return fieldType.isFieldWithinQuery(
searchExecutionContext.getIndexReader(),
from,
to,
includeLower,
includeUpper,
timeZone,
dateMathParser,
searchExecutionContext
);
}
@Override
protected QueryBuilder doCoordinatorRewrite(final CoordinatorRewriteContext coordinatorRewriteContext) {
return toQueryBuilder(getRelation(coordinatorRewriteContext));
}
@Override
protected QueryBuilder doSearchRewrite(final SearchExecutionContext searchExecutionContext) throws IOException {
return toQueryBuilder(getRelation(searchExecutionContext));
}
private AbstractQueryBuilder<? extends AbstractQueryBuilder<?>> toQueryBuilder(MappedFieldType.Relation relation) {
switch (relation) {
case DISJOINT -> {
return new MatchNoneQueryBuilder("The \"" + getName() + "\" query was rewritten to a \"match_none\" query.");
}
case WITHIN -> {
if (from != null || to != null || format != null || timeZone != null) {
RangeQueryBuilder newRangeQuery = new RangeQueryBuilder(fieldName);
newRangeQuery.from(null);
newRangeQuery.to(null);
newRangeQuery.format = null;
newRangeQuery.timeZone = null;
return newRangeQuery;
} else {
return this;
}
}
case INTERSECTS -> {
return this;
}
default -> throw new AssertionError();
}
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
if (from == null && to == null) {
/*
* Open bounds on both side, we can rewrite to an exists query
* if the {@link FieldNamesFieldMapper} is enabled.
*/
if (context.isFieldMapped(FieldNamesFieldMapper.NAME) == false) {
return new MatchNoDocsQuery("No mappings yet");
}
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context
.getFieldType(FieldNamesFieldMapper.NAME);
// Exists query would fail if the fieldNames field is disabled.
if (fieldNamesFieldType.isEnabled()) {
return ExistsQueryBuilder.newFilter(context, fieldName, false);
}
}
MappedFieldType mapper = context.getFieldType(this.fieldName);
if (mapper == null) {
throw new IllegalStateException("Rewrite first");
}
DateMathParser forcedDateParser = getForceDateParser();
return mapper.rangeQuery(from, to, includeLower, includeUpper, relation, timeZone, forcedDateParser, context);
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, from, to, timeZone, includeLower, includeUpper, format);
}
@Override
protected boolean doEquals(RangeQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName)
&& Objects.equals(from, other.from)
&& Objects.equals(to, other.to)
&& Objects.equals(timeZone, other.timeZone)
&& Objects.equals(includeLower, other.includeLower)
&& Objects.equals(includeUpper, other.includeUpper)
&& Objects.equals(format, other.format);
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
}
| RangeQueryBuilder |
java | spring-projects__spring-boot | module/spring-boot-micrometer-tracing/src/test/java/org/springframework/boot/micrometer/tracing/autoconfigure/NoopTracerAutoConfigurationTests.java | {
"start": 1280,
"end": 2365
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(NoopTracerAutoConfiguration.class));
@Test
void shouldSupplyNoopTracer() {
this.contextRunner.run((context) -> {
assertThat(context).hasSingleBean(Tracer.class);
Tracer tracer = context.getBean(Tracer.class);
assertThat(tracer).isEqualTo(Tracer.NOOP);
});
}
@Test
void shouldBackOffOnCustomTracer() {
this.contextRunner.withUserConfiguration(CustomTracerConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(Tracer.class);
assertThat(context).hasBean("customTracer");
Tracer tracer = context.getBean(Tracer.class);
assertThat(tracer).isNotEqualTo(Tracer.NOOP);
});
}
@Test
void shouldBackOffIfMicrometerTracingIsMissing() {
this.contextRunner.withClassLoader(new FilteredClassLoader("io.micrometer.tracing"))
.run((context) -> assertThat(context).doesNotHaveBean(Tracer.class));
}
@Configuration(proxyBeanMethods = false)
private static final | NoopTracerAutoConfigurationTests |
java | quarkusio__quarkus | independent-projects/tools/registry-client/src/test/java/io/quarkus/registry/config/DevToolsConfigSerializationTest.java | {
"start": 598,
"end": 17264
} | class ____ {
static Path baseDir = Paths.get(System.getProperty("user.dir")).toAbsolutePath()
.resolve("src/test/resources/devtools-config");
static Path writeDir = Paths.get(System.getProperty("user.dir")).toAbsolutePath()
.resolve("target/test-serialization");
@Test
public void testReadWriteDefaultEmptyConfig() throws Exception {
Path output = writeDir.resolve("registry-default-only.yaml");
RegistriesConfig.Mutable config;
RegistriesConfig actual;
String contents;
config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.defaultConfig());
config.persist(output); // MutableRegistriesConfig
contents = Files.readString(output);
assertThat(contents).isEqualTo("---\n");
actual = RegistriesConfigMapperHelper.deserialize(output, RegistriesConfigImpl.class);
assertThat(actual).isNull();
actual = RegistriesConfigLocator.load(output);
assertThat(actual).isEqualTo(config.build());
// Emit debug parameter, but no registries
config = RegistriesConfig.builder()
.setDebug(true)
.setRegistry(RegistryConfig.defaultConfig());
config.persist(output); // MutableRegistriesConfig
contents = Files.readString(output);
assertThat(contents).isEqualTo("---\ndebug: true\n");
actual = RegistriesConfig.fromFile(output);
assertThat(actual).isEqualTo(config.build());
actual = RegistriesConfigLocator.load(output);
assertThat(actual).isEqualTo(config.build());
}
@Test
public void testIdOnly() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry("registry.quarkus.io")
.setRegistry("registry.other.org")
.build();
final String configName = "registry-id-only.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testGlobalDebugEnabled() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setDebug(true)
.setRegistry("registry.quarkus.io")
.build();
final String configName = "registry-id-only-debug.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryUpdatePolicy() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setUpdatePolicy("always")
.build())
.build();
final String configName = "registry-update-policy.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryDisabled() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setEnabled(false)
.build())
.build();
// no registries are enabled, the default registry should be added by default
final String configName = "registry-disabled.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryDescriptor() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setDescriptor(RegistryDescriptorConfig.builder()
.setArtifact(ArtifactCoords.fromString("org.acme:acme-quarkus-registry-descriptor::json:2.0"))
.build())
.build())
.build();
final String configName = "registry-descriptor.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryPlatformsArtifact() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setPlatforms(RegistryPlatformsConfig.builder()
.setArtifact(ArtifactCoords.fromString("org.acme:acme-quarkus-platforms::json:2.0"))
.build())
.build())
.build();
final String configName = "registry-platforms-artifact.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryPlatformsDisabled() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setPlatforms(RegistryPlatformsConfig.builder()
.setDisabled(true)
.build())
.build())
.build();
final String configName = "registry-platforms-disabled.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryPlatformsExtensionCatalogIncluded() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setPlatforms(RegistryPlatformsConfig.builder()
.setArtifact(ArtifactCoords.fromString("org.acme:acme-quarkus-platforms::json:2.0"))
.setExtensionCatalogsIncluded(true)
.build())
.build())
.build();
final String configName = "registry-platforms-extension-catalog-included.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryNonPlatformExtensionsArtifact() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setNonPlatformExtensions(RegistryNonPlatformExtensionsConfig.builder()
.setArtifact(
ArtifactCoords.fromString("org.acme:acme-quarkus-non-platform-extensions::json:2.0"))
.build())
.build())
.build();
final String configName = "registry-non-platform-extensions-artifact.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryNonPlatformExtensionsDisabled() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setNonPlatformExtensions(RegistryNonPlatformExtensionsConfig.builder()
.setDisabled(true)
.build())
.build())
.build();
final String configName = "registry-non-platform-extensions-disabled.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryMavenRepoUrl() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setMaven(RegistryMavenConfig.builder()
.setRepository(RegistryMavenRepoConfig.builder()
.setUrl("https://repo.acme.org/maven")
.build())
.build())
.build())
.build();
final String configName = "registry-maven-repo-url.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryMavenRepoUrlAndId() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setMaven(RegistryMavenConfig.builder()
.setRepository(RegistryMavenRepoConfig.builder()
.setUrl("https://repo.acme.org/maven")
.setId("acme-repo")
.build())
.build())
.build())
.build();
final String configName = "registry-maven-repo-url-id.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryRecognizedQuarkusVersions() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setQuarkusVersions(RegistryQuarkusVersionsConfig.builder()
.setRecognizedVersionsExpression("*-acme-*")
.build())
.build())
.build();
final String configName = "registry-recognized-quarkus-versions.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryQuarkusVersionsExclusiveProvider() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setQuarkusVersions(RegistryQuarkusVersionsConfig.builder()
.setRecognizedVersionsExpression("*-acme-*")
.setExclusiveProvider(true)
.build())
.build())
.build();
final String configName = "registry-quarkus-versions-exclusive-provider.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryAnySimpleProperty() throws Exception {
final RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setExtra("client-factory-artifact", "org.acme:acme-registry-client-factory::jar:2.0")
.build())
.build();
final String configName = "registry-any-simple-property.yaml";
assertDeserializedMatches(configName, config);
assertSerializedMatches(config, configName);
}
@Test
public void testRegistryAnyCustomObject() throws Exception {
RegistriesConfig config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setExtra("custom", new Custom("value"))
.build())
.build();
final String configName = "registry-any-custom-object.yaml";
assertSerializedMatches(config, configName);
config = RegistriesConfig.builder()
.setRegistry(RegistryConfig.builder()
.setId("registry.acme.org")
.setExtra("custom", Collections.singletonMap("prop", "value"))
.build())
.build();
assertDeserializedMatches(configName, config);
}
@Test
void testRegistryClientJsonConfig() throws IOException {
String configName = "registry-client-config.json";
Path expectedFile = baseDir.resolve(configName);
Path actualFile = writeDir.resolve(configName);
// Don't build any of these bits... let persist do it..
RegistryConfig expected = RegistryConfig.builder()
.setId("registry.acme.org")
.setDescriptor(RegistryDescriptorConfig.builder()
.setArtifact(
ArtifactCoords
.fromString("registry.quarkus.test:quarkus-registry-descriptor::json:1.0-SNAPSHOT")))
.setPlatforms(RegistryPlatformsConfig.builder()
.setArtifact(ArtifactCoords.fromString("registry.quarkus.test:quarkus-platforms::json:1.0-SNAPSHOT"))
.setExtensionCatalogsIncluded(true))
.setNonPlatformExtensions(RegistryNonPlatformExtensionsConfig.builder()
.setDisabled(true)
.setArtifact(ArtifactCoords
.fromString("registry.quarkus.test:quarkus-non-platform-extensions::json:1.0-SNAPSHOT")));
expected.persist(actualFile);
String expectedContents = Files.readString(expectedFile);
String actualContents = Files.readString(actualFile);
assertThat(actualContents).isEqualTo(expectedContents);
}
@Test
void testReadJsonRegistryDescriptor() throws IOException {
String configName = "registry-descriptor-1.0-SNAPSHOT.json";
Path expectedFile = baseDir.resolve(configName);
Path actualFile = writeDir.resolve(configName);
RegistryConfig expected = RegistryConfig.builder()
.setId("registry.foo.org")
.setDescriptor(RegistryDescriptorConfig.builder()
.setArtifact(
ArtifactCoords
.fromString("org.foo.registry:quarkus-registry-descriptor::json:1.0-SNAPSHOT")))
.setPlatforms(RegistryPlatformsConfig.builder()
.setArtifact(
ArtifactCoords
.fromString("org.foo.registry:quarkus-registry-platforms::json:1.0-SNAPSHOT")))
.build();
expected.persist(actualFile);
String expectedContents = Files.readString(expectedFile);
String actualContents = Files.readString(actualFile);
assertThat(actualContents).isEqualTo(expectedContents);
RegistryConfig.Mutable actual = RegistryConfig.mutableFromFile(actualFile);
actual.setId("registry.foo.org");
assertThat(actual.build()).isEqualTo(expected);
}
private static void assertSerializedMatches(RegistriesConfig config, String configName) throws Exception {
final StringWriter buf = new StringWriter();
try (BufferedWriter writer = new BufferedWriter(buf)) {
RegistriesConfigMapperHelper.toYaml(config, writer);
}
final List<String> lines = new ArrayList<>();
try (BufferedReader reader = new BufferedReader(new StringReader(buf.getBuffer().toString()))) {
String line = reader.readLine();
while (line != null) {
lines.add(line);
line = reader.readLine();
}
}
List<String> expected = Files.readAllLines(baseDir.resolve(configName));
assertThat(lines).isEqualTo(expected);
}
private static void assertDeserializedMatches(String configName, RegistriesConfig expected) throws Exception {
RegistriesConfig actual = RegistriesConfig.fromFile(baseDir.resolve(configName));
assertThat(actual).isEqualTo(expected);
}
public static | DevToolsConfigSerializationTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/util/collections/InstanceIdentityMap.java | {
"start": 10318,
"end": 10548
} | class ____ extends AbstractCollection<V> {
@Override
public @NonNull Iterator<V> iterator() {
return new ValueIterator();
}
@Override
public int size() {
return InstanceIdentityMap.this.size();
}
}
private | Values |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/partitioning/Partitioning.java | {
"start": 1423,
"end": 1554
} | interface ____ {
/**
* Returns the number of partitions that the data is split across.
*/
int numPartitions();
}
| Partitioning |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/IdentifierNameTest.java | {
"start": 11319,
"end": 11521
} | interface ____ {
// BUG: Diagnostic contains:
void foo(int a_b);
}
""")
.addSourceLines(
"Test.java",
"""
| Base |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-osgi/src/test/java/org/assertj/tests/core/osgi/soft/CustomSoftAssertionTest.java | {
"start": 3158,
"end": 3411
} | class ____ extends AbstractSoftAssertions {
@SuppressWarnings("unchecked")
public <K, V> TestProxyableMapAssert<K, V> assertThat(Map<K, V> actual) {
return proxy(TestProxyableMapAssert.class, Map.class, actual);
}
}
}
| TestSoftAssertions |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/SpringVersion.java | {
"start": 1210,
"end": 1601
} | class ____ {
private SpringVersion() {
}
/**
* Return the full version string of the present Spring codebase,
* or {@code null} if it cannot be determined.
* @see Package#getImplementationVersion()
*/
public static @Nullable String getVersion() {
Package pkg = SpringVersion.class.getPackage();
return (pkg != null ? pkg.getImplementationVersion() : null);
}
}
| SpringVersion |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/engine/EngineSearcherTotalHitsMatcher.java | {
"start": 746,
"end": 2053
} | class ____ extends TypeSafeMatcher<Engine.Searcher> {
private final Query query;
private final int totalHits;
private int count;
public EngineSearcherTotalHitsMatcher(Query query, int totalHits) {
this.query = query;
this.totalHits = totalHits;
}
@Override
public boolean matchesSafely(Engine.Searcher searcher) {
try {
this.count = searcher.count(query);
return count == totalHits;
} catch (IOException e) {
return false;
}
}
@Override
protected void describeMismatchSafely(Engine.Searcher item, Description mismatchDescription) {
mismatchDescription.appendText("was ").appendValue(count);
}
@Override
public void describeTo(Description description) {
description.appendText("total hits of size ").appendValue(totalHits).appendText(" with query ").appendValue(query);
}
public static Matcher<Engine.Searcher> engineSearcherTotalHits(Query query, int totalHits) {
return new EngineSearcherTotalHitsMatcher(query, totalHits);
}
public static Matcher<Engine.Searcher> engineSearcherTotalHits(int totalHits) {
return new EngineSearcherTotalHitsMatcher(Queries.newMatchAllQuery(), totalHits);
}
}
| EngineSearcherTotalHitsMatcher |
java | apache__flink | flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/catalog/FunctionCatalogTest.java | {
"start": 32321,
"end": 32630
} | class ____ extends ScalarFunction {
public String eval() {
return null;
}
@Override
public boolean equals(Object o) {
return o != null && o.getClass() == this.getClass();
}
}
/** Invalid testing function. */
public static | TestFunction4 |
java | playframework__playframework | core/play/src/main/java/play/mvc/BodyParser.java | {
"start": 27939,
"end": 28520
} | class ____<A, B> implements BodyParser<A> {
private final play.api.mvc.BodyParser<B> delegate;
private final Function<B, A> transform;
public DelegatingBodyParser(play.api.mvc.BodyParser<B> delegate, Function<B, A> transform) {
this.delegate = delegate;
this.transform = transform;
}
@Override
public Accumulator<ByteString, F.Either<Result, A>> apply(Http.RequestHeader request) {
return BodyParsers.delegate(delegate, transform, request);
}
}
/** A body parser that completes the underlying one. */
abstract | DelegatingBodyParser |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/OffsetsForLeaderEpochUtils.java | {
"start": 2021,
"end": 6234
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(OffsetsForLeaderEpochUtils.class);
private OffsetsForLeaderEpochUtils() {}
static AbstractRequest.Builder<OffsetsForLeaderEpochRequest> prepareRequest(
Map<TopicPartition, SubscriptionState.FetchPosition> requestData) {
OffsetForLeaderTopicCollection topics = new OffsetForLeaderTopicCollection(requestData.size());
requestData.forEach((topicPartition, fetchPosition) ->
fetchPosition.offsetEpoch.ifPresent(fetchEpoch -> {
OffsetForLeaderTopic topic = topics.find(topicPartition.topic());
if (topic == null) {
topic = new OffsetForLeaderTopic().setTopic(topicPartition.topic());
topics.add(topic);
}
topic.partitions().add(new OffsetForLeaderPartition()
.setPartition(topicPartition.partition())
.setLeaderEpoch(fetchEpoch)
.setCurrentLeaderEpoch(fetchPosition.currentLeader.epoch
.orElse(RecordBatch.NO_PARTITION_LEADER_EPOCH))
);
})
);
return OffsetsForLeaderEpochRequest.Builder.forConsumer(topics);
}
public static OffsetForEpochResult handleResponse(
Map<TopicPartition, SubscriptionState.FetchPosition> requestData,
OffsetsForLeaderEpochResponse response) {
Set<TopicPartition> partitionsToRetry = new HashSet<>(requestData.keySet());
Set<String> unauthorizedTopics = new HashSet<>();
Map<TopicPartition, EpochEndOffset> endOffsets = new HashMap<>();
for (OffsetForLeaderTopicResult topic : response.data().topics()) {
for (EpochEndOffset partition : topic.partitions()) {
TopicPartition topicPartition = new TopicPartition(topic.topic(), partition.partition());
if (!requestData.containsKey(topicPartition)) {
LOG.warn("Received unrequested topic or partition {} from response, ignoring.", topicPartition);
continue;
}
Errors error = Errors.forCode(partition.errorCode());
switch (error) {
case NONE:
LOG.debug("Handling OffsetsForLeaderEpoch response for {}. Got offset {} for epoch {}.",
topicPartition, partition.endOffset(), partition.leaderEpoch());
endOffsets.put(topicPartition, partition);
partitionsToRetry.remove(topicPartition);
break;
case NOT_LEADER_OR_FOLLOWER:
case REPLICA_NOT_AVAILABLE:
case KAFKA_STORAGE_ERROR:
case OFFSET_NOT_AVAILABLE:
case LEADER_NOT_AVAILABLE:
case FENCED_LEADER_EPOCH:
case UNKNOWN_LEADER_EPOCH:
LOG.debug("Attempt to fetch offsets for partition {} failed due to {}, retrying.",
topicPartition, error);
break;
case UNKNOWN_TOPIC_OR_PARTITION:
LOG.warn("Received unknown topic or partition error in OffsetsForLeaderEpoch request for partition {}.",
topicPartition);
break;
case TOPIC_AUTHORIZATION_FAILED:
unauthorizedTopics.add(topicPartition.topic());
partitionsToRetry.remove(topicPartition);
break;
default:
LOG.warn("Attempt to fetch offsets for partition {} failed due to: {}, retrying.",
topicPartition, error.message());
}
}
}
if (!unauthorizedTopics.isEmpty())
throw new TopicAuthorizationException(unauthorizedTopics);
return new OffsetForEpochResult(endOffsets, partitionsToRetry);
}
static | OffsetsForLeaderEpochUtils |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/OracleSqlArrayTest.java | {
"start": 4228,
"end": 4477
} | class ____ {
@Id @GeneratedValue Long id;
@Array(length = 33)
@Column(length = 25)
@JdbcTypeCode(SqlTypes.ARRAY)
BigInteger[] bigIntegers;
@Array(length = 2)
@JdbcTypeCode(SqlTypes.ARRAY)
ActivityKind[] activityKinds;
}
}
| Container |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/codec/json/CustomizedJacksonJsonDecoderTests.java | {
"start": 3147,
"end": 3429
} | class ____ extends JacksonJsonDecoder {
@Override
protected ObjectReader customizeReader(
ObjectReader reader, ResolvableType elementType, Map<String, Object> hints) {
return reader.with(EnumFeature.READ_ENUMS_USING_TO_STRING);
}
}
}
| JacksonJsonDecoderWithCustomization |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsSelectTest19.java | {
"start": 909,
"end": 2688
} | class ____ extends TestCase {
public void test_select() throws Exception {
byte[] bytes = new byte[8];
putLong(bytes, 0, 1095288847322L);
// 1095288847322
String sql = "select wm_concat(distinct ',', name) from mytable";
assertEquals("SELECT wm_concat(DISTINCT ',', name)\n" +
"FROM mytable", SQLUtils.formatOdps(sql));
assertEquals("select wm_concat(DISTINCT ',', name)\n" +
"from mytable", SQLUtils.formatOdps(sql, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.ODPS);
SQLStatement stmt = statementList.get(0);
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.ODPS);
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
// assertTrue(visitor.getColumns().contains(new Column("abc", "name")));
}
static void putLong(byte[] b, int off, long val) {
b[off + 7] = (byte) (val);
b[off + 6] = (byte) (val >>> 8);
b[off + 5] = (byte) (val >>> 16);
b[off + 4] = (byte) (val >>> 24);
b[off + 3] = (byte) (val >>> 32);
b[off + 2] = (byte) (val >>> 40);
b[off + 1] = (byte) (val >>> 48);
b[off] = (byte) (val >>> 56);
}
}
| OdpsSelectTest19 |
java | grpc__grpc-java | stub/src/main/java/io/grpc/stub/ServerCalls.java | {
"start": 3428,
"end": 3678
} | interface ____<ReqT, RespT> extends StreamingRequestMethod<ReqT, RespT> {
@Override StreamObserver<ReqT> invoke(StreamObserver<RespT> responseObserver);
}
/**
* Adaptor to a bidirectional streaming method.
*/
public | ClientStreamingMethod |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/multipleresultsetswithassociation/MultipleResultSetTest.java | {
"start": 1329,
"end": 4349
} | class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void setUp() throws Exception {
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/multipleresultsetswithassociation/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
// populate in-memory database
// Could not get the table creation, procedure creation, and data population to work from the same script.
// Once it was in three scripts, all seemed well.
try (SqlSession session = sqlSessionFactory.openSession(); Connection conn = session.getConnection()) {
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/multipleresultsetswithassociation/CreateDB1.sql")) {
runReaderScript(conn, reader);
}
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/multipleresultsetswithassociation/CreateDB2.sql")) {
runReaderScript(conn, reader);
}
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/multipleresultsetswithassociation/CreateDB3.sql")) {
runReaderScript(conn, reader);
}
}
}
private static void runReaderScript(Connection conn, Reader reader) {
ScriptRunner runner = new ScriptRunner(conn);
runner.setLogWriter(null);
runner.setSendFullScript(true);
runner.setAutoCommit(true);
runner.setStopOnError(false);
runner.runScript(reader);
}
@Test
void shouldGetOrderDetailsEachHavingAnOrderHeader() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
List<OrderDetail> orderDetails = mapper.getOrderDetailsWithHeaders();
// There are six order detail records in the database
// As long as the data does not change this should be successful
Assertions.assertEquals(6, orderDetails.size());
// Each order detail should have a corresponding OrderHeader
// Only 2 of 6 orderDetails have orderHeaders
for (OrderDetail orderDetail : orderDetails) {
Assertions.assertNotNull(orderDetail.getOrderHeader());
}
}
}
@Test
void shouldGetOrderDetailsEachHavingAnOrderHeaderAnnotationBased() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
List<OrderDetail> orderDetails = mapper.getOrderDetailsWithHeadersAnnotationBased();
// There are six order detail records in the database
// As long as the data does not change this should be successful
Assertions.assertEquals(6, orderDetails.size());
// Each order detail should have a corresponding OrderHeader
// Only 2 of 6 orderDetails have orderHeaders
for (OrderDetail orderDetail : orderDetails) {
Assertions.assertNotNull(orderDetail.getOrderHeader());
}
}
}
}
| MultipleResultSetTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/jdbc/dialect/spi/DialectResolutionInfo.java | {
"start": 666,
"end": 3095
} | interface ____ extends DatabaseVersion {
/**
* Obtain access to the database name, as returned from {@link java.sql.DatabaseMetaData#getDatabaseProductName()}
* for the target database
*
* @return The database name
*
* @see java.sql.DatabaseMetaData#getDatabaseProductName()
*/
String getDatabaseName();
/**
* Obtain access to the database version, as returned from {@link java.sql.DatabaseMetaData#getDatabaseProductVersion()}
* for the target database
*
* @return The database version
*
* @see java.sql.DatabaseMetaData#getDatabaseProductVersion()
*/
String getDatabaseVersion();
/**
* Obtain access to the name of the JDBC driver, as returned from {@link java.sql.DatabaseMetaData#getDriverName()}
* for the target database
*
* @return The JDBC driver name
*
* @see java.sql.DatabaseMetaData#getDriverName()
*/
String getDriverName();
/**
* Obtain access to the major version of the JDBC driver, as returned from
* {@link java.sql.DatabaseMetaData#getDriverMajorVersion()} ()} for the target database.
*
* @return The JDBC driver major version, or {@value #NO_VERSION} to indicate "no version information"
*
* @see java.sql.DatabaseMetaData#getDriverMajorVersion()
*/
int getDriverMajorVersion();
/**
* Obtain access to the minor version of the JDBC driver, as returned from
* {@link java.sql.DatabaseMetaData#getDriverMinorVersion()} for the target database.
*
* @return The JDBC driver minor version, or {@value #NO_VERSION} to indicate "no version information"
*
* @see java.sql.DatabaseMetaData#getDriverMinorVersion()
*/
int getDriverMinorVersion();
/**
* Obtain access to the SQL keywords of the JDBC driver, as returned from
* {@link java.sql.DatabaseMetaData#getSQLKeywords()} for the target database.
*
* @return The JDBC driver keywords
*
* @see java.sql.DatabaseMetaData#getSQLKeywords()
*/
String getSQLKeywords();
/**
* Obtain access to the {@link DatabaseMetaData} if it is available.
*
* @return The {@link DatabaseMetaData} or <code>null</code> if not available
*/
default DatabaseMetaData getDatabaseMetadata() {
return null;
}
/**
* Obtain access to the complete {@link ConfigurationService#getSettings() map of config settings}.
*
* @return The immutable map of config settings.
*/
default Map<String, Object> getConfigurationValues() {
return emptyMap();
}
}
| DialectResolutionInfo |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/SqmPathSource.java | {
"start": 866,
"end": 4346
} | interface ____<J>
extends SqmExpressible<J>, Bindable<J>, SqmExpressibleAccessor<J>, PathSource<J> {
/**
* The type of {@linkplain SqmPath path} this source creates.
*/
@Override
SqmDomainType<J> getPathType();
/**
* Find a {@link SqmPathSource} by name relative to this source.
*
* @param name the name of the path source to find
* @return null if the subPathSource is not found
* @throws IllegalStateException to indicate that this source cannot be de-referenced
*/
@Nullable SqmPathSource<?> findSubPathSource(String name);
/**
* Find a {@link SqmPathSource} by name relative to this source. If {@code includeSubtypes} is set
* to {@code true} and this path source is polymorphic, also try finding subtype attributes.
*
* @param name the name of the path source to find
* @param includeSubtypes flag indicating whether to consider subtype attributes
* @return null if the subPathSource is not found
* @throws IllegalStateException to indicate that this source cannot be de-referenced
*/
default @Nullable SqmPathSource<?> findSubPathSource(String name, boolean includeSubtypes) {
return findSubPathSource( name );
}
/**
* Find a {@link SqmPathSource} by name relative to this source.
*
* @param name the name of the path source to find
* @throws IllegalStateException to indicate that this source cannot be de-referenced
* @throws IllegalArgumentException if the subPathSource is not found
*/
default SqmPathSource<?> getSubPathSource(String name) {
final SqmPathSource<?> subPathSource = findSubPathSource( name );
if ( subPathSource == null ) {
throw new PathElementException(
String.format(
Locale.ROOT,
"Could not resolve attribute '%s' of '%s'",
name,
getExpressible().getTypeName()
)
);
}
return subPathSource;
}
/**
* Find a {@link SqmPathSource} by name relative to this source. If {@code subtypes} is set
* to {@code true} and this path source is polymorphic, also try finding subtype attributes.
*
* @param name the name of the path source to find
* @param subtypes flag indicating whether to consider subtype attributes
* @throws IllegalStateException to indicate that this source cannot be de-referenced
* @throws IllegalArgumentException if the subPathSource is not found
*/
default SqmPathSource<?> getSubPathSource(String name, boolean subtypes) {
final SqmPathSource<?> subPathSource = findSubPathSource( name, subtypes );
if ( subPathSource == null ) {
throw new PathElementException(
String.format(
Locale.ROOT,
"Could not resolve attribute '%s' of '%s'",
name,
getExpressible().getTypeName()
)
);
}
return subPathSource;
}
/**
* Returns the intermediate {@link SqmPathSource} for a path source
* previously acquired via {@link #findSubPathSource(String)}.
*/
default @Nullable SqmPathSource<?> getIntermediatePathSource(SqmPathSource<?> pathSource) {
return null;
}
/**
* Create an SQM path for this source relative to the given left hand side
*/
SqmPath<J> createSqmPath(SqmPath<?> lhs, @Nullable SqmPathSource<?> intermediatePathSource);
@Override
default @NonNull SqmBindableType<J> getExpressible() {
return getPathType();
}
@Override
default @Nullable SqmDomainType<J> getSqmType() {
return getPathType();
}
/**
* Indicates if this path source is generically typed
*/
default boolean isGeneric() {
return false;
}
}
| SqmPathSource |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/AbstractConfiguredSecurityBuilder.java | {
"start": 8106,
"end": 8249
} | class ____ or an empty
* List if not found. Note that object hierarchies are not considered.
* @param clazz the {@link SecurityConfigurer} | name |
java | resilience4j__resilience4j | resilience4j-retry/src/main/java/io/github/resilience4j/retry/Retry.java | {
"start": 19099,
"end": 20302
} | interface ____ {
/**
* Returns the number of successful calls without a retry attempt.
*
* @return the number of successful calls without a retry attempt
*/
long getNumberOfSuccessfulCallsWithoutRetryAttempt();
/**
* Returns the number of failed calls without a retry attempt.
*
* @return the number of failed calls without a retry attempt
*/
long getNumberOfFailedCallsWithoutRetryAttempt();
/**
* Returns the number of successful calls after a retry attempt.
*
* @return the number of successful calls after a retry attempt
*/
long getNumberOfSuccessfulCallsWithRetryAttempt();
/**
* Returns the number of failed calls after all retry attempts.
*
* @return the number of failed calls after all retry attempts
*/
long getNumberOfFailedCallsWithRetryAttempt();
/**
* Returns the number of total calls after all retry attempts.
*
* @return the number of total calls after all retry attempts
*/
long getNumberOfTotalCalls();
}
| Metrics |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/DestroyMethodInferenceTests.java | {
"start": 6712,
"end": 6834
} | class ____ {
boolean closed = false;
public void shutdown() {
closed = true;
}
}
static | WithLocalShutdownMethod |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmnode/RMNodeImpl.java | {
"start": 50578,
"end": 54666
} | class ____ implements
MultipleArcTransition<RMNodeImpl, RMNodeEvent, NodeState> {
@Override
public NodeState transition(RMNodeImpl rmNode, RMNodeEvent event) {
RMNodeStatusEvent statusEvent = (RMNodeStatusEvent) event;
rmNode.setOpportunisticContainersStatus(
statusEvent.getOpportunisticContainersStatus());
NodeHealthStatus remoteNodeHealthStatus = updateRMNodeFromStatusEvents(
rmNode, statusEvent);
rmNode.updateClusterUtilizationMetrics();
NodeState initialState = rmNode.getState();
boolean isNodeDecommissioning =
initialState.equals(NodeState.DECOMMISSIONING);
if (isNodeDecommissioning) {
List<ApplicationId> keepAliveApps = statusEvent.getKeepAliveAppIds();
// hasScheduledAMContainers solves the following race condition -
// 1. launch AM container on a node with 0 containers.
// 2. gracefully decommission this node.
// 3. Node heartbeats to RM. In StatusUpdateWhenHealthyTransition,
// rmNode.runningApplications will be empty as it is updated after
// call to RMNodeImpl.deactivateNode. This will cause the node to be
// deactivated even though container is running on it and hence kill
// all containers running on it.
// In order to avoid such race conditions the ground truth is retrieved
// from the scheduler before deactivating a DECOMMISSIONING node.
// Only AM containers are considered as AM container reattempts can
// cause application failures if max attempts is set to 1.
if (rmNode.runningApplications.isEmpty() &&
(keepAliveApps == null || keepAliveApps.isEmpty()) &&
!hasScheduledAMContainers(rmNode)) {
LOG.info("No containers running on " + rmNode.nodeId + ". "
+ "Attempting to deactivate decommissioning node.");
RMNodeImpl.deactivateNode(rmNode, NodeState.DECOMMISSIONED);
return NodeState.DECOMMISSIONED;
}
}
if (!remoteNodeHealthStatus.getIsNodeHealthy()) {
LOG.info("Node " + rmNode.nodeId +
" reported UNHEALTHY with details: " +
remoteNodeHealthStatus.getHealthReport());
// if a node in decommissioning receives an unhealthy report,
// it will stay in decommissioning.
if (isNodeDecommissioning) {
return NodeState.DECOMMISSIONING;
} else {
reportNodeUnusable(rmNode, NodeState.UNHEALTHY);
return NodeState.UNHEALTHY;
}
}
rmNode.handleContainerStatus(statusEvent.getContainers());
rmNode.handleReportedIncreasedContainers(
statusEvent.getNMReportedIncreasedContainers());
List<LogAggregationReport> logAggregationReportsForApps =
statusEvent.getLogAggregationReportsForApps();
if (logAggregationReportsForApps != null
&& !logAggregationReportsForApps.isEmpty()) {
rmNode.handleLogAggregationStatus(logAggregationReportsForApps);
}
if(rmNode.nextHeartBeat) {
rmNode.nextHeartBeat = false;
rmNode.context.getDispatcher().getEventHandler().handle(
new NodeUpdateSchedulerEvent(rmNode));
}
// Update DTRenewer in secure mode to keep these apps alive. Today this is
// needed for log-aggregation to finish long after the apps are gone.
if (UserGroupInformation.isSecurityEnabled()) {
rmNode.context.getDelegationTokenRenewer().updateKeepAliveApplications(
statusEvent.getKeepAliveAppIds());
}
return initialState;
}
/**
* Checks if the scheduler has scheduled any AMs on the given node.
* @return true if node has any AM scheduled on it.
*/
private boolean hasScheduledAMContainers(RMNodeImpl rmNode) {
return rmNode.context.getScheduler()
.getSchedulerNode(rmNode.getNodeID())
.getCopiedListOfRunningContainers()
.stream().anyMatch(RMContainer::isAMContainer);
}
}
public static | StatusUpdateWhenHealthyTransition |
java | quarkusio__quarkus | integration-tests/jsonb/src/main/java/io/quarkus/it/jsonb/ModelWithSerializerDeserializerOnFieldResource.java | {
"start": 331,
"end": 1332
} | class ____ {
private final Jsonb jsonb;
public ModelWithSerializerDeserializerOnFieldResource(Jsonb jsonb) {
this.jsonb = jsonb;
}
@POST
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.APPLICATION_JSON)
public String post(String body) throws IOException {
ModelWithSerializerAndDeserializerOnField input = jsonb.fromJson(body,
ModelWithSerializerAndDeserializerOnField.class);
return input.getName() + "/" + input.getInner().getSomeValue();
}
@GET
@Path("/{name}/{someValue}")
@Produces(MediaType.APPLICATION_JSON)
public String get(@PathParam("name") String name, @PathParam("someValue") String someValue) throws IOException {
ModelWithSerializerAndDeserializerOnField input = new ModelWithSerializerAndDeserializerOnField(name,
new ModelWithSerializerAndDeserializerOnField.Inner(someValue));
return jsonb.toJson(input);
}
}
| ModelWithSerializerDeserializerOnFieldResource |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/globals/TemplateGlobalInvalidNameTest.java | {
"start": 452,
"end": 1763
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addClasses(Globals.class)
.addAsResource(new StringAsset(
"Hello {user.name}!"),
"templates/hello.txt"))
.assertException(t -> {
Throwable e = t;
IllegalArgumentException iae = null;
while (e != null) {
if (e instanceof IllegalArgumentException) {
iae = (IllegalArgumentException) e;
break;
}
e = e.getCause();
}
assertNotNull(iae);
assertTrue(
iae.getMessage().contains("Invalid global variable name found: -name!"),
iae.getMessage());
assertTrue(
iae.getMessage().contains(
"supplied by io.quarkus.qute.deployment.globals.TemplateGlobalInvalidNameTest$Globals.user"),
iae.getMessage());
});
@Test
public void test() {
fail();
}
public static | TemplateGlobalInvalidNameTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/abstract_/SoftAssertionsErrorsCollectedTest.java | {
"start": 1087,
"end": 1897
} | class ____ {
private final SoftAssertions softly = new SoftAssertions();
@Test
void return_empty_list_of_errors() {
// GIVEN
Object objectToTest = null;
// WHEN
softly.assertThat(objectToTest).isNull();
// THEN
assertThat(softly.errorsCollected()).isEmpty();
assertThat(softly.errorsCollected()).isEqualTo(softly.assertionErrorsCollected());
}
@Test
void returns_nonempty_list_of_errors() {
// GIVEN
Object objectToTest = null;
// WHEN
softly.assertThat(objectToTest).isNotNull(); // This should allow something to be collected
// THEN
assertThat(softly.errorsCollected()).hasAtLeastOneElementOfType(AssertionError.class)
.isEqualTo(softly.assertionErrorsCollected());
}
}
| SoftAssertionsErrorsCollectedTest |
java | apache__camel | components/camel-exec/src/test/java/org/apache/camel/component/exec/internal/ExecutableJavaProgram.java | {
"start": 960,
"end": 1157
} | class ____ be executed. The behavior of the program is controlled by the arguments that the
* {@link #main(String[])} receives. Valid arguments are the public static fields of the class.
*/
public | to |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java | {
"start": 2400,
"end": 20249
} | class ____ extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
if (nodeOrdinal % 2 == 0) {
return Settings.builder().put(nonIngestNode()).put(super.nodeSettings(nodeOrdinal, otherSettings)).build();
}
return super.nodeSettings(nodeOrdinal, otherSettings);
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(ExtendedIngestTestPlugin.class);
}
public void testSimulate() throws Exception {
putJsonPipeline(
"_id",
(builder, params) -> builder.field("description", "my_pipeline")
.startArray("processors")
.startObject()
.startObject("test")
.endObject()
.endObject()
.endArray()
);
GetPipelineResponse getResponse = getPipelines("_id");
assertThat(getResponse.isFound(), is(true));
assertThat(getResponse.pipelines().size(), equalTo(1));
assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id"));
BytesReference bytes = BytesReference.bytes(
jsonBuilder().startObject()
.startArray("docs")
.startObject()
.field("_index", "index")
.field("_type", "type")
.field("_id", "id")
.startObject("_source")
.field("foo", "bar")
.field("fail", false)
.endObject()
.endObject()
.endArray()
.endObject()
);
SimulatePipelineResponse response;
if (randomBoolean()) {
response = clusterAdmin().prepareSimulatePipeline(bytes, XContentType.JSON).setId("_id").get();
} else {
SimulatePipelineRequest request = jsonSimulatePipelineRequest(bytes);
request.setId("_id");
response = clusterAdmin().simulatePipeline(request).get();
}
assertThat(response.isVerbose(), equalTo(false));
assertThat(response.getPipelineId(), equalTo("_id"));
assertThat(response.getResults().size(), equalTo(1));
assertThat(response.getResults().get(0), instanceOf(SimulateDocumentBaseResult.class));
SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) response.getResults().get(0);
Map<String, Object> source = new HashMap<>();
source.put("foo", "bar");
source.put("fail", false);
source.put("processed", true);
IngestDocument ingestDocument = new IngestDocument("index", "id", Versions.MATCH_ANY, null, null, source);
assertThat(simulateDocumentBaseResult.getIngestDocument().getSource(), equalTo(ingestDocument.getSource()));
assertThat(simulateDocumentBaseResult.getIngestDocument().getMetadata().getMap(), equalTo(ingestDocument.getMetadata().getMap()));
assertThat(simulateDocumentBaseResult.getFailure(), nullValue());
// cleanup
deletePipeline("_id");
}
public void testBulkWithIngestFailures() throws Exception {
createIndex("index");
putJsonPipeline(
"_id",
(builder, params) -> builder.field("description", "my_pipeline")
.startArray("processors")
.startObject()
.startObject("test")
.endObject()
.endObject()
.endArray()
);
int numRequests = scaledRandomIntBetween(32, 128);
BulkRequest bulkRequest = new BulkRequest();
for (int i = 0; i < numRequests; i++) {
IndexRequest indexRequest = new IndexRequest("index").id(Integer.toString(i)).setPipeline("_id");
indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field", "value", "fail", i % 2 == 0);
bulkRequest.add(indexRequest);
}
BulkResponse response = client().bulk(bulkRequest).actionGet();
assertThat(response.getItems().length, equalTo(bulkRequest.requests().size()));
for (int i = 0; i < bulkRequest.requests().size(); i++) {
BulkItemResponse itemResponse = response.getItems()[i];
if (i % 2 == 0) {
BulkItemResponse.Failure failure = itemResponse.getFailure();
ElasticsearchException compoundProcessorException = (ElasticsearchException) failure.getCause();
assertThat(compoundProcessorException.getRootCause().getMessage(), equalTo("test processor failed"));
} else {
IndexResponse indexResponse = itemResponse.getResponse();
assertThat(
"Expected a successful response but found failure [" + itemResponse.getFailure() + "].",
itemResponse.isFailed(),
is(false)
);
assertThat(indexResponse, notNullValue());
assertThat(indexResponse.getId(), equalTo(Integer.toString(i)));
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
}
}
// cleanup
deletePipeline("_id");
}
public void testBulkWithUpsert() throws Exception {
createIndex("index");
putJsonPipeline(
"_id",
(builder, params) -> builder.field("description", "my_pipeline")
.startArray("processors")
.startObject()
.startObject("test")
.endObject()
.endObject()
.endArray()
);
BulkRequest bulkRequest = new BulkRequest();
IndexRequest indexRequest = new IndexRequest("index").id("1").setPipeline("_id");
indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field1", "val1");
bulkRequest.add(indexRequest);
UpdateRequest updateRequest = new UpdateRequest("index", "2");
updateRequest.doc("{}", Requests.INDEX_CONTENT_TYPE);
updateRequest.upsert("{\"field1\":\"upserted_val\"}", XContentType.JSON).upsertRequest().setPipeline("_id");
bulkRequest.add(updateRequest);
BulkResponse response = client().bulk(bulkRequest).actionGet();
assertThat(response.getItems().length, equalTo(bulkRequest.requests().size()));
Map<String, Object> inserted = client().prepareGet("index", "1").get().getSourceAsMap();
assertThat(inserted.get("field1"), equalTo("val1"));
assertThat(inserted.get("processed"), equalTo(true));
Map<String, Object> upserted = client().prepareGet("index", "2").get().getSourceAsMap();
assertThat(upserted.get("field1"), equalTo("upserted_val"));
assertThat(upserted.get("processed"), equalTo(true));
}
public void test() throws Exception {
putJsonPipeline(
"_id",
(builder, params) -> builder.field("description", "my_pipeline")
.startArray("processors")
.startObject()
.startObject("test")
.endObject()
.endObject()
.endArray()
);
GetPipelineResponse getResponse = getPipelines("_id");
assertThat(getResponse.isFound(), is(true));
assertThat(getResponse.pipelines().size(), equalTo(1));
assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id"));
prepareIndex("test").setId("1").setPipeline("_id").setSource("field", "value", "fail", false).get();
Map<String, Object> doc = client().prepareGet("test", "1").get().getSourceAsMap();
assertThat(doc.get("field"), equalTo("value"));
assertThat(doc.get("processed"), equalTo(true));
client().prepareBulk().add(prepareIndex("test").setId("2").setSource("field", "value2", "fail", false).setPipeline("_id")).get();
doc = client().prepareGet("test", "2").get().getSourceAsMap();
assertThat(doc.get("field"), equalTo("value2"));
assertThat(doc.get("processed"), equalTo(true));
deletePipeline("_id");
getResponse = getPipelines("_id");
assertThat(getResponse.isFound(), is(false));
assertThat(getResponse.pipelines().size(), equalTo(0));
}
public void testPutWithPipelineFactoryError() throws Exception {
BytesReference source = BytesReference.bytes(
jsonBuilder().startObject()
.field("description", "my_pipeline")
.startArray("processors")
.startObject()
.startObject("test")
.field("unused", ":sad_face:")
.endObject()
.endObject()
.endArray()
.endObject()
);
PutPipelineRequest putPipelineRequest = putJsonPipelineRequest("_id2", source);
Exception e = expectThrows(
ElasticsearchParseException.class,
client().execute(PutPipelineTransportAction.TYPE, putPipelineRequest)
);
assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]"));
GetPipelineResponse response = getPipelines("_id2");
assertFalse(response.isFound());
}
public void testWithDedicatedMaster() throws Exception {
String masterOnlyNode = internalCluster().startMasterOnlyNode();
putJsonPipeline(
"_id",
(builder, params) -> builder.field("description", "my_pipeline")
.startArray("processors")
.startObject()
.startObject("test")
.endObject()
.endObject()
.endArray()
);
BulkItemResponse item = client(masterOnlyNode).prepareBulk()
.add(prepareIndex("test").setSource("field", "value2", "drop", true).setPipeline("_id"))
.get()
.getItems()[0];
assertFalse(item.isFailed());
assertEquals("auto-generated", item.getResponse().getId());
}
public void testPipelineOriginHeader() throws Exception {
putJsonPipeline("1", (source, params) -> {
source.startArray("processors");
source.startObject();
{
source.startObject("pipeline");
source.field("name", "2");
source.endObject();
}
source.endObject();
return source.endArray();
});
putJsonPipeline("2", (source, params) -> {
source.startArray("processors");
source.startObject();
{
source.startObject("pipeline");
source.field("name", "3");
source.endObject();
}
source.endObject();
return source.endArray();
});
putJsonPipeline("3", (source, params) -> {
source.startArray("processors");
source.startObject();
{
source.startObject("fail");
source.endObject();
}
source.endObject();
return source.endArray();
});
Exception e = expectThrows(Exception.class, () -> {
IndexRequest indexRequest = new IndexRequest("test");
indexRequest.source("{}", XContentType.JSON);
indexRequest.setPipeline("1");
client().index(indexRequest).get();
});
IngestProcessorException ingestException = (IngestProcessorException) e.getCause();
assertThat(ingestException.getBodyHeader("processor_type"), equalTo(List.of("fail")));
assertThat(ingestException.getBodyHeader("pipeline_origin"), equalTo(List.of("3", "2", "1")));
}
public void testPipelineProcessorOnFailure() throws Exception {
putJsonPipeline("1", (source, params) -> {
{
source.startArray("processors");
source.startObject();
{
source.startObject("pipeline");
source.field("name", "2");
source.endObject();
}
source.endObject();
source.endArray();
}
{
source.startArray("on_failure");
source.startObject();
{
source.startObject("onfailure_processor");
source.endObject();
}
source.endObject();
source.endArray();
}
return source;
});
putJsonPipeline("2", (source, params) -> {
source.startArray("processors");
source.startObject();
{
source.startObject("pipeline");
source.field("name", "3");
source.endObject();
}
source.endObject();
return source.endArray();
});
putJsonPipeline("3", (source, params) -> {
source.startArray("processors");
source.startObject();
{
source.startObject("fail");
source.endObject();
}
source.endObject();
return source.endArray();
});
prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).setPipeline("1").get();
Map<String, Object> inserted = client().prepareGet("test", "1").get().getSourceAsMap();
assertThat(inserted.get("readme"), equalTo("pipeline with id [3] is a bad pipeline"));
}
public void testBulkRequestWithInvalidJsonAndPipeline() throws Exception {
// Test that when a document with invalid JSON is in a bulk request with a pipeline,
// the invalid document fails gracefully without causing the entire bulk request to fail.
// This tests the fix for https://github.com/elastic/elasticsearch/issues/138445
createIndex("test_index");
putJsonPipeline(
"test-pipeline",
(builder, params) -> builder.field("description", "test pipeline")
.startArray("processors")
.startObject()
.startObject("test")
.endObject()
.endObject()
.endArray()
);
// Create a bulk request with valid and invalid documents
BulkRequest bulkRequest = new BulkRequest();
// Valid document
IndexRequest validRequest = new IndexRequest("test_index").id("valid_doc");
validRequest.source("{\"valid\":\"test\"}", XContentType.JSON);
validRequest.setPipeline("test-pipeline");
bulkRequest.add(validRequest);
// Invalid document with missing closing brace
IndexRequest invalidRequest = new IndexRequest("test_index").id("invalid_doc");
invalidRequest.source("{\"invalid\":\"json\"", XContentType.JSON);
invalidRequest.setPipeline("test-pipeline");
bulkRequest.add(invalidRequest);
// Invalid document with duplicate fields
IndexRequest invalidRequest2 = new IndexRequest("test_index").id("invalid_doc2");
invalidRequest2.source("{\"invalid\":\"json\", \"invalid\":\"json\"}", XContentType.JSON);
invalidRequest2.setPipeline("test-pipeline");
bulkRequest.add(invalidRequest2);
// Another valid document
IndexRequest validRequest2 = new IndexRequest("test_index").id("valid_doc2");
validRequest2.source("{\"valid\":\"test2\"}", XContentType.JSON);
validRequest2.setPipeline("test-pipeline");
bulkRequest.add(validRequest2);
BulkResponse response = client().bulk(bulkRequest).actionGet();
// The bulk request should succeed
assertThat(response.hasFailures(), is(true));
assertThat(response.getItems().length, equalTo(4));
// First document should succeed
BulkItemResponse item0 = response.getItems()[0];
assertThat(item0.isFailed(), is(false));
assertThat(item0.getResponse().getId(), equalTo("valid_doc"));
assertThat(item0.getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED));
// Second document should fail
BulkItemResponse item1 = response.getItems()[1];
assertThat(item1.isFailed(), is(true));
assertThat(item1.getFailure().getStatus(), equalTo(org.elasticsearch.rest.RestStatus.BAD_REQUEST));
assertThat(item1.getFailure().getCause(), instanceOf(IllegalArgumentException.class));
// Third document should fail
BulkItemResponse item2 = response.getItems()[2];
assertThat(item2.isFailed(), is(true));
assertThat(item2.getFailure().getStatus(), equalTo(org.elasticsearch.rest.RestStatus.BAD_REQUEST));
assertThat(item2.getFailure().getCause(), instanceOf(IllegalArgumentException.class));
// Fourth document should succeed
BulkItemResponse item3 = response.getItems()[3];
assertThat(item3.isFailed(), is(false));
assertThat(item3.getResponse().getId(), equalTo("valid_doc2"));
assertThat(item3.getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED));
// Verify that the valid documents were indexed
assertThat(client().prepareGet("test_index", "valid_doc").get().isExists(), is(true));
assertThat(client().prepareGet("test_index", "valid_doc2").get().isExists(), is(true));
// Verify that the invalid documents were not indexed
assertThat(client().prepareGet("test_index", "invalid_doc").get().isExists(), is(false));
assertThat(client().prepareGet("test_index", "invalid_doc2").get().isExists(), is(false));
// cleanup
deletePipeline("test-pipeline");
}
public static | IngestClientIT |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/web/socket/MockServerContainerContextCustomizerFactory.java | {
"start": 1288,
"end": 1464
} | class ____ one of
* its enclosing classes is annotated or meta-annotated with
* {@link WebAppConfiguration @WebAppConfiguration}.
*
* @author Sam Brannen
* @since 4.3.1
*/
| or |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/RescaleCheckpointManuallyITCase.java | {
"start": 4024,
"end": 14700
} | class ____ extends TestLogger {
private static final int NUM_TASK_MANAGERS = 2;
private static final int SLOTS_PER_TASK_MANAGER = 2;
private static MiniClusterWithClientResource cluster;
@Rule public final SharedObjects sharedObjects = SharedObjects.create();
@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();
@Parameterized.Parameter(0)
public String statebackendType;
@Parameterized.Parameter(1)
public boolean enableAsyncState;
@Parameterized.Parameters(name = "statebackend type ={0}, enableAsyncState={1}")
public static Collection<Object[]> parameter() {
return Arrays.asList(
new Object[][] {
{"forst", true}, {"forst", false}, {"rocksdb", true}, {"rocksdb", false}
});
}
@Before
public void setup() throws Exception {
Configuration config = new Configuration();
config.set(StateBackendOptions.STATE_BACKEND, statebackendType);
config.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, true);
cluster =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(config)
.setNumberTaskManagers(NUM_TASK_MANAGERS)
.setNumberSlotsPerTaskManager(SLOTS_PER_TASK_MANAGER)
.build());
cluster.before();
}
@After
public void shutDownExistingCluster() {
if (cluster != null) {
cluster.after();
cluster = null;
}
}
@Test
public void testCheckpointRescalingInKeyedState() throws Exception {
testCheckpointRescalingKeyedState(false);
}
@Test
public void testCheckpointRescalingOutKeyedState() throws Exception {
testCheckpointRescalingKeyedState(true);
}
/**
* Tests that a job with purely keyed state can be restarted from a checkpoint with a different
* parallelism.
*/
public void testCheckpointRescalingKeyedState(boolean scaleOut) throws Exception {
final int numberKeys = 42;
final int numberElements = 1000;
final int numberElements2 = 500;
final int parallelism = scaleOut ? 3 : 4;
final int parallelism2 = scaleOut ? 4 : 3;
final int maxParallelism = 13;
MiniCluster miniCluster = cluster.getMiniCluster();
String checkpointPath =
runJobAndGetCheckpoint(
numberKeys, numberElements, parallelism, maxParallelism, miniCluster);
assertNotNull(checkpointPath);
restoreAndAssert(
parallelism2,
maxParallelism,
numberKeys,
numberElements2,
numberElements + numberElements2,
miniCluster,
checkpointPath);
}
private String runJobAndGetCheckpoint(
int numberKeys,
int numberElements,
int parallelism,
int maxParallelism,
MiniCluster miniCluster)
throws Exception {
JobID jobID = null;
try {
JobGraph jobGraph =
createJobGraphWithKeyedState(
parallelism,
maxParallelism,
numberKeys,
numberElements,
numberElements,
true,
100,
miniCluster);
jobID = jobGraph.getJobID();
miniCluster.submitJob(jobGraph).get();
miniCluster.requestJobResult(jobID).get();
return getLatestCompletedCheckpointPath(jobID, miniCluster)
.orElseThrow(
() ->
new IllegalStateException(
"Cannot get completed checkpoint, job failed before completing checkpoint"));
} finally {
if (jobID != null) {
CollectionSink.clearElementsSet(jobID);
}
}
}
private void restoreAndAssert(
int restoreParallelism,
int maxParallelism,
int numberKeys,
int numberElements,
int numberElementsExpect,
MiniCluster miniCluster,
String restorePath)
throws Exception {
JobID jobID = null;
try {
JobGraph scaledJobGraph =
createJobGraphWithKeyedState(
restoreParallelism,
maxParallelism,
numberKeys,
numberElements,
numberElementsExpect,
false,
100,
miniCluster);
jobID = scaledJobGraph.getJobID();
scaledJobGraph.setSavepointRestoreSettings(forPath(restorePath));
miniCluster.submitJob(scaledJobGraph).get();
miniCluster.requestJobResult(jobID).get();
Set<Tuple2<Integer, Integer>> actualResult = CollectionSink.getElementsSet(jobID);
Set<Tuple2<Integer, Integer>> expectedResult = new HashSet<>();
for (int key = 0; key < numberKeys; key++) {
int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);
expectedResult.add(
Tuple2.of(
KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(
maxParallelism, restoreParallelism, keyGroupIndex),
key * numberElementsExpect));
}
assertEquals(expectedResult, actualResult);
} finally {
if (jobID != null) {
CollectionSink.clearElementsSet(jobID);
}
}
}
private JobGraph createJobGraphWithKeyedState(
int parallelism,
int maxParallelism,
int numberKeys,
int numberElements,
int numberElementsExpect,
boolean failAfterEmission,
int checkpointingInterval,
MiniCluster miniCluster)
throws IOException {
final Configuration configuration = new Configuration();
configuration.set(
CheckpointingOptions.CHECKPOINTS_DIRECTORY,
temporaryFolder.newFolder().toURI().toString());
configuration.set(
CheckpointingOptions.EXTERNALIZED_CHECKPOINT_RETENTION,
ExternalizedCheckpointRetention.RETAIN_ON_CANCELLATION);
// Force Aligned Checkpoints. This is necessary to prevent test environment randomization
// from overriding it. The elements may not all be sent to sink when unaligned checkpoints
// enabled(refer to FLINK-26882 for more details).
configuration.set(CheckpointingOptions.ENABLE_UNALIGNED, false);
configuration.set(RestartStrategyOptions.RESTART_STRATEGY, "none");
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(configuration);
env.setParallelism(parallelism);
if (0 < maxParallelism) {
env.getConfig().setMaxParallelism(maxParallelism);
}
env.enableCheckpointing(checkpointingInterval);
env.getConfig().setUseSnapshotCompression(true);
SharedReference<JobID> jobID = sharedObjects.add(new JobID());
SharedReference<MiniCluster> miniClusterRef = sharedObjects.add(miniCluster);
KeyedStream<Integer, Integer> input =
env.addSource(
new NotifyingDefiniteKeySource(
numberKeys, numberElements, failAfterEmission) {
String lastCheckpointPath = null;
/**
* This wait method waits at least two checkpoint finished to
* make sure the latest checkpoint contains all the source data.
*/
@Override
public boolean waitCheckpointCompleted() throws Exception {
Optional<String> mostRecentCompletedCheckpointPath =
getLatestCompletedCheckpointPath(
jobID.get(), miniClusterRef.get());
if (mostRecentCompletedCheckpointPath.isPresent()) {
if (lastCheckpointPath == null) {
lastCheckpointPath =
mostRecentCompletedCheckpointPath.get();
} else if (!lastCheckpointPath.equals(
mostRecentCompletedCheckpointPath.get())) {
return true;
}
}
return false;
}
})
.keyBy(
new KeySelector<Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer getKey(Integer value) {
return value;
}
});
if (enableAsyncState) {
input.enableAsyncState();
DataStream<Tuple2<Integer, Integer>> result =
input.flatMap(new AsyncSubtaskIndexFlatMapper(numberElementsExpect));
result.sinkTo(new CollectionSink<>());
} else {
DataStream<Tuple2<Integer, Integer>> result =
input.flatMap(new SubtaskIndexFlatMapper(numberElementsExpect));
result.sinkTo(new CollectionSink<>());
}
return env.getStreamGraph().getJobGraph(env.getClass().getClassLoader(), jobID.get());
}
private static | RescaleCheckpointManuallyITCase |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/junitrule/MutableStrictJUnitRuleTest.java | {
"start": 536,
"end": 1167
} | class ____ {
JUnitCore runner = new JUnitCore();
@Test
public void rule_can_be_changed_to_strict() throws Throwable {
// when
Result result = runner.run(LenientByDefault.class);
// then
JUnitResultAssert.assertThat(result).succeeds(1).fails(1, RuntimeException.class);
}
@Test
public void rule_can_be_changed_to_lenient() throws Throwable {
// when
Result result = runner.run(StrictByDefault.class);
// then
JUnitResultAssert.assertThat(result).succeeds(1).fails(1, RuntimeException.class);
}
public static | MutableStrictJUnitRuleTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/util/type/PrimitiveWrapperHelper.java | {
"start": 2704,
"end": 3065
} | class ____ implements PrimitiveWrapperDescriptor<Float> {
public static final FloatDescriptor INSTANCE = new FloatDescriptor();
private FloatDescriptor() {
}
@Override
public Class<Float> getPrimitiveClass() {
return float.class;
}
@Override
public Class<Float> getWrapperClass() {
return Float.class;
}
}
public static | FloatDescriptor |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/mapping/internal/ImmutableAttributeMappingsMap.java | {
"start": 399,
"end": 1897
} | class ____ implements AttributeMappingsMap {
//Intentionally avoid a Map<String,AttributeMapping> as
//that would imply having a typecheck for AttributeMapping
//on each read.
//Since the array doesn't require a type check on reads,
//we store the index into the array and retrieve from there
//instead - an extra indirection but avoids type check
//and related cache pollution issues.
private final HashMap<String,Integer> mapStore;
private final AttributeMapping[] orderedValues;
public ImmutableAttributeMappingsMap(final LinkedHashMap<String,AttributeMapping> sortedSource) {
final int size = sortedSource.size();
orderedValues = new AttributeMapping[size];
mapStore = new HashMap<>( size );
int idx = 0;
//populate both parallel representations
for ( var entry : sortedSource.entrySet() ) {
orderedValues[idx] = entry.getValue();
mapStore.put( entry.getKey(), Integer.valueOf( idx ) );
idx++;
}
}
@Override
public void forEachValue(final Consumer<? super AttributeMapping> action) {
for ( var attributeMapping : orderedValues ) {
action.accept( attributeMapping );
}
}
public int size() {
return orderedValues.length;
}
@Override
public AttributeMapping get(final String name) {
final Integer integer = mapStore.get( name );
return integer == null ? null : orderedValues[integer];
}
@Override
public Iterable<AttributeMapping> valueIterator() {
return new AttributeMappingIterable();
}
private final | ImmutableAttributeMappingsMap |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/RequestMappingHandlerMappingTests.java | {
"start": 23967,
"end": 24022
} | interface ____ {
}
@Controller
static | ExtraHttpExchange |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 17984,
"end": 18344
} | class ____ not abstract and does not implement the unrecognized methods.
@Test
public void testAbstractVoid() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"import com.google.auto.value.AutoValue;",
"@AutoValue",
"public abstract | is |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/observers/async/AsyncObserverTest.java | {
"start": 1015,
"end": 2246
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(StringProducer.class, StringObserver.class,
ThreadNameProvider.class);
@Test
public void testAsyncObservers() throws InterruptedException, ExecutionException, TimeoutException {
ArcContainer container = Arc.container();
StringProducer producer = container.instance(StringProducer.class).get();
StringObserver observer = container.instance(StringObserver.class).get();
String currentThread = Thread.currentThread().getName();
producer.produce("ping");
List<String> events = observer.getEvents();
assertEquals(1, events.size());
assertTrue(events.get(0).startsWith("sync::ping"));
assertTrue(events.get(0).endsWith(currentThread));
events.clear();
CompletionStage<String> completionStage = producer.produceAsync("pong");
assertEquals("pong", completionStage.toCompletableFuture().get(10, TimeUnit.SECONDS));
assertEquals(1, events.size());
assertTrue(events.get(0).startsWith("async::pong"));
assertFalse(events.get(0).endsWith(currentThread));
}
@Singleton
static | AsyncObserverTest |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/AbfsApacheHttpExpect100Exception.java | {
"start": 1180,
"end": 1382
} | class ____ extends HttpResponseException {
public AbfsApacheHttpExpect100Exception(final HttpResponse httpResponse) {
super(EXPECT_100_JDK_ERROR, httpResponse);
}
}
| AbfsApacheHttpExpect100Exception |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/RecordWriter.java | {
"start": 1322,
"end": 1767
} | interface ____<K, V> {
/**
* Writes a key/value pair.
*
* @param key the key to write.
* @param value the value to write.
* @throws IOException
*/
void write(K key, V value) throws IOException;
/**
* Close this <code>RecordWriter</code> to future operations.
*
* @param reporter facility to report progress.
* @throws IOException
*/
void close(Reporter reporter) throws IOException;
}
| RecordWriter |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/namedparam/NamedParameterJdbcOperations.java | {
"start": 17488,
"end": 30181
} | interface ____ appropriate
* when you don't have a domain model. Otherwise, consider using
* one of the queryForObject() methods.
* <p>The query is expected to be a single row query; the result row will be
* mapped to a Map (one entry for each column, using the column name as the key).
* @param sql the SQL query to execute
* @param paramMap map of parameters to bind to the query
* (leaving it to the PreparedStatement to guess the corresponding SQL type)
* @return the result Map (one entry for each column, using the column name as the key)
* @throws org.springframework.dao.IncorrectResultSizeDataAccessException
* if the query does not return exactly one row
* @throws DataAccessException if the query fails
* @see org.springframework.jdbc.core.JdbcTemplate#queryForMap(String)
* @see org.springframework.jdbc.core.ColumnMapRowMapper
*/
Map<String, @Nullable Object> queryForMap(String sql, Map<String, ?> paramMap) throws DataAccessException;
/**
* Query given SQL to create a prepared statement from SQL and a
* list of arguments to bind to the query, expecting a result list.
* <p>The results will be mapped to a List (one entry for each row) of
* result objects, each of them matching the specified element type.
* @param sql the SQL query to execute
* @param paramSource container of arguments to bind to the query
* @param elementType the required type of element in the result list
* (for example, {@code Integer.class})
* @return a List of objects that match the specified element type
* @throws DataAccessException if the query fails
* @see org.springframework.jdbc.core.JdbcTemplate#queryForList(String, Class)
* @see org.springframework.jdbc.core.SingleColumnRowMapper
*/
<T> List<@Nullable T> queryForList(String sql, SqlParameterSource paramSource, Class<T> elementType)
throws DataAccessException;
/**
* Query given SQL to create a prepared statement from SQL and a
* list of arguments to bind to the query, expecting a result list.
* <p>The results will be mapped to a List (one entry for each row) of
* result objects, each of them matching the specified element type.
* @param sql the SQL query to execute
* @param paramMap map of parameters to bind to the query
* (leaving it to the PreparedStatement to guess the corresponding SQL type)
* @param elementType the required type of element in the result list
* (for example, {@code Integer.class})
* @return a List of objects that match the specified element type
* @throws DataAccessException if the query fails
* @see org.springframework.jdbc.core.JdbcTemplate#queryForList(String, Class)
* @see org.springframework.jdbc.core.SingleColumnRowMapper
*/
<T> List<@Nullable T> queryForList(String sql, Map<String, ?> paramMap, Class<T> elementType)
throws DataAccessException;
/**
* Query given SQL to create a prepared statement from SQL and a
* list of arguments to bind to the query, expecting a result list.
* <p>The results will be mapped to a List (one entry for each row) of
* Maps (one entry for each column, using the column name as the key).
* Each element in the list will be of the form returned by this interface's
* {@code queryForMap} methods.
* @param sql the SQL query to execute
* @param paramSource container of arguments to bind to the query
* @return a List that contains a Map per row
* @throws DataAccessException if the query fails
* @see org.springframework.jdbc.core.JdbcTemplate#queryForList(String)
*/
List<Map<String, @Nullable Object>> queryForList(String sql, SqlParameterSource paramSource) throws DataAccessException;
/**
* Query given SQL to create a prepared statement from SQL and a
* list of arguments to bind to the query, expecting a result list.
* <p>The results will be mapped to a List (one entry for each row) of
* Maps (one entry for each column, using the column name as the key).
* Each element in the list will be of the form returned by this interface's
* {@code queryForMap} methods.
* @param sql the SQL query to execute
* @param paramMap map of parameters to bind to the query
* (leaving it to the PreparedStatement to guess the corresponding SQL type)
* @return a List that contains a Map per row
* @throws DataAccessException if the query fails
* @see org.springframework.jdbc.core.JdbcTemplate#queryForList(String)
*/
List<Map<String, @Nullable Object>> queryForList(String sql, Map<String, ?> paramMap) throws DataAccessException;
/**
* Query given SQL to create a prepared statement from SQL and a
* list of arguments to bind to the query, expecting an SqlRowSet.
* <p>The results will be mapped to an SqlRowSet which holds the data in a
* disconnected fashion. This wrapper will translate any SQLExceptions thrown.
* <p>Note that, for the default implementation, JDBC RowSet support needs to
* be available at runtime: by default, a standard JDBC {@code CachedRowSet}
* is used.
* @param sql the SQL query to execute
* @param paramSource container of arguments to bind to the query
* @return an SqlRowSet representation (possibly a wrapper around a
* {@code javax.sql.rowset.CachedRowSet})
* @throws DataAccessException if there is any problem executing the query
* @see org.springframework.jdbc.core.JdbcTemplate#queryForRowSet(String)
* @see org.springframework.jdbc.core.SqlRowSetResultSetExtractor
* @see javax.sql.rowset.CachedRowSet
*/
SqlRowSet queryForRowSet(String sql, SqlParameterSource paramSource) throws DataAccessException;
/**
* Query given SQL to create a prepared statement from SQL and a
* list of arguments to bind to the query, expecting an SqlRowSet.
* <p>The results will be mapped to an SqlRowSet which holds the data in a
* disconnected fashion. This wrapper will translate any SQLExceptions thrown.
* <p>Note that, for the default implementation, JDBC RowSet support needs to
* be available at runtime: by default, a standard JDBC {@code CachedRowSet}
* is used.
* @param sql the SQL query to execute
* @param paramMap map of parameters to bind to the query
* (leaving it to the PreparedStatement to guess the corresponding SQL type)
* @return an SqlRowSet representation (possibly a wrapper around a
* {@code javax.sql.rowset.CachedRowSet})
* @throws DataAccessException if there is any problem executing the query
* @see org.springframework.jdbc.core.JdbcTemplate#queryForRowSet(String)
* @see org.springframework.jdbc.core.SqlRowSetResultSetExtractor
* @see javax.sql.rowset.CachedRowSet
*/
SqlRowSet queryForRowSet(String sql, Map<String, ?> paramMap) throws DataAccessException;
/**
* Issue an update via a prepared statement, binding the given arguments.
* @param sql the SQL containing named parameters
* @param paramSource container of arguments and SQL types to bind to the query
* @return the number of rows affected
* @throws DataAccessException if there is any problem issuing the update
*/
int update(String sql, SqlParameterSource paramSource) throws DataAccessException;
/**
* Issue an update via a prepared statement, binding the given arguments.
* @param sql the SQL containing named parameters
* @param paramMap map of parameters to bind to the query
* (leaving it to the PreparedStatement to guess the corresponding SQL type)
* @return the number of rows affected
* @throws DataAccessException if there is any problem issuing the update
*/
int update(String sql, Map<String, ?> paramMap) throws DataAccessException;
/**
* Issue an update via a prepared statement, binding the given arguments,
* returning generated keys.
* <p>This method requires support for generated keys in the JDBC driver.
* @param sql the SQL containing named parameters
* @param paramSource container of arguments and SQL types to bind to the query
* @param generatedKeyHolder a {@link KeyHolder} that will hold the generated keys
* @return the number of rows affected
* @throws DataAccessException if there is any problem issuing the update
* @see MapSqlParameterSource
* @see org.springframework.jdbc.support.GeneratedKeyHolder
* @see java.sql.DatabaseMetaData#supportsGetGeneratedKeys()
*/
int update(String sql, SqlParameterSource paramSource, KeyHolder generatedKeyHolder)
throws DataAccessException;
/**
* Issue an update via a prepared statement, binding the given arguments,
* returning generated keys.
* <p>This method requires support for generated keys in the JDBC driver.
* @param sql the SQL containing named parameters
* @param paramSource container of arguments and SQL types to bind to the query
* @param generatedKeyHolder a {@link KeyHolder} that will hold the generated keys
* @param keyColumnNames names of the columns that will have keys generated for them
* @return the number of rows affected
* @throws DataAccessException if there is any problem issuing the update
* @see MapSqlParameterSource
* @see org.springframework.jdbc.support.GeneratedKeyHolder
* @see java.sql.DatabaseMetaData#supportsGetGeneratedKeys()
*/
int update(String sql, SqlParameterSource paramSource, KeyHolder generatedKeyHolder, String[] keyColumnNames)
throws DataAccessException;
/**
* Execute a batch using the supplied SQL statement with the batch of supplied arguments.
* @param sql the SQL statement to execute
* @param batchArgs the array of {@link SqlParameterSource} containing the batch of
* arguments for the query
* @return an array containing the numbers of rows affected by each update in the batch
* (may also contain special JDBC-defined negative values for affected rows such as
* {@link java.sql.Statement#SUCCESS_NO_INFO}/{@link java.sql.Statement#EXECUTE_FAILED})
* @throws DataAccessException if there is any problem issuing the update
*/
int[] batchUpdate(String sql, SqlParameterSource[] batchArgs);
/**
* Executes a batch using the supplied SQL statement with the batch of supplied arguments.
* @param sql the SQL statement to execute
* @param batchValues the array of Maps containing the batch of arguments for the query
* @return an array containing the numbers of rows affected by each update in the batch
* (may also contain special JDBC-defined negative values for affected rows such as
* {@link java.sql.Statement#SUCCESS_NO_INFO}/{@link java.sql.Statement#EXECUTE_FAILED})
* @throws DataAccessException if there is any problem issuing the update
*/
int[] batchUpdate(String sql, Map<String, ?>[] batchValues);
/**
* Execute a batch using the supplied SQL statement with the batch of supplied
* arguments, returning generated keys.
* <p>This method requires support for generated keys in the JDBC driver.
* @param sql the SQL statement to execute
* @param batchArgs the array of {@link SqlParameterSource} containing the batch of
* arguments for the query
* @param generatedKeyHolder a {@link KeyHolder} that will hold the generated keys
* @return an array containing the numbers of rows affected by each update in the batch
* (may also contain special JDBC-defined negative values for affected rows such as
* {@link java.sql.Statement#SUCCESS_NO_INFO}/{@link java.sql.Statement#EXECUTE_FAILED})
* @throws DataAccessException if there is any problem issuing the update
* @since 6.1
* @see org.springframework.jdbc.support.GeneratedKeyHolder
* @see java.sql.DatabaseMetaData#supportsGetGeneratedKeys()
*/
int[] batchUpdate(String sql, SqlParameterSource[] batchArgs, KeyHolder generatedKeyHolder);
/**
* Execute a batch using the supplied SQL statement with the batch of supplied arguments,
* returning generated keys.
* <p>This method requires support for generated keys in the JDBC driver.
* @param sql the SQL statement to execute
* @param batchArgs the array of {@link SqlParameterSource} containing the batch of
* arguments for the query
* @param generatedKeyHolder a {@link KeyHolder} that will hold the generated keys
* @param keyColumnNames names of the columns that will have keys generated for them
* @return an array containing the numbers of rows affected by each update in the batch
* (may also contain special JDBC-defined negative values for affected rows such as
* {@link java.sql.Statement#SUCCESS_NO_INFO}/{@link java.sql.Statement#EXECUTE_FAILED})
* @throws DataAccessException if there is any problem issuing the update
* @since 6.1
* @see org.springframework.jdbc.support.GeneratedKeyHolder
* @see java.sql.DatabaseMetaData#supportsGetGeneratedKeys()
*/
int[] batchUpdate(String sql, SqlParameterSource[] batchArgs, KeyHolder generatedKeyHolder,
String[] keyColumnNames);
}
| are |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexResponse.java | {
"start": 2571,
"end": 5743
} | class ____ implements Writeable, ToXContentFragment {
private final Index index;
private final @Nullable Exception exception;
private final @Nullable ShardResult[] shards;
public IndexResult(final Index index) {
this(index, null, null);
}
public IndexResult(final Index index, final Exception failure) {
this(index, Objects.requireNonNull(failure), null);
}
public IndexResult(final Index index, final ShardResult[] shards) {
this(index, null, Objects.requireNonNull(shards));
}
private IndexResult(final Index index, @Nullable final Exception exception, @Nullable final ShardResult[] shards) {
this.index = Objects.requireNonNull(index);
this.exception = exception;
this.shards = shards;
}
IndexResult(final StreamInput in) throws IOException {
this.index = new Index(in);
this.exception = in.readException();
this.shards = in.readOptionalArray(ShardResult::new, ShardResult[]::new);
}
@Override
public void writeTo(final StreamOutput out) throws IOException {
index.writeTo(out);
out.writeException(exception);
out.writeOptionalArray(shards);
}
public Index getIndex() {
return index;
}
public Exception getException() {
return exception;
}
public ShardResult[] getShards() {
return shards;
}
public boolean hasFailures() {
if (exception != null) {
return true;
}
if (shards != null) {
for (ShardResult shard : shards) {
if (shard.hasFailures()) {
return true;
}
}
}
return false;
}
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject(index.getName());
{
if (hasFailures()) {
builder.field("closed", false);
if (exception != null) {
builder.startObject("exception");
ElasticsearchException.generateFailureXContent(builder, params, exception, true);
builder.endObject();
} else {
builder.startObject("failedShards");
for (ShardResult shard : shards) {
if (shard.hasFailures()) {
shard.toXContent(builder, params);
}
}
builder.endObject();
}
} else {
builder.field("closed", true);
}
}
return builder.endObject();
}
@Override
public String toString() {
return Strings.toString(this);
}
}
public static | IndexResult |
java | apache__kafka | streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/PurgeRepartitionTopicIntegrationTest.java | {
"start": 3770,
"end": 5030
} | class ____ implements TestCondition {
@Override
public final boolean conditionMet() {
try {
final Set<String> topics = adminClient.listTopics().names().get();
if (!topics.contains(REPARTITION_TOPIC)) {
return false;
}
} catch (final Exception e) {
return false;
}
try {
final ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, REPARTITION_TOPIC);
final Config config = adminClient
.describeConfigs(Collections.singleton(resource))
.values()
.get(resource)
.get();
return config.get(TopicConfig.CLEANUP_POLICY_CONFIG).value().equals(TopicConfig.CLEANUP_POLICY_DELETE)
&& config.get(TopicConfig.SEGMENT_MS_CONFIG).value().equals(PURGE_INTERVAL_MS.toString())
&& config.get(LogConfig.INTERNAL_SEGMENT_BYTES_CONFIG).value().equals(PURGE_SEGMENT_BYTES.toString());
} catch (final Exception e) {
return false;
}
}
}
private | RepartitionTopicCreatedWithExpectedConfigs |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/registration/ClientRegistrations.java | {
"start": 14073,
"end": 14164
} | interface ____<S, T, E extends Throwable> {
T apply(S src) throws E;
}
}
| ThrowingFunction |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/ImplicitCompositeKeyJoinTest.java | {
"start": 1284,
"end": 3647
} | class ____ {
private static final Logger LOGGER = Logger.getLogger( ImplicitCompositeKeyJoinTest.class );
@Test
public void testCorrectColumnSizeValues(ServiceRegistryScope registryScope, DomainModelScope modelScope) {
boolean createTableEmployeeFound = false;
final List<String> commands = new SchemaCreatorImpl( registryScope.getRegistry() )
.generateCreationCommands( modelScope.getDomainModel(), false );
for ( String command : commands ) {
LOGGER.info( command );
if ( command.toLowerCase().matches( "^create( (column|row))? table employee.+" ) ) {
final String[] columnsDefinition = getColumnsDefinition( command );
for ( String columnsDefinition1 : columnsDefinition ) {
checkColumnSize( columnsDefinition1 );
}
createTableEmployeeFound = true;
}
}
assertTrue( createTableEmployeeFound,
"Expected create table command for Employee entity not found" );
}
private String[] getColumnsDefinition(String command) {
String substring = command.toLowerCase().replaceAll( "create( (column|row))? table employee ", "" );
substring = substring.substring( 0, substring.toLowerCase().indexOf( "primary key" ) );
return substring.split( "\\," );
}
private void checkColumnSize(String s) {
if ( s.toLowerCase().contains( "manager_age" ) ) {
if ( !s.contains( "15" ) ) {
fail( expectedMessage( "manager_age", 15, s ) );
}
}
else if ( s.toLowerCase().contains( "manager_birthday" ) ) {
if ( !s.contains( "255" ) ) {
fail( expectedMessage( "manager_birthday", 255, s ) );
}
}
else if ( s.toLowerCase().contains( "manager_name" ) ) {
if ( !s.contains( "20" ) ) {
fail( expectedMessage( "manager_name", 20, s ) );
}
}
else if ( s.toLowerCase().contains( "age" ) ) {
if ( !s.contains( "15" ) ) {
fail( expectedMessage( "age", 15, s ) );
}
}
else if ( s.toLowerCase().contains( "birthday" ) ) {
if ( !s.contains( "255" ) ) {
fail( expectedMessage( "birthday", 255, s ) );
}
}
else if ( s.toLowerCase().contains( "name" ) ) {
if ( !s.contains( "20" ) ) {
fail( expectedMessage( "name", 20, s ) );
}
}
}
private String expectedMessage(String column_name, int size, String actual) {
return "Expected " + column_name + " " + size + " but was " + actual;
}
@Entity
@Table(name = "Employee")
public | ImplicitCompositeKeyJoinTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java | {
"start": 22425,
"end": 22578
} | interface ____ extends Broadcaster {
void subscribe();
void unsubscribe();
void setChannel(Channel<?> channel);
}
public static | EventBroadcaster |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/ingest/IngestPipelineMetric.java | {
"start": 639,
"end": 2103
} | class ____ extends IngestMetric {
/**
* The amount of bytes ingested by a pipeline.
*/
private final CounterMetric bytesIngested = new CounterMetric();
/**
* The amount of bytes produced by a pipeline.
*/
private final CounterMetric bytesProduced = new CounterMetric();
void add(IngestPipelineMetric metrics) {
super.add(metrics);
bytesIngested.inc(metrics.bytesIngested.count());
bytesProduced.inc(metrics.bytesProduced.count());
}
/**
* Call this prior to the ingest action.
* @param bytesIngested The number of bytes ingested by the pipeline.
*/
void preIngestBytes(long bytesIngested) {
this.bytesIngested.inc(bytesIngested);
}
/**
* Call this after performing the ingest action.
* @param bytesProduced The number of bytes resulting from running a request in the pipeline.
*/
void postIngestBytes(long bytesProduced) {
this.bytesProduced.inc(bytesProduced);
}
/**
* Creates a serializable representation for these metrics.
*/
IngestStats.ByteStats createByteStats() {
long bytesIngested = this.bytesIngested.count();
long bytesProduced = this.bytesProduced.count();
if (bytesIngested == 0L && bytesProduced == 0L) {
return IngestStats.ByteStats.IDENTITY;
}
return new IngestStats.ByteStats(bytesIngested, bytesProduced);
}
}
| IngestPipelineMetric |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1400/Issue1494.java | {
"start": 799,
"end": 926
} | class ____ extends A {
private String name;
public String getName() {
return name;
}
}
}
| B |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/bugs/CovariantOverrideTest.java | {
"start": 553,
"end": 1558
} | interface
____ callMe();
}
@Test
public void returnFoo1() {
ReturnsObject mock = mock(ReturnsObject.class);
when(mock.callMe()).thenReturn("foo");
assertEquals("foo", mock.callMe()); // Passes
}
@Test
public void returnFoo2() {
ReturnsString mock = mock(ReturnsString.class);
when(mock.callMe()).thenReturn("foo");
assertEquals("foo", mock.callMe()); // Passes
}
@Test
public void returnFoo3() {
ReturnsObject mock = mock(ReturnsString.class);
when(mock.callMe()).thenReturn("foo");
assertEquals("foo", mock.callMe()); // Passes
}
@Test
public void returnFoo4() {
ReturnsString mock = mock(ReturnsString.class);
mock.callMe(); // covariant override not generated
ReturnsObject mock2 = mock; // Switch to base type to call covariant override
verify(mock2).callMe(); // Fails: java.lang.AssertionError: expected:<foo> but was:<null>
}
}
| String |
java | apache__camel | components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/InputContactMessageContent.java | {
"start": 1704,
"end": 3488
} | class ____ {
private String phoneNumber;
private String firstName;
private String lastName;
private String vcard;
private Builder() {
}
public Builder phoneNumber(String phoneNumber) {
this.phoneNumber = phoneNumber;
return this;
}
public Builder firstName(String firstName) {
this.firstName = firstName;
return this;
}
public Builder lastName(String lastName) {
this.lastName = lastName;
return this;
}
public Builder vcard(String vcard) {
this.vcard = vcard;
return this;
}
public InputContactMessageContent build() {
InputContactMessageContent inputContactMessageContent = new InputContactMessageContent();
inputContactMessageContent.setPhoneNumber(phoneNumber);
inputContactMessageContent.setFirstName(firstName);
inputContactMessageContent.setLastName(lastName);
inputContactMessageContent.setVcard(vcard);
return inputContactMessageContent;
}
}
public String getPhoneNumber() {
return phoneNumber;
}
public void setPhoneNumber(String phoneNumber) {
this.phoneNumber = phoneNumber;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getVcard() {
return vcard;
}
public void setVcard(String vcard) {
this.vcard = vcard;
}
}
| Builder |
java | quarkusio__quarkus | integration-tests/main/src/main/java/io/quarkus/it/rest/TestResourceWithTwoConstructors.java | {
"start": 140,
"end": 478
} | class ____ {
Someservice service;
public TestResourceWithTwoConstructors() {
}
@Inject
public TestResourceWithTwoConstructors(Someservice service) {
this.service = service;
}
@GET
@Path("/service")
public String service() {
return service.name();
}
}
| TestResourceWithTwoConstructors |
java | redisson__redisson | redisson/src/main/java/org/redisson/codec/ReferenceCodecProvider.java | {
"start": 1450,
"end": 1746
} | class ____ has the REntity annotation.
* @param config Redisson config object
*
* @return the cached codec instance.
*/
<T extends Codec> T getCodec(REntity anno, Class<?> cls, Config config);
/**
* Get a codec instance by a RObjectField annotation and the | that |
java | elastic__elasticsearch | x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java | {
"start": 28908,
"end": 30743
} | class ____ extends ClusterStateWaitStep implements NamedWriteable {
public static final String NAME = "observable_cluster_state_action";
public ObservableClusterStateWaitStep(StepKey current, StepKey next) {
super(current, next);
}
@Override
public boolean isRetryable() {
return false;
}
public ObservableClusterStateWaitStep(StreamInput in) throws IOException {
this(new StepKey(in.readString(), in.readString(), in.readString()), readOptionalNextStepKey(in));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(getKey().phase());
out.writeString(getKey().action());
out.writeString(getKey().name());
boolean hasNextStep = getNextStepKey() != null;
out.writeBoolean(hasNextStep);
if (hasNextStep) {
out.writeString(getNextStepKey().phase());
out.writeString(getNextStepKey().action());
out.writeString(getNextStepKey().name());
}
}
private static StepKey readOptionalNextStepKey(StreamInput in) throws IOException {
if (in.readBoolean()) {
return new StepKey(in.readString(), in.readString(), in.readString());
}
return null;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Result isConditionMet(Index index, ProjectState currentState) {
boolean complete = currentState.metadata().index("test").getSettings().getAsBoolean("index.lifecycle.test.complete", false);
return new Result(complete, null);
}
}
public static | ObservableClusterStateWaitStep |
java | bumptech__glide | samples/svg/src/main/java/com/bumptech/glide/samples/svg/SvgDrawableTranscoder.java | {
"start": 564,
"end": 974
} | class ____ implements ResourceTranscoder<SVG, PictureDrawable> {
@Nullable
@Override
public Resource<PictureDrawable> transcode(
@NonNull Resource<SVG> toTranscode, @NonNull Options options) {
SVG svg = toTranscode.get();
Picture picture = svg.renderToPicture();
PictureDrawable drawable = new PictureDrawable(picture);
return new SimpleResource<>(drawable);
}
}
| SvgDrawableTranscoder |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/SessionAssociationMarkers.java | {
"start": 573,
"end": 2379
} | class ____ {
private static final SessionAssociationMarkers NON_ASSOCIATED = new SessionAssociationMarkers();
final boolean allowLoadOutsideTransaction;
final String sessionFactoryUuid;
transient SharedSessionContractImplementor session; //TODO by resetting this one field on Session#close we might be able to avoid iterating on all managed instances to de-associate them? Only if we guarantee all managed types use this.
public SessionAssociationMarkers(final SharedSessionContractImplementor session) {
this.session = session;
allowLoadOutsideTransaction =
session.getFactory().getSessionFactoryOptions()
.isInitializeLazyStateOutsideTransactionsEnabled();
sessionFactoryUuid = allowLoadOutsideTransaction ? session.getFactory().getUuid() : null;
}
/**
* Constructor for the singleton representing non-associated
* state.
*/
private SessionAssociationMarkers() {
session = null;
sessionFactoryUuid = null;
allowLoadOutsideTransaction = false;
}
/**
* Copying constructor for when we're allowed to load outside of transactions
* and need to transparently reassociated to the SessionFactory having the
* specified UUID.
*
* @param uuid The UUID of the SessionFactory
*/
private SessionAssociationMarkers(String uuid) {
session = null;
sessionFactoryUuid = uuid;
allowLoadOutsideTransaction = true;
}
public SessionAssociationMarkers deAssociatedCopy() {
return allowLoadOutsideTransaction
? new SessionAssociationMarkers( sessionFactoryUuid )
: NON_ASSOCIATED;
}
/**
* Careful as this mutates the state of this instance, which is possibly
* used by multiple managed entities.
* Removes the reference to the session; useful on Session close.
*/
public void sessionClosed() {
session = null;
}
}
| SessionAssociationMarkers |
java | spring-projects__spring-boot | cli/spring-boot-cli/src/json-shade/java/org/springframework/boot/cli/json/JSONTokener.java | {
"start": 684,
"end": 910
} | class ____ written without inspecting the non-free org.json source code.
/**
* Parses a JSON (<a href="https://www.ietf.org/rfc/rfc4627.txt">RFC 4627</a>) encoded
* string into the corresponding object. Most clients of this | was |
java | apache__camel | components/camel-stax/src/main/java/org/apache/camel/component/stax/StAXBuilder.java | {
"start": 1699,
"end": 2146
} | class ____ has JAXB annotations to bind POJO.
* @param isNamespaceAware sets the namespace awareness of the xml reader
*/
public static <T> Expression stax(Class<T> clazz, boolean isNamespaceAware) {
return new StAXJAXBIteratorExpression<>(clazz, isNamespaceAware);
}
/**
* Creates a {@link org.apache.camel.component.stax.StAXJAXBIteratorExpression}.
*
* @param clazzName the FQN name of the | which |
java | apache__maven | impl/maven-xml/src/main/java/org/apache/maven/internal/xml/XmlNodeImpl.java | {
"start": 1355,
"end": 10060
} | class ____ implements Serializable, XmlNode {
@Nonnull
protected final String prefix;
@Nonnull
protected final String namespaceUri;
@Nonnull
protected final String name;
protected final String value;
@Nonnull
protected final Map<String, String> attributes;
@Nonnull
protected final List<XmlNode> children;
protected final Object location;
public XmlNodeImpl(String name) {
this(name, null, null, null, null);
}
public XmlNodeImpl(String name, String value) {
this(name, value, null, null, null);
}
public XmlNodeImpl(XmlNode from, String name) {
this(name, from.getValue(), from.getAttributes(), from.getChildren(), from.getInputLocation());
}
public XmlNodeImpl(
String name, String value, Map<String, String> attributes, List<XmlNode> children, Object location) {
this("", "", name, value, attributes, children, location);
}
public XmlNodeImpl(
String prefix,
String namespaceUri,
String name,
String value,
Map<String, String> attributes,
List<XmlNode> children,
Object location) {
this.prefix = prefix == null ? "" : prefix;
this.namespaceUri = namespaceUri == null ? "" : namespaceUri;
this.name = Objects.requireNonNull(name);
this.value = value;
this.attributes = ImmutableCollections.copy(attributes);
this.children = ImmutableCollections.copy(children);
this.location = location;
}
@SuppressWarnings("removal")
@Override
public XmlNode merge(XmlNode source, Boolean childMergeOverride) {
return XmlService.merge(this, source, childMergeOverride);
}
// ----------------------------------------------------------------------
// Name handling
// ----------------------------------------------------------------------
@Override
@Nonnull
@Deprecated(since = "4.0.0", forRemoval = true)
public String getPrefix() {
return prefix;
}
@Override
@Nonnull
public String prefix() {
return getPrefix();
}
@Override
@Nonnull
@Deprecated(since = "4.0.0", forRemoval = true)
public String getNamespaceUri() {
return namespaceUri;
}
@Override
@Nonnull
public String namespaceUri() {
return getNamespaceUri();
}
@Override
@Nonnull
@Deprecated(since = "4.0.0", forRemoval = true)
public String getName() {
return name;
}
@Override
@Nonnull
public String name() {
return getName();
}
// ----------------------------------------------------------------------
// Value handling
// ----------------------------------------------------------------------
@Override
@Deprecated(since = "4.0.0", forRemoval = true)
public String getValue() {
return value;
}
@Override
public String value() {
return getValue();
}
// ----------------------------------------------------------------------
// Attribute handling
// ----------------------------------------------------------------------
@Override
@Nonnull
@Deprecated(since = "4.0.0", forRemoval = true)
public Map<String, String> getAttributes() {
return attributes;
}
@Override
@Nonnull
public Map<String, String> attributes() {
return getAttributes();
}
@Override
@Deprecated(since = "4.0.0", forRemoval = true)
public String getAttribute(@Nonnull String name) {
return attributes.get(name);
}
@Override
public String attribute(@Nonnull String name) {
return getAttribute(name);
}
// ----------------------------------------------------------------------
// Child handling
// ----------------------------------------------------------------------
@Deprecated(since = "4.0.0", forRemoval = true)
@Override
public XmlNode getChild(String name) {
if (name != null) {
ListIterator<XmlNode> it = children.listIterator(children.size());
while (it.hasPrevious()) {
XmlNode child = it.previous();
if (name.equals(child.getName())) {
return child;
}
}
}
return null;
}
@Override
public XmlNode child(String name) {
return getChild(name);
}
@Override
@Nonnull
@Deprecated(since = "4.0.0", forRemoval = true)
public List<XmlNode> getChildren() {
return children;
}
@Override
@Nonnull
public List<XmlNode> children() {
return getChildren();
}
@Deprecated(since = "4.0.0", forRemoval = true)
public int getChildCount() {
return children.size();
}
// ----------------------------------------------------------------------
// Input location handling
// ----------------------------------------------------------------------
/**
* @since 3.2.0
* @return input location
*/
@Deprecated(since = "4.0.0", forRemoval = true)
@Override
public Object getInputLocation() {
return location;
}
@Override
public Object inputLocation() {
return getInputLocation();
}
// ----------------------------------------------------------------------
// Helpers
// ----------------------------------------------------------------------
@SuppressWarnings("checkstyle:MethodLength")
public static XmlNode merge(XmlNode dominant, XmlNode recessive, Boolean childMergeOverride) {
return XmlService.merge(dominant, recessive, childMergeOverride);
}
/**
* Merge two DOMs, with one having dominance in the case of collision. Merge mechanisms (vs. override for nodes, or
* vs. append for children) is determined by attributes of the dominant root node.
*
* @see XmlService#CHILDREN_COMBINATION_MODE_ATTRIBUTE
* @see XmlService#SELF_COMBINATION_MODE_ATTRIBUTE
* @param dominant The dominant DOM into which the recessive value/attributes/children will be merged
* @param recessive The recessive DOM, which will be merged into the dominant DOM
* @return merged DOM
*
* @deprecated use {@link XmlService#merge(XmlNode, XmlNode, Boolean)} instead
*/
@Deprecated(since = "4.0.0", forRemoval = true)
public static XmlNode merge(XmlNode dominant, XmlNode recessive) {
return XmlService.merge(dominant, recessive);
}
// ----------------------------------------------------------------------
// Standard object handling
// ----------------------------------------------------------------------
@Override
public boolean equals(Object o) {
return this == o
|| o instanceof XmlNode that
&& Objects.equals(this.name, that.name())
&& Objects.equals(this.value, that.value())
&& Objects.equals(this.attributes, that.attributes())
&& Objects.equals(this.children, that.children());
}
@Override
public int hashCode() {
return Objects.hash(name, value, attributes, children);
}
@Override
public String toString() {
try {
StringWriter writer = new StringWriter();
XmlService.write(this, writer);
return writer.toString();
} catch (IOException e) {
return toStringObject();
}
}
public String toStringObject() {
StringBuilder sb = new StringBuilder();
sb.append("XmlNode[");
boolean w = false;
w = addToStringField(sb, prefix, o -> !o.isEmpty(), "prefix", w);
w = addToStringField(sb, namespaceUri, o -> !o.isEmpty(), "namespaceUri", w);
w = addToStringField(sb, name, o -> !o.isEmpty(), "name", w);
w = addToStringField(sb, value, o -> !o.isEmpty(), "value", w);
w = addToStringField(sb, attributes, o -> !o.isEmpty(), "attributes", w);
w = addToStringField(sb, children, o -> !o.isEmpty(), "children", w);
w = addToStringField(sb, location, Objects::nonNull, "location", w);
sb.append("]");
return sb.toString();
}
private static <T> boolean addToStringField(StringBuilder sb, T o, Function<T, Boolean> p, String n, boolean w) {
if (!p.apply(o)) {
if (w) {
sb.append(", ");
} else {
w = true;
}
sb.append(n).append("='").append(o).append('\'');
}
return w;
}
}
| XmlNodeImpl |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/Probe.java | {
"start": 1086,
"end": 2830
} | class ____ implements MonitorKeys {
private String name;
protected Probe() {
}
/**
* Create a probe of a specific name
*
* @param name probe name
*/
public Probe(String name) {
this.name = name;
}
protected void setName(String name) {
this.name = name;
}
public String getName() {
return name;
}
@Override
public String toString() {
return getName();
}
public static String getProperty(Map<String, String> props, String name,
String defaultValue) throws IOException {
String value = props.get(name);
if (StringUtils.isEmpty(value)) {
if (defaultValue == null) {
throw new IOException(name + " not specified");
}
return defaultValue;
}
return value;
}
public static int getPropertyInt(Map<String, String> props, String name,
Integer defaultValue) throws IOException {
String value = props.get(name);
if (StringUtils.isEmpty(value)) {
if (defaultValue == null) {
throw new IOException(name + " not specified");
}
return defaultValue;
}
return Integer.parseInt(value);
}
public static boolean getPropertyBool(Map<String, String> props, String name,
boolean defaultValue) {
String value = props.get(name);
if (StringUtils.isEmpty(value)) {
return defaultValue;
}
return Boolean.parseBoolean(value);
}
/**
* perform any prelaunch initialization
*/
public void init() throws IOException {
}
/**
* Ping the endpoint. All exceptions must be caught and included in the
* (failure) status.
*
* @param instance instance to ping
* @return the status
*/
public abstract ProbeStatus ping(ComponentInstance instance);
}
| Probe |
java | apache__kafka | metadata/src/test/java/org/apache/kafka/image/publisher/BrokerRegistrationTrackerTest.java | {
"start": 1983,
"end": 6411
} | class ____ {
AtomicInteger numCalls = new AtomicInteger(0);
BrokerRegistrationTracker tracker = new BrokerRegistrationTracker(1, () -> numCalls.incrementAndGet());
MetadataImage image = MetadataImage.EMPTY;
void onMetadataUpdate(MetadataDelta delta) {
MetadataProvenance provenance = new MetadataProvenance(0, 0, 0, true);
image = delta.apply(provenance);
LogDeltaManifest manifest = new LogDeltaManifest.Builder().
provenance(provenance).
leaderAndEpoch(LeaderAndEpoch.UNKNOWN).
numBatches(1).
elapsedNs(1).
numBytes(1).
build();
tracker.onMetadataUpdate(delta, image, manifest);
}
MetadataDelta newDelta() {
return new MetadataDelta.Builder().
setImage(image).
build();
}
}
@Test
public void testTrackerName() {
BrokerRegistrationTrackerTestContext ctx = new BrokerRegistrationTrackerTestContext();
assertEquals("BrokerRegistrationTracker(id=1)", ctx.tracker.name());
}
@Test
public void testMetadataVersionUpdateWithoutRegistrationDoesNothing() {
BrokerRegistrationTrackerTestContext ctx = new BrokerRegistrationTrackerTestContext();
MetadataDelta delta = ctx.newDelta();
delta.replay(new FeatureLevelRecord().
setName(MetadataVersion.FEATURE_NAME).
setFeatureLevel(MetadataVersion.IBP_3_7_IV2.featureLevel()));
ctx.onMetadataUpdate(delta);
assertEquals(0, ctx.numCalls.get());
}
@Test
public void testBrokerUpdateWithoutNewMvDoesNothing() {
BrokerRegistrationTrackerTestContext ctx = new BrokerRegistrationTrackerTestContext();
MetadataDelta delta = ctx.newDelta();
delta.replay(new RegisterBrokerRecord().
setBrokerId(1).
setIncarnationId(INCARNATION_ID).
setLogDirs(List.of(A, B, C)));
delta.replay(new FeatureLevelRecord().
setName(MetadataVersion.FEATURE_NAME).
setFeatureLevel(MetadataVersion.MINIMUM_VERSION.featureLevel()));
ctx.onMetadataUpdate(delta);
assertEquals(0, ctx.numCalls.get());
}
@ParameterizedTest
@ValueSource(booleans = {false, true})
public void testBrokerUpdateWithNewMv(boolean jbodMv) {
BrokerRegistrationTrackerTestContext ctx = new BrokerRegistrationTrackerTestContext();
MetadataDelta delta = ctx.newDelta();
delta.replay(new RegisterBrokerRecord().
setBrokerId(1).
setIncarnationId(INCARNATION_ID).
setLogDirs(List.of()));
delta.replay(new FeatureLevelRecord().
setName(MetadataVersion.FEATURE_NAME).
setFeatureLevel(jbodMv ? MetadataVersion.IBP_3_7_IV2.featureLevel() :
MetadataVersion.IBP_3_7_IV1.featureLevel()));
ctx.onMetadataUpdate(delta);
if (jbodMv) {
assertEquals(1, ctx.numCalls.get());
} else {
assertEquals(0, ctx.numCalls.get());
}
}
@ParameterizedTest
@ValueSource(booleans = {false, true})
public void testBrokerUpdateWithNewMvWithTwoDeltas(boolean jbodMv) {
BrokerRegistrationTrackerTestContext ctx = new BrokerRegistrationTrackerTestContext();
MetadataDelta delta = ctx.newDelta();
delta.replay(new RegisterBrokerRecord().
setBrokerId(1).
setIncarnationId(INCARNATION_ID).
setLogDirs(List.of()));
delta.replay(new FeatureLevelRecord().
setName(MetadataVersion.FEATURE_NAME).
setFeatureLevel(MetadataVersion.IBP_3_7_IV1.featureLevel()));
ctx.onMetadataUpdate(delta);
// No calls are made because MetadataVersion is older than IBP_3_7_IV2 initially
assertEquals(0, ctx.numCalls.get());
delta = ctx.newDelta();
delta.replay(new FeatureLevelRecord().
setName(MetadataVersion.FEATURE_NAME).
setFeatureLevel(jbodMv ? MetadataVersion.IBP_3_7_IV2.featureLevel() :
MetadataVersion.IBP_3_7_IV1.featureLevel()));
ctx.onMetadataUpdate(delta);
if (jbodMv) {
assertEquals(1, ctx.numCalls.get());
} else {
assertEquals(0, ctx.numCalls.get());
}
}
}
| BrokerRegistrationTrackerTestContext |
java | elastic__elasticsearch | libs/dissect/src/main/java/org/elasticsearch/dissect/DissectParser.java | {
"start": 9328,
"end": 17355
} | class ____ the list of keys+delimiters are found (dissectPairs), which allows the use of that ordered
* list to know which delimiter to use for the search. The delimiters is progressed once the current delimiter is matched.
*
* There are two special cases that requires additional parsing beyond the standard naive algorithm. Consecutive delimiters should
* results in a empty matches unless the {@code ->} is provided. For example given the dissect pattern of
* {@code %{a},%{b},%{c},%{d}} and input string of {@code foo,,,} the match should be successful with empty values for b,c and d.
* However, if the key modifier {@code ->}, is present it will simply skip over any delimiters just to the right of the key
* without assigning any values. For example {@code %{a->},{%b}} will match the input string of {@code foo,,,,,,bar} with a=foo and
* b=bar.
*
*/
DissectMatch dissectMatch = new DissectMatch(appendSeparator, maxMatches, maxResults, appendCount, referenceCount);
Iterator<DissectPair> it = matchPairs.iterator();
// ensure leading delimiter matches
if (inputString != null
&& inputString.length() > leadingDelimiter.length()
&& leadingDelimiter.equals(inputString.substring(0, leadingDelimiter.length()))) {
byte[] input = inputString.getBytes(StandardCharsets.UTF_8);
// grab the first key/delimiter pair
DissectPair dissectPair = it.next();
DissectKey key = dissectPair.key();
byte[] delimiter = dissectPair.delimiter().getBytes(StandardCharsets.UTF_8);
// start dissection after the first delimiter
int i = leadingDelimiter.getBytes(StandardCharsets.UTF_8).length;
int valueStart = i;
int lookAheadMatches;
// start walking the input string byte by byte, look ahead for matches where needed
// if a match is found jump forward to the end of the match
while (i < input.length) {
lookAheadMatches = 0;
// potential match between delimiter and input string
if (delimiter.length > 0 && input[i] == delimiter[0]) {
// look ahead to see if the entire delimiter matches the input string
for (int j = 0; j < delimiter.length; j++) {
if (i + j < input.length && input[i + j] == delimiter[j]) {
lookAheadMatches++;
}
}
// found a full delimiter match
if (lookAheadMatches == delimiter.length) {
// record the key/value tuple
byte[] value = Arrays.copyOfRange(input, valueStart, i);
dissectMatch.add(key, new String(value, StandardCharsets.UTF_8));
// jump to the end of the match
i += lookAheadMatches;
// look for consecutive delimiters (e.g. a,,,,d,e)
while (i < input.length) {
lookAheadMatches = 0;
for (int j = 0; j < delimiter.length; j++) {
if (i + j < input.length && input[i + j] == delimiter[j]) {
lookAheadMatches++;
}
}
// found consecutive delimiters
if (lookAheadMatches == delimiter.length) {
// jump to the end of the match
i += lookAheadMatches;
if (key.skipRightPadding() == false) {
// progress the keys/delimiter if possible
if (it.hasNext() == false) {
break; // the while loop
}
dissectPair = it.next();
key = dissectPair.key();
// add the key with an empty value for the empty delimiter
dissectMatch.add(key, "");
}
} else {
break; // the while loop
}
}
// progress the keys/delimiter if possible
if (it.hasNext() == false) {
break; // the for loop
}
dissectPair = it.next();
key = dissectPair.key();
delimiter = dissectPair.delimiter().getBytes(StandardCharsets.UTF_8);
// i is always one byte after the last found delimiter, aka the start of the next value
valueStart = i;
} else {
i++;
}
} else {
i++;
}
}
// the last key, grab the rest of the input (unless consecutive delimiters already grabbed the last key)
// and there is no trailing delimiter
if (dissectMatch.fullyMatched() == false && delimiter.length == 0) {
byte[] value = Arrays.copyOfRange(input, valueStart, input.length);
String valueString = new String(value, StandardCharsets.UTF_8);
dissectMatch.add(key, valueString);
}
}
Map<String, String> results = dissectMatch.getResults();
return dissectMatch.isValid(results) ? results : null;
}
/**
* Entry point to dissect a string into its parts.
*
* @param inputString The string to dissect
* @return the key/value Map of the results
* @throws DissectException if unable to dissect a pair into its parts.
*/
public Map<String, String> forceParse(String inputString) {
Map<String, String> results = parse(inputString);
if (results == null) {
throw new DissectException.FindMatch(pattern, inputString);
}
return results;
}
/**
* Returns the output keys produced by the instance (excluding named skip keys),
* e.g. for the pattern <code>"%{a} %{b} %{?c}"</code> the result is <code>[a, b]</code>.
* <p>
* The result is an ordered set, where the entries are in the same order as they appear in the pattern.
* <p>
* The reference keys are returned with the name they have in the pattern, e.g. for <code>"%{*x} %{&x}"</code>
* the result is <code>[x]</code>.
*
* @return the output keys produced by the instance.
*/
public Set<String> outputKeys() {
Set<String> result = new LinkedHashSet<>(matchPairs.size());
for (DissectPair matchPair : matchPairs) {
if (matchPair.key.getModifier() != DissectKey.Modifier.NAMED_SKIP) {
result.add(matchPair.key.getName());
}
}
return result;
}
/**
* Returns the reference keys present in the pattern,
* e.g. for the pattern <code>"%{a} %{b} %{*c} %{&c} %{*d} %{&d}"</code> it returns <code>[c, d]</code>.
* <p>
* The result is an ordered set, where the entries are in the same order as they appear in the pattern.
*
* @return the reference keys included in the pattern.
*/
public Set<String> referenceKeys() {
Set<String> result = new LinkedHashSet<>(matchPairs.size());
for (DissectPair matchPair : matchPairs) {
if (matchPair.key.getModifier() == DissectKey.Modifier.FIELD_NAME) {
result.add(matchPair.key.getName());
}
}
return result;
}
/**
* A tuple | construction |
java | apache__camel | components/camel-spring-parent/camel-spring-ai/camel-spring-ai-chat/src/test/java/org/apache/camel/component/springai/chat/SpringAiChatSystemMessageIT.java | {
"start": 1244,
"end": 3720
} | class ____ extends OllamaTestSupport {
@Test
public void testSystemMessageInfluencesResponse() {
var exchange = template().request("direct:chat", e -> {
e.getIn().setBody("What should I do today?");
e.getIn().setHeader(SpringAiChatConstants.SYSTEM_MESSAGE,
"You are a fitness coach. Always recommend physical activities.");
});
String response = exchange.getMessage().getBody(String.class);
assertThat(response).isNotNull();
assertThat(response.toLowerCase()).containsAnyOf("exercise", "workout", "run", "walk", "activity", "physical");
}
@Test
public void testSystemMessageWithSpecificFormat() {
var exchange = template().request("direct:chat", e -> {
e.getIn().setBody("What is 5 + 3?");
e.getIn().setHeader(SpringAiChatConstants.SYSTEM_MESSAGE,
"You are a math teacher. Always explain your answer step by step.");
});
String response = exchange.getMessage().getBody(String.class);
assertThat(response).isNotNull();
assertThat(response).contains("8");
}
@Test
public void testSystemMessageAsExpert() {
var exchange = template().request("direct:chat", e -> {
e.getIn().setBody("Tell me about integration patterns in one sentence.");
e.getIn().setHeader(SpringAiChatConstants.SYSTEM_MESSAGE,
"You are an expert in Enterprise Integration Patterns and Apache Camel.");
});
String response = exchange.getMessage().getBody(String.class);
assertThat(response).isNotNull();
assertThat(response.toLowerCase()).containsAnyOf("integration", "pattern", "system", "message");
}
@Test
public void testWithoutSystemMessage() {
String response = template().requestBody("direct:chat",
"Say hello in one word.", String.class);
assertThat(response).isNotNull();
assertThat(response.toLowerCase()).containsAnyOf("hello", "hi");
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
bindChatModel(this.getCamelContext());
from("direct:chat")
.to("spring-ai-chat:test?chatModel=#chatModel");
}
};
}
}
| SpringAiChatSystemMessageIT |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/results/graph/internal/ImmutableFetchList.java | {
"start": 1833,
"end": 2471
} | class ____ implements Iterator<Fetch> {
private int idx;
public FetchIterator() {
assert ImmutableFetchList.this.fetches != null;
this.idx = 0;
while (ImmutableFetchList.this.fetches[idx] == null) {
idx++;
}
}
@Override
public boolean hasNext() {
return idx < ImmutableFetchList.this.fetches.length;
}
@Override
public Fetch next() {
final Fetch fetch = ImmutableFetchList.this.fetches[idx++];
while ( idx < ImmutableFetchList.this.fetches.length ) {
if ( ImmutableFetchList.this.fetches[idx] != null ) {
break;
}
idx++;
}
return fetch;
}
}
public static | FetchIterator |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/InferenceContextTests.java | {
"start": 456,
"end": 1159
} | class ____ extends AbstractWireSerializingTestCase<InferenceContext> {
@Override
protected Writeable.Reader<InferenceContext> instanceReader() {
return InferenceContext::new;
}
@Override
protected InferenceContext createTestInstance() {
return new InferenceContext(randomAlphaOfLength(10));
}
@Override
protected InferenceContext mutateInstance(InferenceContext instance) throws IOException {
String originalProductUseCase = instance.productUseCase();
String newProductUseCase = randomValueOtherThan(originalProductUseCase, () -> randomAlphaOfLength(10));
return new InferenceContext(newProductUseCase);
}
}
| InferenceContextTests |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/MultipleInputITCase.java | {
"start": 6315,
"end": 7318
} | class ____ extends AbstractStreamOperatorV2<Long>
implements MultipleInputStreamOperator<Long> {
private ValueState<Long> sumState;
public KeyedSumMultipleInputOperator(StreamOperatorParameters<Long> parameters) {
super(parameters, 3);
}
@Override
public void initializeState(StateInitializationContext context) throws Exception {
super.initializeState(context);
sumState =
context.getKeyedStateStore()
.getState(
new ValueStateDescriptor<>(
"sum-state", LongSerializer.INSTANCE));
}
@Override
public List<Input> getInputs() {
return Arrays.asList(
new KeyedSumInput(this, 1),
new KeyedSumInput(this, 2),
new KeyedSumInput(this, 3));
}
private | KeyedSumMultipleInputOperator |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/cluster/CamelClusterService.java | {
"start": 2836,
"end": 3393
} | interface ____ the underlying concrete CamelClusterService.
* @return an instance of the underlying concrete CamelClusterService as the required type.
*/
default <T extends CamelClusterService> T unwrap(Class<T> clazz) {
if (CamelClusterService.class.isAssignableFrom(clazz)) {
return clazz.cast(this);
}
throw new IllegalArgumentException(
"Unable to unwrap this CamelClusterService type (" + getClass() + ") to the required type (" + clazz + ")");
}
@FunctionalInterface
| of |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.