language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/testkit/junit/jupiter/params/converter/Hex.java
|
{
"start": 1330,
"end": 1641
}
|
class ____ extends TypedArgumentConverter<String, byte[]> {
protected HexArgumentConverter() {
super(String.class, byte[].class);
}
@Override
protected byte[] convert(String source) throws ArgumentConversionException {
return Digests.fromHex(source);
}
}
}
|
HexArgumentConverter
|
java
|
elastic__elasticsearch
|
test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java
|
{
"start": 725,
"end": 4043
}
|
class ____ extends AbstractClientYamlTestFragmentParserTestCase {
public void testParseTeardownSection() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
- do:
delete:
index: foo
type: doc
id: 1
ignore: 404
- do:
delete2:
index: foo
type: doc
id: 1
ignore: 404
""");
TeardownSection section = TeardownSection.parse(parser);
assertThat(section, notNullValue());
assertThat(section.getPrerequisiteSection().isEmpty(), equalTo(true));
assertThat(section.getDoSections().size(), equalTo(2));
assertThat(((DoSection) section.getDoSections().get(0)).getApiCallSection().getApi(), equalTo("delete"));
assertThat(((DoSection) section.getDoSections().get(1)).getApiCallSection().getApi(), equalTo("delete2"));
}
public void testParseWithSkip() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
- skip:
cluster_features: "some_feature"
reason: "there is a reason"
- do:
delete:
index: foo
type: doc
id: 1
ignore: 404
- do:
delete2:
index: foo
type: doc
id: 1
ignore: 404
""");
TeardownSection section = TeardownSection.parse(parser);
assertThat(section, notNullValue());
assertThat(section.getPrerequisiteSection().isEmpty(), equalTo(false));
assertThat(section.getPrerequisiteSection().skipReason, equalTo("there is a reason"));
assertThat(section.getDoSections().size(), equalTo(2));
assertThat(((DoSection) section.getDoSections().get(0)).getApiCallSection().getApi(), equalTo("delete"));
assertThat(((DoSection) section.getDoSections().get(1)).getApiCallSection().getApi(), equalTo("delete2"));
}
public void testParseWithRequires() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
- requires:
cluster_features: "some_feature"
reason: "there is a reason"
- do:
delete:
index: foo
type: doc
id: 1
ignore: 404
- do:
delete2:
index: foo
type: doc
id: 1
ignore: 404
""");
TeardownSection section = TeardownSection.parse(parser);
assertThat(section, notNullValue());
assertThat(section.getPrerequisiteSection().isEmpty(), equalTo(false));
assertThat(section.getPrerequisiteSection().requireReason, equalTo("there is a reason"));
assertThat(section.getDoSections().size(), equalTo(2));
assertThat(((DoSection) section.getDoSections().get(0)).getApiCallSection().getApi(), equalTo("delete"));
assertThat(((DoSection) section.getDoSections().get(1)).getApiCallSection().getApi(), equalTo("delete2"));
}
}
|
TeardownSectionTests
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/internal/CharArrays.java
|
{
"start": 1128,
"end": 18422
}
|
class ____ {
private static final CharArrays INSTANCE = new CharArrays();
/**
* Returns the singleton instance of this class.
*
* @return the singleton instance of this class.
*/
public static CharArrays instance() {
return INSTANCE;
}
private Arrays arrays = Arrays.instance();
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
Failures failures = Failures.instance();
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
CharArrays() {
this(StandardComparisonStrategy.instance());
}
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
public Comparator<?> getComparator() {
return arrays.getComparator();
}
public CharArrays(ComparisonStrategy comparisonStrategy) {
setArrays(new Arrays(comparisonStrategy));
}
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
public void setArrays(Arrays arrays) {
this.arrays = arrays;
}
/**
* Asserts that the given array is {@code null} or empty.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @throws AssertionError if the given array is not {@code null} *and* contains one or more elements.
*/
public void assertNullOrEmpty(AssertionInfo info, char[] actual) {
arrays.assertNullOrEmpty(info, failures, actual);
}
/**
* Asserts that the given array is empty.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array is not empty.
*/
public void assertEmpty(AssertionInfo info, char[] actual) {
arrays.assertEmpty(info, failures, actual);
}
/**
* Asserts that the given array is not empty.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array is empty.
*/
public void assertNotEmpty(AssertionInfo info, char[] actual) {
arrays.assertNotEmpty(info, failures, actual);
}
/**
* Asserts that the number of elements in the given array is equal to the expected one.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param expectedSize the expected size of {@code actual}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the number of elements in the given array is different than the expected one.
*/
public void assertHasSize(AssertionInfo info, char[] actual, int expectedSize) {
arrays.assertHasSize(info, actual, expectedSize);
}
/**
* Asserts that the number of elements in the given array is greater than the given boundary.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param boundary the given value to compare the size of {@code actual} to.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the number of elements in the given array is not greater than the boundary.
*/
public void assertHasSizeGreaterThan(AssertionInfo info, char[] actual, int boundary) {
arrays.assertHasSizeGreaterThan(info, actual, boundary);
}
/**
* Asserts that the number of elements in the given array is greater than or equal to the given boundary.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param boundary the given value to compare the size of {@code actual} to.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the number of elements in the given array is not greater than or equal to the boundary.
*/
public void assertHasSizeGreaterThanOrEqualTo(AssertionInfo info, char[] actual, int boundary) {
arrays.assertHasSizeGreaterThanOrEqualTo(info, actual, boundary);
}
/**
* Asserts that the number of elements in the given array is less than the given boundary.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param boundary the given value to compare the size of {@code actual} to.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the number of elements in the given array is not less than the boundary.
*/
public void assertHasSizeLessThan(AssertionInfo info, char[] actual, int boundary) {
arrays.assertHasSizeLessThan(info, actual, boundary);
}
/**
* Asserts that the number of elements in the given array is less than or equal to the given boundary.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param boundary the given value to compare the size of {@code actual} to.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the number of elements in the given array is not less than or equal to the boundary.
*/
public void assertHasSizeLessThanOrEqualTo(AssertionInfo info, char[] actual, int boundary) {
arrays.assertHasSizeLessThanOrEqualTo(info, actual, boundary);
}
/**
* Asserts that the number of elements in the given array is between the given lower and higher boundary (inclusive).
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param lowerBoundary the lower boundary compared to which actual size should be greater than or equal to.
* @param higherBoundary the higher boundary compared to which actual size should be less than or equal to.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the number of elements in the given array is not between the boundaries.
*/
public void assertHasSizeBetween(AssertionInfo info, char[] actual, int lowerBoundary, int higherBoundary) {
arrays.assertHasSizeBetween(info, actual, lowerBoundary, higherBoundary);
}
/**
* Assert that the actual array has the same size as the other {@code Iterable}.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param other the group to compare
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the other group is {@code null}.
* @throws AssertionError if the actual group does not have the same size.
*/
public void assertHasSameSizeAs(AssertionInfo info, char[] actual, Iterable<?> other) {
arrays.assertHasSameSizeAs(info, actual, other);
}
/**
* Assert that the actual array has the same size as the other array.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param other the group to compare
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the other group is {@code null}.
* @throws AssertionError if the actual group does not have the same size.
*/
public void assertHasSameSizeAs(AssertionInfo info, char[] actual, Object[] other) {
arrays.assertHasSameSizeAs(info, actual, other);
}
/**
* Asserts that the given array contains the given values, in any order.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param values the values that are expected to be in the given array.
* @throws NullPointerException if the array of values is {@code null}.
* @throws IllegalArgumentException if the array of values is empty.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array does not contain the given values.
*/
public void assertContains(AssertionInfo info, char[] actual, char[] values) {
arrays.assertContains(info, failures, actual, values);
}
/**
* Verifies that the given array contains the given value at the given index.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param value the value to look for.
* @param index the index where the value should be stored in the given array.
* @throws AssertionError if the given array is {@code null} or empty.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws IndexOutOfBoundsException if the value of the given {@code Index} is equal to or greater than the size of
* the given array.
* @throws AssertionError if the given array does not contain the given value at the given index.
*/
public void assertContains(AssertionInfo info, char[] actual, char value, Index index) {
arrays.assertContains(info, failures, actual, value, index);
}
/**
* Verifies that the given array does not contain the given value at the given index.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param value the value to look for.
* @param index the index where the value should be stored in the given array.
* @throws AssertionError if the given array is {@code null}.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws AssertionError if the given array contains the given value at the given index.
*/
public void assertDoesNotContain(AssertionInfo info, char[] actual, char value, Index index) {
arrays.assertDoesNotContain(info, failures, actual, value, index);
}
/**
* Asserts that the given array contains only the given values and nothing else, in any order.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param values the values that are expected to be in the given array.
* @throws NullPointerException if the array of values is {@code null}.
* @throws IllegalArgumentException if the array of values is empty.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array does not contain the given values or if the given array contains values
* that are not in the given array.
*/
public void assertContainsOnly(AssertionInfo info, char[] actual, char[] values) {
arrays.assertContainsOnly(info, failures, actual, values);
}
public void assertContainsExactly(AssertionInfo info, char[] actual, char[] values) {
arrays.assertContainsExactly(info, failures, actual, values);
}
public void assertContainsExactlyInAnyOrder(AssertionInfo info, char[] actual, char[] values) {
arrays.assertContainsExactlyInAnyOrder(info, failures, actual, values);
}
/**
* Asserts that the given array contains only once the given values.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param values the values that are expected to be in the given array.
* @throws NullPointerException if the array of values is {@code null}.
* @throws IllegalArgumentException if the array of values is empty.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array does not contain the given values or if the given array contains more
* than once values.
*/
public void assertContainsOnlyOnce(AssertionInfo info, char[] actual, char[] values) {
arrays.assertContainsOnlyOnce(info, failures, actual, values);
}
/**
* Verifies that the given array contains the given sequence of values, without any other values between them.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param sequence the sequence of values to look for.
* @throws AssertionError if the given array is {@code null}.
* @throws NullPointerException if the given sequence is {@code null}.
* @throws IllegalArgumentException if the given sequence is empty.
* @throws AssertionError if the given array does not contain the given sequence of values.
*/
public void assertContainsSequence(AssertionInfo info, char[] actual, char[] sequence) {
arrays.assertContainsSequence(info, failures, actual, sequence);
}
/**
* Verifies that the given array contains the given subsequence of values (possibly with other values between them).
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param subsequence the subsequence of values to look for.
* @throws AssertionError if the given array is {@code null}.
* @throws NullPointerException if the given subsequence is {@code null}.
* @throws IllegalArgumentException if the given subsequence is empty.
* @throws AssertionError if the given array does not contain the given subsequence of values.
*/
public void assertContainsSubsequence(AssertionInfo info, char[] actual, char[] subsequence) {
arrays.assertContainsSubsequence(info, failures, actual, subsequence);
}
/**
* Asserts that the given array does not contain the given values.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param values the values that are expected not to be in the given array.
* @throws NullPointerException if the array of values is {@code null}.
* @throws IllegalArgumentException if the array of values is empty.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array contains any of given values.
*/
public void assertDoesNotContain(AssertionInfo info, char[] actual, char[] values) {
arrays.assertDoesNotContain(info, failures, actual, values);
}
/**
* Asserts that the given array does not have duplicate values.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @throws NullPointerException if the array of values is {@code null}.
* @throws IllegalArgumentException if the array of values is empty.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array contains duplicate values.
*/
public void assertDoesNotHaveDuplicates(AssertionInfo info, char[] actual) {
arrays.assertDoesNotHaveDuplicates(info, failures, actual);
}
/**
* Verifies that the given array starts with the given sequence of values, without any other values between them.
* Similar to <code>{@link #assertContainsSequence(AssertionInfo, char[], char[])}</code>, but it also verifies that
* the first element in the sequence is also the first element of the given array.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param sequence the sequence of values to look for.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array does not start with the given sequence of values.
*/
public void assertStartsWith(AssertionInfo info, char[] actual, char[] sequence) {
arrays.assertStartsWith(info, failures, actual, sequence);
}
/**
* Verifies that the given array ends with the given sequence of values, without any other values between them.
* Similar to <code>{@link #assertContainsSequence(AssertionInfo, char[], char[])}</code>, but it also verifies that
* the last element in the sequence is also the last element of the given array.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param sequence the sequence of values to look for.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array does not end with the given sequence of values.
*/
public void assertEndsWith(AssertionInfo info, char[] actual, char[] sequence) {
arrays.assertEndsWith(info, failures, actual, sequence);
}
/**
* Concrete implementation of {@link ArraySortedAssert#isSorted()}.
*
* @param info contains information about the assertion.
* @param actual the given array.
*/
public void assertIsSorted(AssertionInfo info, char[] actual) {
arrays.assertIsSorted(info, failures, actual);
}
/**
* Concrete implementation of {@link ArraySortedAssert#isSortedAccordingTo(Comparator)}.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param comparator the {@link Comparator} used to compare array elements
*/
public void assertIsSortedAccordingToComparator(AssertionInfo info, char[] actual,
Comparator<? super Character> comparator) {
Arrays.assertIsSortedAccordingToComparator(info, failures, actual, comparator);
}
public void assertContainsAnyOf(AssertionInfo info, char[] actual, char[] values) {
arrays.assertContainsAnyOf(info, failures, actual, values);
}
}
|
CharArrays
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
|
{
"start": 119853,
"end": 122122
}
|
class ____ extends FSEditLogOp {
CacheDirectiveInfo directive;
public AddCacheDirectiveInfoOp() {
super(OP_ADD_CACHE_DIRECTIVE);
}
static AddCacheDirectiveInfoOp getInstance(OpInstanceCache cache) {
return cache.get(OP_ADD_CACHE_DIRECTIVE);
}
@Override
void resetSubFields() {
directive = null;
}
public AddCacheDirectiveInfoOp setDirective(
CacheDirectiveInfo directive) {
this.directive = directive;
assert(directive.getId() != null);
assert(directive.getPath() != null);
assert(directive.getReplication() != null);
assert(directive.getPool() != null);
assert(directive.getExpiration() != null);
return this;
}
@Override
void readFields(DataInputStream in, int logVersion) throws IOException {
directive = FSImageSerialization.readCacheDirectiveInfo(in);
readRpcIds(in, logVersion);
}
@Override
public void writeFields(DataOutputStream out) throws IOException {
FSImageSerialization.writeCacheDirectiveInfo(out, directive);
writeRpcIds(rpcClientId, rpcCallId, out);
}
@Override
protected void toXml(ContentHandler contentHandler) throws SAXException {
FSImageSerialization.writeCacheDirectiveInfo(contentHandler, directive);
appendRpcIdsToXml(contentHandler, rpcClientId, rpcCallId);
}
@Override
void fromXml(Stanza st) throws InvalidXmlException {
directive = FSImageSerialization.readCacheDirectiveInfo(st);
readRpcIdsFromXml(st);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("AddCacheDirectiveInfo [")
.append("id=" + directive.getId() + ",")
.append("path=" + directive.getPath().toUri().getPath() + ",")
.append("replication=" + directive.getReplication() + ",")
.append("pool=" + directive.getPool() + ",")
.append("expiration=" + directive.getExpiration().getMillis());
appendRpcIdsToString(builder, rpcClientId, rpcCallId);
builder.append("]");
return builder.toString();
}
}
/**
* {@literal @AtMostOnce} for
* {@link ClientProtocol#modifyCacheDirective}
*/
static
|
AddCacheDirectiveInfoOp
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/multipart/LargerThanDefaultFormAttributeMultipartFormInputTest.java
|
{
"start": 990,
"end": 2372
}
|
class ____ {
@RegisterExtension
static ResteasyReactiveUnitTest test = new ResteasyReactiveUnitTest()
.setMaxFormAttributeSize(120000)
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(Resource.class, Data.class);
}
});
private final File FILE = new File("./src/test/resources/larger-than-default-form-attribute.txt");
@Test
public void test() throws IOException {
String fileContents = new String(Files.readAllBytes(FILE.toPath()), StandardCharsets.UTF_8);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 10; ++i) {
sb.append(fileContents);
}
fileContents = sb.toString();
Assertions.assertTrue(fileContents.length() > HttpServerOptions.DEFAULT_MAX_FORM_ATTRIBUTE_SIZE);
given()
.multiPart("text", fileContents)
.accept("text/plain")
.when()
.post("/test")
.then()
.statusCode(200)
.contentType(ContentType.TEXT)
.body(equalTo(fileContents));
}
@Path("/test")
public static
|
LargerThanDefaultFormAttributeMultipartFormInputTest
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLShowCreateViewStatement.java
|
{
"start": 906,
"end": 1565
}
|
class ____ extends SQLStatementImpl implements SQLShowStatement, SQLReplaceable {
private SQLName name;
public void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, name);
}
visitor.endVisit(this);
}
public SQLName getName() {
return name;
}
public void setName(SQLName functionName) {
this.name = functionName;
}
@Override
public boolean replace(SQLExpr expr, SQLExpr target) {
if (name == expr) {
setName((SQLName) target);
return true;
}
return false;
}
}
|
SQLShowCreateViewStatement
|
java
|
apache__flink
|
flink-formats/flink-avro/src/test/java/org/apache/flink/formats/avro/utils/AvroTestUtils.java
|
{
"start": 2348,
"end": 20309
}
|
class ____ {
/** Tests all Avro data types as well as nested types for a specific record. */
public static Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row>
getSpecificTestData() {
final Address addr =
Address.newBuilder()
.setNum(42)
.setStreet("Main Street 42")
.setCity("Test City")
.setState("Test State")
.setZip("12345")
.build();
final Row rowAddr = new Row(5);
rowAddr.setField(0, 42);
rowAddr.setField(1, "Main Street 42");
rowAddr.setField(2, "Test City");
rowAddr.setField(3, "Test State");
rowAddr.setField(4, "12345");
final User user =
User.newBuilder()
.setName("Charlie")
.setFavoriteNumber(null)
.setFavoriteColor("blue")
.setTypeLongTest(1337L)
.setTypeDoubleTest(1.337d)
.setTypeNullTest(null)
.setTypeBoolTest(false)
.setTypeArrayString(Arrays.asList("hello", "world"))
.setTypeArrayBoolean(Arrays.asList(true, true, false))
.setTypeNullableArray(null)
.setTypeEnum(Colors.RED)
.setTypeMap(Collections.singletonMap("test", 12L))
.setTypeFixed(
new Fixed16(
new byte[] {
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16
}))
.setTypeUnion(12.0)
.setTypeNested(addr)
.setTypeBytes(ByteBuffer.allocate(10))
.setTypeDate(LocalDate.parse("2014-03-01"))
.setTypeTimeMillis(LocalTime.parse("12:12:12"))
.setTypeTimeMicros(
LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS))
.setTypeTimestampMillis(Instant.parse("2014-03-01T12:12:12.321Z"))
.setTypeTimestampMicros(
Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS))
// byte array must contain the two's-complement representation of the
// unscaled integer value in big-endian byte order
.setTypeDecimalBytes(
ByteBuffer.wrap(
BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray()))
// array of length n can store at most
// Math.floor(Math.log10(Math.pow(2, 8 * n - 1) - 1))
// base-10 digits of precision
.setTypeDecimalFixed(
new Fixed2(
BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray()))
.build();
final Row rowUser = new Row(23);
rowUser.setField(0, "Charlie");
rowUser.setField(1, null);
rowUser.setField(2, "blue");
rowUser.setField(3, 1337L);
rowUser.setField(4, 1.337d);
rowUser.setField(5, null);
rowUser.setField(6, false);
rowUser.setField(7, new String[] {"hello", "world"});
rowUser.setField(8, new Boolean[] {true, true, false});
rowUser.setField(9, null);
rowUser.setField(10, "RED");
rowUser.setField(11, Collections.singletonMap("test", 12L));
rowUser.setField(12, new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16});
rowUser.setField(13, 12.0);
rowUser.setField(14, rowAddr);
rowUser.setField(15, new byte[10]);
rowUser.setField(16, Date.valueOf("2014-03-01"));
rowUser.setField(17, Time.valueOf("12:12:12"));
rowUser.setField(
18, Time.valueOf(LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS)));
rowUser.setField(19, Timestamp.valueOf("2014-03-01 12:12:12.321"));
rowUser.setField(
20, Timestamp.from(Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS)));
rowUser.setField(21, BigDecimal.valueOf(2000, 2));
rowUser.setField(22, BigDecimal.valueOf(2000, 2));
final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> t = new Tuple3<>();
t.f0 = User.class;
t.f1 = user;
t.f2 = rowUser;
return t;
}
/** Tests almost all Avro data types as well as nested types for a generic record. */
public static Tuple3<GenericRecord, Row, Schema> getGenericTestData() {
final String schemaString =
"{\"type\":\"record\",\"name\":\"GenericUser\",\"namespace\":\"org.apache.flink.formats.avro.generated\","
+ "\"fields\": [{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"favorite_number\",\"type\":[\"int\",\"null\"]},"
+ "{\"name\":\"favorite_color\",\"type\":[\"string\",\"null\"]},{\"name\":\"type_long_test\",\"type\":[\"long\",\"null\"]}"
+ ",{\"name\":\"type_double_test\",\"type\":\"double\"},{\"name\":\"type_null_test\",\"type\":[\"null\"]},"
+ "{\"name\":\"type_bool_test\",\"type\":[\"boolean\"]},{\"name\":\"type_array_string\",\"type\":"
+ "{\"type\":\"array\",\"items\":\"string\"}},{\"name\":\"type_array_boolean\",\"type\":{\"type\":\"array\","
+ "\"items\":\"boolean\"}},{\"name\":\"type_nullable_array\",\"type\":[\"null\",{\"type\":\"array\","
+ "\"items\":\"string\"}],\"default\":null},{\"name\":\"type_enum\",\"type\":{\"type\":\"enum\","
+ "\"name\":\"Colors\",\"symbols\":[\"RED\",\"GREEN\",\"BLUE\"]}},{\"name\":\"type_map\",\"type\":{\"type\":\"map\","
+ "\"values\":\"long\"}},{\"name\":\"type_fixed\",\"type\":[\"null\",{\"type\":\"fixed\",\"name\":\"Fixed16\","
+ "\"size\":16}],\"size\":16},{\"name\":\"type_union\",\"type\":[\"null\",\"boolean\",\"long\",\"double\"]},"
+ "{\"name\":\"type_nested\",\"type\":[\"null\",{\"type\":\"record\",\"name\":\"Address\",\"fields\":[{\"name\":\"num\","
+ "\"type\":\"int\"},{\"name\":\"street\",\"type\":\"string\"},{\"name\":\"city\",\"type\":\"string\"},"
+ "{\"name\":\"state\",\"type\":\"string\"},{\"name\":\"zip\",\"type\":\"string\"}]}]},{\"name\":\"type_bytes\","
+ "\"type\":\"bytes\"},{\"name\":\"type_date\",\"type\":{\"type\":\"int\",\"logicalType\":\"date\"}},"
+ "{\"name\":\"type_time_millis\",\"type\":{\"type\":\"int\",\"logicalType\":\"time-millis\"}},{\"name\":\"type_time_micros\","
+ "\"type\":{\"type\":\"long\",\"logicalType\":\"time-micros\"}},{\"name\":\"type_timestamp_millis\",\"type\":{\"type\":\"long\","
+ "\"logicalType\":\"timestamp-millis\"}},{\"name\":\"type_timestamp_micros\",\"type\":{\"type\":\"long\","
+ "\"logicalType\":\"timestamp-micros\"}},{\"name\":\"type_decimal_bytes\",\"type\":{\"type\":\"bytes\","
+ "\"logicalType\":\"decimal\",\"precision\":4,\"scale\":2}},{\"name\":\"type_decimal_fixed\",\"type\":{\"type\":\"fixed\","
+ "\"name\":\"Fixed2\",\"size\":2,\"logicalType\":\"decimal\",\"precision\":4,\"scale\":2}}]}";
final Schema schema = new Schema.Parser().parse(schemaString);
GenericRecord addr =
new GenericData.Record(schema.getField("type_nested").schema().getTypes().get(1));
addr.put("num", 42);
addr.put("street", "Main Street 42");
addr.put("city", "Test City");
addr.put("state", "Test State");
addr.put("zip", "12345");
final Row rowAddr = new Row(5);
rowAddr.setField(0, 42);
rowAddr.setField(1, "Main Street 42");
rowAddr.setField(2, "Test City");
rowAddr.setField(3, "Test State");
rowAddr.setField(4, "12345");
final GenericRecord user = new GenericData.Record(schema);
user.put("name", "Charlie");
user.put("favorite_number", null);
user.put("favorite_color", "blue");
user.put("type_long_test", 1337L);
user.put("type_double_test", 1.337d);
user.put("type_null_test", null);
user.put("type_bool_test", false);
user.put("type_array_string", Arrays.asList("hello", "world"));
user.put("type_array_boolean", Arrays.asList(true, true, false));
user.put("type_nullable_array", null);
user.put(
"type_enum",
new GenericData.EnumSymbol(schema.getField("type_enum").schema(), "RED"));
user.put("type_map", Collections.singletonMap("test", 12L));
user.put(
"type_fixed",
new Fixed16(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}));
user.put("type_union", 12.0);
user.put("type_nested", addr);
user.put("type_bytes", ByteBuffer.allocate(10));
user.put("type_date", LocalDate.parse("2014-03-01"));
user.put("type_time_millis", LocalTime.parse("12:12:12"));
user.put("type_time_micros", LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS));
user.put("type_timestamp_millis", Instant.parse("2014-03-01T12:12:12.321Z"));
user.put(
"type_timestamp_micros", Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS));
user.put(
"type_decimal_bytes",
ByteBuffer.wrap(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray()));
user.put(
"type_decimal_fixed",
new GenericData.Fixed(
schema.getField("type_decimal_fixed").schema(),
BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray()));
final Row rowUser = new Row(23);
rowUser.setField(0, "Charlie");
rowUser.setField(1, null);
rowUser.setField(2, "blue");
rowUser.setField(3, 1337L);
rowUser.setField(4, 1.337d);
rowUser.setField(5, null);
rowUser.setField(6, false);
rowUser.setField(7, new String[] {"hello", "world"});
rowUser.setField(8, new Boolean[] {true, true, false});
rowUser.setField(9, null);
rowUser.setField(10, "RED");
rowUser.setField(11, Collections.singletonMap("test", 12L));
rowUser.setField(12, new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16});
rowUser.setField(13, 12.0);
rowUser.setField(14, rowAddr);
rowUser.setField(15, new byte[10]);
rowUser.setField(16, Date.valueOf("2014-03-01"));
rowUser.setField(17, Time.valueOf("12:12:12"));
rowUser.setField(
18, Time.valueOf(LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS)));
rowUser.setField(19, Timestamp.valueOf("2014-03-01 12:12:12.321"));
rowUser.setField(
20, Timestamp.from(Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS)));
rowUser.setField(21, BigDecimal.valueOf(2000, 2));
rowUser.setField(22, BigDecimal.valueOf(2000, 2));
final Tuple3<GenericRecord, Row, Schema> t = new Tuple3<>();
t.f0 = user;
t.f1 = rowUser;
t.f2 = schema;
return t;
}
public static Tuple4<Class<? extends SpecificRecord>, SpecificRecord, GenericRecord, Row>
getTimestampTestData() {
final String schemaString =
"{\"type\":\"record\",\"name\":\"GenericTimestamps\",\"namespace\":\"org.apache.flink.formats.avro.generated\","
+ "\"fields\": [{\"name\":\"type_timestamp_millis\",\"type\":{\"type\":\"long\","
+ "\"logicalType\":\"timestamp-millis\"}},{\"name\":\"type_timestamp_micros\",\"type\":{\"type\":\"long\","
+ "\"logicalType\":\"timestamp-micros\"}},{\"name\": \"type_local_timestamp_millis\", \"type\": {\"type\": \"long\", \"logicalType\": \"local-timestamp-millis\"}},"
+ "{\"name\": \"type_local_timestamp_micros\", \"type\": {\"type\": \"long\", \"logicalType\": \"local-timestamp-micros\"}}]}";
final Schema schema = new Schema.Parser().parse(schemaString);
final GenericRecord timestampRecord = new GenericData.Record(schema);
timestampRecord.put("type_timestamp_millis", Instant.parse("2014-03-01T12:12:12.321Z"));
timestampRecord.put(
"type_timestamp_micros", Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS));
timestampRecord.put(
"type_local_timestamp_millis", LocalDateTime.parse("2014-03-01T12:12:12.321"));
timestampRecord.put(
"type_local_timestamp_micros", LocalDateTime.parse("1970-01-01T00:00:00.123456"));
final Timestamps timestamps =
Timestamps.newBuilder()
.setTypeTimestampMillis(Instant.parse("2014-03-01T12:12:12.321Z"))
.setTypeTimestampMicros(
Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS))
.setTypeLocalTimestampMillis(LocalDateTime.parse("2014-03-01T12:12:12.321"))
.setTypeLocalTimestampMicros(
LocalDateTime.parse("1970-01-01T00:00:00.123456"))
.build();
final Row timestampRow = new Row(4);
timestampRow.setField(0, Timestamp.valueOf("2014-03-01 12:12:12.321"));
timestampRow.setField(
1, Timestamp.from(Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS)));
timestampRow.setField(2, Timestamp.valueOf(LocalDateTime.parse("2014-03-01T12:12:12.321")));
timestampRow.setField(
3, Timestamp.valueOf(LocalDateTime.parse("1970-01-01T00:00:00.123456")));
final Tuple4<Class<? extends SpecificRecord>, SpecificRecord, GenericRecord, Row> t =
new Tuple4<>();
t.f0 = Timestamps.class;
t.f1 = timestamps;
t.f2 = timestampRecord;
t.f3 = timestampRow;
return t;
}
/**
* Craft a large Avro Schema which contains more than 0xFFFF characters.
*
* <p>0xFFFF is the magical number that once a java string length is above it, then the
* serialization scheme changes
*/
public static Schema getLargeSchema() {
SchemaBuilder.FieldAssembler<Schema> fields =
SchemaBuilder.record("LargeAvroSchema")
.namespace(AvroSerializerLargeGenericRecordTest.class.getName())
.fields();
for (int i = 0; i < 10000; ++i) {
fields = fields.optionalString("field" + i);
}
Schema schema = fields.endRecord();
assert schema.toString().length() > 0xFFFF;
return schema;
}
/** Craft a small Avro Schema which contains less than 0xFFFF characters. */
public static Schema getSmallSchema() {
return new org.apache.avro.Schema.Parser()
.parse(
"{\"type\":\"record\",\"name\":\"Dummy\",\"namespace\":\"dummy\",\"fields\": "
+ "[{\"name\":\"afield\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}");
}
/**
* Writes given record using specified schema.
*
* @param record record to serialize
* @param schema schema to use for serialization
* @return serialized record
*/
public static byte[] writeRecord(GenericRecord record, Schema schema) throws IOException {
return writeRecord(record, schema, AvroEncoding.BINARY);
}
/**
* Writes given record using specified schema.
*
* @param record record to serialize
* @param schema schema to use for serialization
* @param encoding serialization approach to use
* @return serialized record
*/
public static byte[] writeRecord(GenericRecord record, Schema schema, AvroEncoding encoding)
throws IOException {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Encoder encoder = createEncoder(encoding, schema, stream);
new GenericDatumWriter<>(schema).write(record, encoder);
encoder.flush();
return stream.toByteArray();
}
/**
* Writes given specific record.
*
* @param record record to serialize
* @return serialized record
*/
public static <T extends SpecificRecord> byte[] writeRecord(T record, AvroEncoding encoding)
throws IOException {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Encoder encoder = createEncoder(encoding, record.getSchema(), stream);
@SuppressWarnings("unchecked")
SpecificDatumWriter<T> writer = new SpecificDatumWriter<>((Class<T>) record.getClass());
writer.write(record, encoder);
encoder.flush();
return stream.toByteArray();
}
/** Creates an Avro encoder using the requested serialization approach. */
public static Encoder createEncoder(
AvroEncoding encoding, Schema schema, OutputStream outputStream) throws IOException {
if (encoding == AvroEncoding.JSON) {
return EncoderFactory.get().jsonEncoder(schema, outputStream);
} else {
return EncoderFactory.get().binaryEncoder(outputStream, null);
}
}
}
|
AvroTestUtils
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/atomic/longarray/AtomicLongArrayAssert_containsAnyOf_Test.java
|
{
"start": 878,
"end": 1225
}
|
class ____ extends AtomicLongArrayAssertBaseTest {
@Override
protected AtomicLongArrayAssert invoke_api_method() {
return assertions.containsAnyOf(1, 2);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertContainsAnyOf(info(), internalArray(), arrayOf(1, 2));
}
}
|
AtomicLongArrayAssert_containsAnyOf_Test
|
java
|
apache__camel
|
components/camel-telemetry/src/test/java/org/apache/camel/telemetry/decorators/AbstractMessagingSpanDecoratorTest.java
|
{
"start": 1205,
"end": 3547
}
|
class ____ {
@Test
public void testOperationName() {
Endpoint endpoint = Mockito.mock(Endpoint.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn("jms://MyQueue?hello=world");
SpanDecorator decorator = new AbstractMessagingSpanDecorator() {
@Override
public String getComponent() {
return null;
}
@Override
public String getComponentClassName() {
return null;
}
};
assertEquals("MyQueue", decorator.getOperationName(null, endpoint));
}
@Test
public void testPreMessageBusDestination() {
Endpoint endpoint = Mockito.mock(Endpoint.class);
Exchange exchange = Mockito.mock(Exchange.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn("jms://MyQueue?hello=world");
SpanDecorator decorator = new AbstractMessagingSpanDecorator() {
@Override
public String getComponent() {
return null;
}
@Override
public String getComponentClassName() {
return null;
}
};
MockSpanAdapter span = new MockSpanAdapter();
decorator.beforeTracingEvent(span, exchange, endpoint);
assertEquals("MyQueue", span.tags().get(TagConstants.MESSAGE_BUS_DESTINATION));
}
@Test
public void testPreMessageId() {
String messageId = "abcd";
Endpoint endpoint = Mockito.mock(Endpoint.class);
Exchange exchange = Mockito.mock(Exchange.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn("test");
SpanDecorator decorator = new AbstractMessagingSpanDecorator() {
@Override
public String getComponent() {
return null;
}
@Override
public String getComponentClassName() {
return null;
}
@Override
public String getMessageId(Exchange exchange) {
return messageId;
}
};
MockSpanAdapter span = new MockSpanAdapter();
decorator.beforeTracingEvent(span, exchange, endpoint);
assertEquals(messageId, span.tags().get(TagConstants.MESSAGE_ID));
}
}
|
AbstractMessagingSpanDecoratorTest
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/failures/postconstruct/MyClassB.java
|
{
"start": 900,
"end": 1310
}
|
class ____ {
boolean setupComplete = false;
boolean injectedFirst = false;
@Inject
protected MyClassA another;
private MyClassA propA;
@Inject
public void setPropA(MyClassA propA) {
this.propA = propA;
}
public MyClassA getPropA() {
return propA;
}
@PostConstruct
public void setup() {
throw new RuntimeException("bad");
}
}
|
MyClassB
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/sort/BinaryExternalSorter.java
|
{
"start": 42633,
"end": 46665
}
|
class ____ extends ThreadBase {
private final int maxFanIn;
private final BinaryExternalMerger merger;
private MergingThread(
ExceptionHandler<IOException> exceptionHandler,
CircularQueues queues,
int maxNumFileHandles,
BinaryExternalMerger merger) {
super(exceptionHandler, "SortMerger merging thread", queues);
this.maxFanIn = maxNumFileHandles;
this.merger = merger;
}
@Override
public void go() throws IOException {
final List<ChannelWithMeta> spillChannelIDs = new ArrayList<>();
List<ChannelWithMeta> finalMergeChannelIDs = new ArrayList<>();
ChannelWithMeta channelID;
while (isRunning()) {
try {
channelID = this.queues.merge.take();
} catch (InterruptedException iex) {
if (isRunning()) {
LOG.error(
"Merging thread was interrupted (without being shut down) "
+ "while grabbing a channel with meta. Retrying...");
continue;
} else {
return;
}
}
if (!isRunning()) {
return;
}
if (channelID == FINAL_MERGE_MARKER) {
finalMergeChannelIDs.addAll(spillChannelIDs);
spillChannelIDs.clear();
// sort file channels by block numbers, to ensure a better merging performance
finalMergeChannelIDs.sort(
Comparator.comparingInt(ChannelWithMeta::getBlockCount));
break;
}
spillChannelIDs.add(channelID);
// if async merge is disabled, we will only do the final merge
// otherwise we wait for `maxFanIn` number of channels to begin a merge
if (!asyncMergeEnabled || spillChannelIDs.size() < maxFanIn) {
continue;
}
// perform a intermediate merge
finalMergeChannelIDs.addAll(merger.mergeChannelList(spillChannelIDs));
spillChannelIDs.clear();
}
// check if we have spilled some data at all
if (finalMergeChannelIDs.isEmpty()) {
if (iterator == null) {
// only set the iterator if it's not set
// by the in memory merge stage of spilling thread.
setResultIterator(EmptyMutableObjectIterator.get());
}
} else {
// merge channels until sufficient file handles are available
while (isRunning() && finalMergeChannelIDs.size() > this.maxFanIn) {
finalMergeChannelIDs = merger.mergeChannelList(finalMergeChannelIDs);
}
// Beginning final merge.
// no need to call `getReadMemoryFromHeap` again,
// because `finalMergeChannelIDs` must become smaller
List<FileIOChannel> openChannels = new ArrayList<>();
BinaryMergeIterator<BinaryRowData> iterator =
merger.getMergingIterator(finalMergeChannelIDs, openChannels);
channelManager.addOpenChannels(openChannels);
setResultIterator(iterator);
}
// Merging thread done.
}
}
public long getUsedMemoryInBytes() {
long usedSizeInBytes = 0;
for (BinaryInMemorySortBuffer sortBuffer : sortBuffers) {
usedSizeInBytes += sortBuffer.getOccupancy();
}
return usedSizeInBytes;
}
public long getNumSpillFiles() {
return numSpillFiles;
}
public long getSpillInBytes() {
return spillInBytes;
}
}
|
MergingThread
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/jsonp/JSONPParseTest3.java
|
{
"start": 296,
"end": 1082
}
|
class ____ extends TestCase {
public void test_f() throws Exception {
String text = "parent.callback ({'id':1, 'name':'ido)nans'},1,2 ); /**/ ";
JSONPObject jsonpObject = (JSONPObject) JSON.parseObject(text, JSONPObject.class);
assertEquals("parent.callback", jsonpObject.getFunction());
assertEquals(3, jsonpObject.getParameters().size());
JSONObject param = (JSONObject) jsonpObject.getParameters().get(0);
assertEquals(1, param.get("id"));
assertEquals("ido)nans", param.get("name"));
String json = JSON.toJSONString(jsonpObject, SerializerFeature.BrowserSecure, SerializerFeature.MapSortField);
assertEquals("/**/parent.callback({\"id\":1,\"name\":\"ido\\u0029nans\"},1,2)", json);
}
}
|
JSONPParseTest3
|
java
|
google__guice
|
core/test/com/google/inject/internal/UniqueAnnotationsTest.java
|
{
"start": 767,
"end": 1204
}
|
class ____ extends TestCase {
@UniqueAnnotations.Internal(31)
public Void unused;
public void testEqualsHashCodeToString() {
Annotation actual = UniqueAnnotations.create(31);
Annotation expected = getClass().getFields()[0].getAnnotations()[0];
assertEquals(expected.toString(), actual.toString());
assertEquals(expected.hashCode(), actual.hashCode());
assertEquals(expected, actual);
}
}
|
UniqueAnnotationsTest
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/function/TriConsumerTest.java
|
{
"start": 1088,
"end": 2900
}
|
class ____ extends AbstractLangTest {
@Test
void testAccept() throws Throwable {
final AtomicReference<Character> ref1 = new AtomicReference<>();
final AtomicReference<Short> ref2 = new AtomicReference<>();
final AtomicReference<String> ref3 = new AtomicReference<>();
final TriConsumer<AtomicReference<Character>, AtomicReference<Short>, AtomicReference<String>> tri = (t, u,
v) -> {
ref1.set(Character.valueOf('a'));
ref2.set(Short.valueOf((short) 1));
ref3.set("z");
};
tri.accept(ref1, ref2, ref3);
assertEquals(Character.valueOf('a'), ref1.get());
assertEquals(Short.valueOf((short) 1), ref2.get());
assertEquals("z", ref3.get());
}
@Test
void testAndThen() throws Throwable {
final AtomicReference<Character> ref1 = new AtomicReference<>();
final AtomicReference<Short> ref2 = new AtomicReference<>();
final AtomicReference<String> ref3 = new AtomicReference<>();
final TriConsumer<AtomicReference<Character>, AtomicReference<Short>, AtomicReference<String>> tri = (t, u,
v) -> {
ref1.set(Character.valueOf('a'));
ref2.set(Short.valueOf((short) 1));
ref3.set("z");
};
final TriConsumer<AtomicReference<Character>, AtomicReference<Short>, AtomicReference<String>> triAfter = (t, u,
v) -> {
ref1.set(Character.valueOf('b'));
ref2.set(Short.valueOf((short) 2));
ref3.set("zz");
};
tri.andThen(triAfter).accept(ref1, ref2, ref3);
assertEquals(Character.valueOf('b'), ref1.get());
assertEquals(Short.valueOf((short) 2), ref2.get());
assertEquals("zz", ref3.get());
}
}
|
TriConsumerTest
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/windowing/sessionwindows/SessionEventGeneratorImpl.java
|
{
"start": 9736,
"end": 10028
}
|
class ____ implements EventGenerator<K, E> {
@Override
public K getKey() {
return configuration.getSessionConfiguration().getKey();
}
}
/** Internal generator delegate for producing session events that are timely. */
private
|
AbstractEventGenerator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/AnalyzerTests.java
|
{
"start": 2079,
"end": 12890
}
|
class ____ extends ESTestCase {
private static final String INDEX_NAME = "test";
private IndexResolution index = loadIndexResolution("mapping-default.json");
private static Map<String, EsField> loadEqlMapping(String name) {
return TypesTests.loadMapping(name);
}
public static IndexResolution loadIndexResolution(String name) {
return IndexResolution.valid(new EsIndex(INDEX_NAME, loadEqlMapping(name)));
}
public void testOptionalFieldOnTheLeft() {
Equals check = equalsCondition("process where ?foo == 123");
checkMissingOptional(check.left());
assertTrue(check.right() instanceof Literal);
assertEquals(123, ((Literal) check.right()).value());
}
public void testOptionalFieldOnTheRight() {
Equals check = equalsCondition("process where 123 == ?bar");
checkMissingOptional(check.right());
assertTrue(check.left() instanceof Literal);
assertEquals(123, ((Literal) check.left()).value());
}
public void testOptionalFieldsInsideFunction() {
Equals check = equalsCondition("process where concat(?foo, \" \", ?bar) == \"test\"");
assertEquals("test", ((Literal) check.right()).value());
assertTrue(check.left() instanceof Concat);
Concat concat = (Concat) check.left();
List<Expression> arguments = new ArrayList<>(3);
checkMissingOptional(concat.arguments().get(0));
assertEquals(new Literal(Source.EMPTY, " ", DataTypes.KEYWORD), concat.arguments().get(1));
checkMissingOptional(concat.arguments().get(2));
}
public void testOptionalFieldExistsInMapping() {
Equals check = equalsCondition("process where ?pid == 123");
assertTrue(check.left() instanceof FieldAttribute);
assertEquals("pid", ((FieldAttribute) check.left()).name());
assertTrue(check.right() instanceof Literal);
assertEquals(123, ((Literal) check.right()).value());
}
public void testOptionalFieldsAsSequenceKey() {
String eql = """
sequence by ?x
[any where ?x == 123] by ?pid
[any where true] by pid
[any where ?y != null] by ?z
until [any where string(?t) == \"null\"] by ?w
""";
LogicalPlan plan = accept(index, eql);
assertTrue(plan instanceof Head);
Head head = (Head) plan;
assertTrue(head.child() instanceof OrderBy);
OrderBy orderBy = (OrderBy) head.child();
assertTrue(orderBy.child() instanceof Sequence);
Sequence s = (Sequence) orderBy.child();
List<KeyedFilter> queries = s.queries();
assertEquals(3, queries.size());
// any where ?x == 123 by ?x, ?pid
KeyedFilter q = queries.get(0);
assertEquals(2, q.keys().size());
List<? extends NamedExpression> keys = q.keys();
assertEquals(OptionalMissingAttribute.class, keys.get(0).getClass());
assertEquals(OptionalResolvedAttribute.class, keys.get(1).getClass());
OptionalMissingAttribute optional = (OptionalMissingAttribute) keys.get(0);
assertEquals(true, optional.resolved());
assertEquals("x", optional.name());
FieldAttribute field = (FieldAttribute) keys.get(1);
assertEquals("pid", field.name());
assertTrue(q.child() instanceof Filter);
Filter filter = (Filter) q.child();
assertTrue(filter.condition() instanceof Equals);
Equals equals = (Equals) filter.condition();
checkMissingOptional(equals.left());
assertEquals(123, ((Literal) equals.right()).value());
// any where true by ?x, pid
q = queries.get(1);
assertEquals(2, q.keys().size());
keys = q.keys();
assertEquals(OptionalMissingAttribute.class, keys.get(0).getClass());
assertEquals(FieldAttribute.class, keys.get(1).getClass());
optional = (OptionalMissingAttribute) keys.get(0);
assertEquals(true, optional.resolved());
assertEquals("x", optional.name());
field = (FieldAttribute) keys.get(1);
assertEquals("pid", field.name());
assertTrue(q.child() instanceof Filter);
filter = (Filter) q.child();
assertTrue(filter.condition() instanceof Literal);
Literal l = (Literal) filter.condition();
assertEquals(Literal.TRUE, l);
// any where ?y != null by ?x, ?z
q = queries.get(2);
assertEquals(2, q.keys().size());
keys = q.keys();
assertEquals(OptionalMissingAttribute.class, keys.get(0).getClass());
assertEquals(OptionalMissingAttribute.class, keys.get(1).getClass());
optional = (OptionalMissingAttribute) keys.get(0);
assertEquals(true, optional.resolved());
assertEquals("x", optional.name());
optional = (OptionalMissingAttribute) keys.get(1);
assertEquals(true, optional.resolved());
assertEquals("z", optional.name());
assertTrue(q.child() instanceof Filter);
filter = (Filter) q.child();
assertTrue(filter.condition() instanceof Not);
Not not = (Not) filter.condition();
equals = (Equals) not.field();
checkMissingOptional(equals.left());
checkMissingOptional(equals.right());
// until [any where string(?t) == \"null\"] by ?w
q = s.until();
keys = q.keys();
assertEquals(OptionalMissingAttribute.class, keys.get(0).getClass());
assertEquals(OptionalMissingAttribute.class, keys.get(1).getClass());
optional = (OptionalMissingAttribute) keys.get(0);
assertEquals(true, optional.resolved());
assertEquals("x", optional.name());
optional = (OptionalMissingAttribute) keys.get(1);
assertEquals(true, optional.resolved());
assertEquals("w", optional.name());
assertTrue(q.child() instanceof Filter);
filter = (Filter) q.child();
assertTrue(filter.condition() instanceof Equals);
equals = (Equals) filter.condition();
assertTrue(equals.right() instanceof Literal);
assertEquals("null", ((Literal) equals.right()).value());
assertEquals(DataTypes.KEYWORD, ((Literal) equals.right()).dataType());
assertTrue(equals.left() instanceof ToString);
checkMissingOptional(((ToString) equals.left()).value());
}
public void testOptionalFieldsAsSampleKey() {
String eql = """
sample by ?x
[any where ?x == 123] by ?pid
[any where true] by pid
[any where ?y != null] by ?z
""";
LogicalPlan plan = accept(index, eql);
assertTrue(plan instanceof LimitWithOffset);
plan = ((LimitWithOffset) plan).child();
Sample sample = (Sample) plan;
assertEquals(3, sample.children().size());
List<KeyedFilter> queries = sample.queries();
assertEquals(3, queries.size());
// any where ?x == 123 by ?x, ?pid
KeyedFilter q = queries.get(0);
assertEquals(2, q.keys().size());
List<? extends NamedExpression> keys = q.keys();
assertEquals(OptionalMissingAttribute.class, keys.get(0).getClass());
assertEquals(OptionalResolvedAttribute.class, keys.get(1).getClass());
OptionalMissingAttribute optional = (OptionalMissingAttribute) keys.get(0);
assertEquals(true, optional.resolved());
assertEquals("x", optional.name());
FieldAttribute field = (FieldAttribute) keys.get(1);
assertEquals("pid", field.name());
assertTrue(q.child() instanceof Filter);
Filter filter = (Filter) q.child();
assertTrue(filter.condition() instanceof Equals);
Equals equals = (Equals) filter.condition();
checkMissingOptional(equals.left());
assertEquals(123, ((Literal) equals.right()).value());
// any where true by ?x, pid
q = queries.get(1);
assertEquals(2, q.keys().size());
keys = q.keys();
assertEquals(OptionalMissingAttribute.class, keys.get(0).getClass());
assertEquals(FieldAttribute.class, keys.get(1).getClass());
optional = (OptionalMissingAttribute) keys.get(0);
assertEquals(true, optional.resolved());
assertEquals("x", optional.name());
field = (FieldAttribute) keys.get(1);
assertEquals("pid", field.name());
assertTrue(q.child() instanceof Filter);
filter = (Filter) q.child();
assertTrue(filter.condition() instanceof Literal);
Literal l = (Literal) filter.condition();
assertEquals(Literal.TRUE, l);
// any where ?y != null by ?x, ?z
q = queries.get(2);
assertEquals(2, q.keys().size());
keys = q.keys();
assertEquals(OptionalMissingAttribute.class, keys.get(0).getClass());
assertEquals(OptionalMissingAttribute.class, keys.get(1).getClass());
optional = (OptionalMissingAttribute) keys.get(0);
assertEquals(true, optional.resolved());
assertEquals("x", optional.name());
optional = (OptionalMissingAttribute) keys.get(1);
assertEquals(true, optional.resolved());
assertEquals("z", optional.name());
assertTrue(q.child() instanceof Filter);
filter = (Filter) q.child();
assertTrue(filter.condition() instanceof Not);
Not not = (Not) filter.condition();
equals = (Equals) not.field();
checkMissingOptional(equals.left());
checkMissingOptional(equals.right());
}
private LogicalPlan accept(IndexResolution resolution, String eql) {
PreAnalyzer preAnalyzer = new PreAnalyzer();
Analyzer analyzer = analyzer();
EqlParser parser = new EqlParser();
LogicalPlan plan = parser.createStatement(eql);
return analyzer.analyze(preAnalyzer.preAnalyze(plan, resolution));
}
private LogicalPlan accept(String eql) {
return accept(index, eql);
}
private Equals equalsCondition(String query) {
LogicalPlan plan = accept(query);
assertTrue(plan instanceof Head);
Head head = (Head) plan;
assertTrue(head.child() instanceof OrderBy);
OrderBy orderBy = (OrderBy) head.child();
assertTrue(orderBy.child() instanceof Filter);
Filter filter = (Filter) orderBy.child();
assertTrue(filter.condition() instanceof And);
And condition = (And) filter.condition();
assertTrue(condition.right() instanceof Equals);
return (Equals) condition.right();
}
private void checkMissingOptional(Expression e) {
assertEquals(DataTypes.NULL, e.dataType());
assertTrue(e.foldable());
assertNull(e.fold());
}
}
|
AnalyzerTests
|
java
|
spring-projects__spring-security
|
kerberos/kerberos-test/src/main/java/org/springframework/security/kerberos/test/KerberosSecurityTestcase.java
|
{
"start": 1322,
"end": 2292
}
|
class ____ {
private MiniKdc kdc;
private File workDir;
private Properties conf;
@BeforeEach
public void startMiniKdc() throws Exception {
createTestDir();
createMiniKdcConf();
this.kdc = new MiniKdc(this.conf, this.workDir);
this.kdc.start();
}
/**
* Create a working directory, it should be the build directory. Under this directory
* an ApacheDS working directory will be created, this directory will be deleted when
* the MiniKdc stops.
*/
public void createTestDir() {
this.workDir = new File(System.getProperty("test.dir", "target"));
}
/**
* Create a Kdc configuration
*/
public void createMiniKdcConf() {
this.conf = MiniKdc.createConf();
}
@AfterEach
public void stopMiniKdc() {
if (this.kdc != null) {
this.kdc.stop();
}
}
public MiniKdc getKdc() {
return this.kdc;
}
public File getWorkDir() {
return this.workDir;
}
public Properties getConf() {
return this.conf;
}
}
|
KerberosSecurityTestcase
|
java
|
elastic__elasticsearch
|
test/external-modules/latency-simulating-directory/src/main/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingRepositoryPlugin.java
|
{
"start": 1234,
"end": 2588
}
|
class ____ extends Plugin implements RepositoryPlugin {
public static final String TYPE = "latency-simulating";
@Override
public Map<String, Repository.Factory> getRepositories(
Environment env,
NamedXContentRegistry namedXContentRegistry,
ClusterService clusterService,
BigArrays bigArrays,
RecoverySettings recoverySettings,
RepositoriesMetrics repositoriesMetrics,
SnapshotMetrics snapshotMetrics
) {
return Map.of(
TYPE,
(projectId, metadata) -> new LatencySimulatingBlobStoreRepository(
projectId,
metadata,
env,
namedXContentRegistry,
clusterService,
bigArrays,
recoverySettings,
buildSimulator(metadata.settings())
)
);
}
private static Runnable buildSimulator(Settings settings) {
long sleepyTime = settings.getAsLong("latency", 0L);
if (sleepyTime == 0L) {
return () -> {};
}
return () -> {
try {
Thread.sleep(sleepyTime);
} catch (InterruptedException e) {
throw new AssertionError("BlobRepository read interrupted!");
}
};
}
}
|
LatencySimulatingRepositoryPlugin
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/HdfsKMSUtil.java
|
{
"start": 2270,
"end": 2409
}
|
class ____ key provider related methods in hdfs client package.
*
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public final
|
for
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/sortedset/ZRangeArgs.java
|
{
"start": 171,
"end": 1499
}
|
class ____ implements RedisCommandExtraArguments {
private boolean rev;
private long offset = -1;
private int count;
/**
* The REV argument reverses the ordering, so elements are ordered from highest to lowest score, and score ties are
* resolved by reverse lexicographical ordering.
*
* @return the current {@code ZRangeArgs}
**/
public ZRangeArgs rev() {
this.rev = true;
return this;
}
/**
* The LIMIT argument can be used to obtain a sub-range from the matching elements.
* A negative {@code count} returns all elements from the {@code offset}.
*
* @param offset the offset value
* @param count the count value
* @return the current {@code ZRangeArgs}
**/
public ZRangeArgs limit(long offset, int count) {
this.offset = offset;
this.count = count;
return this;
}
@Override
public List<Object> toArgs() {
List<Object> list = new ArrayList<>();
if (rev) {
list.add("REV");
}
if (count != 0 && offset != -1) {
list.add("LIMIT");
list.add(Long.toString(offset));
list.add(Long.toString(count));
}
return list;
}
public boolean isReverse() {
return rev;
}
}
|
ZRangeArgs
|
java
|
grpc__grpc-java
|
api/src/main/java/io/grpc/LoadBalancer.java
|
{
"start": 35634,
"end": 37300
}
|
class ____<T> {
private final String debugString;
private final T defaultValue;
private Key(String debugString, T defaultValue) {
this.debugString = debugString;
this.defaultValue = defaultValue;
}
/**
* Factory method for creating instances of {@link Key}. The default value of the key is
* {@code null}.
*
* @param debugString a debug string that describes this key.
* @param <T> Key type
* @return Key object
*/
public static <T> Key<T> create(String debugString) {
Preconditions.checkNotNull(debugString, "debugString");
return new Key<>(debugString, /*defaultValue=*/ null);
}
/**
* Factory method for creating instances of {@link Key}.
*
* @param debugString a debug string that describes this key.
* @param defaultValue default value to return when value for key not set
* @param <T> Key type
* @return Key object
*/
public static <T> Key<T> createWithDefault(String debugString, T defaultValue) {
Preconditions.checkNotNull(debugString, "debugString");
return new Key<>(debugString, defaultValue);
}
/**
* Returns the user supplied default value for this key.
*/
public T getDefault() {
return defaultValue;
}
@Override
public String toString() {
return debugString;
}
}
}
/**
* Provides essentials for LoadBalancer implementations.
*
* @since 1.2.0
*/
@ThreadSafe
@ExperimentalApi("https://github.com/grpc/grpc-java/issues/1771")
public abstract static
|
Key
|
java
|
apache__camel
|
components/camel-pqc/src/generated/java/org/apache/camel/component/pqc/dataformat/PQCDataFormatConfigurer.java
|
{
"start": 735,
"end": 4667
}
|
class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("BufferSize", int.class);
map.put("KeyEncapsulationAlgorithm", java.lang.String.class);
map.put("KeyGenerator", javax.crypto.KeyGenerator.class);
map.put("KeyPair", java.security.KeyPair.class);
map.put("Provider", java.lang.String.class);
map.put("SymmetricKeyAlgorithm", java.lang.String.class);
map.put("SymmetricKeyLength", int.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
PQCDataFormat target = (PQCDataFormat) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "buffersize":
case "bufferSize": target.setBufferSize(property(camelContext, int.class, value)); return true;
case "keyencapsulationalgorithm":
case "keyEncapsulationAlgorithm": target.setKeyEncapsulationAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
case "keygenerator":
case "keyGenerator": target.setKeyGenerator(property(camelContext, javax.crypto.KeyGenerator.class, value)); return true;
case "keypair":
case "keyPair": target.setKeyPair(property(camelContext, java.security.KeyPair.class, value)); return true;
case "provider": target.setProvider(property(camelContext, java.lang.String.class, value)); return true;
case "symmetrickeyalgorithm":
case "symmetricKeyAlgorithm": target.setSymmetricKeyAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
case "symmetrickeylength":
case "symmetricKeyLength": target.setSymmetricKeyLength(property(camelContext, int.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "buffersize":
case "bufferSize": return int.class;
case "keyencapsulationalgorithm":
case "keyEncapsulationAlgorithm": return java.lang.String.class;
case "keygenerator":
case "keyGenerator": return javax.crypto.KeyGenerator.class;
case "keypair":
case "keyPair": return java.security.KeyPair.class;
case "provider": return java.lang.String.class;
case "symmetrickeyalgorithm":
case "symmetricKeyAlgorithm": return java.lang.String.class;
case "symmetrickeylength":
case "symmetricKeyLength": return int.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
PQCDataFormat target = (PQCDataFormat) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "buffersize":
case "bufferSize": return target.getBufferSize();
case "keyencapsulationalgorithm":
case "keyEncapsulationAlgorithm": return target.getKeyEncapsulationAlgorithm();
case "keygenerator":
case "keyGenerator": return target.getKeyGenerator();
case "keypair":
case "keyPair": return target.getKeyPair();
case "provider": return target.getProvider();
case "symmetrickeyalgorithm":
case "symmetricKeyAlgorithm": return target.getSymmetricKeyAlgorithm();
case "symmetrickeylength":
case "symmetricKeyLength": return target.getSymmetricKeyLength();
default: return null;
}
}
}
|
PQCDataFormatConfigurer
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobSubmitHandlerTest.java
|
{
"start": 3078,
"end": 14770
}
|
class ____ {
@Parameters(name = "SSL enabled: {0}")
public static Iterable<Tuple2<Boolean, String>> data() {
ArrayList<Tuple2<Boolean, String>> parameters = new ArrayList<>(3);
parameters.add(Tuple2.of(false, "no SSL"));
for (String sslProvider : SSLUtilsTest.AVAILABLE_SSL_PROVIDERS) {
parameters.add(Tuple2.of(true, sslProvider));
}
return parameters;
}
@TempDir private java.nio.file.Path temporaryFolder;
private final Configuration configuration;
private BlobServer blobServer;
public JobSubmitHandlerTest(Tuple2<Boolean, String> withSsl) {
this.configuration =
withSsl.f0
? SSLUtilsTest.createInternalSslConfigWithKeyAndTrustStores(withSsl.f1)
: new Configuration();
}
@BeforeEach
void setup() throws IOException {
Configuration config = new Configuration(configuration);
blobServer =
new BlobServer(
config, TempDirUtils.newFolder(temporaryFolder), new VoidBlobStore());
blobServer.start();
}
@AfterEach
void teardown() throws IOException {
if (blobServer != null) {
blobServer.close();
}
}
@TestTemplate
void testSerializationFailureHandling() throws Exception {
final Path jobGraphFile = TempDirUtils.newFile(temporaryFolder).toPath();
DispatcherGateway mockGateway =
TestingDispatcherGateway.newBuilder()
.setSubmitFunction(
jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.build();
JobSubmitHandler handler =
new JobSubmitHandler(
() -> CompletableFuture.completedFuture(mockGateway),
RpcUtils.INF_TIMEOUT,
Collections.emptyMap(),
Executors.directExecutor(),
configuration);
JobSubmitRequestBody request =
new JobSubmitRequestBody(
jobGraphFile.toString(), Collections.emptyList(), Collections.emptyList());
assertThatThrownBy(
() ->
handler.handleRequest(
HandlerRequest.create(
request, EmptyMessageParameters.getInstance()),
mockGateway))
.isInstanceOf(RestHandlerException.class)
.satisfies(
e ->
assertThat(((RestHandlerException) e).getHttpResponseStatus())
.isEqualTo(HttpResponseStatus.BAD_REQUEST));
}
@TestTemplate
void testSuccessfulJobSubmission() throws Exception {
final Path jobGraphFile = TempDirUtils.newFile(temporaryFolder).toPath();
try (ObjectOutputStream objectOut =
new ObjectOutputStream(Files.newOutputStream(jobGraphFile))) {
objectOut.writeObject(JobGraphTestUtils.emptyJobGraph());
}
TestingDispatcherGateway.Builder builder = TestingDispatcherGateway.newBuilder();
builder.setBlobServerPort(blobServer.getPort())
.setSubmitFunction(jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setHostname("localhost");
DispatcherGateway mockGateway = builder.build();
JobSubmitHandler handler =
new JobSubmitHandler(
() -> CompletableFuture.completedFuture(mockGateway),
RpcUtils.INF_TIMEOUT,
Collections.emptyMap(),
Executors.directExecutor(),
configuration);
JobSubmitRequestBody request =
new JobSubmitRequestBody(
jobGraphFile.getFileName().toString(),
Collections.emptyList(),
Collections.emptyList());
handler.handleRequest(
HandlerRequest.create(
request,
EmptyMessageParameters.getInstance(),
Collections.singleton(jobGraphFile.toFile())),
mockGateway)
.get();
}
@TestTemplate
void testRejectionOnCountMismatch() throws Exception {
final Path jobGraphFile = TempDirUtils.newFile(temporaryFolder).toPath();
try (ObjectOutputStream objectOut =
new ObjectOutputStream(Files.newOutputStream(jobGraphFile))) {
objectOut.writeObject(JobGraphTestUtils.emptyJobGraph());
}
final Path countExceedingFile = TempDirUtils.newFile(temporaryFolder).toPath();
TestingDispatcherGateway.Builder builder = TestingDispatcherGateway.newBuilder();
builder.setBlobServerPort(blobServer.getPort())
.setSubmitFunction(jobGraph -> CompletableFuture.completedFuture(Acknowledge.get()))
.setHostname("localhost");
DispatcherGateway mockGateway = builder.build();
JobSubmitHandler handler =
new JobSubmitHandler(
() -> CompletableFuture.completedFuture(mockGateway),
RpcUtils.INF_TIMEOUT,
Collections.emptyMap(),
Executors.directExecutor(),
configuration);
JobSubmitRequestBody request =
new JobSubmitRequestBody(
jobGraphFile.getFileName().toString(),
Collections.emptyList(),
Collections.emptyList());
try {
handler.handleRequest(
HandlerRequest.create(
request,
EmptyMessageParameters.getInstance(),
Arrays.asList(
jobGraphFile.toFile(), countExceedingFile.toFile())),
mockGateway)
.get();
} catch (Exception e) {
ExceptionUtils.findThrowable(
e,
candidate ->
candidate instanceof RestHandlerException
&& candidate.getMessage().contains("count"));
}
}
@TestTemplate
void testFileHandling() throws Exception {
final String dcEntryName = "entry";
CompletableFuture<ExecutionPlan> submittedExecutionPlanFuture = new CompletableFuture<>();
DispatcherGateway dispatcherGateway =
TestingDispatcherGateway.newBuilder()
.setBlobServerPort(blobServer.getPort())
.setSubmitFunction(
submittedExecutionPlan -> {
submittedExecutionPlanFuture.complete(submittedExecutionPlan);
return CompletableFuture.completedFuture(Acknowledge.get());
})
.build();
JobSubmitHandler handler =
new JobSubmitHandler(
() -> CompletableFuture.completedFuture(dispatcherGateway),
RpcUtils.INF_TIMEOUT,
Collections.emptyMap(),
Executors.directExecutor(),
configuration);
final Path jobGraphFile = TempDirUtils.newFile(temporaryFolder).toPath();
final Path jarFile = TempDirUtils.newFile(temporaryFolder).toPath();
final Path artifactFile = TempDirUtils.newFile(temporaryFolder).toPath();
final JobGraph jobGraph = JobGraphTestUtils.emptyJobGraph();
// the entry that should be updated
jobGraph.addUserArtifact(
dcEntryName, new DistributedCache.DistributedCacheEntry("random", false));
try (ObjectOutputStream objectOut =
new ObjectOutputStream(Files.newOutputStream(jobGraphFile))) {
objectOut.writeObject(jobGraph);
}
JobSubmitRequestBody request =
new JobSubmitRequestBody(
jobGraphFile.getFileName().toString(),
Collections.singletonList(jarFile.getFileName().toString()),
Collections.singleton(
new JobSubmitRequestBody.DistributedCacheFile(
dcEntryName, artifactFile.getFileName().toString())));
handler.handleRequest(
HandlerRequest.create(
request,
EmptyMessageParameters.getInstance(),
Arrays.asList(
jobGraphFile.toFile(),
jarFile.toFile(),
artifactFile.toFile())),
dispatcherGateway)
.get();
assertThat(submittedExecutionPlanFuture)
.as("No ExecutionPlan was submitted.")
.isCompleted();
final ExecutionPlan submittedExecutionPlan = submittedExecutionPlanFuture.get();
assertThat(submittedExecutionPlan.getUserJarBlobKeys()).hasSize(1);
assertThat(submittedExecutionPlan.getUserArtifacts()).hasSize(1);
assertThat(submittedExecutionPlan.getUserArtifacts().get(dcEntryName).blobKey).isNotNull();
}
@TestTemplate
void testFailedJobSubmission() throws Exception {
final String errorMessage = "test";
DispatcherGateway mockGateway =
TestingDispatcherGateway.newBuilder()
.setSubmitFunction(
jobgraph ->
FutureUtils.completedExceptionally(
new Exception(errorMessage)))
.build();
JobSubmitHandler handler =
new JobSubmitHandler(
() -> CompletableFuture.completedFuture(mockGateway),
RpcUtils.INF_TIMEOUT,
Collections.emptyMap(),
Executors.directExecutor(),
configuration);
final Path jobGraphFile = TempDirUtils.newFile(temporaryFolder).toPath();
JobGraph jobGraph = JobGraphTestUtils.emptyJobGraph();
try (ObjectOutputStream objectOut =
new ObjectOutputStream(Files.newOutputStream(jobGraphFile))) {
objectOut.writeObject(jobGraph);
}
JobSubmitRequestBody request =
new JobSubmitRequestBody(
jobGraphFile.getFileName().toString(),
Collections.emptyList(),
Collections.emptyList());
assertThatFuture(
handler.handleRequest(
HandlerRequest.create(
request,
EmptyMessageParameters.getInstance(),
Collections.singletonList(jobGraphFile.toFile())),
mockGateway))
.eventuallyFailsWith(Exception.class)
.withMessageContaining(errorMessage);
}
}
|
JobSubmitHandlerTest
|
java
|
quarkusio__quarkus
|
integration-tests/websockets/src/main/java/io/quarkus/websockets/ChatMessageDecoder.java
|
{
"start": 187,
"end": 597
}
|
class ____ implements Decoder.Text<ChatMessageDTO> {
private final Jsonb jsonb = JsonbBuilder.create();
@Override
public ChatMessageDTO decode(String string) throws DecodeException {
return jsonb.fromJson(
string,
ChatMessageDTO.class);
}
@Override
public boolean willDecode(String string) {
return Boolean.TRUE;
}
}
|
ChatMessageDecoder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/SqlTypes.java
|
{
"start": 20063,
"end": 30455
}
|
enum ____ is declared in DDL using {@code create type ... as enum}
* or {@code create type ... as domain}.
*
* @see org.hibernate.dialect.type.PostgreSQLEnumJdbcType
* @see org.hibernate.dialect.type.OracleEnumJdbcType
*
* @since 6.3
*/
public static final int NAMED_ENUM = 6001;
/**
* A type code representing a SQL {@code ENUM} type for databases like
* {@link org.hibernate.dialect.MySQLDialect MySQL} where {@code ENUM}
* types do not have names. Enum values are ordered by ordinal.
*
* @see OrdinalEnumJdbcType
*
* @since 6.5
*/
public static final int ORDINAL_ENUM = 6002;
/**
* A type code representing a SQL {@code ENUM} type for databases like
* {@link org.hibernate.dialect.PostgreSQLDialect PostgreSQL} where
* {@code ENUM} types must have names. Enum values are ordered by ordinal.
*
* @see org.hibernate.dialect.type.PostgreSQLEnumJdbcType
*
* @since 6.5
*/
public static final int NAMED_ORDINAL_ENUM = 6003;
/**
* A type code representing a {@code vector} type for databases
* like {@link org.hibernate.dialect.PostgreSQLDialect PostgreSQL},
* {@link org.hibernate.dialect.OracleDialect Oracle 23ai} and {@link org.hibernate.dialect.MariaDBDialect MariaDB}.
* A vector essentially is a {@code float[]} with a fixed length.
*
* @since 6.4
*/
public static final int VECTOR = 10_000;
/**
* A type code representing a single-byte integer vector type for Oracle 23ai database.
*/
public static final int VECTOR_INT8 = 10_001;
/**
* A type code representing a single-precision floating-point vector type for Oracle 23ai database.
*/
public static final int VECTOR_FLOAT32 = 10_002;
/**
* A type code representing a double-precision floating-point vector type for Oracle 23ai database.
*/
public static final int VECTOR_FLOAT64 = 10_003;
/**
* A type code representing a bit precision vector type for databases
* like {@link org.hibernate.dialect.PostgreSQLDialect PostgreSQL} and
* {@link org.hibernate.dialect.OracleDialect Oracle 23ai}.
* @since 7.2
*/
public static final int VECTOR_BINARY = 10_004;
/**
* A type code representing a half-precision floating-point vector type for databases
* like {@link org.hibernate.dialect.PostgreSQLDialect PostgreSQL}.
* @since 7.2
*/
public static final int VECTOR_FLOAT16 = 10_005;
/**
* A type code representing a sparse single-byte integer vector type for Oracle 23ai database.
* @since 7.2
*/
public static final int SPARSE_VECTOR_INT8 = 10_006;
/**
* A type code representing a sparse single-precision floating-point vector type for Oracle 23ai database.
* @since 7.2
*/
public static final int SPARSE_VECTOR_FLOAT32 = 10_007;
/**
* A type code representing a sparse double-precision floating-point vector type for Oracle 23ai database.
* @since 7.2
*/
public static final int SPARSE_VECTOR_FLOAT64 = 10_008;
private SqlTypes() {
}
/**
* Does the given JDBC type code represent some sort of
* numeric type?
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isNumericType(int typeCode) {
switch (typeCode) {
case Types.BIT:
case Types.SMALLINT:
case Types.TINYINT:
case Types.INTEGER:
case Types.BIGINT:
case Types.DOUBLE:
case Types.REAL:
case Types.FLOAT:
case Types.NUMERIC:
case Types.DECIMAL:
return true;
default:
return false;
}
}
/**
* Is this a type with a length, that is, is it
* some kind of character string or binary string?
*
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isStringType(int typeCode) {
switch (typeCode) {
case Types.CHAR:
case Types.VARCHAR:
case Types.LONGVARCHAR:
case Types.NCHAR:
case Types.NVARCHAR:
case Types.LONGNVARCHAR:
case Types.BINARY:
case Types.VARBINARY:
case Types.LONGVARBINARY:
return true;
default:
return false;
}
}
/**
* Does the given JDBC type code represent some sort of
* character string type?
*
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isCharacterOrClobType(int typeCode) {
switch (typeCode) {
case Types.CHAR:
case Types.VARCHAR:
case Types.LONGVARCHAR:
case Types.NCHAR:
case Types.NVARCHAR:
case Types.LONGNVARCHAR:
case Types.CLOB:
case Types.NCLOB:
return true;
default:
return false;
}
}
/**
* Does the given JDBC type code represent some sort of
* character string type?
*
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isCharacterType(int typeCode) {
switch (typeCode) {
case Types.CHAR:
case Types.VARCHAR:
case Types.LONGVARCHAR:
case Types.NCHAR:
case Types.NVARCHAR:
case Types.LONGNVARCHAR:
return true;
default:
return false;
}
}
/**
* Does the given JDBC type code represent some sort of
* variable-length character string type?
*
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isVarcharType(int typeCode) {
switch (typeCode) {
case Types.VARCHAR:
case Types.LONGVARCHAR:
case Types.NVARCHAR:
case Types.LONGNVARCHAR:
return true;
default:
return false;
}
}
/**
* Does the given JDBC type code represent some sort of
* variable-length binary string type?
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isVarbinaryType(int typeCode) {
switch (typeCode) {
case Types.VARBINARY:
case Types.LONGVARBINARY:
return true;
default:
return false;
}
}
/**
* Does the given JDBC type code represent some sort of
* variable-length binary string or BLOB type?
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isBinaryType(int typeCode) {
switch ( typeCode ) {
case Types.BINARY:
case Types.VARBINARY:
case Types.LONGVARBINARY:
case Types.BLOB:
return true;
default:
return false;
}
}
/**
* Does the given typecode represent one of the two SQL decimal types?
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isNumericOrDecimal(int typeCode) {
switch ( typeCode ) {
case NUMERIC:
case DECIMAL:
return true;
default:
return false;
}
}
/**
* Does the given typecode represent a SQL floating point type?
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isFloatOrRealOrDouble(int typeCode) {
switch ( typeCode ) {
case FLOAT:
case REAL:
case DOUBLE:
return true;
default:
return false;
}
}
/**
* Does the given typecode represent a SQL integer type?
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isIntegral(int typeCode) {
switch ( typeCode ) {
case INTEGER:
case BIGINT:
case SMALLINT:
case TINYINT:
return true;
default:
return false;
}
}
@Internal
public static boolean isSmallOrTinyInt(int typeCode) {
switch ( typeCode ) {
case SMALLINT:
case TINYINT:
return true;
default:
return false;
}
}
/**
* Does the given typecode represent a SQL date, time, or timestamp type?
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean isTemporalType(int typeCode) {
switch ( typeCode ) {
case DATE:
case TIME:
case TIME_WITH_TIMEZONE:
case TIME_UTC:
case TIMESTAMP:
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
case INSTANT:
return true;
default:
return false;
}
}
/**
* Does the given typecode represent a SQL {@code interval} type?
*/
public static boolean isIntervalType(int typeCode) {
return typeCode == INTERVAL_SECOND;
}
/**
* Does the given typecode represent a {@code duration} type?
*/
public static boolean isDurationType(int typeCode) {
return typeCode == DURATION;
}
/**
* Does the given typecode represent a SQL date or timestamp type?
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean hasDatePart(int typeCode) {
switch ( typeCode ) {
case DATE:
case TIMESTAMP:
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
return true;
default:
return false;
}
}
/**
* Does the given typecode represent a SQL time or timestamp type?
* @param typeCode a JDBC type code from {@link Types}
*/
public static boolean hasTimePart(int typeCode) {
switch ( typeCode ) {
case TIME:
case TIME_WITH_TIMEZONE:
case TIME_UTC:
case TIMESTAMP:
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
return true;
default:
return false;
}
}
/**
* Does the typecode represent a spatial (Geometry or Geography) type.
*
* @param typeCode - a JDBC type code
*/
public static boolean isSpatialType(int typeCode) {
switch ( typeCode ) {
case GEOMETRY:
case POINT:
case GEOGRAPHY:
return true;
default:
return false;
}
}
public static boolean isEnumType(int typeCode) {
switch ( typeCode ) {
case ENUM:
case NAMED_ENUM:
return true;
default:
return false;
}
}
/**
* Does the typecode represent a JSON type.
*
* @param typeCode - a JDBC type code
* @since 7.0
*/
public static boolean isJsonType(int typeCode) {
switch ( typeCode ) {
case JSON:
case JSON_ARRAY:
return true;
default:
return false;
}
}
/**
* Does the typecode represent a JSON type or a type that can be implicitly cast to JSON.
*
* @param typeCode - a JDBC type code
* @since 7.0
*/
public static boolean isImplicitJsonType(int typeCode) {
switch ( typeCode ) {
case JSON:
case JSON_ARRAY:
return true;
default:
return isCharacterOrClobType( typeCode );
}
}
/**
* Does the typecode represent a XML type.
*
* @param typeCode - a JDBC type code
* @since 7.0
*/
public static boolean isXmlType(int typeCode) {
switch ( typeCode ) {
case SQLXML:
case XML_ARRAY:
return true;
default:
return false;
}
}
/**
* Does the typecode represent an XML type or a type that can be implicitly cast to XML.
*
* @param typeCode - a JDBC type code
* @since 7.0
*/
public static boolean isImplicitXmlType(int typeCode) {
switch ( typeCode ) {
case SQLXML:
case XML_ARRAY:
return true;
default:
return isCharacterOrClobType( typeCode );
}
}
}
|
type
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/FastCodesHeader.java
|
{
"start": 991,
"end": 1841
}
|
interface ____ {
default String getAndCheckNotNull(HashMap<String, String> fields, String field) {
String value = fields.get(field);
if (value == null) {
String headerClass = this.getClass().getSimpleName();
RemotingCommand.log.error("the custom field {}.{} is null", headerClass, field);
// no exception throws, keep compatible with RemotingCommand.decodeCommandCustomHeader
}
return value;
}
default void writeIfNotNull(ByteBuf out, String key, Object value) {
if (value != null) {
RocketMQSerializable.writeStr(out, true, key);
RocketMQSerializable.writeStr(out, false, value.toString());
}
}
void encode(ByteBuf out);
void decode(HashMap<String, String> fields) throws RemotingCommandException;
}
|
FastCodesHeader
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/test/java/org/apache/hadoop/yarn/server/router/webapp/JavaProcess.java
|
{
"start": 933,
"end": 974
}
|
class ____ start a new process.
*/
public
|
to
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java
|
{
"start": 4163,
"end": 23232
}
|
class ____ extends AbstractJobPersistentTasksExecutor<OpenJobAction.JobParams> {
private static final Logger logger = LogManager.getLogger(OpenJobPersistentTasksExecutor.class);
public static String[] indicesOfInterest(String resultsIndex) {
if (resultsIndex == null) {
return new String[] { AnomalyDetectorsIndex.jobStateIndexPattern(), MlMetaIndex.indexName(), MlConfigIndex.indexName() };
}
return new String[] {
AnomalyDetectorsIndex.jobStateIndexPattern(),
resultsIndex,
MlMetaIndex.indexName(),
MlConfigIndex.indexName() };
}
private final AutodetectProcessManager autodetectProcessManager;
private final DatafeedConfigProvider datafeedConfigProvider;
private final Client client;
private final JobResultsProvider jobResultsProvider;
private final AnomalyDetectionAuditor auditor;
private final XPackLicenseState licenseState;
private volatile ClusterState clusterState;
public OpenJobPersistentTasksExecutor(
Settings settings,
ClusterService clusterService,
AutodetectProcessManager autodetectProcessManager,
DatafeedConfigProvider datafeedConfigProvider,
MlMemoryTracker memoryTracker,
Client client,
IndexNameExpressionResolver expressionResolver,
XPackLicenseState licenseState,
AnomalyDetectionAuditor auditor
) {
super(MlTasks.JOB_TASK_NAME, MachineLearning.UTILITY_THREAD_POOL_NAME, settings, clusterService, memoryTracker, expressionResolver);
this.autodetectProcessManager = Objects.requireNonNull(autodetectProcessManager);
this.datafeedConfigProvider = Objects.requireNonNull(datafeedConfigProvider);
this.client = Objects.requireNonNull(client);
this.jobResultsProvider = new JobResultsProvider(client, settings, expressionResolver);
this.auditor = auditor;
this.licenseState = licenseState;
clusterService.addListener(event -> clusterState = event.state());
}
@Override
protected Assignment doGetAssignment(
OpenJobAction.JobParams params,
Collection<DiscoveryNode> candidateNodes,
ClusterState clusterState,
@Nullable ProjectId projectId
) {
Job job = params.getJob();
// If the task parameters do not have a job field then the job
// was first opened on a pre v6.6 node and has not been migrated
// out of cluster state - this should be impossible in version 8
assert job != null;
boolean isMemoryTrackerRecentlyRefreshed = memoryTracker.isRecentlyRefreshed();
Optional<Assignment> optionalAssignment = getPotentialAssignment(params, clusterState, isMemoryTrackerRecentlyRefreshed);
// NOTE: this will return here if isMemoryTrackerRecentlyRefreshed is false, we don't allow assignment with stale memory
if (optionalAssignment.isPresent()) {
return optionalAssignment.get();
}
JobNodeSelector jobNodeSelector = new JobNodeSelector(
clusterState,
candidateNodes,
params.getJobId(),
MlTasks.JOB_TASK_NAME,
memoryTracker,
job.allowLazyOpen() ? Integer.MAX_VALUE : maxLazyMLNodes,
node -> nodeFilter(node, job)
);
Assignment assignment = jobNodeSelector.selectNode(
maxOpenJobs,
maxConcurrentJobAllocations,
maxMachineMemoryPercent,
maxNodeMemory,
useAutoMemoryPercentage
);
auditRequireMemoryIfNecessary(params.getJobId(), auditor, assignment, jobNodeSelector, isMemoryTrackerRecentlyRefreshed);
return assignment;
}
private static boolean nodeSupportsModelSnapshotVersion(DiscoveryNode node, Job job) {
if (job.getModelSnapshotId() == null || job.getModelSnapshotMinVersion() == null) {
// There is no snapshot to restore or the min model snapshot version is 5.5.0
// which is OK as we have already checked the node is >= 5.5.0.
return true;
}
return MlConfigVersion.getMlConfigVersionForNode(node).onOrAfter(job.getModelSnapshotMinVersion());
}
public static String nodeFilter(DiscoveryNode node, Job job) {
String jobId = job.getId();
if (nodeSupportsModelSnapshotVersion(node, job) == false) {
return "Not opening job ["
+ jobId
+ "] on node ["
+ JobNodeSelector.nodeNameAndVersion(node)
+ "], because the job's model snapshot requires a node with ML config version ["
+ job.getModelSnapshotMinVersion()
+ "] or higher";
}
if (Job.getCompatibleJobTypes(MlConfigVersion.getMlConfigVersionForNode(node)).contains(job.getJobType()) == false) {
return "Not opening job ["
+ jobId
+ "] on node ["
+ JobNodeSelector.nodeNameAndVersion(node)
+ "], because this node does not support jobs of type ["
+ job.getJobType()
+ "]";
}
return null;
}
static void validateJobAndId(String jobId, Job job) {
if (job == null) {
throw ExceptionsHelper.missingJobException(jobId);
}
if (job.getBlocked().getReason() != Blocked.Reason.NONE) {
throw ExceptionsHelper.conflictStatusException(
"Cannot open job [{}] because it is executing [{}]",
jobId,
job.getBlocked().getReason()
);
}
if (job.getJobVersion() == null) {
throw ExceptionsHelper.badRequestException(
"Cannot open job [{}] because jobs created prior to version 5.5 are not supported",
jobId
);
}
}
@Override
public void validate(OpenJobAction.JobParams params, ClusterState clusterState, @Nullable ProjectId projectId) {
final Job job = params.getJob();
final String jobId = params.getJobId();
validateJobAndId(jobId, job);
// If we already know that we can't find an ml node because all ml nodes are running at capacity or
// simply because there are no ml nodes in the cluster then we fail quickly here:
var assignment = getAssignment(params, clusterState.nodes().getAllNodes(), clusterState, projectId);
if (assignment.equals(AWAITING_UPGRADE)) {
throw makeCurrentlyBeingUpgradedException(logger, params.getJobId());
}
if (assignment.getExecutorNode() == null && assignment.equals(AWAITING_LAZY_ASSIGNMENT) == false) {
throw makeNoSuitableNodesException(logger, params.getJobId(), assignment.getExplanation());
}
}
@Override
// Exceptions that occur while the node is dying, i.e. after the JVM has received a SIGTERM,
// are ignored. Core services will be stopping in response to the SIGTERM and we want the
// job to try to open again on another node, not spuriously fail on the dying node.
protected void nodeOperation(AllocatedPersistentTask task, OpenJobAction.JobParams params, PersistentTaskState state) {
JobTask jobTask = (JobTask) task;
jobTask.setAutodetectProcessManager(autodetectProcessManager);
JobTaskState jobTaskState = (JobTaskState) state;
JobState jobState = jobTaskState == null ? null : jobTaskState.getState();
ActionListener<Boolean> checkSnapshotVersionListener = ActionListener.wrap(
mappingsUpdate -> jobResultsProvider.setRunningForecastsToFailed(
params.getJobId(),
ActionListener.wrap(r -> runJob(jobTask, jobState, params), e -> {
if (autodetectProcessManager.isNodeDying() == false) {
logger.warn(() -> "[" + params.getJobId() + "] failed to set forecasts to failed", e);
runJob(jobTask, jobState, params);
}
})
),
e -> {
if (autodetectProcessManager.isNodeDying() == false) {
logger.error(() -> "[" + params.getJobId() + "] Failed verifying snapshot version", e);
failTask(jobTask, "failed snapshot verification; cause: " + e.getMessage());
}
}
);
ActionListener<Boolean> resultsMappingUpdateHandler = ActionListener.wrap(
mappingsUpdate -> verifyCurrentSnapshotVersion(params.getJobId(), checkSnapshotVersionListener),
e -> {
if (autodetectProcessManager.isNodeDying() == false) {
logger.error(() -> "[" + params.getJobId() + "] Failed to update results mapping", e);
failTask(jobTask, "failed to update results mapping; cause: " + e.getMessage());
}
}
);
// We need to update the results index as we MAY update the current forecast results, setting the running forcasts to failed
// This writes to the results index, which might need updating
ElasticsearchMappings.addDocMappingIfMissing(
AnomalyDetectorsIndex.jobResultsAliasedName(params.getJobId()),
AnomalyDetectorsIndex::wrappedResultsMapping,
client,
clusterState,
PERSISTENT_TASK_MASTER_NODE_TIMEOUT,
resultsMappingUpdateHandler,
AnomalyDetectorsIndex.RESULTS_INDEX_MAPPINGS_VERSION
);
}
// Exceptions that occur while the node is dying, i.e. after the JVM has received a SIGTERM,
// are ignored. Core services will be stopping in response to the SIGTERM and we want the
// job to try to open again on another node, not spuriously fail on the dying node.
private void runJob(JobTask jobTask, JobState jobState, OpenJobAction.JobParams params) {
// If the node is already running its exit handlers then do nothing - shortly
// the persistent task will get assigned to a new node and the code below will
// run there instead.
if (autodetectProcessManager.isNodeDying()) {
return;
}
// If the job is closing, simply stop and return
if (JobState.CLOSING.equals(jobState)) {
// Mark as completed instead of using `stop` as stop assumes native processes have started
logger.info("[{}] job got reassigned while stopping. Marking as completed", params.getJobId());
jobTask.markAsCompleted();
return;
}
// If the job is failed then the Persistent Task Service will
// try to restart it on a node restart. Exiting here leaves the
// job in the failed state and it must be force closed.
if (JobState.FAILED.equals(jobState)) {
return;
}
ActionListener<String> getRunningDatafeedListener = ActionListener.wrap(runningDatafeedId -> {
if (runningDatafeedId != null) {
// This job has a running datafeed attached to it.
// In order to prevent gaps in the model we revert to the current snapshot deleting intervening results.
RevertToCurrentSnapshotAction revertToCurrentSnapshotAction = new RevertToCurrentSnapshotAction(
jobTask,
ActionListener.wrap(response -> openJob(jobTask), e -> {
if (autodetectProcessManager.isNodeDying() == false) {
logger.error(() -> "[" + jobTask.getJobId() + "] failed to revert to current snapshot", e);
failTask(jobTask, "failed to revert to current snapshot");
}
})
);
revertToCurrentSnapshotAction.run();
} else {
openJob(jobTask);
}
}, e -> {
if (autodetectProcessManager.isNodeDying() == false) {
logger.error(() -> "[" + jobTask.getJobId() + "] failed to search for associated datafeed", e);
failTask(jobTask, "failed to search for associated datafeed");
}
});
getRunningDatafeed(jobTask.getJobId(), getRunningDatafeedListener);
}
private void failTask(JobTask jobTask, String reason) {
String jobId = jobTask.getJobId();
auditor.error(jobId, reason);
JobTaskState failedState = new JobTaskState(JobState.FAILED, jobTask.getAllocationId(), reason, Instant.now());
jobTask.updatePersistentTaskState(failedState, ActionListener.wrap(r -> {
logger.debug("[{}] updated task state to failed", jobId);
stopAssociatedDatafeedForFailedJob(jobId);
}, e -> {
logger.error(() -> "[" + jobId + "] error while setting task state to failed; marking task as failed", e);
jobTask.markAsFailed(e);
stopAssociatedDatafeedForFailedJob(jobId);
}));
}
private void stopAssociatedDatafeedForFailedJob(String jobId) {
if (autodetectProcessManager.isNodeDying()) {
// The node shutdown caught us at a bad time, and we cannot stop the datafeed
return;
}
ActionListener<String> getRunningDatafeedListener = ActionListener.wrap(runningDatafeedId -> {
if (runningDatafeedId == null) {
return;
}
StopDatafeedAction.Request request = new StopDatafeedAction.Request(runningDatafeedId);
request.setForce(true);
executeAsyncWithOrigin(
client,
ML_ORIGIN,
StopDatafeedAction.INSTANCE,
request,
ActionListener.wrap(
// StopDatafeedAction will audit the stopping of the datafeed if it succeeds so we don't need to do that here
r -> logger.info("[{}] stopped associated datafeed [{}] after job failure", jobId, runningDatafeedId),
e -> {
if (autodetectProcessManager.isNodeDying() == false) {
logger.error(
() -> format("[%s] failed to stop associated datafeed [%s] after job failure", jobId, runningDatafeedId),
e
);
auditor.error(jobId, "failed to stop associated datafeed after job failure");
}
}
)
);
}, e -> {
if (autodetectProcessManager.isNodeDying() == false) {
logger.error(() -> "[" + jobId + "] failed to search for associated datafeed", e);
}
});
getRunningDatafeed(jobId, getRunningDatafeedListener);
}
private void getRunningDatafeed(String jobId, ActionListener<String> listener) {
ActionListener<Set<String>> datafeedListener = listener.delegateFailureAndWrap((delegate, datafeeds) -> {
assert datafeeds.size() <= 1;
if (datafeeds.isEmpty()) {
delegate.onResponse(null);
return;
}
String datafeedId = datafeeds.iterator().next();
PersistentTasksCustomMetadata tasks = clusterState.getMetadata().getProject().custom(PersistentTasksCustomMetadata.TYPE);
PersistentTasksCustomMetadata.PersistentTask<?> datafeedTask = MlTasks.getDatafeedTask(datafeedId, tasks);
delegate.onResponse(datafeedTask != null ? datafeedId : null);
});
datafeedConfigProvider.findDatafeedIdsForJobIds(Collections.singleton(jobId), datafeedListener);
}
private void verifyCurrentSnapshotVersion(String jobId, ActionListener<Boolean> listener) {
ActionListener<GetJobsAction.Response> jobListener = ActionListener.wrap(jobResponse -> {
List<Job> jobPage = jobResponse.getResponse().results();
// We requested a single concrete job so if it didn't exist we would get an error
assert jobPage.size() == 1;
String jobSnapshotId = jobPage.get(0).getModelSnapshotId();
if (jobSnapshotId == null) {
listener.onResponse(true);
return;
}
executeAsyncWithOrigin(
client,
ML_ORIGIN,
GetModelSnapshotsAction.INSTANCE,
new GetModelSnapshotsAction.Request(jobId, jobSnapshotId),
ActionListener.wrap(snapshot -> {
if (snapshot.getPage().count() == 0) {
listener.onResponse(true);
return;
}
assert snapshot.getPage().results().size() == 1;
ModelSnapshot snapshotObj = snapshot.getPage().results().get(0);
if (snapshotObj.getMinVersion().onOrAfter(MIN_SUPPORTED_SNAPSHOT_VERSION)) {
listener.onResponse(true);
return;
}
listener.onFailure(
ExceptionsHelper.badRequestException(
"[{}] job model snapshot [{}] has min version before [{}], "
+ "please revert to a newer model snapshot or reset the job",
jobId,
jobSnapshotId,
MIN_SUPPORTED_SNAPSHOT_VERSION.toString()
)
);
}, snapshotFailure -> {
if (ExceptionsHelper.unwrapCause(snapshotFailure) instanceof ResourceNotFoundException) {
listener.onResponse(true);
return;
}
listener.onFailure(
ExceptionsHelper.serverError("[{}] failed finding snapshot [{}]", snapshotFailure, jobId, jobSnapshotId)
);
})
);
}, error -> listener.onFailure(ExceptionsHelper.serverError("[{}] error getting job", error, jobId)));
GetJobsAction.Request request = new GetJobsAction.Request(jobId).masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT);
executeAsyncWithOrigin(client, ML_ORIGIN, GetJobsAction.INSTANCE, request, jobListener);
}
/**
* This action reverts a job to its current snapshot if one exists or resets the job.
* This action is retryable. As this action happens when a job is relocating to another node,
* it is common that this happens during rolling upgrades. During a rolling upgrade, it is
* probable that data nodes containing shards of the ML indices might not be available temporarily
* which results to failures in the revert/reset action. Thus, it is important to retry a few times
* so that the job manages to successfully recover without user intervention.
*/
private
|
OpenJobPersistentTasksExecutor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/CompositeIdFkGeneratedValueTest.java
|
{
"start": 10907,
"end": 11499
}
|
class ____ implements Serializable {
private Long nid;
private Long hid;
public PK(Long nid, Long hid) {
this.nid = nid;
this.hid = hid;
}
private PK() {
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
PK pk = (PK) o;
return Objects.equals( nid, pk.nid ) && Objects.equals( hid, pk.hid );
}
@Override
public int hashCode() {
return Objects.hash( nid, hid );
}
}
}
@Entity(name = "HeadT")
public static
|
PK
|
java
|
apache__flink
|
flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/functions/KeyedStateReaderFunction.java
|
{
"start": 2074,
"end": 3151
}
|
class ____<K, OUT> extends AbstractRichFunction {
private static final long serialVersionUID = 3873843034140417407L;
/**
* Process one key from the restored state backend.
*
* <p>This function can read partitioned state from the restored state backend and output zero
* or more elements using the {@link Collector} parameter.
*
* @param key The input value.
* @param out The collector for returning result values.
* @throws Exception This method may throw exceptions. Throwing an exception will cause the
* operation to fail and may trigger recovery.
*/
public abstract void readKey(K key, Context ctx, Collector<OUT> out) throws Exception;
/**
* Context that {@link KeyedStateReaderFunction}'s can use for getting additional data about an
* input record.
*
* <p>The context is only valid for the duration of a {@link
* KeyedStateReaderFunction#readKey(Object, Context, Collector)} call. Do not store the context
* and use afterwards!
*/
public
|
KeyedStateReaderFunction
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/query/ResultDescriptor.java
|
{
"start": 440,
"end": 665
}
|
interface ____ {
/**
* Resolve the descriptor into a memento capable of being stored in the
* {@link NamedObjectRepository}
*/
ResultMemento resolve(ResultSetMappingResolutionContext resolutionContext);
}
|
ResultDescriptor
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ClassNameTest.java
|
{
"start": 1274,
"end": 1742
}
|
class ____ {}
""")
.doTest();
}
@Test
public void packageInfo() {
compilationHelper
.addSourceLines(
"a/package-info.java",
"""
/** Documentation for our package */
package a;
""")
.addSourceLines(
"b/Test.java",
"""
// BUG: Diagnostic contains: Test inside Test.java, instead found: Green
package b;
|
Two
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/authentication/OAuth2LoginReactiveAuthenticationManager.java
|
{
"start": 3310,
"end": 6710
}
|
class ____ implements ReactiveAuthenticationManager {
private final ReactiveAuthenticationManager authorizationCodeManager;
private final ReactiveOAuth2UserService<OAuth2UserRequest, OAuth2User> userService;
private GrantedAuthoritiesMapper authoritiesMapper = ((authorities) -> authorities);
public OAuth2LoginReactiveAuthenticationManager(
ReactiveOAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> accessTokenResponseClient,
ReactiveOAuth2UserService<OAuth2UserRequest, OAuth2User> userService) {
Assert.notNull(accessTokenResponseClient, "accessTokenResponseClient cannot be null");
Assert.notNull(userService, "userService cannot be null");
this.authorizationCodeManager = new OAuth2AuthorizationCodeReactiveAuthenticationManager(
accessTokenResponseClient);
this.userService = userService;
}
@Override
public Mono<Authentication> authenticate(Authentication authentication) {
return Mono.defer(() -> {
OAuth2AuthorizationCodeAuthenticationToken token = (OAuth2AuthorizationCodeAuthenticationToken) authentication;
// Section 3.1.2.1 Authentication Request -
// https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest scope
// REQUIRED. OpenID Connect requests MUST contain the "openid" scope value.
if (token.getAuthorizationExchange().getAuthorizationRequest().getScopes().contains("openid")) {
// This is an OpenID Connect Authentication Request so return null
// and let OidcAuthorizationCodeReactiveAuthenticationManager handle it
// instead once one is created
return Mono.empty();
}
return this.authorizationCodeManager.authenticate(token)
.onErrorMap(OAuth2AuthorizationException.class,
(e) -> new OAuth2AuthenticationException(e.getError(), e.getError().toString(), e))
.cast(OAuth2AuthorizationCodeAuthenticationToken.class)
.flatMap(this::onSuccess);
});
}
/**
* Sets the {@link GrantedAuthoritiesMapper} used for mapping
* {@link OAuth2User#getAuthorities()} to a new set of authorities which will be
* associated to the {@link OAuth2LoginAuthenticationToken}.
* @param authoritiesMapper the {@link GrantedAuthoritiesMapper} used for mapping the
* user's authorities
* @since 5.4
*/
public final void setAuthoritiesMapper(GrantedAuthoritiesMapper authoritiesMapper) {
Assert.notNull(authoritiesMapper, "authoritiesMapper cannot be null");
this.authoritiesMapper = authoritiesMapper;
}
private Mono<OAuth2LoginAuthenticationToken> onSuccess(OAuth2AuthorizationCodeAuthenticationToken authentication) {
OAuth2AccessToken accessToken = authentication.getAccessToken();
Map<String, Object> additionalParameters = authentication.getAdditionalParameters();
OAuth2UserRequest userRequest = new OAuth2UserRequest(authentication.getClientRegistration(), accessToken,
additionalParameters);
return this.userService.loadUser(userRequest).map((oauth2User) -> {
Collection<? extends GrantedAuthority> mappedAuthorities = this.authoritiesMapper
.mapAuthorities(oauth2User.getAuthorities());
OAuth2LoginAuthenticationToken authenticationResult = new OAuth2LoginAuthenticationToken(
authentication.getClientRegistration(), authentication.getAuthorizationExchange(), oauth2User,
mappedAuthorities, accessToken, authentication.getRefreshToken());
return authenticationResult;
});
}
}
|
OAuth2LoginReactiveAuthenticationManager
|
java
|
spring-projects__spring-framework
|
spring-tx/src/main/java/org/springframework/transaction/reactive/TransactionContext.java
|
{
"start": 1233,
"end": 3469
}
|
class ____ {
private final @Nullable TransactionContext parent;
private final Map<Object, Object> resources = new LinkedHashMap<>();
private @Nullable Set<TransactionSynchronization> synchronizations;
private volatile @Nullable String currentTransactionName;
private volatile boolean currentTransactionReadOnly;
private volatile @Nullable Integer currentTransactionIsolationLevel;
private volatile boolean actualTransactionActive;
TransactionContext() {
this(null);
}
TransactionContext(@Nullable TransactionContext parent) {
this.parent = parent;
}
public @Nullable TransactionContext getParent() {
return this.parent;
}
public Map<Object, Object> getResources() {
return this.resources;
}
public void setSynchronizations(@Nullable Set<TransactionSynchronization> synchronizations) {
this.synchronizations = synchronizations;
}
public @Nullable Set<TransactionSynchronization> getSynchronizations() {
return this.synchronizations;
}
public void setCurrentTransactionName(@Nullable String currentTransactionName) {
this.currentTransactionName = currentTransactionName;
}
public @Nullable String getCurrentTransactionName() {
return this.currentTransactionName;
}
public void setCurrentTransactionReadOnly(boolean currentTransactionReadOnly) {
this.currentTransactionReadOnly = currentTransactionReadOnly;
}
public boolean isCurrentTransactionReadOnly() {
return this.currentTransactionReadOnly;
}
public void setCurrentTransactionIsolationLevel(@Nullable Integer currentTransactionIsolationLevel) {
this.currentTransactionIsolationLevel = currentTransactionIsolationLevel;
}
public @Nullable Integer getCurrentTransactionIsolationLevel() {
return this.currentTransactionIsolationLevel;
}
public void setActualTransactionActive(boolean actualTransactionActive) {
this.actualTransactionActive = actualTransactionActive;
}
public boolean isActualTransactionActive() {
return this.actualTransactionActive;
}
public void clear() {
this.synchronizations = null;
this.currentTransactionName = null;
this.currentTransactionReadOnly = false;
this.currentTransactionIsolationLevel = null;
this.actualTransactionActive = false;
}
}
|
TransactionContext
|
java
|
netty__netty
|
codec-classes-quic/src/main/java/io/netty/handler/codec/quic/QuicheQuicSslContext.java
|
{
"start": 2264,
"end": 21556
}
|
class ____ extends QuicSslContext {
private static final InternalLogger LOGGER = InternalLoggerFactory.getInstance(QuicheQuicSslContext.class);
// Use default that is supported in java 11 and earlier and also in OpenSSL / BoringSSL.
// See https://github.com/netty/netty-tcnative/issues/567
// See https://www.java.com/en/configure_crypto.html for ordering
private static final String[] DEFAULT_NAMED_GROUPS = { "x25519", "secp256r1", "secp384r1", "secp521r1" };
private static final String[] NAMED_GROUPS;
static {
String[] namedGroups = DEFAULT_NAMED_GROUPS;
Set<String> defaultConvertedNamedGroups = new LinkedHashSet<>(namedGroups.length);
for (int i = 0; i < namedGroups.length; i++) {
defaultConvertedNamedGroups.add(GroupsConverter.toBoringSSL(namedGroups[i]));
}
// Call Quic.isAvailable() first to ensure native lib is loaded.
// See https://github.com/netty/netty-incubator-codec-quic/issues/759
if (Quic.isAvailable()) {
final long sslCtx = BoringSSL.SSLContext_new();
try {
// Let's filter out any group that is not supported from the default.
Iterator<String> defaultGroupsIter = defaultConvertedNamedGroups.iterator();
while (defaultGroupsIter.hasNext()) {
if (BoringSSL.SSLContext_set1_groups_list(sslCtx, defaultGroupsIter.next()) == 0) {
// Not supported, let's remove it. This could for example be the case if we use
// fips and the configure group is not supported when using FIPS.
// See https://github.com/netty/netty-tcnative/issues/883
defaultGroupsIter.remove();
}
}
String groups = SystemPropertyUtil.get("jdk.tls.namedGroups", null);
if (groups != null) {
String[] nGroups = groups.split(",");
Set<String> supportedNamedGroups = new LinkedHashSet<>(nGroups.length);
Set<String> supportedConvertedNamedGroups = new LinkedHashSet<>(nGroups.length);
Set<String> unsupportedNamedGroups = new LinkedHashSet<>();
for (String namedGroup : nGroups) {
String converted = GroupsConverter.toBoringSSL(namedGroup);
// Will return 0 on failure.
if (BoringSSL.SSLContext_set1_groups_list(sslCtx, converted) == 0) {
unsupportedNamedGroups.add(namedGroup);
} else {
supportedConvertedNamedGroups.add(converted);
supportedNamedGroups.add(namedGroup);
}
}
if (supportedNamedGroups.isEmpty()) {
namedGroups = defaultConvertedNamedGroups.toArray(EmptyArrays.EMPTY_STRINGS);
LOGGER.info("All configured namedGroups are not supported: {}. Use default: {}.",
Arrays.toString(unsupportedNamedGroups.toArray(EmptyArrays.EMPTY_STRINGS)),
Arrays.toString(DEFAULT_NAMED_GROUPS));
} else {
String[] groupArray = supportedNamedGroups.toArray(EmptyArrays.EMPTY_STRINGS);
if (unsupportedNamedGroups.isEmpty()) {
LOGGER.info("Using configured namedGroups -D 'jdk.tls.namedGroup': {} ",
Arrays.toString(groupArray));
} else {
LOGGER.info("Using supported configured namedGroups: {}. Unsupported namedGroups: {}. ",
Arrays.toString(groupArray),
Arrays.toString(unsupportedNamedGroups.toArray(EmptyArrays.EMPTY_STRINGS)));
}
namedGroups = supportedConvertedNamedGroups.toArray(EmptyArrays.EMPTY_STRINGS);
}
} else {
namedGroups = defaultConvertedNamedGroups.toArray(EmptyArrays.EMPTY_STRINGS);
}
} finally {
BoringSSL.SSLContext_free(sslCtx);
}
}
NAMED_GROUPS = namedGroups;
}
final ClientAuth clientAuth;
private final boolean server;
@SuppressWarnings("deprecation")
private final ApplicationProtocolNegotiator apn;
private long sessionCacheSize;
private long sessionTimeout;
private final QuicheQuicSslSessionContext sessionCtx;
private final QuicheQuicSslEngineMap engineMap = new QuicheQuicSslEngineMap();
private final QuicClientSessionCache sessionCache;
private final BoringSSLSessionTicketCallback sessionTicketCallback = new BoringSSLSessionTicketCallback();
final NativeSslContext nativeSslContext;
QuicheQuicSslContext(boolean server, long sessionTimeout, long sessionCacheSize,
ClientAuth clientAuth, @Nullable TrustManagerFactory trustManagerFactory,
@Nullable KeyManagerFactory keyManagerFactory, String password,
@Nullable Mapping<? super String, ? extends QuicSslContext> mapping,
@Nullable Boolean earlyData, @Nullable BoringSSLKeylog keylog,
String[] applicationProtocols, Map.Entry<SslContextOption<?>, Object>... ctxOptions) {
Quic.ensureAvailability();
this.server = server;
this.clientAuth = server ? checkNotNull(clientAuth, "clientAuth") : ClientAuth.NONE;
final X509TrustManager trustManager;
if (trustManagerFactory == null) {
try {
trustManagerFactory =
TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init((KeyStore) null);
trustManager = chooseTrustManager(trustManagerFactory);
} catch (Exception e) {
throw new IllegalStateException(e);
}
} else {
trustManager = chooseTrustManager(trustManagerFactory);
}
final X509ExtendedKeyManager keyManager;
if (keyManagerFactory == null) {
if (server) {
throw new IllegalArgumentException("No KeyManagerFactory");
}
keyManager = null;
} else {
keyManager = chooseKeyManager(keyManagerFactory);
}
String[] groups = NAMED_GROUPS;
String[] sigalgs = EmptyArrays.EMPTY_STRINGS;
Map<String, String> serverKeyTypes = null;
Set<String> clientKeyTypes = null;
if (ctxOptions != null) {
for (Map.Entry<SslContextOption<?>, Object> ctxOpt : ctxOptions) {
SslContextOption<?> option = ctxOpt.getKey();
if (option == BoringSSLContextOption.GROUPS) {
String[] groupsArray = (String[]) ctxOpt.getValue();
Set<String> groupsSet = new LinkedHashSet<String>(groupsArray.length);
for (String group : groupsArray) {
groupsSet.add(GroupsConverter.toBoringSSL(group));
}
groups = groupsSet.toArray(EmptyArrays.EMPTY_STRINGS);
} else if (option == BoringSSLContextOption.SIGNATURE_ALGORITHMS) {
String[] sigalgsArray = (String[]) ctxOpt.getValue();
Set<String> sigalgsSet = new LinkedHashSet<String>(sigalgsArray.length);
for (String sigalg : sigalgsArray) {
sigalgsSet.add(sigalg);
}
sigalgs = sigalgsSet.toArray(EmptyArrays.EMPTY_STRINGS);
} else if (option == BoringSSLContextOption.CLIENT_KEY_TYPES) {
clientKeyTypes = (Set<String>) ctxOpt.getValue();
} else if (option == BoringSSLContextOption.SERVER_KEY_TYPES) {
serverKeyTypes = (Map<String, String>) ctxOpt.getValue();
} else {
LOGGER.debug("Skipping unsupported " + SslContextOption.class.getSimpleName()
+ ": " + ctxOpt.getKey());
}
}
}
final BoringSSLPrivateKeyMethod privateKeyMethod;
if (keyManagerFactory instanceof BoringSSLKeylessManagerFactory) {
privateKeyMethod = new BoringSSLAsyncPrivateKeyMethodAdapter(engineMap,
((BoringSSLKeylessManagerFactory) keyManagerFactory).privateKeyMethod);
} else {
privateKeyMethod = null;
}
sessionCache = server ? null : new QuicClientSessionCache();
int verifyMode = server ? boringSSLVerifyModeForServer(this.clientAuth) : BoringSSL.SSL_VERIFY_PEER;
nativeSslContext = new NativeSslContext(BoringSSL.SSLContext_new(server, applicationProtocols,
new BoringSSLHandshakeCompleteCallback(engineMap),
new BoringSSLCertificateCallback(engineMap, keyManager, password, serverKeyTypes, clientKeyTypes),
new BoringSSLCertificateVerifyCallback(engineMap, trustManager),
mapping == null ? null : new BoringSSLTlsextServernameCallback(engineMap, mapping),
keylog == null ? null : new BoringSSLKeylogCallback(engineMap, keylog),
server ? null : new BoringSSLSessionCallback(engineMap, sessionCache), privateKeyMethod,
sessionTicketCallback, verifyMode,
BoringSSL.subjectNames(trustManager.getAcceptedIssuers())));
boolean success = false;
try {
if (groups.length > 0 && BoringSSL.SSLContext_set1_groups_list(nativeSslContext.ctx, groups) == 0) {
String msg = "failed to set curves / groups list: " + Arrays.toString(groups);
String lastError = BoringSSL.ERR_last_error();
if (lastError != null) {
// We have some more details about why the operations failed, include these into the message.
msg += ". " + lastError;
}
throw new IllegalStateException(msg);
}
if (sigalgs.length > 0 && BoringSSL.SSLContext_set1_sigalgs_list(nativeSslContext.ctx, sigalgs) == 0) {
String msg = "failed to set signature algorithm list: " + Arrays.toString(sigalgs);
String lastError = BoringSSL.ERR_last_error();
if (lastError != null) {
// We have some more details about why the operations failed, include these into the message.
msg += ". " + lastError;
}
throw new IllegalStateException(msg);
}
apn = new QuicheQuicApplicationProtocolNegotiator(applicationProtocols);
if (this.sessionCache != null) {
// Cache is handled via our own implementation.
this.sessionCache.setSessionCacheSize((int) sessionCacheSize);
this.sessionCache.setSessionTimeout((int) sessionTimeout);
} else {
// Cache is handled by BoringSSL internally
BoringSSL.SSLContext_setSessionCacheSize(
nativeSslContext.address(), sessionCacheSize);
this.sessionCacheSize = sessionCacheSize;
BoringSSL.SSLContext_setSessionCacheTimeout(
nativeSslContext.address(), sessionTimeout);
this.sessionTimeout = sessionTimeout;
}
if (earlyData != null) {
BoringSSL.SSLContext_set_early_data_enabled(nativeSslContext.address(), earlyData);
}
sessionCtx = new QuicheQuicSslSessionContext(this);
success = true;
} finally {
if (!success) {
nativeSslContext.release();
}
}
}
private X509ExtendedKeyManager chooseKeyManager(KeyManagerFactory keyManagerFactory) {
for (KeyManager manager: keyManagerFactory.getKeyManagers()) {
if (manager instanceof X509ExtendedKeyManager) {
return (X509ExtendedKeyManager) manager;
}
}
throw new IllegalArgumentException("No X509ExtendedKeyManager included");
}
private static X509TrustManager chooseTrustManager(TrustManagerFactory trustManagerFactory) {
for (TrustManager manager: trustManagerFactory.getTrustManagers()) {
if (manager instanceof X509TrustManager) {
return (X509TrustManager) manager;
}
}
throw new IllegalArgumentException("No X509TrustManager included");
}
static X509Certificate @Nullable [] toX509Certificates0(@Nullable File file) throws CertificateException {
return toX509Certificates(file);
}
static PrivateKey toPrivateKey0(@Nullable File keyFile, @Nullable String keyPassword) throws Exception {
return toPrivateKey(keyFile, keyPassword);
}
static TrustManagerFactory buildTrustManagerFactory0(
X509Certificate @Nullable [] certCollection)
throws NoSuchAlgorithmException, CertificateException, KeyStoreException, IOException {
return buildTrustManagerFactory(certCollection, null, null);
}
private static int boringSSLVerifyModeForServer(ClientAuth mode) {
switch (mode) {
case NONE:
return BoringSSL.SSL_VERIFY_NONE;
case REQUIRE:
return BoringSSL.SSL_VERIFY_PEER | BoringSSL.SSL_VERIFY_FAIL_IF_NO_PEER_CERT;
case OPTIONAL:
return BoringSSL.SSL_VERIFY_PEER;
default:
throw new Error("Unexpected mode: " + mode);
}
}
@Nullable
QuicheQuicConnection createConnection(LongFunction<Long> connectionCreator, QuicheQuicSslEngine engine) {
nativeSslContext.retain();
long ssl = BoringSSL.SSL_new(nativeSslContext.address(), isServer(), engine.tlsHostName);
engineMap.put(ssl, engine);
long connection = connectionCreator.apply(ssl);
if (connection == -1) {
engineMap.remove(ssl);
// We retained before but as we don't create a QuicheQuicConnection and transfer ownership we need to
// explict call release again here.
nativeSslContext.release();
return null;
}
// The connection will call nativeSslContext.release() once it is freed.
return new QuicheQuicConnection(connection, ssl, engine, nativeSslContext);
}
/**
* Add the given engine to this context
*
* @param engine the engine to add.
* @return the pointer address of this context.
*/
long add(QuicheQuicSslEngine engine) {
nativeSslContext.retain();
engine.connection.reattach(nativeSslContext);
engineMap.put(engine.connection.ssl, engine);
return nativeSslContext.address();
}
/**
* Remove the given engine from this context.
*
* @param engine the engine to remove.
*/
void remove(QuicheQuicSslEngine engine) {
QuicheQuicSslEngine removed = engineMap.remove(engine.connection.ssl);
assert removed == null || removed == engine;
engine.removeSessionFromCacheIfInvalid();
}
@Nullable
QuicClientSessionCache getSessionCache() {
return sessionCache;
}
@Override
public boolean isClient() {
return !server;
}
@Override
public List<String> cipherSuites() {
return Arrays.asList("TLS_AES_128_GCM_SHA256", "TLS_AES_256_GCM_SHA384");
}
@Override
public long sessionCacheSize() {
if (sessionCache != null) {
return sessionCache.getSessionCacheSize();
} else {
synchronized (this) {
return sessionCacheSize;
}
}
}
@Override
public long sessionTimeout() {
if (sessionCache != null) {
return sessionCache.getSessionTimeout();
} else {
synchronized (this) {
return sessionTimeout;
}
}
}
@Override
public ApplicationProtocolNegotiator applicationProtocolNegotiator() {
return apn;
}
@Override
public QuicSslEngine newEngine(ByteBufAllocator alloc) {
return new QuicheQuicSslEngine(this, null, -1);
}
@Override
public QuicSslEngine newEngine(ByteBufAllocator alloc, String peerHost, int peerPort) {
return new QuicheQuicSslEngine(this, peerHost, peerPort);
}
@Override
public QuicSslSessionContext sessionContext() {
return sessionCtx;
}
@Override
protected SslHandler newHandler(ByteBufAllocator alloc, boolean startTls) {
throw new UnsupportedOperationException();
}
@Override
public SslHandler newHandler(ByteBufAllocator alloc, Executor delegatedTaskExecutor) {
throw new UnsupportedOperationException();
}
@Override
protected SslHandler newHandler(ByteBufAllocator alloc, boolean startTls, Executor executor) {
throw new UnsupportedOperationException();
}
@Override
protected SslHandler newHandler(ByteBufAllocator alloc, String peerHost, int peerPort, boolean startTls) {
throw new UnsupportedOperationException();
}
@Override
public SslHandler newHandler(ByteBufAllocator alloc, String peerHost, int peerPort,
Executor delegatedTaskExecutor) {
throw new UnsupportedOperationException();
}
@Override
protected SslHandler newHandler(ByteBufAllocator alloc, String peerHost, int peerPort,
boolean startTls, Executor delegatedTaskExecutor) {
throw new UnsupportedOperationException();
}
@Override
protected void finalize() throws Throwable {
try {
nativeSslContext.release();
} finally {
super.finalize();
}
}
void setSessionTimeout(int seconds) throws IllegalArgumentException {
if (sessionCache != null) {
sessionCache.setSessionTimeout(seconds);
} else {
BoringSSL.SSLContext_setSessionCacheTimeout(nativeSslContext.address(), seconds);
this.sessionTimeout = seconds;
}
}
void setSessionCacheSize(int size) throws IllegalArgumentException {
if (sessionCache != null) {
sessionCache.setSessionCacheSize(size);
} else {
BoringSSL.SSLContext_setSessionCacheSize(nativeSslContext.address(), size);
sessionCacheSize = size;
}
}
void setSessionTicketKeys(SslSessionTicketKey @Nullable [] ticketKeys) {
sessionTicketCallback.setSessionTicketKeys(ticketKeys);
BoringSSL.SSLContext_setSessionTicketKeys(
nativeSslContext.address(), ticketKeys != null && ticketKeys.length != 0);
}
@SuppressWarnings("deprecation")
private static final
|
QuicheQuicSslContext
|
java
|
hibernate__hibernate-orm
|
hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/sequence/SequenceInformationExtractorDerbyDatabaseImpl.java
|
{
"start": 276,
"end": 951
}
|
class ____ extends SequenceInformationExtractorLegacyImpl {
/**
* Singleton access
*/
public static final SequenceInformationExtractorDerbyDatabaseImpl INSTANCE = new SequenceInformationExtractorDerbyDatabaseImpl();
@Override
protected String sequenceNameColumn() {
return "sequencename";
}
@Override
protected String sequenceCatalogColumn() {
return null;
}
@Override
protected String sequenceStartValueColumn() {
return "startvalue";
}
@Override
protected String sequenceMinValueColumn() {
return "minimumvalue";
}
@Override
protected String sequenceMaxValueColumn() {
return "maximumvalue";
}
}
|
SequenceInformationExtractorDerbyDatabaseImpl
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-api/src/main/java/org/apache/dubbo/config/bootstrap/builders/AbstractReferenceBuilder.java
|
{
"start": 1022,
"end": 4122
}
|
class ____<
T extends AbstractReferenceConfig, B extends AbstractReferenceBuilder<T, B>>
extends AbstractInterfaceBuilder<T, B> {
/**
* Check if service provider exists, if not exists, it will be fast fail
*/
protected Boolean check;
/**
* Whether to eagle-init
*/
protected Boolean init;
/**
* Whether to use generic interface
*/
protected String generic;
/**
* Whether to find reference's instance from the current JVM
*/
protected Boolean injvm;
/**
* Lazy create connection
*/
protected Boolean lazy;
protected String reconnect;
protected Boolean sticky;
/**
* The remote service version the customer side will reference
*/
protected String version;
/**
* The remote service group the customer side will reference
*/
protected String group;
public B check(Boolean check) {
this.check = check;
return getThis();
}
public B init(Boolean init) {
this.init = init;
return getThis();
}
public B generic(String generic) {
this.generic = generic;
return getThis();
}
public B generic(Boolean generic) {
if (generic != null) {
this.generic = generic.toString();
} else {
this.generic = null;
}
return getThis();
}
/**
* @param injvm
* @see AbstractInterfaceBuilder#scope(String)
* @deprecated instead, use the parameter <b>scope</b> to judge if it's in jvm, scope=local
*/
@Deprecated
public B injvm(Boolean injvm) {
this.injvm = injvm;
return getThis();
}
public B lazy(Boolean lazy) {
this.lazy = lazy;
return getThis();
}
public B reconnect(String reconnect) {
this.reconnect = reconnect;
return getThis();
}
public B sticky(Boolean sticky) {
this.sticky = sticky;
return getThis();
}
public B version(String version) {
this.version = version;
return getThis();
}
public B group(String group) {
this.group = group;
return getThis();
}
@Override
public void build(T instance) {
super.build(instance);
if (check != null) {
instance.setCheck(check);
}
if (init != null) {
instance.setInit(init);
}
if (!StringUtils.isEmpty(generic)) {
instance.setGeneric(generic);
}
if (injvm != null) {
instance.setInjvm(injvm);
}
if (lazy != null) {
instance.setLazy(lazy);
}
if (!StringUtils.isEmpty(reconnect)) {
instance.setReconnect(reconnect);
}
if (sticky != null) {
instance.setSticky(sticky);
}
if (!StringUtils.isEmpty(version)) {
instance.setVersion(version);
}
if (!StringUtils.isEmpty(group)) {
instance.setGroup(group);
}
}
}
|
AbstractReferenceBuilder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java
|
{
"start": 743,
"end": 4314
}
|
class ____ extends AbstractVector implements DoubleVector {
static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantDoubleVector.class);
private final double value;
ConstantDoubleVector(double value, int positionCount, BlockFactory blockFactory) {
super(positionCount, blockFactory);
this.value = value;
}
@Override
public double getDouble(int position) {
return value;
}
@Override
public DoubleBlock asBlock() {
return new DoubleVectorBlock(this);
}
@Override
public DoubleVector filter(int... positions) {
return blockFactory().newConstantDoubleVector(value, positions.length);
}
@Override
public DoubleBlock keepMask(BooleanVector mask) {
if (getPositionCount() == 0) {
incRef();
return new DoubleVectorBlock(this);
}
if (mask.isConstant()) {
if (mask.getBoolean(0)) {
incRef();
return new DoubleVectorBlock(this);
}
return (DoubleBlock) blockFactory().newConstantNullBlock(getPositionCount());
}
try (DoubleBlock.Builder builder = blockFactory().newDoubleBlockBuilder(getPositionCount())) {
// TODO if X-ArrayBlock used BooleanVector for it's null mask then we could shuffle references here.
for (int p = 0; p < getPositionCount(); p++) {
if (mask.getBoolean(p)) {
builder.appendDouble(value);
} else {
builder.appendNull();
}
}
return builder.build();
}
}
@Override
public ReleasableIterator<DoubleBlock> lookup(IntBlock positions, ByteSizeValue targetBlockSize) {
if (positions.getPositionCount() == 0) {
return ReleasableIterator.empty();
}
IntVector positionsVector = positions.asVector();
if (positionsVector == null) {
return new DoubleLookup(asBlock(), positions, targetBlockSize);
}
int min = positionsVector.min();
if (min < 0) {
throw new IllegalArgumentException("invalid position [" + min + "]");
}
if (min > getPositionCount()) {
return ReleasableIterator.single((DoubleBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount()));
}
if (positionsVector.max() < getPositionCount()) {
return ReleasableIterator.single(positions.blockFactory().newConstantDoubleBlockWith(value, positions.getPositionCount()));
}
return new DoubleLookup(asBlock(), positions, targetBlockSize);
}
@Override
public ElementType elementType() {
return ElementType.DOUBLE;
}
@Override
public boolean isConstant() {
return true;
}
@Override
public DoubleVector deepCopy(BlockFactory blockFactory) {
return blockFactory.newConstantDoubleVector(value, getPositionCount());
}
@Override
public long ramBytesUsed() {
return RAM_BYTES_USED;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof DoubleVector that) {
return DoubleVector.equals(this, that);
}
return false;
}
@Override
public int hashCode() {
return DoubleVector.hash(this);
}
public String toString() {
return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']';
}
}
|
ConstantDoubleVector
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jetty/src/main/java/org/springframework/boot/jetty/autoconfigure/servlet/JettyServletWebServerAutoConfiguration.java
|
{
"start": 3764,
"end": 4672
}
|
class ____ {
@Bean
@ConditionalOnMissingBean(name = "websocketServletWebServerCustomizer")
WebSocketJettyServletWebServerFactoryCustomizer websocketServletWebServerCustomizer() {
return new WebSocketJettyServletWebServerFactoryCustomizer();
}
@Bean
@ConditionalOnNotWarDeployment
@Order(Ordered.LOWEST_PRECEDENCE)
@ConditionalOnMissingBean(name = "websocketUpgradeFilterWebServerCustomizer")
WebServerFactoryCustomizer<JettyServletWebServerFactory> websocketUpgradeFilterWebServerCustomizer() {
return (factory) -> {
factory.addInitializers((servletContext) -> {
Dynamic registration = servletContext.addFilter(WebSocketUpgradeFilter.class.getName(),
new WebSocketUpgradeFilter());
registration.setAsyncSupported(true);
registration.addMappingForUrlPatterns(EnumSet.of(DispatcherType.REQUEST), false, "/*");
});
};
}
}
}
|
JettyWebSocketConfiguration
|
java
|
grpc__grpc-java
|
stub/src/main/java/io/grpc/stub/ServerCalls.java
|
{
"start": 11203,
"end": 11755
}
|
class ____<ReqT, RespT>
extends ServerCallStreamObserver<RespT> {
final ServerCall<ReqT, RespT> call;
private final boolean serverStreamingOrBidi;
volatile boolean cancelled;
private boolean frozen;
private boolean autoRequestEnabled = true;
private boolean sentHeaders;
private Runnable onReadyHandler;
private Runnable onCancelHandler;
private boolean aborted = false;
private boolean completed = false;
private Runnable onCloseHandler;
// Non private to avoid synthetic
|
ServerCallStreamObserverImpl
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ChatScriptEndpointBuilderFactory.java
|
{
"start": 3386,
"end": 5892
}
|
interface ____
extends
EndpointProducerBuilder {
default ChatScriptEndpointBuilder basic() {
return (ChatScriptEndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedChatScriptEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedChatScriptEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
public
|
AdvancedChatScriptEndpointBuilder
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/scripting/support/StandardScriptFactory.java
|
{
"start": 1907,
"end": 9190
}
|
class ____ implements ScriptFactory, BeanClassLoaderAware {
private final @Nullable String scriptEngineName;
private final String scriptSourceLocator;
private final Class<?> @Nullable [] scriptInterfaces;
private @Nullable ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader();
private volatile @Nullable ScriptEngine scriptEngine;
/**
* Create a new StandardScriptFactory for the given script source.
* @param scriptSourceLocator a locator that points to the source of the script.
* Interpreted by the post-processor that actually creates the script.
*/
public StandardScriptFactory(String scriptSourceLocator) {
this(null, scriptSourceLocator, (Class<?>[]) null);
}
/**
* Create a new StandardScriptFactory for the given script source.
* @param scriptSourceLocator a locator that points to the source of the script.
* Interpreted by the post-processor that actually creates the script.
* @param scriptInterfaces the Java interfaces that the scripted object
* is supposed to implement
*/
public StandardScriptFactory(String scriptSourceLocator, Class<?>... scriptInterfaces) {
this(null, scriptSourceLocator, scriptInterfaces);
}
/**
* Create a new StandardScriptFactory for the given script source.
* @param scriptEngineName the name of the JSR-223 ScriptEngine to use
* (explicitly given instead of inferred from the script source)
* @param scriptSourceLocator a locator that points to the source of the script.
* Interpreted by the post-processor that actually creates the script.
*/
public StandardScriptFactory(String scriptEngineName, String scriptSourceLocator) {
this(scriptEngineName, scriptSourceLocator, (Class<?>[]) null);
}
/**
* Create a new StandardScriptFactory for the given script source.
* @param scriptEngineName the name of the JSR-223 ScriptEngine to use
* (explicitly given instead of inferred from the script source)
* @param scriptSourceLocator a locator that points to the source of the script.
* Interpreted by the post-processor that actually creates the script.
* @param scriptInterfaces the Java interfaces that the scripted object
* is supposed to implement
*/
public StandardScriptFactory(
@Nullable String scriptEngineName, String scriptSourceLocator, Class<?> @Nullable ... scriptInterfaces) {
Assert.hasText(scriptSourceLocator, "'scriptSourceLocator' must not be empty");
this.scriptEngineName = scriptEngineName;
this.scriptSourceLocator = scriptSourceLocator;
this.scriptInterfaces = scriptInterfaces;
}
@Override
public void setBeanClassLoader(ClassLoader classLoader) {
this.beanClassLoader = classLoader;
}
@Override
public String getScriptSourceLocator() {
return this.scriptSourceLocator;
}
@Override
public Class<?> @Nullable [] getScriptInterfaces() {
return this.scriptInterfaces;
}
@Override
public boolean requiresConfigInterface() {
return false;
}
/**
* Load and parse the script via JSR-223's ScriptEngine.
*/
@Override
public @Nullable Object getScriptedObject(ScriptSource scriptSource, Class<?> @Nullable ... actualInterfaces)
throws IOException, ScriptCompilationException {
Object script = evaluateScript(scriptSource);
if (!ObjectUtils.isEmpty(actualInterfaces)) {
boolean adaptationRequired = false;
for (Class<?> requestedIfc : actualInterfaces) {
if (script instanceof Class<?> clazz ? !requestedIfc.isAssignableFrom(clazz) :
!requestedIfc.isInstance(script)) {
adaptationRequired = true;
break;
}
}
if (adaptationRequired) {
script = adaptToInterfaces(script, scriptSource, actualInterfaces);
}
}
if (script instanceof Class<?> scriptClass) {
try {
return ReflectionUtils.accessibleConstructor(scriptClass).newInstance();
}
catch (NoSuchMethodException ex) {
throw new ScriptCompilationException(
"No default constructor on script class: " + scriptClass.getName(), ex);
}
catch (InstantiationException ex) {
throw new ScriptCompilationException(
scriptSource, "Unable to instantiate script class: " + scriptClass.getName(), ex);
}
catch (IllegalAccessException | InaccessibleObjectException ex) {
throw new ScriptCompilationException(
scriptSource, "Could not access script constructor: " + scriptClass.getName(), ex);
}
catch (InvocationTargetException ex) {
throw new ScriptCompilationException(
"Failed to invoke script constructor: " + scriptClass.getName(), ex.getTargetException());
}
}
return script;
}
protected Object evaluateScript(ScriptSource scriptSource) {
try {
ScriptEngine scriptEngine = this.scriptEngine;
if (scriptEngine == null) {
scriptEngine = retrieveScriptEngine(scriptSource);
if (scriptEngine == null) {
throw new IllegalStateException("Could not determine script engine for " + scriptSource);
}
this.scriptEngine = scriptEngine;
}
return scriptEngine.eval(scriptSource.getScriptAsString());
}
catch (Exception ex) {
throw new ScriptCompilationException(scriptSource, ex);
}
}
protected @Nullable ScriptEngine retrieveScriptEngine(ScriptSource scriptSource) {
ScriptEngineManager scriptEngineManager = new ScriptEngineManager(this.beanClassLoader);
if (this.scriptEngineName != null) {
return StandardScriptUtils.retrieveEngineByName(scriptEngineManager, this.scriptEngineName);
}
if (scriptSource instanceof ResourceScriptSource resourceScriptSource) {
String filename = resourceScriptSource.getResource().getFilename();
if (filename != null) {
String extension = StringUtils.getFilenameExtension(filename);
if (extension != null) {
ScriptEngine engine = scriptEngineManager.getEngineByExtension(extension);
if (engine != null) {
return engine;
}
}
}
}
return null;
}
protected @Nullable Object adaptToInterfaces(
@Nullable Object script, ScriptSource scriptSource, Class<?>... actualInterfaces) {
Class<?> adaptedIfc;
if (actualInterfaces.length == 1) {
adaptedIfc = actualInterfaces[0];
}
else {
adaptedIfc = ClassUtils.createCompositeInterface(actualInterfaces, this.beanClassLoader);
}
if (adaptedIfc != null) {
ScriptEngine scriptEngine = this.scriptEngine;
if (!(scriptEngine instanceof Invocable invocable)) {
throw new ScriptCompilationException(scriptSource,
"ScriptEngine must implement Invocable in order to adapt it to an interface: " + scriptEngine);
}
if (script != null) {
script = invocable.getInterface(script, adaptedIfc);
}
if (script == null) {
script = invocable.getInterface(adaptedIfc);
if (script == null) {
throw new ScriptCompilationException(scriptSource,
"Could not adapt script to interface [" + adaptedIfc.getName() + "]");
}
}
}
return script;
}
@Override
public @Nullable Class<?> getScriptedObjectType(ScriptSource scriptSource)
throws IOException, ScriptCompilationException {
return null;
}
@Override
public boolean requiresScriptedObjectRefresh(ScriptSource scriptSource) {
return scriptSource.isModified();
}
@Override
public String toString() {
return "StandardScriptFactory: script source locator [" + this.scriptSourceLocator + "]";
}
}
|
StandardScriptFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java
|
{
"start": 2664,
"end": 4661
}
|
class ____ extends BinarySpatialFunction
implements
EvaluatorMapper,
SpatialEvaluatorFactory.SpatialSourceSupplier,
TranslationAware,
SurrogateExpression {
protected SpatialRelatesFunction(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) {
super(source, left, right, leftDocValues, rightDocValues, false, false);
}
protected SpatialRelatesFunction(
Source source,
Expression left,
Expression right,
boolean leftDocValues,
boolean rightDocValues,
boolean supportsGrid
) {
super(source, left, right, leftDocValues, rightDocValues, false, supportsGrid);
}
protected SpatialRelatesFunction(StreamInput in, boolean leftDocValues, boolean rightDocValues) throws IOException {
super(in, leftDocValues, rightDocValues, false, false);
}
protected SpatialRelatesFunction(StreamInput in, boolean leftDocValues, boolean rightDocValues, boolean supportsGrid)
throws IOException {
super(in, leftDocValues, rightDocValues, false, supportsGrid);
}
public abstract ShapeRelation queryRelation();
@Override
public DataType dataType() {
return DataType.BOOLEAN;
}
/**
* Produce a map of rules defining combinations of incoming types to the evaluator factory that should be used.
*/
abstract Map<SpatialEvaluatorFactory.SpatialEvaluatorKey, SpatialEvaluatorFactory<?, ?>> evaluatorRules();
/**
* Some spatial functions can replace themselves with alternatives that are more efficient for certain cases.
*/
@Override
public SpatialRelatesFunction surrogate() {
return this;
}
@Override
public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
return SpatialEvaluatorFactory.makeSpatialEvaluator(this, evaluatorRules(), toEvaluator);
}
protected static
|
SpatialRelatesFunction
|
java
|
alibaba__nacos
|
common/src/test/java/com/alibaba/nacos/common/http/param/HeaderTest.java
|
{
"start": 1070,
"end": 5180
}
|
class ____ {
@Test
void testSetContentType() {
Header header = Header.newInstance();
header.setContentType(null);
assertEquals(MediaType.APPLICATION_JSON, header.getValue(HttpHeaderConsts.CONTENT_TYPE));
header.setContentType(MediaType.MULTIPART_FORM_DATA);
assertEquals(MediaType.MULTIPART_FORM_DATA, header.getValue(HttpHeaderConsts.CONTENT_TYPE));
}
@Test
void testHeaderKyeIgnoreCase() {
Header header = Header.newInstance();
header.addParam("Content-Encoding", "gzip");
assertEquals("gzip", header.getValue("content-encoding"));
}
@Test
void testToList() {
Header header = Header.newInstance();
List<String> list = header.toList();
assertTrue(list.contains(HttpHeaderConsts.CONTENT_TYPE));
assertTrue(list.contains(MediaType.APPLICATION_JSON));
assertEquals(1, list.indexOf(MediaType.APPLICATION_JSON) - list.indexOf(HttpHeaderConsts.CONTENT_TYPE));
assertTrue(list.contains(HttpHeaderConsts.ACCEPT_CHARSET));
assertTrue(list.contains("UTF-8"));
assertEquals(1, list.indexOf("UTF-8") - list.indexOf(HttpHeaderConsts.ACCEPT_CHARSET));
}
@Test
void testAddAllForMap() {
Map<String, String> map = new HashMap<>();
map.put("test1", "test2");
map.put("test3", "test4");
Header header = Header.newInstance();
header.addAll(map);
assertEquals("test2", header.getValue("test1"));
assertEquals("test4", header.getValue("test3"));
assertEquals(4, header.getHeader().size());
}
@Test
void testAddAllForList() {
List<String> list = new ArrayList<>(4);
list.add("test1");
list.add("test2");
list.add("test3");
list.add("test4");
Header header = Header.newInstance();
header.addAll(list);
assertEquals("test2", header.getValue("test1"));
assertEquals("test4", header.getValue("test3"));
assertEquals(4, header.getHeader().size());
}
@Test
void testAddAllForListWithWrongLength() {
assertThrows(IllegalArgumentException.class, () -> {
List<String> list = new ArrayList<>(3);
list.add("test1");
list.add("test2");
list.add("test3");
Header header = Header.newInstance();
header.addAll(list);
});
}
@Test
void testAddOriginalResponseHeader() {
List<String> list = new ArrayList<>(4);
list.add("test1");
list.add("test2");
list.add("test3");
list.add("test4");
Header header = Header.newInstance();
header.addOriginalResponseHeader("test", list);
assertEquals("test1", header.getValue("test"));
assertEquals(1, header.getOriginalResponseHeader().size());
assertEquals(list, header.getOriginalResponseHeader().get("test"));
}
@Test
void testGetCharset() {
Header header = Header.newInstance();
assertEquals("UTF-8", header.getCharset());
header.addParam(HttpHeaderConsts.ACCEPT_CHARSET, null);
header.setContentType(MediaType.APPLICATION_JSON);
assertEquals("UTF-8", header.getCharset());
header.setContentType("application/json;charset=GBK");
assertEquals("GBK", header.getCharset());
header.setContentType("application/json");
assertEquals("UTF-8", header.getCharset());
header.setContentType("");
assertEquals("UTF-8", header.getCharset());
}
@Test
void testClear() {
Header header = Header.newInstance();
header.addOriginalResponseHeader("test", Collections.singletonList("test"));
assertEquals(3, header.getHeader().size());
assertEquals(1, header.getOriginalResponseHeader().size());
header.clear();
assertEquals(0, header.getHeader().size());
assertEquals(0, header.getOriginalResponseHeader().size());
assertEquals("Header{headerToMap={}}", header.toString());
}
}
|
HeaderTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/convert/MapConversionsTest.java
|
{
"start": 639,
"end": 730
}
|
class ____ {
public int x() {
return 1;
}
}
static
|
Request
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy/deployment/src/main/java/io/quarkus/resteasy/deployment/RestPathAnnotationProcessor.java
|
{
"start": 9531,
"end": 10100
}
|
interface
____ searchPathAnnotationOnInterfaces(index, methodInfo).isPresent();
}
return true;
}
private boolean notRequired(Capabilities capabilities,
Optional<MetricsCapabilityBuildItem> metricsCapability) {
return capabilities.isMissing(Capability.RESTEASY) ||
(capabilities.isMissing(Capability.OPENTELEMETRY_TRACER) &&
!(metricsCapability.isPresent()
&& metricsCapability.get().metricsSupported(MetricsFactory.MICROMETER)));
}
}
|
return
|
java
|
google__guava
|
android/guava-testlib/src/com/google/common/collect/testing/google/MultimapKeySetTester.java
|
{
"start": 1798,
"end": 3169
}
|
class ____<K, V> extends AbstractMultimapTester<K, V, Multimap<K, V>> {
public void testKeySet() {
for (Entry<K, V> entry : getSampleElements()) {
assertTrue(multimap().keySet().contains(entry.getKey()));
}
}
@CollectionSize.Require(absent = ZERO)
@MapFeature.Require(ALLOWS_NULL_KEYS)
public void testKeySetContainsNullKeyPresent() {
initMultimapWithNullKey();
assertTrue(multimap().keySet().contains(null));
}
@MapFeature.Require(ALLOWS_NULL_KEY_QUERIES)
public void testKeySetContainsNullKeyAbsent() {
assertFalse(multimap().keySet().contains(null));
}
@MapFeature.Require(SUPPORTS_REMOVE)
public void testKeySetRemovePropagatesToMultimap() {
int key0Count = multimap().get(k0()).size();
assertEquals(key0Count > 0, multimap().keySet().remove(k0()));
assertEquals(getNumElements() - key0Count, multimap().size());
assertGet(k0());
}
@CollectionSize.Require(absent = ZERO)
@CollectionFeature.Require(SUPPORTS_ITERATOR_REMOVE)
public void testKeySetIteratorRemove() {
int key0Count = multimap().get(k0()).size();
Iterator<K> keyItr = multimap().keySet().iterator();
while (keyItr.hasNext()) {
if (keyItr.next().equals(k0())) {
keyItr.remove();
}
}
assertEquals(getNumElements() - key0Count, multimap().size());
assertGet(k0());
}
}
|
MultimapKeySetTester
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/jdk8/ZonedDateTimeTest.java
|
{
"start": 574,
"end": 799
}
|
class ____ {
private ZonedDateTime date;
public ZonedDateTime getDate() {
return date;
}
public void setDate(ZonedDateTime date) {
this.date = date;
}
}
}
|
VO
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/LongFloatConversion.java
|
{
"start": 1677,
"end": 2352
}
|
class ____ extends BugChecker implements MethodInvocationTreeMatcher {
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
for (ExpressionTree arg : tree.getArguments()) {
if (!getType(arg).getKind().equals(TypeKind.LONG)) {
continue;
}
TargetType targetType = targetType(state.withPath(new TreePath(state.getPath(), arg)));
if (targetType == null) {
continue;
}
if (targetType.type().getKind().equals(TypeKind.FLOAT)) {
state.reportMatch(describeMatch(arg, SuggestedFix.prefixWith(arg, "(float) ")));
}
}
return NO_MATCH;
}
}
|
LongFloatConversion
|
java
|
quarkusio__quarkus
|
integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftWithArgumentsTest.java
|
{
"start": 512,
"end": 2137
}
|
class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName("openshift-with-arguments")
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("openshift-with-arguments.properties");
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
List<HasMetadata> openshiftList = DeserializationUtil
.deserializeAsList(kubernetesDir.resolve("openshift.yml"));
assertThat(openshiftList.get(0)).isInstanceOfSatisfying(Deployment.class, dc -> {
assertThat(dc.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("openshift-with-arguments");
});
assertThat(dc.getSpec()).satisfies(deploymentSpec -> {
assertThat(deploymentSpec.getTemplate()).satisfies(t -> {
assertThat(t.getSpec()).satisfies(podSpec -> {
assertThat(podSpec.getContainers()).singleElement().satisfies(container -> {
assertThat(container.getName()).isEqualTo("openshift-with-arguments");
assertThat(container.getArgs()).containsExactly("A", "B");
});
});
});
});
});
}
}
|
OpenshiftWithArgumentsTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/objectid/TestObjectIdDeserialization.java
|
{
"start": 5512,
"end": 17520
}
|
class ____ {
public int value;
public SomeWrapper next;
public SomeNode() {this(0);}
public SomeNode(int v) {value = v;}
}
/*
/*****************************************************
/* Unit tests, external id deserialization
/*****************************************************
*/
private final ObjectMapper MAPPER = new ObjectMapper();
private final static String EXP_SIMPLE_INT_CLASS = "{\"id\":1,\"value\":13,\"next\":1}";
@Test
public void testSimpleDeserializationClass() throws Exception
{
// then bring back...
Identifiable result = MAPPER.readValue(EXP_SIMPLE_INT_CLASS, Identifiable.class);
assertEquals(13, result.value);
assertSame(result, result.next);
}
// Should be ok NOT to have Object id, as well
@Test
public void testMissingObjectId() throws Exception
{
Identifiable result = MAPPER.readValue(a2q("{'value':28, 'next':{'value':29}}"),
Identifiable.class);
assertNotNull(result);
assertEquals(28, result.value);
assertNotNull(result.next);
assertEquals(29, result.next.value);
}
@Test
public void testSimpleUUIDForClassRoundTrip() throws Exception
{
UUIDNode root = new UUIDNode(1);
UUIDNode child1 = new UUIDNode(2);
UUIDNode child2 = new UUIDNode(3);
root.first = child1;
root.second = child2;
child1.parent = root;
child2.parent = root;
child1.first = child2;
String json = MAPPER.writeValueAsString(root);
// and should come back the same too...
UUIDNode result = MAPPER.readValue(json, UUIDNode.class);
assertEquals(1, result.value);
UUIDNode result2 = result.first;
UUIDNode result3 = result.second;
assertNotNull(result2);
assertNotNull(result3);
assertEquals(2, result2.value);
assertEquals(3, result3.value);
assertSame(result, result2.parent);
assertSame(result, result3.parent);
assertSame(result3, result2.first);
}
// Bit more complex, due to extra wrapping etc:
private final static String EXP_SIMPLE_INT_PROP = "{\"node\":{\"@id\":1,\"value\":7,\"next\":{\"node\":1}}}";
@Test
public void testSimpleDeserializationProperty() throws Exception
{
IdWrapper result = MAPPER.readValue(EXP_SIMPLE_INT_PROP, IdWrapper.class);
assertEquals(7, result.node.value);
assertSame(result.node, result.node.next.node);
}
// Another test to ensure ordering is not required (i.e. can do front references)
@Test
public void testSimpleDeserWithForwardRefs() throws Exception
{
IdWrapper result = MAPPER.readValue("{\"node\":{\"value\":7,\"next\":{\"node\":1}, \"@id\":1}}"
,IdWrapper.class);
assertEquals(7, result.node.value);
assertSame(result.node, result.node.next.node);
}
@Test
public void testForwardReference()
throws Exception
{
String json = "{\"employees\":["
+ "{\"id\":1,\"name\":\"First\",\"manager\":2,\"reports\":[]},"
+ "{\"id\":2,\"name\":\"Second\",\"manager\":null,\"reports\":[1]}"
+ "]}";
Company company = MAPPER.readValue(json, Company.class);
assertEquals(2, company.employees.size());
Employee firstEmployee = company.employees.get(0);
Employee secondEmployee = company.employees.get(1);
assertEquals(1, firstEmployee.id);
assertEquals(2, secondEmployee.id);
assertEquals(secondEmployee, firstEmployee.manager); // Ensure that forward reference was properly resolved.
assertEquals(firstEmployee, secondEmployee.reports.get(0)); // And that back reference is also properly resolved.
}
@Test
public void testForwardReferenceInCollection()
throws Exception
{
String json = "{\"employees\":["
+ "{\"id\":1,\"name\":\"First\",\"manager\":null,\"reports\":[2]},"
+ "{\"id\":2,\"name\":\"Second\",\"manager\":1,\"reports\":[]}"
+ "]}";
Company company = MAPPER.readValue(json, Company.class);
assertEquals(2, company.employees.size());
Employee firstEmployee = company.employees.get(0);
Employee secondEmployee = company.employees.get(1);
assertEmployees(firstEmployee, secondEmployee);
}
@Test
public void testForwardReferenceInMap()
throws Exception
{
String json = "{\"employees\":{"
+ "\"1\":{\"id\":1,\"name\":\"First\",\"manager\":null,\"reports\":[2]},"
+ "\"2\": 2,"
+ "\"3\":{\"id\":2,\"name\":\"Second\",\"manager\":1,\"reports\":[]}"
+ "}}";
MappedCompany company = MAPPER.readValue(json, MappedCompany.class);
assertEquals(3, company.employees.size());
Employee firstEmployee = company.employees.get(1);
Employee secondEmployee = company.employees.get(3);
assertEmployees(firstEmployee, secondEmployee);
}
private void assertEmployees(Employee firstEmployee, Employee secondEmployee)
{
assertEquals(1, firstEmployee.id);
assertEquals(2, secondEmployee.id);
assertEquals(1, firstEmployee.reports.size());
assertSame(secondEmployee, firstEmployee.reports.get(0)); // Ensure that forward reference was properly resolved and in order.
assertSame(firstEmployee, secondEmployee.manager); // And that back reference is also properly resolved.
}
@Test
public void testForwardReferenceAnySetterCombo() throws Exception {
String json = "{\"@id\":1, \"foo\":2, \"bar\":{\"@id\":2, \"foo\":1}}";
AnySetterObjectId value = MAPPER.readValue(json, AnySetterObjectId.class);
assertSame(value.values.get("bar"), value.values.get("foo"));
}
@Test
public void testUnresolvedForwardReference()
throws Exception
{
String json = "{\"employees\":["
+ "{\"id\":1,\"name\":\"First\",\"manager\":null,\"reports\":[3]},"
+ "{\"id\":2,\"name\":\"Second\",\"manager\":3,\"reports\":[]}"
+ "]}";
try {
MAPPER.readValue(json, Company.class);
fail("Should have thrown.");
} catch (UnresolvedForwardReference exception) {
// Expected
List<UnresolvedId> unresolvedIds = exception.getUnresolvedIds();
assertEquals(2, unresolvedIds.size());
UnresolvedId firstUnresolvedId = unresolvedIds.get(0);
assertEquals(3, firstUnresolvedId.getId());
assertEquals(Employee.class, firstUnresolvedId.getType());
UnresolvedId secondUnresolvedId = unresolvedIds.get(1);
assertEquals(firstUnresolvedId.getId(), secondUnresolvedId.getId());
assertEquals(Employee.class, secondUnresolvedId.getType());
}
}
// [databind#299]: Allow unresolved ids to become nulls
@Test
public void testUnresolvableAsNull() throws Exception
{
IdWrapper w = MAPPER.readerFor(IdWrapper.class)
.without(DeserializationFeature.FAIL_ON_UNRESOLVED_OBJECT_IDS)
.readValue(a2q("{'node':123}"));
assertNotNull(w);
assertNull(w.node);
}
@Test
public void testKeepCollectionOrdering() throws Exception
{
String json = "{\"employees\":[2,1,"
+ "{\"id\":1,\"name\":\"First\",\"manager\":null,\"reports\":[2]},"
+ "{\"id\":2,\"name\":\"Second\",\"manager\":1,\"reports\":[]}"
+ "]}";
Company company = MAPPER.readValue(json, Company.class);
assertEquals(4, company.employees.size());
// Deser must keep object ordering.
Employee firstEmployee = company.employees.get(1);
Employee secondEmployee = company.employees.get(0);
assertSame(firstEmployee, company.employees.get(2));
assertSame(secondEmployee, company.employees.get(3));
assertEmployees(firstEmployee, secondEmployee);
}
@Test
public void testKeepMapOrdering()
throws Exception
{
String json = "{\"employees\":{"
+ "\"1\":2, \"2\":1,"
+ "\"3\":{\"id\":1,\"name\":\"First\",\"manager\":null,\"reports\":[2]},"
+ "\"4\":{\"id\":2,\"name\":\"Second\",\"manager\":1,\"reports\":[]}"
+ "}}";
MappedCompany company = MAPPER.readValue(json, MappedCompany.class);
assertEquals(4, company.employees.size());
Employee firstEmployee = company.employees.get(2);
Employee secondEmployee = company.employees.get(1);
assertEmployees(firstEmployee, secondEmployee);
// Deser must keep object ordering. Not sure if it's really important for maps,
// but since default map is LinkedHashMap might as well ensure it does...
Iterator<Entry<Integer,Employee>> iterator = company.employees.entrySet().iterator();
assertSame(secondEmployee, iterator.next().getValue());
assertSame(firstEmployee, iterator.next().getValue());
assertSame(firstEmployee, iterator.next().getValue());
assertSame(secondEmployee, iterator.next().getValue());
}
/*
/*****************************************************
/* Unit tests, custom (property-based) id deserialization
/*****************************************************
*/
private final static String EXP_CUSTOM_VIA_CLASS = "{\"customId\":123,\"value\":-900,\"next\":123}";
@Test
public void testCustomDeserializationClass() throws Exception
{
// then bring back...
IdentifiableCustom result = MAPPER.readValue(EXP_CUSTOM_VIA_CLASS, IdentifiableCustom.class);
assertEquals(-900, result.value);
assertSame(result, result.next);
}
private final static String EXP_CUSTOM_VIA_PROP = "{\"node\":{\"customId\":3,\"value\":99,\"next\":{\"node\":3}}}";
@Test
public void testCustomDeserializationProperty() throws Exception
{
// then bring back...
IdWrapperExt result = MAPPER.readValue(EXP_CUSTOM_VIA_PROP, IdWrapperExt.class);
assertEquals(99, result.node.value);
assertSame(result.node, result.node.next.node);
assertEquals(3, result.node.customId);
}
/*
/*****************************************************
/* Unit tests, custom id resolver
/*****************************************************
*/
@Test
public void testCustomPoolResolver() throws Exception
{
Map<Object,WithCustomResolution> pool = new HashMap<Object,WithCustomResolution>();
pool.put(1, new WithCustomResolution(1, 1));
pool.put(2, new WithCustomResolution(2, 2));
pool.put(3, new WithCustomResolution(3, 3));
pool.put(4, new WithCustomResolution(4, 4));
pool.put(5, new WithCustomResolution(5, 5));
ContextAttributes attrs = MAPPER.deserializationConfig().getAttributes().withSharedAttribute(POOL_KEY, pool);
String content = "{\"data\":[1,2,3,4,5]}";
CustomResolutionWrapper wrapper = MAPPER.readerFor(CustomResolutionWrapper.class).with(attrs).readValue(content);
assertFalse(wrapper.data.isEmpty());
for (WithCustomResolution ob : wrapper.data) {
assertSame(pool.get(ob.id), ob);
}
}
/*
/*****************************************************
/* Unit tests, missing/null Object id [databind#742]
/*****************************************************
*/
/*
private final static String EXP_SIMPLE_INT_CLASS = "{\"id\":1,\"value\":13,\"next\":1}";
@JsonIdentityInfo(generator=ObjectIdGenerators.IntSequenceGenerator.class, property="id")
static
|
SomeNode
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/CatchFailTest.java
|
{
"start": 8567,
"end": 8908
}
|
class ____ {
@Test(expected = IOException.class)
public void f() {
try {
throw new IOException();
} catch (IOException expected) {
org.junit.Assert.fail();
}
}
}
""")
.doTest();
}
}
|
Foo
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/DependencyCycleValidationTest.java
|
{
"start": 26084,
"end": 27157
}
|
interface ____ {",
" Object selfReferential();",
"}");
CompilerTests.daggerCompiler(module, component)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
String.join(
"\n",
"Found a dependency cycle:",
" Object is injected at",
" [TestComponent] TestModule.bindToSelf(sameKey)",
" Object is injected at",
" [TestComponent] TestModule.bindToSelf(sameKey)",
" ...",
"",
"The cycle is requested via:",
" Object is requested at",
" [TestComponent] TestComponent.selfReferential()"))
.onSource(component)
.onLineContaining("
|
TestComponent
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/annotations/Options.java
|
{
"start": 1639,
"end": 4041
}
|
enum ____ {
/** <code>false</code> for select statement; <code>true</code> for insert/update/delete statement. */
DEFAULT,
/** Flushes cache regardless of the statement type. */
TRUE,
/** Does not flush cache regardless of the statement type. */
FALSE
}
/**
* Returns whether use the 2nd cache feature if assigned the cache.
*
* @return {@code true} if use; {@code false} if otherwise
*/
boolean useCache() default true;
/**
* Returns the 2nd cache flush strategy.
*
* @return the 2nd cache flush strategy
*/
FlushCachePolicy flushCache() default FlushCachePolicy.DEFAULT;
/**
* Returns the result set type.
*
* @return the result set type
*/
ResultSetType resultSetType() default ResultSetType.DEFAULT;
/**
* Return the statement type.
*
* @return the statement type
*/
StatementType statementType() default StatementType.PREPARED;
/**
* Returns the fetch size.
*
* @return the fetch size
*/
int fetchSize() default -1;
/**
* Returns the statement timeout.
*
* @return the statement timeout
*/
int timeout() default -1;
/**
* Returns whether use the generated keys feature supported by JDBC 3.0
*
* @return {@code true} if use; {@code false} if otherwise
*/
boolean useGeneratedKeys() default false;
/**
* Returns property names that holds a key value.
* <p>
* If you specify multiple property, please separate using comma(',').
*
* @return property names that separate with comma(',')
*/
String keyProperty() default "";
/**
* Returns column names that retrieves a key value.
* <p>
* If you specify multiple column, please separate using comma(',').
*
* @return column names that separate with comma(',')
*/
String keyColumn() default "";
/**
* Returns result set names.
* <p>
* If you specify multiple result set, please separate using comma(',').
*
* @return result set names that separate with comma(',')
*/
String resultSets() default "";
/**
* @return A database id that correspond this options
*
* @since 3.5.5
*/
String databaseId() default "";
/**
* The container annotation for {@link Options}.
*
* @author Kazuki Shimizu
*
* @since 3.5.5
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@
|
FlushCachePolicy
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/requests/DeleteRecordsRequest.java
|
{
"start": 1478,
"end": 3396
}
|
class ____ extends AbstractRequest.Builder<DeleteRecordsRequest> {
private final DeleteRecordsRequestData data;
public Builder(DeleteRecordsRequestData data) {
super(ApiKeys.DELETE_RECORDS);
this.data = data;
}
@Override
public DeleteRecordsRequest build(short version) {
return new DeleteRecordsRequest(data, version);
}
@Override
public String toString() {
return data.toString();
}
}
private DeleteRecordsRequest(DeleteRecordsRequestData data, short version) {
super(ApiKeys.DELETE_RECORDS, version);
this.data = data;
}
@Override
public DeleteRecordsRequestData data() {
return data;
}
@Override
public AbstractResponse getErrorResponse(int throttleTimeMs, Throwable e) {
DeleteRecordsResponseData result = new DeleteRecordsResponseData().setThrottleTimeMs(throttleTimeMs);
short errorCode = Errors.forException(e).code();
for (DeleteRecordsTopic topic : data.topics()) {
DeleteRecordsTopicResult topicResult = new DeleteRecordsTopicResult().setName(topic.name());
result.topics().add(topicResult);
for (DeleteRecordsRequestData.DeleteRecordsPartition partition : topic.partitions()) {
topicResult.partitions().add(new DeleteRecordsResponseData.DeleteRecordsPartitionResult()
.setPartitionIndex(partition.partitionIndex())
.setErrorCode(errorCode)
.setLowWatermark(DeleteRecordsResponse.INVALID_LOW_WATERMARK));
}
}
return new DeleteRecordsResponse(result);
}
public static DeleteRecordsRequest parse(Readable readable, short version) {
return new DeleteRecordsRequest(new DeleteRecordsRequestData(readable, version), version);
}
}
|
Builder
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/IsDockerWorking.java
|
{
"start": 601,
"end": 1133
}
|
class ____ implements Strategy {
private final boolean silent;
public DockerBinaryStrategy(boolean silent) {
this.silent = silent;
}
@Override
public Result get() {
if (ContainerRuntimeUtil.detectContainerRuntime(false, silent,
ContainerRuntime.DOCKER, ContainerRuntime.PODMAN) != UNAVAILABLE) {
return Result.AVAILABLE;
} else {
return Result.UNKNOWN;
}
}
}
}
|
DockerBinaryStrategy
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/internal/stubbing/answers/AnswersWithDelayTest.java
|
{
"start": 513,
"end": 2899
}
|
class ____ {
@Test
public void should_return_value() throws Throwable {
assertThat(
new AnswersWithDelay(1, new Returns("value"))
.answer(
new InvocationBuilder()
.method("oneArg")
.arg("A")
.toInvocation()))
.isEqualTo("value");
}
@Test
public void should_fail_when_contained_answer_should_fail() {
assertThatThrownBy(
() -> {
new AnswersWithDelay(1, new Returns("one"))
.validateFor(
new InvocationBuilder()
.method("voidMethod")
.toInvocation());
})
.isInstanceOf(MockitoException.class)
.hasMessageContainingAll(
"'voidMethod' is a *void method* and it *cannot* be stubbed with a *return value*!",
"Voids are usually stubbed with Throwables:",
" doThrow(exception).when(mock).someVoidMethod();",
"If you need to set the void method to do nothing you can use:",
" doNothing().when(mock).someVoidMethod();",
"For more information, check out the javadocs for Mockito.doNothing().");
}
@Test
public void should_succeed_when_contained_answer_should_succeed() {
new AnswersWithDelay(1, new Returns("one"))
.validateFor(new InvocationBuilder().simpleMethod().toInvocation());
}
@Test
public void should_delay() throws Throwable {
final long sleepyTime = 500L;
final AnswersWithDelay testSubject = new AnswersWithDelay(sleepyTime, new Returns("value"));
final Date before = new Date();
testSubject.answer(new InvocationBuilder().method("oneArg").arg("A").toInvocation());
final Date after = new Date();
final long timePassed = after.getTime() - before.getTime();
assertThat(timePassed).isCloseTo(sleepyTime, within(15L));
}
}
|
AnswersWithDelayTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/module/TestDuplicateRegistration.java
|
{
"start": 392,
"end": 1978
}
|
class ____ extends JacksonModule {
private final AtomicInteger counter;
private final Object id;
public MyModule(AtomicInteger c, Object id) {
super();
counter = c;
this.id = id;
}
@Override
public Object getRegistrationId() {
return id;
}
@Override
public String getModuleName() {
return "TestModule";
}
@Override
public Version version() {
return Version.unknownVersion();
}
@Override
public void setupModule(SetupContext context) {
counter.addAndGet(1);
}
}
@Test
public void testDuplicateRegistration() throws Exception
{
// by default, duplicate registration should be prevented
AtomicInteger counter = new AtomicInteger();
/*ObjectMapper mapper =*/ jsonMapperBuilder()
.addModule(new MyModule(counter, "id"))
.addModule(new MyModule(counter, "id"))
.addModule(new MyModule(counter, "id"))
.build();
assertEquals(1, counter.get());
// but may be allowed by using non-identical id
AtomicInteger counter2 = new AtomicInteger();
/*ObjectMapper mapper2 =*/ jsonMapperBuilder()
.addModule(new MyModule(counter2, "id1"))
.addModule(new MyModule(counter2, "id2"))
.addModule(new MyModule(counter2, "id3"))
.build();
assertEquals(3, counter2.get());
}
}
|
MyModule
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshResponse.java
|
{
"start": 1107,
"end": 2357
}
|
class ____ {
private int returnCode = -1;
private String message;
private String senderName;
/**
* Convenience method to create a response for successful refreshes.
* @return void response
*/
public static RefreshResponse successResponse() {
return new RefreshResponse(0, "Success");
}
// Most RefreshHandlers will use this
public RefreshResponse(int returnCode, String message) {
this.returnCode = returnCode;
this.message = message;
}
/**
* Optionally set the sender of this RefreshResponse.
* This helps clarify things when multiple handlers respond.
* @param name The name of the sender
*/
public void setSenderName(String name) {
senderName = name;
}
public String getSenderName() { return senderName; }
public int getReturnCode() { return returnCode; }
public void setReturnCode(int rc) { returnCode = rc; }
public void setMessage(String m) { message = m; }
public String getMessage() { return message; }
@Override
public String toString() {
String ret = "";
if (senderName != null) {
ret += senderName + ": ";
}
if (message != null) {
ret += message;
}
ret += " (exit " + returnCode + ")";
return ret;
}
}
|
RefreshResponse
|
java
|
google__gson
|
extras/src/test/java/com/google/gson/typeadapters/RuntimeTypeAdapterFactoryTest.java
|
{
"start": 8614,
"end": 8773
}
|
class ____ {
private final String ownerName;
BillingInstrument(String ownerName) {
this.ownerName = ownerName;
}
}
static
|
BillingInstrument
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/config/ConfigDef.java
|
{
"start": 43409,
"end": 46305
}
|
enum ____ {
/**
* Used for boolean values. Values can be provided as a Boolean object or as a String with values
* <code>true</code> or <code>false</code> (this is not case-sensitive), otherwise a {@link ConfigException} is
* thrown.
*/
BOOLEAN,
/**
* Used for string values. Values must be provided as a String object, otherwise a {@link ConfigException} is
* thrown.
*/
STRING,
/**
* Used for numerical values within the Java Integer range. Values must be provided as a Integer object or as
* a String being a valid Integer value, otherwise a {@link ConfigException} is thrown.
*/
INT,
/**
* Used for numerical values within the Java Short range. Values must be provided as a Short object or as
* a String being a valid Short value, otherwise a {@link ConfigException} is thrown.
*/
SHORT,
/**
* Used for numerical values within the Java Long range. Values must be provided as a Long object, as an Integer
* object or as a String being a valid Long value, otherwise a {@link ConfigException} is thrown.
*/
LONG,
/**
* Used for numerical values within the Java Double range. Values must be provided as a Number object, as a
* Double object or as a String being a valid Double value, otherwise a {@link ConfigException} is thrown.
*/
DOUBLE,
/**
* Used for list values. Values must be provided as a List object, as a String object, otherwise a
* {@link ConfigException} is thrown. When the value is provided as a String it must use commas to separate the
* different entries (for example: <code>first-entry, second-entry</code>) and an empty String maps to an empty List.
*/
LIST,
/**
* Used for values that implement a Kafka interface. Values must be provided as a Class object or as a
* String object, otherwise a {@link ConfigException} is thrown. When the value is provided as a String it must
* be the binary name of the Class.
*/
CLASS,
/**
* Used for string values containing sensitive data such as a password or key. The values of configurations with
* of this type are not included in logs and instead replaced with "[hidden]". Values must be provided as a
* String object, otherwise a {@link ConfigException} is thrown.
*/
PASSWORD;
/**
* Whether this type contains sensitive data such as a password or key.
* @return true if the type is {@link #PASSWORD}
*/
public boolean isSensitive() {
return this == PASSWORD;
}
}
/**
* The importance level for a configuration
*/
public
|
Type
|
java
|
hibernate__hibernate-orm
|
hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/SingleStoreDialect.java
|
{
"start": 44258,
"end": 45203
}
|
class ____<T extends Exportable> implements Exporter<T> {
@Override
public String[] getSqlCreateStrings(T exportable, Metadata metadata, SqlStringGenerationContext context) {
return ArrayHelper.EMPTY_STRING_ARRAY;
}
@Override
public String[] getSqlDropStrings(T exportable, Metadata metadata, SqlStringGenerationContext context) {
return ArrayHelper.EMPTY_STRING_ARRAY;
}
}
/**
* Because of hibernate requires that entity tables have primary key separate unique keys are restricted.
* SingleStore restrictions:
* - Primary key in SingleStore table is unique key and shard key
* - SingleStore table allows only single shard key
* - SingleStore unique keys must contain all columns of the shard key: <a href="https://docs.singlestore.com/docs/unique-key-restrictions">Unique Key restrictions</a>.
* - Shard key fields cannot be updated (or altered) so they must be fields that never change
*/
static
|
EmptyExporter
|
java
|
apache__flink
|
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/time/TimeContext.java
|
{
"start": 1231,
"end": 1597
}
|
interface ____ {
/**
* Timestamp of the element currently being processed.
*
* <p>In case of {@link org.apache.flink.cep.time.TimeBehaviour#ProcessingTime} this means the
* time when the event entered the cep operator.
*/
long timestamp();
/** Returns the current processing time. */
long currentProcessingTime();
}
|
TimeContext
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/support/AbstractMessageChannel.java
|
{
"start": 1266,
"end": 1382
}
|
class ____ {@link MessageChannel} implementations.
*
* @author Rossen Stoyanchev
* @since 4.0
*/
public abstract
|
for
|
java
|
apache__flink
|
flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/operators/GenericWriteAheadSinkTest.java
|
{
"start": 7774,
"end": 8587
}
|
class ____ extends GenericWriteAheadSink<Tuple1<Integer>> {
private static final long serialVersionUID = 1L;
public List<Integer> values = new ArrayList<>();
public ListSink2() throws Exception {
super(
new FailingCommitter(),
TypeExtractor.getForObject(new Tuple1<>(1))
.createSerializer(new SerializerConfigImpl()),
"job");
}
@Override
protected boolean sendValues(
Iterable<Tuple1<Integer>> values, long checkpointId, long timestamp)
throws Exception {
for (Tuple1<Integer> value : values) {
this.values.add(value.f0);
}
return true;
}
}
private static
|
ListSink2
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/concurrent/BackgroundInitializer.java
|
{
"start": 4059,
"end": 4372
}
|
class ____<T> extends AbstractConcurrentInitializer<T, Exception> {
/**
* Builds a new instance.
*
* @param <T> The type of results supplied by this builder.
* @param <I> The type of the initializer managed by this builder.
* @since 3.14.0
*/
public static
|
BackgroundInitializer
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/pattern/ThrowablePatternConverter.java
|
{
"start": 1657,
"end": 11043
}
|
class ____ extends LogEventPatternConverter {
/**
* Returns the list of formatters used to render the suffix.
*
* @deprecated Kept for binary backward compatibility.
*/
@Deprecated
protected final List<PatternFormatter> formatters;
private final Function<LogEvent, String> effectiveLineSeparatorProvider;
protected final ThrowableFormatOptions options;
private final ThrowableRenderer renderer;
/**
* @deprecated Use {@link #ThrowablePatternConverter(String, String, String[], Configuration, ThrowablePropertyRendererFactory, ThrowableStackTraceRendererFactory)} instead.
*/
@Deprecated
protected ThrowablePatternConverter(final String name, final String style, @Nullable final String[] options) {
this(name, style, options, null, null, null);
}
/**
* @deprecated Use {@link #ThrowablePatternConverter(String, String, String[], Configuration, ThrowablePropertyRendererFactory, ThrowableStackTraceRendererFactory)} instead.
*/
@Deprecated
protected ThrowablePatternConverter(
final String name,
final String style,
@Nullable final String[] options,
@Nullable final Configuration config) {
this(name, style, options, config, null, null);
}
/**
* The canonical constructor.
*
* @param name name of the converter
* @param style CSS style for output
* @param options array of options
* @param config a configuration
* @param stackTraceRendererFactory a renderer factory
* @since 2.25.0
*/
ThrowablePatternConverter(
final String name,
final String style,
@Nullable final String[] options,
@Nullable final Configuration config,
@Nullable final ThrowablePropertyRendererFactory propertyRendererFactory,
@Nullable final ThrowableStackTraceRendererFactory stackTraceRendererFactory) {
// Process `name`, `style`, and `options`
super(name, style);
this.options = ThrowableFormatOptions.newInstance(options);
// Determine the effective line separator
final List<PatternFormatter> suffixFormatters = new ArrayList<>();
this.effectiveLineSeparatorProvider = createEffectiveLineSeparator(
this.options.getSeparator(), this.options.getSuffix(), config, suffixFormatters);
this.formatters = Collections.unmodifiableList(suffixFormatters);
// Create the effective renderer
this.renderer =
createEffectiveRenderer(options, this.options, propertyRendererFactory, stackTraceRendererFactory);
}
/**
* Creates an instance of the class.
*
* @param config a configuration
* @param options the pattern options
* @return a new instance
*/
public static ThrowablePatternConverter newInstance(
@Nullable final Configuration config, @Nullable final String[] options) {
return new ThrowablePatternConverter("Throwable", "throwable", options, config, null, null);
}
/**
* {@inheritDoc}
*/
@Override
public void format(final LogEvent event, final StringBuilder buffer) {
requireNonNull(event, "event");
requireNonNull(buffer, "buffer");
final Throwable throwable = event.getThrown();
if (throwable != null) {
final String lineSeparator = effectiveLineSeparatorProvider.apply(event);
renderer.renderThrowable(buffer, throwable, lineSeparator);
}
}
/**
* Indicates this converter handles {@link Throwable}s.
*
* @return {@code true}
*/
@Override
public boolean handlesThrowable() {
return true;
}
public ThrowableFormatOptions getOptions() {
return options;
}
/**
* Creates a lambda that returns the <em>effective</em> line separator by concatenating the formatted {@code suffix} with the {@code separator}.
* <p>
* At the beginning, there was only {@code separator} used as a terminator at the end of every rendered line.
* Its content was rendered literally without any processing.
* </p>
* <p>
* Later on, {@code suffix} was added in <a href="https://github.com/apache/logging-log4j2/pull/61">#61</a>.
* {@code suffix} is functionally identical to {@code separator} with the exception that it contains a Pattern Layout conversion pattern.
* In an ideal world, {@code separator} should have been extended to accept patterns.
* But without giving it a second of thought, just like almost any other Log4j feature, we cheerfully accepted the feature.
* </p>
* <p>
* Given two overlapping features, how do we determine the <em>effective</em> line separator?
* </p>
* <pre>{@code
* String effectiveLineSeparator(String separator, String suffix, LogEvent event) {
* String formattedSuffix = format(suffix, event);
* return isNotBlank(formattedSuffix)
* ? (' ' + formattedSuffix + lineSeparator)
* : lineSeparator;
* }
* }</pre>
*
* @param separator the user-provided {@code separator} option
* @param suffix the user-provided {@code suffix} option containing a Pattern Layout conversion pattern
* @param config the configuration to create the Pattern Layout conversion pattern parser
* @param suffixFormatters the list of pattern formatters employed to format the suffix
* @return a lambda that returns the <em>effective</em> line separator by concatenating the formatted {@code suffix} with the {@code separator}
*/
private static Function<LogEvent, String> createEffectiveLineSeparator(
final String separator,
@Nullable final String suffix,
@Nullable final Configuration config,
final List<PatternFormatter> suffixFormatters) {
requireNonNull(separator, "separator");
requireNonNull(suffixFormatters, "suffixFormatters");
if (suffix != null) {
// Suffix is allowed to be a Pattern Layout conversion pattern, hence we need to parse it
final PatternParser parser = PatternLayout.createPatternParser(config);
final List<PatternFormatter> parsedSuffixFormatters = parser.parse(suffix);
// Collect formatters excluding ones handling throwables
for (final PatternFormatter suffixFormatter : parsedSuffixFormatters) {
if (!suffixFormatter.handlesThrowable()) {
suffixFormatters.add(suffixFormatter);
}
}
// Create the lambda accepting a `LogEvent` to invoke collected formatters
return logEvent -> {
final StringBuilder buffer = new StringBuilder();
buffer.append(' ');
for (PatternFormatter suffixFormatter : suffixFormatters) {
suffixFormatter.format(logEvent, buffer);
}
final boolean blankSuffix = buffer.length() == 1;
if (blankSuffix) {
return separator;
} else {
buffer.append(separator);
return buffer.toString();
}
};
} else {
return logEvent -> separator;
}
}
private static ThrowableRenderer createEffectiveRenderer(
final String[] rawOptions,
final ThrowableFormatOptions options,
@Nullable final ThrowablePropertyRendererFactory propertyRendererFactory,
@Nullable final ThrowableStackTraceRendererFactory stackTraceRendererFactory) {
// Try to create a property renderer first
final ThrowablePropertyRendererFactory effectivePropertyRendererFactory =
propertyRendererFactory != null ? propertyRendererFactory : ThrowablePropertyRendererFactory.INSTANCE;
final ThrowableRenderer propertyRenderer = effectivePropertyRendererFactory.createPropertyRenderer(rawOptions);
if (propertyRenderer != null) {
return propertyRenderer;
}
// Create a stack trace renderer
final ThrowableStackTraceRendererFactory effectiveStackTraceRendererFactory = stackTraceRendererFactory != null
? stackTraceRendererFactory
: ThrowableStackTraceRendererFactory.INSTANCE;
return effectiveStackTraceRendererFactory.createStackTraceRenderer(options);
}
/**
* Returns the formatted suffix pattern.
*
* @param logEvent the log event to use while formatting the suffix pattern
* @return the formatted suffix
* @deprecated Planned to be removed without a replacement
*/
@Deprecated
protected String getSuffix(final LogEvent logEvent) {
requireNonNull(logEvent, "logEvent");
final String effectiveLineSeparator = effectiveLineSeparatorProvider.apply(logEvent);
if (options.getSeparator().equals(effectiveLineSeparator)) {
return "";
}
return effectiveLineSeparator.substring(
// Skip whitespace prefix:
1,
// Remove the separator:
effectiveLineSeparator.length() - options.getSeparator().length());
}
}
|
ThrowablePatternConverter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/GenericMappedSuperclassNestedJoinTest.java
|
{
"start": 5654,
"end": 5803
}
|
class ____<T extends SimpleObject>
extends SeqOrderLinkObject<T> {
}
@Entity( name = "Selection" )
public static
|
SeqOrderLinkObjectWithUserContext
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/JUnitParameterMethodNotFoundTest.java
|
{
"start": 7833,
"end": 7956
}
|
class ____ {
public Object dataProvider() {
return new Object[] {1};
}
}
}\
""")
.doTest();
}
}
|
Inner
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/domain/PredicateSpecificationUnitTests.java
|
{
"start": 1568,
"end": 5407
}
|
class ____ implements Serializable {
private PredicateSpecification<Object> spec;
@Mock(serializable = true) Root<Object> root;
@Mock(serializable = true) CriteriaBuilder builder;
@Mock(serializable = true) Predicate predicate;
@Mock(serializable = true) Predicate another;
@BeforeEach
void setUp() {
spec = (root, cb) -> predicate;
}
@Test // GH-3521
void allReturnsEmptyPredicate() {
PredicateSpecification<Object> specification = PredicateSpecification.unrestricted();
assertThat(specification).isNotNull();
assertThat(specification.toPredicate(root, builder)).isNull();
}
@Test // GH-3521
void allOfCombinesPredicatesInOrder() {
PredicateSpecification<Object> specification = PredicateSpecification.allOf(spec);
assertThat(specification).isNotNull();
assertThat(specification.toPredicate(root, builder)).isSameAs(predicate);
}
@Test // GH-3521
void anyOfCombinesPredicatesInOrder() {
PredicateSpecification<Object> specification = PredicateSpecification.allOf(spec);
assertThat(specification).isNotNull();
assertThat(specification.toPredicate(root, builder)).isSameAs(predicate);
}
@Test // GH-3521
void emptyAllOfReturnsEmptySpecification() {
PredicateSpecification<Object> specification = PredicateSpecification.allOf();
assertThat(specification).isNotNull();
assertThat(specification.toPredicate(root, builder)).isNull();
}
@Test // GH-3521
void emptyAnyOfReturnsEmptySpecification() {
PredicateSpecification<Object> specification = PredicateSpecification.anyOf();
assertThat(specification).isNotNull();
assertThat(specification.toPredicate(root, builder)).isNull();
}
@Test // GH-3521
void specificationsShouldBeSerializable() {
PredicateSpecification<Object> serializableSpec = new SerializableSpecification();
PredicateSpecification<Object> specification = serializableSpec.and(serializableSpec);
assertThat(specification).isNotNull();
PredicateSpecification<Object> transferredSpecification = (PredicateSpecification<Object>) deserialize(
serialize(specification));
assertThat(transferredSpecification).isNotNull();
}
@Test // GH-3521
void complexSpecificationsShouldBeSerializable() {
SerializableSpecification serializableSpec = new SerializableSpecification();
PredicateSpecification<Object> specification = PredicateSpecification
.not(serializableSpec.and(serializableSpec).or(serializableSpec));
assertThat(specification).isNotNull();
PredicateSpecification<Object> transferredSpecification = (PredicateSpecification<Object>) deserialize(
serialize(specification));
assertThat(transferredSpecification).isNotNull();
}
@Test // GH-3521
void andCombinesSpecificationsInOrder() {
Predicate firstPredicate = mock(Predicate.class);
Predicate secondPredicate = mock(Predicate.class);
PredicateSpecification<Object> first = ((root1, criteriaBuilder) -> firstPredicate);
PredicateSpecification<Object> second = ((root1, criteriaBuilder) -> secondPredicate);
first.and(second).toPredicate(root, builder);
verify(builder).and(firstPredicate, secondPredicate);
}
@Test // GH-3521
void orCombinesSpecificationsInOrder() {
Predicate firstPredicate = mock(Predicate.class);
Predicate secondPredicate = mock(Predicate.class);
PredicateSpecification<Object> first = ((root1, criteriaBuilder) -> firstPredicate);
PredicateSpecification<Object> second = ((root1, criteriaBuilder) -> secondPredicate);
first.or(second).toPredicate(root, builder);
verify(builder).or(firstPredicate, secondPredicate);
}
@Test // GH-3849, GH-4023
void notWithNullPredicate() {
PredicateSpecification<Object> notSpec = PredicateSpecification.not((r, cb) -> null);
assertThat(notSpec.toPredicate(root, builder)).isNull();
verifyNoInteractions(builder);
}
static
|
PredicateSpecificationUnitTests
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/cache/UnusedContextsIntegrationTests.java
|
{
"start": 9077,
"end": 9530
}
|
class ____ {
@Test
void test(@Value("${magicKey}") String magicKey) {
assertThat(magicKey).isEqualTo("puzzle");
}
/**
* Duplicates configuration of {@link OverridingNestedTestCase2}.
*/
@Nested
@Order(1)
@NestedTestConfiguration(OVERRIDE)
@SpringJUnitConfig(EventTracker.class)
@ContextCustomizerFactories(DisplayNameCustomizerFactory.class)
@TestPropertySource(properties = "magicKey = enigma")
|
NestedTestCase
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/session/defaults/DefaultSqlSession.java
|
{
"start": 1756,
"end": 9593
}
|
class ____ implements SqlSession {
private final Configuration configuration;
private final Executor executor;
private final boolean autoCommit;
private boolean dirty;
private List<Cursor<?>> cursorList;
public DefaultSqlSession(Configuration configuration, Executor executor, boolean autoCommit) {
this.configuration = configuration;
this.executor = executor;
this.dirty = false;
this.autoCommit = autoCommit;
}
public DefaultSqlSession(Configuration configuration, Executor executor) {
this(configuration, executor, false);
}
@Override
public <T> T selectOne(String statement) {
return this.selectOne(statement, null);
}
@Override
public <T> T selectOne(String statement, Object parameter) {
// Popular vote was to return null on 0 results and throw exception on too many.
List<T> list = this.selectList(statement, parameter);
if (list.size() == 1) {
return list.get(0);
}
if (list.size() > 1) {
throw new TooManyResultsException(
"Expected one result (or null) to be returned by selectOne(), but found: " + list.size());
} else {
return null;
}
}
@Override
public <K, V> Map<K, V> selectMap(String statement, String mapKey) {
return this.selectMap(statement, null, mapKey, RowBounds.DEFAULT);
}
@Override
public <K, V> Map<K, V> selectMap(String statement, Object parameter, String mapKey) {
return this.selectMap(statement, parameter, mapKey, RowBounds.DEFAULT);
}
@Override
public <K, V> Map<K, V> selectMap(String statement, Object parameter, String mapKey, RowBounds rowBounds) {
final List<? extends V> list = selectList(statement, parameter, rowBounds);
final DefaultMapResultHandler<K, V> mapResultHandler = new DefaultMapResultHandler<>(mapKey,
configuration.getObjectFactory(), configuration.getObjectWrapperFactory(), configuration.getReflectorFactory());
final DefaultResultContext<V> context = new DefaultResultContext<>();
for (V o : list) {
context.nextResultObject(o);
mapResultHandler.handleResult(context);
}
return mapResultHandler.getMappedResults();
}
@Override
public <T> Cursor<T> selectCursor(String statement) {
return selectCursor(statement, null);
}
@Override
public <T> Cursor<T> selectCursor(String statement, Object parameter) {
return selectCursor(statement, parameter, RowBounds.DEFAULT);
}
@Override
public <T> Cursor<T> selectCursor(String statement, Object parameter, RowBounds rowBounds) {
try {
MappedStatement ms = configuration.getMappedStatement(statement);
dirty |= ms.isDirtySelect();
Cursor<T> cursor = executor.queryCursor(ms, wrapCollection(parameter), rowBounds);
registerCursor(cursor);
return cursor;
} catch (Exception e) {
throw ExceptionFactory.wrapException("Error querying database. Cause: " + e, e);
} finally {
ErrorContext.instance().reset();
}
}
@Override
public <E> List<E> selectList(String statement) {
return this.selectList(statement, null);
}
@Override
public <E> List<E> selectList(String statement, Object parameter) {
return this.selectList(statement, parameter, RowBounds.DEFAULT);
}
@Override
public <E> List<E> selectList(String statement, Object parameter, RowBounds rowBounds) {
return selectList(statement, parameter, rowBounds, Executor.NO_RESULT_HANDLER);
}
private <E> List<E> selectList(String statement, Object parameter, RowBounds rowBounds, ResultHandler handler) {
try {
MappedStatement ms = configuration.getMappedStatement(statement);
dirty |= ms.isDirtySelect();
return executor.query(ms, wrapCollection(parameter), rowBounds, handler);
} catch (Exception e) {
throw ExceptionFactory.wrapException("Error querying database. Cause: " + e, e);
} finally {
ErrorContext.instance().reset();
}
}
@Override
public void select(String statement, Object parameter, ResultHandler handler) {
select(statement, parameter, RowBounds.DEFAULT, handler);
}
@Override
public void select(String statement, ResultHandler handler) {
select(statement, null, RowBounds.DEFAULT, handler);
}
@Override
public void select(String statement, Object parameter, RowBounds rowBounds, ResultHandler handler) {
selectList(statement, parameter, rowBounds, handler);
}
@Override
public int insert(String statement) {
return insert(statement, null);
}
@Override
public int insert(String statement, Object parameter) {
return update(statement, parameter);
}
@Override
public int update(String statement) {
return update(statement, null);
}
@Override
public int update(String statement, Object parameter) {
try {
dirty = true;
MappedStatement ms = configuration.getMappedStatement(statement);
return executor.update(ms, wrapCollection(parameter));
} catch (Exception e) {
throw ExceptionFactory.wrapException("Error updating database. Cause: " + e, e);
} finally {
ErrorContext.instance().reset();
}
}
@Override
public int delete(String statement) {
return update(statement, null);
}
@Override
public int delete(String statement, Object parameter) {
return update(statement, parameter);
}
@Override
public void commit() {
commit(false);
}
@Override
public void commit(boolean force) {
try {
executor.commit(isCommitOrRollbackRequired(force));
dirty = false;
} catch (Exception e) {
throw ExceptionFactory.wrapException("Error committing transaction. Cause: " + e, e);
} finally {
ErrorContext.instance().reset();
}
}
@Override
public void rollback() {
rollback(false);
}
@Override
public void rollback(boolean force) {
try {
executor.rollback(isCommitOrRollbackRequired(force));
dirty = false;
} catch (Exception e) {
throw ExceptionFactory.wrapException("Error rolling back transaction. Cause: " + e, e);
} finally {
ErrorContext.instance().reset();
}
}
@Override
public List<BatchResult> flushStatements() {
try {
return executor.flushStatements();
} catch (Exception e) {
throw ExceptionFactory.wrapException("Error flushing statements. Cause: " + e, e);
} finally {
ErrorContext.instance().reset();
}
}
@Override
public void close() {
try {
executor.close(isCommitOrRollbackRequired(false));
closeCursors();
dirty = false;
} finally {
ErrorContext.instance().reset();
}
}
private void closeCursors() {
if (cursorList != null && !cursorList.isEmpty()) {
for (Cursor<?> cursor : cursorList) {
cursor.close();
}
cursorList.clear();
}
}
@Override
public Configuration getConfiguration() {
return configuration;
}
@Override
public <T> T getMapper(Class<T> type) {
return configuration.getMapper(type, this);
}
@Override
public Connection getConnection() {
try {
return executor.getTransaction().getConnection();
} catch (SQLException e) {
throw ExceptionFactory.wrapException("Error getting a new connection. Cause: " + e, e);
}
}
@Override
public void clearCache() {
executor.clearLocalCache();
}
private <T> void registerCursor(Cursor<T> cursor) {
if (cursorList == null) {
cursorList = new ArrayList<>();
}
cursorList.add(cursor);
}
private boolean isCommitOrRollbackRequired(boolean force) {
return !autoCommit && dirty || force;
}
private Object wrapCollection(final Object object) {
return ParamNameResolver.wrapToMapIfCollection(object, null);
}
/**
* @deprecated Since 3.5.5
*/
@Deprecated
public static
|
DefaultSqlSession
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/sync/ForStSyncMapState.java
|
{
"start": 16555,
"end": 20864
}
|
class ____ implements Map.Entry<UK, UV> {
private final RocksDB db;
/**
* The raw bytes of the key stored in RocksDB. Each user key is stored in RocksDB with the
* format #KeyGroup#Key#Namespace#UserKey.
*/
private final byte[] rawKeyBytes;
/** The raw bytes of the value stored in RocksDB. */
private byte[] rawValueBytes;
/** True if the entry has been deleted. */
private boolean deleted;
/**
* The user key and value. The deserialization is performed lazily, i.e. the key and the
* value is deserialized only when they are accessed.
*/
private UK userKey;
private UV userValue;
/** The offset of User Key offset in raw key bytes. */
private final int userKeyOffset;
private final TypeSerializer<UK> keySerializer;
private final TypeSerializer<UV> valueSerializer;
private final DataInputDeserializer dataInputView;
RocksDBMapEntry(
@Nonnull final RocksDB db,
@Nonnegative final int userKeyOffset,
@Nonnull final byte[] rawKeyBytes,
@Nonnull final byte[] rawValueBytes,
@Nonnull final TypeSerializer<UK> keySerializer,
@Nonnull final TypeSerializer<UV> valueSerializer,
@Nonnull DataInputDeserializer dataInputView) {
this.db = db;
this.userKeyOffset = userKeyOffset;
this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer;
this.rawKeyBytes = rawKeyBytes;
this.rawValueBytes = rawValueBytes;
this.deleted = false;
this.dataInputView = dataInputView;
}
public void remove() {
deleted = true;
rawValueBytes = null;
try {
db.delete(columnFamily, writeOptions, rawKeyBytes);
} catch (RocksDBException e) {
throw new FlinkRuntimeException("Error while removing data from RocksDB.", e);
}
}
@Override
public UK getKey() {
if (userKey == null) {
try {
userKey =
deserializeUserKey(
dataInputView, userKeyOffset, rawKeyBytes, keySerializer);
} catch (IOException e) {
throw new FlinkRuntimeException("Error while deserializing the user key.", e);
}
}
return userKey;
}
@Override
public UV getValue() {
if (deleted) {
return null;
} else {
if (userValue == null) {
try {
userValue =
deserializeUserValue(dataInputView, rawValueBytes, valueSerializer);
} catch (IOException e) {
throw new FlinkRuntimeException(
"Error while deserializing the user value.", e);
}
}
return userValue;
}
}
@Override
public UV setValue(UV value) {
if (deleted) {
throw new IllegalStateException("The value has already been deleted.");
}
UV oldValue = getValue();
try {
userValue = value;
rawValueBytes = serializeValueNullSensitive(value, valueSerializer);
db.put(columnFamily, writeOptions, rawKeyBytes, rawValueBytes);
} catch (IOException | RocksDBException e) {
throw new FlinkRuntimeException("Error while putting data into RocksDB.", e);
}
return oldValue;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Map.Entry)) {
return false;
}
Map.Entry<?, ?> e = (Map.Entry<?, ?>) o;
return Objects.equals(getKey(), e.getKey()) && Objects.equals(getValue(), e.getValue());
}
}
/** An auxiliary utility to scan all entries under the given key. */
private abstract
|
RocksDBMapEntry
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/persister/entity/EntityPersister.java
|
{
"start": 40793,
"end": 47842
}
|
class ____ by this persister.
* <p>
* A request has already identified the entity name of this persister as the
* mapping for the given instance. However, we still need to account for
* possible subclassing and potentially reroute to the more appropriate
* persister.
* <p>
* For example, a request names {@code Animal} as the entity name which gets
* resolved to this persister. But the actual instance is really an instance
* of {@code Cat} which is a subclass of {@code Animal}. So, here the
* {@code Animal} persister is being asked to return the persister specific
* to {@code Cat}.
* <p>
* It's also possible that the instance is actually an {@code Animal} instance
* in the above example in which case we would return {@code this} from this
* method.
*
* @param instance The entity instance
* @param factory Reference to the SessionFactory
*
* @return The appropriate persister
*
* @throws HibernateException Indicates that instance was deemed to not be a
* subclass of the entity mapped by this persister.
*/
EntityPersister getSubclassEntityPersister(Object instance, SessionFactoryImplementor factory);
EntityRepresentationStrategy getRepresentationStrategy();
@Override
default EntityMappingType getEntityMappingType() {
return this;
}
@Override
default void addToCacheKey(
MutableCacheKeyBuilder cacheKey,
Object value,
SharedSessionContractImplementor session) {
getIdentifierMapping().addToCacheKey( cacheKey, getIdentifier( value, session ), session );
}
/**
* @deprecated Use {@link #getBytecodeEnhancementMetadata()}
*/
@Deprecated(since = "7", forRemoval = true)
default BytecodeEnhancementMetadata getInstrumentationMetadata() {
throw new UnsupportedOperationException();
}
default BytecodeEnhancementMetadata getBytecodeEnhancementMetadata() {
return getInstrumentationMetadata();
}
FilterAliasGenerator getFilterAliasGenerator(final String rootAlias);
default FilterAliasGenerator getFilterAliasGenerator(TableGroup rootTableGroup) {
return new TableGroupFilterAliasGenerator( getTableName(), rootTableGroup );
}
/**
* The table to join to.
*/
String getTableName();
/**
* Converts an array of attribute names to a set of indexes, according to the entity metamodel
*
* @param attributeNames Array of names to be resolved
*
* @return A set of unique indexes of the attribute names found in the metamodel
*/
int[] resolveAttributeIndexes(String[] attributeNames);
/**
* Like {@link #resolveAttributeIndexes(String[])} but also always returns mutable attributes
*
* @param attributeNames Array of names to be resolved
*
* @return A set of unique indexes of the attribute names found in the metamodel
*/
default int[] resolveDirtyAttributeIndexes(
Object[] values,
Object[] loadedState,
String[] attributeNames,
SessionImplementor session) {
return resolveAttributeIndexes( attributeNames );
}
boolean canUseReferenceCacheEntries();
@Incubating
boolean useShallowQueryCacheLayout();
@Incubating
boolean storeDiscriminatorInShallowQueryCacheLayout();
boolean hasFilterForLoadByKey();
/**
* @return Metadata for each unique key defined
*/
@Incubating
Iterable<UniqueKeyEntry> uniqueKeyEntries();
/**
* Get a SQL select string that performs a select based on a unique
* key determined by the given property name.
*
* @param propertyName The name of the property which maps to the
* column(s) to use in the select statement restriction.
* @return The SQL select string
*/
String getSelectByUniqueKeyString(String propertyName);
/**
* Get a SQL select string that performs a select based on a unique
* key determined by the given property names.
*
* @param propertyNames The names of the properties which maps to the
* column(s) to use in the select statement restriction.
* @return The SQL select string
*/
default String getSelectByUniqueKeyString(String[] propertyNames) {
// default impl only for backward compatibility
if ( propertyNames.length > 1 ) {
throw new IllegalArgumentException( "support for multiple properties not implemented" );
}
return getSelectByUniqueKeyString( propertyNames[0] );
}
String getSelectByUniqueKeyString(String[] propertyNames, String[] columnNames);
/**
* The names of the primary key columns in the root table.
*
* @return The primary key column names.
*/
String[] getRootTableKeyColumnNames();
/**
* Get the database-specific SQL command to retrieve the last
* generated IDENTITY value.
*
* @return The SQL command string
*/
String getIdentitySelectString();
/**
* Get the names of columns used to persist the identifier
*/
String[] getIdentifierColumnNames();
/**
* Get the result set aliases used for the identifier columns, given a suffix
*/
String[] getIdentifierAliases(String suffix);
/**
* Locks are always applied to the "root table".
*
* @return The root table name
*/
String getRootTableName();
/**
* Get the names of columns on the root table used to persist the identifier.
*
* @return The root table identifier column names.
*/
String[] getRootTableIdentifierColumnNames();
/**
* For versioned entities, get the name of the column (again, expected on the
* root table) used to store the version values.
*
* @return The version column name.
*/
String getVersionColumnName();
/**
* Get the result set aliases used for the property columns, given a suffix (properties of this class, only).
*/
String[] getPropertyAliases(String suffix, int i);
/**
* Get the result set aliases used for the identifier columns, given a suffix
*/
String getDiscriminatorAlias(String suffix);
boolean hasMultipleTables();
String[] getTableNames();
/**
* @deprecated Only ever used from places where we really want to use<ul>
* <li>{@link SelectStatement} (select generator)</li>
* <li>{@link InsertSelectStatement}</li>
* <li>{@link org.hibernate.sql.ast.tree.update.UpdateStatement}</li>
* <li>{@link org.hibernate.sql.ast.tree.delete.DeleteStatement}</li>
* </ul>
*/
@Deprecated( since = "6.2" )
String getTableName(int j);
String[] getKeyColumns(int j);
int getTableSpan();
boolean isInverseTable(int j);
boolean isNullableTable(int j);
boolean hasDuplicateTables();
int getSubclassTableSpan();
String getSubclassTableName(int j);
String getTableNameForColumn(String columnName);
/**
* @return the column name for the discriminator as specified in the mapping.
*
* @deprecated Use {@link EntityDiscriminatorMapping#getSelectionExpression()} instead
*/
@Deprecated
String getDiscriminatorColumnName();
/**
* Get the discriminator type
*/
Type getDiscriminatorType();
/**
* Does the result set contain rowids?
*/
boolean hasRowId();
String[] getSubclassPropertyColumnNames(int i);
/**
* Return the column alias names used to persist/query the named property of the
|
managed
|
java
|
bumptech__glide
|
third_party/gif_decoder/src/main/java/com/bumptech/glide/gifdecoder/GifHeaderParser.java
|
{
"start": 717,
"end": 16079
}
|
class ____ {
private static final String TAG = "GifHeaderParser";
private static final int MASK_INT_LOWEST_BYTE = 0x000000FF;
/** Identifies the beginning of an Image Descriptor. */
private static final int IMAGE_SEPARATOR = 0x2C;
/** Identifies the beginning of an extension block. */
private static final int EXTENSION_INTRODUCER = 0x21;
/** This block is a single-field block indicating the end of the GIF Data Stream. */
private static final int TRAILER = 0x3B;
// Possible labels that identify the current extension block.
private static final int LABEL_GRAPHIC_CONTROL_EXTENSION = 0xF9;
private static final int LABEL_APPLICATION_EXTENSION = 0xFF;
private static final int LABEL_COMMENT_EXTENSION = 0xFE;
private static final int LABEL_PLAIN_TEXT_EXTENSION = 0x01;
// Graphic Control Extension packed field masks
/**
* Mask (bits 4-2) to extract Disposal Method of the current frame.
*
* @see GifFrame.GifDisposalMethod possible values
*/
private static final int GCE_MASK_DISPOSAL_METHOD = 0b00011100;
/**
* Shift so the Disposal Method extracted from the packed value is on the least significant bit.
*/
private static final int GCE_DISPOSAL_METHOD_SHIFT = 2;
/**
* Mask (bit 0) to extract Transparent Color Flag of the current frame.
* <p><b>GIF89a</b>: <i>Indicates whether a transparency index is given
* in the Transparent Index field.</i></p>
* Possible values are:<ul>
* <li>0 - Transparent Index is not given.</li>
* <li>1 - Transparent Index is given.</li>
* </ul>
*/
private static final int GCE_MASK_TRANSPARENT_COLOR_FLAG = 0b00000001;
// Image Descriptor packed field masks (describing Local Color Table)
/**
* Mask (bit 7) to extract Local Color Table Flag of the current image.
* <p><b>GIF89a</b>: <i>Indicates the presence of a Local Color Table
* immediately following this Image Descriptor.</i></p>
*/
private static final int DESCRIPTOR_MASK_LCT_FLAG = 0b10000000;
/**
* Mask (bit 6) to extract Interlace Flag of the current image.
* <p><b>GIF89a</b>: <i>Indicates if the image is interlaced.
* An image is interlaced in a four-pass interlace pattern.</i></p>
* Possible values are:<ul>
* <li>0 - Image is not interlaced.</li>
* <li>1 - Image is interlaced.</li>
* </ul>
*/
private static final int DESCRIPTOR_MASK_INTERLACE_FLAG = 0b01000000;
/**
* Mask (bits 2-0) to extract Size of the Local Color Table of the current image.
* <p><b>GIF89a</b>: <i>If the Local Color Table Flag is set to 1, the value in this
* field is used to calculate the number of bytes contained in the Local Color Table.
* To determine that actual size of the color table, raise 2 to [the value of the field + 1].
* This value should be 0 if there is no Local Color Table specified.</i></p>
*/
private static final int DESCRIPTOR_MASK_LCT_SIZE = 0b00000111;
// Logical Screen Descriptor packed field masks (describing Global Color Table)
/**
* Mask (bit 7) to extract Global Color Table Flag of the current image.
* <p><b>GIF89a</b>: <i>Indicates the presence of a Global Color Table
* immediately following this Image Descriptor.</i></p>
* Possible values are:<ul>
* <li>0 - No Global Color Table follows, the Background Color Index field is meaningless.</li>
* <li>1 - A Global Color Table will immediately follow,
* the Background Color Index field is meaningful.</li>
* </ul>
*/
private static final int LSD_MASK_GCT_FLAG = 0b10000000;
/**
* Mask (bits 2-0) to extract Size of the Global Color Table of the current image.
* <p><b>GIF89a</b>: <i>If the Global Color Table Flag is set to 1, the value in this
* field is used to calculate the number of bytes contained in the Global Color Table.
* To determine that actual size of the color table, raise 2 to [the value of the field + 1].
* Even if there is no Global Color Table specified, set this field according to the above
* formula so that decoders can choose the best graphics mode to display the stream in.</i></p>
*/
private static final int LSD_MASK_GCT_SIZE = 0b00000111;
/** The minimum frame delay in hundredths of a second. */
static final int MIN_FRAME_DELAY = 2;
/**
* The default frame delay in hundredths of a second.
* This is used for GIFs with frame delays less than the minimum.
*/
static final int DEFAULT_FRAME_DELAY = 10;
private static final int MAX_BLOCK_SIZE = 256;
// Raw data read working array.
private final byte[] block = new byte[MAX_BLOCK_SIZE];
private ByteBuffer rawData;
private GifHeader header;
private int blockSize = 0;
public GifHeaderParser setData(@NonNull ByteBuffer data) {
reset();
rawData = data.asReadOnlyBuffer();
rawData.position(0);
rawData.order(ByteOrder.LITTLE_ENDIAN);
return this;
}
public GifHeaderParser setData(@Nullable byte[] data) {
if (data != null) {
setData(ByteBuffer.wrap(data));
} else {
rawData = null;
header.status = GifDecoder.STATUS_OPEN_ERROR;
}
return this;
}
public void clear() {
rawData = null;
header = null;
}
private void reset() {
rawData = null;
Arrays.fill(block, (byte) 0);
header = new GifHeader();
blockSize = 0;
}
@NonNull
public GifHeader parseHeader() {
if (rawData == null) {
throw new IllegalStateException("You must call setData() before parseHeader()");
}
if (err()) {
return header;
}
readHeader();
if (!err()) {
readContents();
if (header.frameCount < 0) {
header.status = STATUS_FORMAT_ERROR;
}
}
return header;
}
/**
* Determines if the GIF is animated by trying to read in the first 2 frames
* This method re-parses the data even if the header has already been read.
*/
public boolean isAnimated() {
readHeader();
if (!err()) {
readContents(2 /* maxFrames */);
}
return header.frameCount > 1;
}
/**
* Main file parser. Reads GIF content blocks.
*/
private void readContents() {
readContents(Integer.MAX_VALUE /* maxFrames */);
}
/**
* Main file parser. Reads GIF content blocks. Stops after reading maxFrames
*/
private void readContents(int maxFrames) {
// Read GIF file content blocks.
boolean done = false;
while (!(done || err() || header.frameCount > maxFrames)) {
int code = read();
switch (code) {
case IMAGE_SEPARATOR:
// The Graphic Control Extension is optional, but will always come first if it exists.
// If one did exist, there will be a non-null current frame which we should use.
// However if one did not exist, the current frame will be null
// and we must create it here. See issue #134.
if (header.currentFrame == null) {
header.currentFrame = new GifFrame();
}
readBitmap();
break;
case EXTENSION_INTRODUCER:
int extensionLabel = read();
switch (extensionLabel) {
case LABEL_GRAPHIC_CONTROL_EXTENSION:
// Start a new frame.
header.currentFrame = new GifFrame();
readGraphicControlExt();
break;
case LABEL_APPLICATION_EXTENSION:
readBlock();
StringBuilder app = new StringBuilder();
for (int i = 0; i < 11; i++) {
app.append((char) block[i]);
}
if (app.toString().equals("NETSCAPE2.0")) {
readNetscapeExt();
} else {
// Don't care.
skip();
}
break;
case LABEL_COMMENT_EXTENSION:
skip();
break;
case LABEL_PLAIN_TEXT_EXTENSION:
skip();
break;
default:
// Uninteresting extension.
skip();
}
break;
case TRAILER:
// This block is a single-field block indicating the end of the GIF Data Stream.
done = true;
break;
// Bad byte, but keep going and see what happens
case 0x00:
default:
header.status = STATUS_FORMAT_ERROR;
}
}
}
/**
* Reads Graphic Control Extension values.
*/
private void readGraphicControlExt() {
// Block size.
read();
/*
* Graphic Control Extension packed field:
* 7 6 5 4 3 2 1 0
* +---------------+
* 1 | | | | |
*
* Reserved 3 Bits
* Disposal Method 3 Bits
* User Input Flag 1 Bit
* Transparent Color Flag 1 Bit
*/
int packed = read();
// Disposal method.
//noinspection WrongConstant field has to be extracted from packed value
header.currentFrame.dispose = (packed & GCE_MASK_DISPOSAL_METHOD) >> GCE_DISPOSAL_METHOD_SHIFT;
if (header.currentFrame.dispose == DISPOSAL_UNSPECIFIED) {
// Elect to keep old image if discretionary.
header.currentFrame.dispose = DISPOSAL_NONE;
}
header.currentFrame.transparency = (packed & GCE_MASK_TRANSPARENT_COLOR_FLAG) != 0;
// Delay in milliseconds.
int delayInHundredthsOfASecond = readShort();
// TODO: consider allowing -1 to indicate show forever.
if (delayInHundredthsOfASecond < MIN_FRAME_DELAY) {
delayInHundredthsOfASecond = DEFAULT_FRAME_DELAY;
}
header.currentFrame.delay = delayInHundredthsOfASecond * 10;
// Transparent color index
header.currentFrame.transIndex = read();
// Block terminator
read();
}
/**
* Reads next frame image.
*/
private void readBitmap() {
// (sub)image position & size.
header.currentFrame.ix = readShort();
header.currentFrame.iy = readShort();
header.currentFrame.iw = readShort();
header.currentFrame.ih = readShort();
/*
* Image Descriptor packed field:
* 7 6 5 4 3 2 1 0
* +---------------+
* 9 | | | | | |
*
* Local Color Table Flag 1 Bit
* Interlace Flag 1 Bit
* Sort Flag 1 Bit
* Reserved 2 Bits
* Size of Local Color Table 3 Bits
*/
int packed = read();
boolean lctFlag = (packed & DESCRIPTOR_MASK_LCT_FLAG) != 0;
int lctSize = (int) Math.pow(2, (packed & DESCRIPTOR_MASK_LCT_SIZE) + 1);
header.currentFrame.interlace = (packed & DESCRIPTOR_MASK_INTERLACE_FLAG) != 0;
if (lctFlag) {
header.currentFrame.lct = readColorTable(lctSize);
} else {
// No local color table.
header.currentFrame.lct = null;
}
// Save this as the decoding position pointer.
header.currentFrame.bufferFrameStart = rawData.position();
// False decode pixel data to advance buffer.
skipImageData();
if (err()) {
return;
}
header.frameCount++;
// Add image to frame.
header.frames.add(header.currentFrame);
}
/**
* Reads Netscape extension to obtain iteration count.
*/
private void readNetscapeExt() {
do {
readBlock();
if (block[0] == 1) {
// Loop count sub-block.
int b1 = ((int) block[1]) & MASK_INT_LOWEST_BYTE;
int b2 = ((int) block[2]) & MASK_INT_LOWEST_BYTE;
header.loopCount = (b2 << 8) | b1;
}
} while (blockSize > 0 && !err());
}
/**
* Reads GIF file header information.
*/
private void readHeader() {
StringBuilder id = new StringBuilder();
for (int i = 0; i < 6; i++) {
id.append((char) read());
}
if (!id.toString().startsWith("GIF")) {
header.status = STATUS_FORMAT_ERROR;
return;
}
readLSD();
if (header.gctFlag && !err()) {
header.gct = readColorTable(header.gctSize);
header.bgColor = header.gct[header.bgIndex];
}
}
/**
* Reads Logical Screen Descriptor.
*/
private void readLSD() {
// Logical screen size.
header.width = readShort();
header.height = readShort();
/*
* Logical Screen Descriptor packed field:
* 7 6 5 4 3 2 1 0
* +---------------+
* 4 | | | | |
*
* Global Color Table Flag 1 Bit
* Color Resolution 3 Bits
* Sort Flag 1 Bit
* Size of Global Color Table 3 Bits
*/
int packed = read();
header.gctFlag = (packed & LSD_MASK_GCT_FLAG) != 0;
header.gctSize = (int) Math.pow(2, (packed & LSD_MASK_GCT_SIZE) + 1);
// Background color index.
header.bgIndex = read();
// Pixel aspect ratio
header.pixelAspect = read();
}
/**
* Reads color table as 256 RGB integer values.
*
* @param nColors int number of colors to read.
* @return int array containing 256 colors (packed ARGB with full alpha).
*/
@Nullable
private int[] readColorTable(int nColors) {
int nBytes = 3 * nColors;
int[] tab = null;
byte[] c = new byte[nBytes];
try {
rawData.get(c);
// TODO: what bounds checks are we avoiding if we know the number of colors?
// Max size to avoid bounds checks.
tab = new int[MAX_BLOCK_SIZE];
int i = 0;
int j = 0;
while (i < nColors) {
int r = ((int) c[j++]) & MASK_INT_LOWEST_BYTE;
int g = ((int) c[j++]) & MASK_INT_LOWEST_BYTE;
int b = ((int) c[j++]) & MASK_INT_LOWEST_BYTE;
tab[i++] = 0xFF000000 | (r << 16) | (g << 8) | b;
}
} catch (BufferUnderflowException e) {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Format Error Reading Color Table", e);
}
header.status = STATUS_FORMAT_ERROR;
}
return tab;
}
/**
* Skips LZW image data for a single frame to advance buffer.
*/
private void skipImageData() {
// lzwMinCodeSize
read();
// data sub-blocks
skip();
}
/**
* Skips variable length blocks up to and including next zero length block.
*/
private void skip() {
int blockSize;
do {
blockSize = read();
int newPosition = Math.min(rawData.position() + blockSize, rawData.limit());
rawData.position(newPosition);
} while (blockSize > 0);
}
/**
* Reads next variable length block from input.
*/
private void readBlock() {
blockSize = read();
int n = 0;
if (blockSize > 0) {
int count = 0;
try {
while (n < blockSize) {
count = blockSize - n;
rawData.get(block, n, count);
n += count;
}
} catch (Exception e) {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG,
"Error Reading Block n: " + n + " count: " + count + " blockSize: " + blockSize, e);
}
header.status = STATUS_FORMAT_ERROR;
}
}
}
/**
* Reads a single byte from the input stream.
*/
private int read() {
int currByte = 0;
try {
currByte = rawData.get() & MASK_INT_LOWEST_BYTE;
} catch (Exception e) {
header.status = STATUS_FORMAT_ERROR;
}
return currByte;
}
/**
* Reads next 16-bit value, LSB first.
*/
private int readShort() {
// Read 16-bit value.
return rawData.getShort();
}
private boolean err() {
return header.status != GifDecoder.STATUS_OK;
}
}
|
GifHeaderParser
|
java
|
quarkusio__quarkus
|
integration-tests/opentelemetry-reactive-messaging/src/main/java/io/quarkus/it/opentelemetry/TracedService.java
|
{
"start": 181,
"end": 284
}
|
class ____ {
@WithSpan
public String call() {
return "Chained trace";
}
}
|
TracedService
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/LogEventFactoryTest.java
|
{
"start": 1892,
"end": 3904
}
|
class ____ {
private static final String CONFIG = "log4j2-config.xml";
private static final LoggerContextRule context = new LoggerContextRule(CONFIG);
private ListAppender app;
// this would look so cool using lambdas
@ClassRule
public static RuleChain chain = RuleChain.outerRule((base, description) -> new Statement() {
@Override
public void evaluate() throws Throwable {
System.setProperty(Constants.LOG4J_LOG_EVENT_FACTORY, TestLogEventFactory.class.getName());
resetLogEventFactory(new TestLogEventFactory());
try {
base.evaluate();
} finally {
System.clearProperty(Constants.LOG4J_LOG_EVENT_FACTORY);
resetLogEventFactory(new DefaultLogEventFactory());
}
}
private void resetLogEventFactory(final LogEventFactory logEventFactory) throws IllegalAccessException {
final Field field = FieldUtils.getField(LoggerConfig.class, "LOG_EVENT_FACTORY", true);
FieldUtils.removeFinalModifier(field);
FieldUtils.writeStaticField(field, logEventFactory, false);
}
})
.around(context);
@Before
public void before() {
app = context.getListAppender("List").clear();
}
@Test
public void testEvent() {
final org.apache.logging.log4j.Logger logger = context.getLogger("org.apache.test.LogEventFactory");
logger.error("error message");
final List<LogEvent> events = app.getEvents();
assertNotNull("No events", events);
assertEquals("Incorrect number of events. Expected 1, actual " + events.size(), 1, events.size());
final LogEvent event = events.get(0);
assertEquals("TestLogEventFactory wasn't used", "Test", event.getLoggerName());
}
public static
|
LogEventFactoryTest
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/util/HierarchicalUriComponents.java
|
{
"start": 18223,
"end": 22525
}
|
enum ____ {
SCHEME {
@Override
public boolean isAllowed(int c) {
return isAlpha(c) || isDigit(c) || '+' == c || '-' == c || '.' == c;
}
},
AUTHORITY {
@Override
public boolean isAllowed(int c) {
return (isUnreservedOrSubDelimiter(c) || ':' == c || '@' == c);
}
},
USER_INFO {
@Override
public boolean isAllowed(int c) {
return (isUnreservedOrSubDelimiter(c) || ':' == c);
}
},
HOST_IPV4 {
@Override
public boolean isAllowed(int c) {
return isUnreservedOrSubDelimiter(c);
}
},
HOST_IPV6 {
@Override
public boolean isAllowed(int c) {
return (isUnreservedOrSubDelimiter(c) || '[' == c || ']' == c || ':' == c);
}
},
PORT {
@Override
public boolean isAllowed(int c) {
return isDigit(c);
}
},
PATH {
@Override
public boolean isAllowed(int c) {
return (isPchar(c) || '/' == c);
}
},
PATH_SEGMENT {
@Override
public boolean isAllowed(int c) {
return isPchar(c);
}
},
QUERY {
@Override
public boolean isAllowed(int c) {
return (isPchar(c) || '/' == c || '?' == c);
}
},
QUERY_PARAM {
@Override
public boolean isAllowed(int c) {
if ('=' == c || '&' == c) {
return false;
}
else {
return (isPchar(c) || '/' == c || '?' == c);
}
}
},
FRAGMENT {
@Override
public boolean isAllowed(int c) {
return (isPchar(c) || '/' == c || '?' == c);
}
},
URI {
@Override
public boolean isAllowed(int c) {
return isUnreserved(c);
}
};
private static final boolean[] unreservedOrSubDelimiterArray = new boolean[128];
static {
for (int i = 0; i < 128; i++) {
char c = (char) i;
unreservedOrSubDelimiterArray[i] = (URI.isUnreserved(c) || URI.isSubDelimiter(c));
}
}
/**
* Indicates whether the given character is allowed in this URI component.
* @return {@code true} if the character is allowed; {@code false} otherwise
*/
public abstract boolean isAllowed(int c);
/**
* Indicates whether the given character is in the {@code ALPHA} set.
* @see <a href="https://www.ietf.org/rfc/rfc3986.txt">RFC 3986, appendix A</a>
*/
protected boolean isAlpha(int c) {
return (c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z');
}
/**
* Indicates whether the given character is in the {@code DIGIT} set.
* @see <a href="https://www.ietf.org/rfc/rfc3986.txt">RFC 3986, appendix A</a>
*/
protected boolean isDigit(int c) {
return (c >= '0' && c <= '9');
}
/**
* Indicates whether the given character is in the {@code gen-delims} set.
* @see <a href="https://www.ietf.org/rfc/rfc3986.txt">RFC 3986, appendix A</a>
*/
protected boolean isGenericDelimiter(int c) {
return (':' == c || '/' == c || '?' == c || '#' == c || '[' == c || ']' == c || '@' == c);
}
/**
* Indicates whether the given character is in the {@code sub-delims} set.
* @see <a href="https://www.ietf.org/rfc/rfc3986.txt">RFC 3986, appendix A</a>
*/
protected boolean isSubDelimiter(int c) {
return ('!' == c || '$' == c || '&' == c || '\'' == c ||
'(' == c || ')' == c || '*' == c || '+' == c || ',' == c || ';' == c || '=' == c);
}
/**
* Indicates whether the given character is in the {@code reserved} set.
* @see <a href="https://www.ietf.org/rfc/rfc3986.txt">RFC 3986, appendix A</a>
*/
protected boolean isReserved(int c) {
return (isGenericDelimiter(c) || isSubDelimiter(c));
}
/**
* Indicates whether the given character is in the {@code unreserved} set.
* @see <a href="https://www.ietf.org/rfc/rfc3986.txt">RFC 3986, appendix A</a>
*/
protected boolean isUnreserved(int c) {
return (isAlpha(c) || isDigit(c) || '-' == c || '.' == c || '_' == c || '~' == c);
}
/**
* Indicates whether the given character is in the {@code pchar} set.
* @see <a href="https://www.ietf.org/rfc/rfc3986.txt">RFC 3986, appendix A</a>
*/
protected boolean isPchar(int c) {
return (isUnreservedOrSubDelimiter(c) || ':' == c || '@' == c);
}
/**
* Combined check whether a character is unreserved or a sub-delimiter.
*/
protected boolean isUnreservedOrSubDelimiter(int c) {
return (c < unreservedOrSubDelimiterArray.length && c >= 0 && unreservedOrSubDelimiterArray[c]);
}
}
private
|
Type
|
java
|
grpc__grpc-java
|
examples/example-gcp-observability/src/main/java/io/grpc/examples/gcpobservability/GcpObservabilityServer.java
|
{
"start": 1229,
"end": 2800
}
|
class ____ {
private static final Logger logger = Logger.getLogger(GcpObservabilityServer.class.getName());
private Server server;
private void start() throws IOException {
int port = 50051;
server = Grpc.newServerBuilderForPort(port, InsecureServerCredentials.create())
.addService(new GreeterImpl())
.build()
.start();
logger.info("Server started, listening on " + port);
}
private void stop() throws InterruptedException {
if (server != null) {
server.shutdown().awaitTermination(30, TimeUnit.SECONDS);
}
}
private void blockUntilShutdown() throws InterruptedException {
if (server != null) {
server.awaitTermination();
}
}
/**
* Main launches the server from the command line.
*/
public static void main(String[] args) throws IOException, InterruptedException {
// Initialize observability
GcpObservability observability = GcpObservability.grpcInit();
final GcpObservabilityServer server = new GcpObservabilityServer();
server.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
System.err.println("*** shutting down gRPC server since JVM is shutting down");
try {
server.stop();
} catch (InterruptedException e) {
e.printStackTrace(System.err);
}
// Shut down observability
observability.close();
System.err.println("*** server shut down");
}
});
server.blockUntilShutdown();
}
static
|
GcpObservabilityServer
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
|
{
"start": 3342,
"end": 3524
}
|
class ____, " +
"use -DTestAccessControlListGroupMapping=$className to specify " +
"group mapping class (must implement GroupMappingServiceProvider " +
"
|
specified
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/Sets.java
|
{
"start": 51031,
"end": 55364
}
|
class ____<E extends @Nullable Object>
extends FilteredSortedSet<E> implements NavigableSet<E> {
FilteredNavigableSet(NavigableSet<E> unfiltered, Predicate<? super E> predicate) {
super(unfiltered, predicate);
}
NavigableSet<E> unfiltered() {
return (NavigableSet<E>) unfiltered;
}
@Override
public @Nullable E lower(@ParametricNullness E e) {
return Iterators.find(unfiltered().headSet(e, false).descendingIterator(), predicate, null);
}
@Override
public @Nullable E floor(@ParametricNullness E e) {
return Iterators.find(unfiltered().headSet(e, true).descendingIterator(), predicate, null);
}
@Override
public @Nullable E ceiling(@ParametricNullness E e) {
return Iterables.find(unfiltered().tailSet(e, true), predicate, null);
}
@Override
public @Nullable E higher(@ParametricNullness E e) {
return Iterables.find(unfiltered().tailSet(e, false), predicate, null);
}
@Override
public @Nullable E pollFirst() {
return Iterables.removeFirstMatching(unfiltered(), predicate);
}
@Override
public @Nullable E pollLast() {
return Iterables.removeFirstMatching(unfiltered().descendingSet(), predicate);
}
@Override
public NavigableSet<E> descendingSet() {
return Sets.filter(unfiltered().descendingSet(), predicate);
}
@Override
public Iterator<E> descendingIterator() {
return Iterators.filter(unfiltered().descendingIterator(), predicate);
}
@Override
@ParametricNullness
public E last() {
return Iterators.find(unfiltered().descendingIterator(), predicate);
}
@Override
public NavigableSet<E> subSet(
@ParametricNullness E fromElement,
boolean fromInclusive,
@ParametricNullness E toElement,
boolean toInclusive) {
return filter(
unfiltered().subSet(fromElement, fromInclusive, toElement, toInclusive), predicate);
}
@Override
public NavigableSet<E> headSet(@ParametricNullness E toElement, boolean inclusive) {
return filter(unfiltered().headSet(toElement, inclusive), predicate);
}
@Override
public NavigableSet<E> tailSet(@ParametricNullness E fromElement, boolean inclusive) {
return filter(unfiltered().tailSet(fromElement, inclusive), predicate);
}
}
/**
* Returns every possible list that can be formed by choosing one element from each of the given
* sets in order; the "n-ary <a href="http://en.wikipedia.org/wiki/Cartesian_product">Cartesian
* product</a>" of the sets. For example:
*
* {@snippet :
* Sets.cartesianProduct(ImmutableList.of(
* ImmutableSet.of(1, 2),
* ImmutableSet.of("A", "B", "C")))
* }
*
* <p>returns a set containing six lists:
*
* <ul>
* <li>{@code ImmutableList.of(1, "A")}
* <li>{@code ImmutableList.of(1, "B")}
* <li>{@code ImmutableList.of(1, "C")}
* <li>{@code ImmutableList.of(2, "A")}
* <li>{@code ImmutableList.of(2, "B")}
* <li>{@code ImmutableList.of(2, "C")}
* </ul>
*
* <p>The result is guaranteed to be in the "traditional", lexicographical order for Cartesian
* products that you would get from nesting for loops:
*
* {@snippet :
* for (B b0 : sets.get(0)) {
* for (B b1 : sets.get(1)) {
* ...
* ImmutableList<B> tuple = ImmutableList.of(b0, b1, ...);
* // operate on tuple
* }
* }
* }
*
* <p>Note that if any input set is empty, the Cartesian product will also be empty. If no sets at
* all are provided (an empty list), the resulting Cartesian product has one element, an empty
* list (counter-intuitive, but mathematically consistent).
*
* <p><i>Performance notes:</i> while the cartesian product of sets of size {@code m, n, p} is a
* set of size {@code m x n x p}, its actual memory consumption is much smaller. When the
* cartesian set is constructed, the input sets are merely copied. Only as the resulting set is
* iterated are the individual lists created, and these are not retained after iteration.
*
* @param sets the sets to choose elements from, in the order that the elements chosen from those
* sets should appear in the resulting lists
* @param <B> any common base
|
FilteredNavigableSet
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/main/java/io/quarkus/rest/client/reactive/deployment/RestClientReactiveProcessor.java
|
{
"start": 14852,
"end": 23772
}
|
class ____ a constructor that:
* <ul>
* <li>puts all the providers registered by the @RegisterProvider annotation in a
* map using the {@link AnnotationRegisteredProviders#addProviders(String, Map)} method</li>
* <li>registers all the provider implementations annotated with @Provider using
* {@link AnnotationRegisteredProviders#addGlobalProvider(Class, int)}</li>
* </ul>
*/
@BuildStep
void registerProvidersFromAnnotations(CombinedIndexBuildItem indexBuildItem,
List<RegisterProviderAnnotationInstanceBuildItem> registerProviderAnnotationInstances,
List<AnnotationToRegisterIntoClientContextBuildItem> annotationsToRegisterIntoClientContext,
List<ClientRequestFilterBuildItem> clientRequestFilters,
List<ClientResponseFilterBuildItem> clientResponseFilters,
BuildProducer<GeneratedBeanBuildItem> generatedBeansProducer,
BuildProducer<GeneratedClassBuildItem> generatedClassesProducer,
BuildProducer<UnremovableBeanBuildItem> unremovableBeansProducer,
BuildProducer<ReflectiveClassBuildItem> reflectiveClassesProducer,
BuildProducer<ExecutionModelAnnotationsAllowedBuildItem> executionModelAnnotationsAllowedProducer,
RestClientReactiveConfig clientConfig) {
String annotationRegisteredProvidersImpl = AnnotationRegisteredProviders.class.getName() + "Implementation";
IndexView index = indexBuildItem.getIndex();
Map<String, List<AnnotationInstance>> annotationsByClassName = new HashMap<>();
for (RegisterProviderAnnotationInstanceBuildItem bi : registerProviderAnnotationInstances) {
annotationsByClassName.computeIfAbsent(bi.getTargetClass(), key -> new ArrayList<>())
.add(bi.getAnnotationInstance());
}
try (ClassCreator classCreator = ClassCreator.builder()
.className(annotationRegisteredProvidersImpl)
.classOutput(new GeneratedBeanGizmoAdaptor(generatedBeansProducer))
.superClass(AnnotationRegisteredProviders.class)
.build()) {
classCreator.addAnnotation(Singleton.class.getName());
MethodCreator constructor = classCreator
.getMethodCreator(MethodDescriptor.ofConstructor(annotationRegisteredProvidersImpl));
constructor.invokeSpecialMethod(MethodDescriptor.ofConstructor(AnnotationRegisteredProviders.class),
constructor.getThis());
if (clientConfig.providerAutodiscovery()) {
for (AnnotationInstance instance : index.getAnnotations(ResteasyReactiveDotNames.PROVIDER)) {
ClassInfo providerClass = instance.target().asClass();
// ignore providers annotated with `@ConstrainedTo(SERVER)`
AnnotationInstance constrainedToInstance = providerClass
.declaredAnnotation(ResteasyReactiveDotNames.CONSTRAINED_TO);
if (constrainedToInstance != null) {
if (RuntimeType.valueOf(constrainedToInstance.value().asEnum()) == RuntimeType.SERVER) {
continue;
}
}
if (skipAutoDiscoveredProvider(providerClass.interfaceNames())) {
continue;
}
registerGlobalProvider(providerClass.name(), index, constructor, reflectiveClassesProducer);
}
}
Set<DotName> providersFromBuildItems = new HashSet<>();
providersFromBuildItems.addAll(clientRequestFilters.stream().map(ClientRequestFilterBuildItem::getClassName)
.map(DotName::createSimple).collect(
Collectors.toSet()));
providersFromBuildItems.addAll(clientResponseFilters.stream().map(ClientResponseFilterBuildItem::getClassName)
.map(DotName::createSimple).collect(
Collectors.toSet()));
if (!providersFromBuildItems.isEmpty()) {
for (DotName dotName : providersFromBuildItems) {
registerGlobalProvider(dotName, index, constructor, reflectiveClassesProducer);
}
unremovableBeansProducer.produce(UnremovableBeanBuildItem.beanTypes(providersFromBuildItems));
}
MultivaluedMap<String, GeneratedClassResult> generatedProviders = new QuarkusMultivaluedHashMap<>();
populateClientExceptionMapperFromAnnotations(index, generatedClassesProducer, reflectiveClassesProducer,
executionModelAnnotationsAllowedProducer)
.forEach(generatedProviders::add);
populateClientRedirectHandlerFromAnnotations(generatedClassesProducer, reflectiveClassesProducer, index)
.forEach(generatedProviders::add);
for (AnnotationToRegisterIntoClientContextBuildItem annotation : annotationsToRegisterIntoClientContext) {
populateClientProviderFromAnnotations(annotation, generatedClassesProducer, reflectiveClassesProducer, index)
.forEach(generatedProviders::add);
}
addGeneratedProviders(index, classCreator, constructor, annotationsByClassName, generatedProviders);
constructor.returnValue(null);
}
unremovableBeansProducer.produce(UnremovableBeanBuildItem.beanClassNames(annotationRegisteredProvidersImpl));
}
private void registerGlobalProvider(DotName providerClassName,
IndexView index, MethodCreator methodCreator,
BuildProducer<ReflectiveClassBuildItem> reflectiveClassesProducer) {
int priority = getAnnotatedPriority(index, providerClassName.toString(), Priorities.USER);
methodCreator.invokeVirtualMethod(
MethodDescriptor.ofMethod(AnnotationRegisteredProviders.class, "addGlobalProvider",
void.class, Class.class,
int.class),
methodCreator.getThis(), methodCreator.loadClassFromTCCL(providerClassName.toString()),
methodCreator.load(priority));
// when the server is not included, providers are not automatically registered for reflection,
// so we need to always do it for the client to be on the safe side
reflectiveClassesProducer.produce(ReflectiveClassBuildItem.builder(providerClassName.toString()).build());
}
@BuildStep
AdditionalBeanBuildItem registerProviderBeans(CombinedIndexBuildItem combinedIndex) {
IndexView index = combinedIndex.getIndex();
List<AnnotationInstance> allInstances = new ArrayList<>(index.getAnnotations(REGISTER_PROVIDER));
for (AnnotationInstance annotation : index.getAnnotations(REGISTER_PROVIDERS)) {
allInstances.addAll(asList(annotation.value().asNestedArray()));
}
allInstances.addAll(index.getAnnotations(REGISTER_CLIENT_HEADERS));
AdditionalBeanBuildItem.Builder builder = AdditionalBeanBuildItem.builder().setUnremovable();
for (AnnotationInstance annotationInstance : allInstances) {
// Make sure all providers not annotated with @Provider but used in @RegisterProvider are registered as beans
AnnotationValue value = annotationInstance.value();
if (value != null) {
builder.addBeanClass(value.asClass().name().toString());
}
}
return builder.build();
}
@BuildStep
void handleSseEventFilter(BuildProducer<ReflectiveClassBuildItem> reflectiveClasses,
BeanArchiveIndexBuildItem beanArchiveIndexBuildItem) {
var index = beanArchiveIndexBuildItem.getIndex();
Collection<AnnotationInstance> instances = index.getAnnotations(DotNames.SSE_EVENT_FILTER);
if (instances.isEmpty()) {
return;
}
List<String> filterClassNames = new ArrayList<>(instances.size());
for (AnnotationInstance instance : instances) {
if (instance.target().kind() != AnnotationTarget.Kind.METHOD) {
continue;
}
if (instance.value() == null) {
continue; // can't happen
}
Type filterType = instance.value().asClass();
DotName filterClassName = filterType.name();
ClassInfo filterClassInfo = index.getClassByName(filterClassName.toString());
if (filterClassInfo == null) {
log.warn("Unable to find class '" + filterType.name() + "' in index");
} else if (!filterClassInfo.hasNoArgsConstructor()) {
throw new RestClientDefinitionException(
"Classes used in @SseEventFilter must have a no-args constructor. Offending
|
with
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/FirstValueAggFunctionWithOrderTest.java
|
{
"start": 3049,
"end": 3532
}
|
class ____
extends NumberFirstValueAggFunctionWithOrderTestBase<Short> {
@Override
protected Short getValue(String v) {
return Short.valueOf(v);
}
@Override
protected AggregateFunction<Short, RowData> getAggregator() {
return new FirstValueAggFunction<>(DataTypes.SMALLINT().getLogicalType());
}
}
/** Test for {@link IntType}. */
@Nested
final
|
ShortFirstValueAggFunctionWithOrderTest
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/context/properties/bind/Bindable.java
|
{
"start": 11373,
"end": 11500
}
|
enum ____ {
/**
* Do not bind direct {@link ConfigurationProperty} matches.
*/
NO_DIRECT_PROPERTY
}
}
|
BindRestriction
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Ses2EndpointBuilderFactory.java
|
{
"start": 20906,
"end": 22917
}
|
interface ____ {
/**
* AWS Simple Email Service (SES) (camel-aws2-ses)
* Send e-mails through AWS SES service.
*
* Category: cloud,mail
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-ses
*
* @return the dsl builder for the headers' name.
*/
default Ses2HeaderNameBuilder aws2Ses() {
return Ses2HeaderNameBuilder.INSTANCE;
}
/**
* AWS Simple Email Service (SES) (camel-aws2-ses)
* Send e-mails through AWS SES service.
*
* Category: cloud,mail
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-ses
*
* Syntax: <code>aws2-ses:from</code>
*
* Path parameter: from (required)
* The sender's email address.
*
* @param path from
* @return the dsl builder
*/
default Ses2EndpointBuilder aws2Ses(String path) {
return Ses2EndpointBuilderFactory.endpointBuilder("aws2-ses", path);
}
/**
* AWS Simple Email Service (SES) (camel-aws2-ses)
* Send e-mails through AWS SES service.
*
* Category: cloud,mail
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-ses
*
* Syntax: <code>aws2-ses:from</code>
*
* Path parameter: from (required)
* The sender's email address.
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path from
* @return the dsl builder
*/
default Ses2EndpointBuilder aws2Ses(String componentName, String path) {
return Ses2EndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the AWS Simple Email Service (SES) component.
*/
public static
|
Ses2Builders
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/UnsafeWildcardTest.java
|
{
"start": 1568,
"end": 1722
}
|
interface ____<T extends Marker> {
List<?> convert(T input);
}
// error below can be avoided here with "
|
Converter
|
java
|
mockito__mockito
|
mockito-extensions/mockito-junit-jupiter/src/test/java/org/mockitousage/GenericTypeMockTest.java
|
{
"start": 7152,
"end": 7596
}
|
class ____<T> {
List<T> tList;
}
@Mock List<Integer> intList;
@InjectMocks
UnderTestWithTypeParameter<String> underTestWithTypeParameterNoMatch =
new UnderTestWithTypeParameter<String>();
@Test
void testWithTypeParameterNoMatch() {
assertNotNull(intList);
// verify that when no candidate matches by type parameter of
|
UnderTestWithTypeParameter
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/AnyGetterOrdering4388Test.java
|
{
"start": 1940,
"end": 2143
}
|
class ____ {
public int a = 1, b = 2;
@JsonAnyGetter
public Map<String, Object> map = new HashMap<>();
}
@JsonPropertyOrder({"a", "b"})
static
|
IgnorePropertiesOnFieldPojo
|
java
|
netty__netty
|
codec-http2/src/test/java/io/netty/handler/codec/http2/HpackHuffmanTest.java
|
{
"start": 1766,
"end": 8360
}
|
class ____ {
@Test
public void testHuffman() throws Http2Exception {
String s = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for (int i = 0; i < s.length(); i++) {
roundTrip(s.substring(0, i));
}
Random random = new Random(123456789L);
byte[] buf = new byte[4096];
random.nextBytes(buf);
roundTrip(buf);
}
@Test
public void testDecodeEOS() throws Http2Exception {
final byte[] buf = new byte[4];
for (int i = 0; i < 4; i++) {
buf[i] = (byte) 0xFF;
}
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(buf);
}
});
}
@Test
public void testDecodeIllegalPadding() throws Http2Exception {
final byte[] buf = new byte[1];
buf[0] = 0x00; // '0', invalid padding
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(buf);
}
});
}
@Test
public void testDecodeExtraPadding() throws Http2Exception {
final byte[] buf = makeBuf(0x0f, 0xFF); // '1', 'EOS'
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(buf);
}
});
}
@Test
public void testDecodeExtraPadding1byte() throws Http2Exception {
final byte[] buf = makeBuf(0xFF);
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(buf);
}
});
}
@Test
public void testDecodeExtraPadding2byte() throws Http2Exception {
final byte[] buf = makeBuf(0x1F, 0xFF); // 'a'
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(buf);
}
});
}
@Test
public void testDecodeExtraPadding3byte() throws Http2Exception {
final byte[] buf = makeBuf(0x1F, 0xFF, 0xFF); // 'a'
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(buf);
}
});
}
@Test
public void testDecodeExtraPadding4byte() throws Http2Exception {
final byte[] buf = makeBuf(0x1F, 0xFF, 0xFF, 0xFF); // 'a'
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(buf);
}
});
}
@Test
public void testDecodeExtraPadding29bit() throws Http2Exception {
final byte[] buf = makeBuf(0xFF, 0x9F, 0xFF, 0xFF, 0xFF); // '|'
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(buf);
}
});
}
@Test
public void testDecodePartialSymbol() throws Http2Exception {
final byte[] buf =
makeBuf(0x52, 0xBC, 0x30, 0xFF, 0xFF, 0xFF, 0xFF); // " pFA\x00", 31 bits of padding, a.k.a. EOS
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(buf);
}
});
}
@Test
public void testEncoderSanitizingMultiByteCharacters() throws Http2Exception {
final int inputLen = 500;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < inputLen; i++) {
// Starts with 0x4E01 because certain suboptimal sanitization could cause some problem with this input.
// For example, if a multibyte character C is sanitized by doing (C & OxFF), if C == 0x4E01, then
// (0x4E01 & OxFF) is greater than zero which indicates insufficient sanitization.
sb.append((char) (0x4E01 + i));
}
HpackHuffmanEncoder encoder = new HpackHuffmanEncoder();
String toBeEncoded = sb.toString();
ByteBuf buffer = Unpooled.buffer();
byte[] bytes;
try {
encoder.encode(buffer, toBeEncoded);
bytes = new byte[buffer.readableBytes()];
buffer.readBytes(bytes);
} finally {
buffer.release(); // Release as soon as possible.
}
byte[] actualBytes = decode(bytes);
String actualDecoded = new String(actualBytes);
char[] charArray = new char[inputLen];
Arrays.fill(charArray, '?');
String expectedDecoded = new String(charArray);
assertEquals(
expectedDecoded,
actualDecoded,
"Expect the decoded string to be sanitized and contains only '?' characters."
);
}
private static byte[] makeBuf(int ... bytes) {
byte[] buf = new byte[bytes.length];
for (int i = 0; i < buf.length; i++) {
buf[i] = (byte) bytes[i];
}
return buf;
}
private static void roundTrip(String s) throws Http2Exception {
roundTrip(new HpackHuffmanEncoder(), s);
}
private static void roundTrip(HpackHuffmanEncoder encoder, String s)
throws Http2Exception {
roundTrip(encoder, s.getBytes());
}
private static void roundTrip(byte[] buf) throws Http2Exception {
roundTrip(new HpackHuffmanEncoder(), buf);
}
private static void roundTrip(HpackHuffmanEncoder encoder, byte[] buf)
throws Http2Exception {
ByteBuf buffer = Unpooled.buffer();
try {
encoder.encode(buffer, new AsciiString(buf, false));
byte[] bytes = new byte[buffer.readableBytes()];
buffer.readBytes(bytes);
byte[] actualBytes = decode(bytes);
assertArrayEquals(buf, actualBytes);
} finally {
buffer.release();
}
}
private static byte[] decode(byte[] bytes) throws Http2Exception {
ByteBuf buffer = Unpooled.wrappedBuffer(bytes);
try {
AsciiString decoded = new HpackHuffmanDecoder().decode(buffer, buffer.readableBytes());
assertFalse(buffer.isReadable());
return decoded.toByteArray();
} finally {
buffer.release();
}
}
}
|
HpackHuffmanTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/BytecodeEnhancedLazyLoadingOnDeletedEntityTest.java
|
{
"start": 3428,
"end": 3908
}
|
class ____ {
@Id
Integer id;
@ManyToMany(fetch = FetchType.LAZY)
List<AssociationNonOwner> nonOwners = new ArrayList<>();
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public List<AssociationNonOwner> getNonOwners() {
return nonOwners;
}
public void setNonOwners(
List<AssociationNonOwner> nonOwners) {
this.nonOwners = nonOwners;
}
}
@Entity(name = "ANonOwner")
@Table
static
|
AssociationOwner
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/JpaQueryMethod.java
|
{
"start": 2581,
"end": 16258
}
|
class ____ extends QueryMethod {
/**
* @see <a href=
* "https://jakarta.ee/specifications/persistence/3.1/jakarta-persistence-spec-3.1#persistent-fields-and-properties">Jakarta
* Persistence Specification: Persistent Fields and Properties - Paragraph starting with
* "Collection-valued persistent...".</a>
*/
private static final Set<Class<?>> NATIVE_ARRAY_TYPES = Set.of(byte[].class, Byte[].class, char[].class,
Character[].class);
private static final StoredProcedureAttributeSource storedProcedureAttributeSource = StoredProcedureAttributeSource.INSTANCE;
private final QueryExtractor extractor;
private final Method method;
private final Class<?> returnType;
private @Nullable StoredProcedureAttributes storedProcedureAttributes;
private final Lazy<LockModeType> lockModeType;
private final Lazy<QueryHints> queryHints;
private final Lazy<JpaEntityGraph> jpaEntityGraph;
private final Lazy<Modifying> modifying;
private final Lazy<Boolean> isNativeQuery;
private final Lazy<Boolean> isCollectionQuery;
private final Lazy<Boolean> isProcedureQuery;
private final Lazy<JpaEntityMetadata<?>> entityMetadata;
private final Lazy<Optional<Meta>> metaAnnotation;
/**
* Creates a {@link JpaQueryMethod}.
*
* @param method must not be {@literal null}.
* @param metadata must not be {@literal null}.
* @param factory must not be {@literal null}.
* @param extractor must not be {@literal null}.
*/
public JpaQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory factory,
QueryExtractor extractor) {
this(method, metadata, factory, extractor, JpaParameters::new);
}
/**
* Creates a {@link JpaQueryMethod}.
*
* @param method must not be {@literal null}.
* @param metadata must not be {@literal null}.
* @param factory must not be {@literal null}.
* @param extractor must not be {@literal null}.
* @param parametersFunction function to obtain {@link JpaParameters}, must not be {@literal null}.
* @since 3.5
*/
public JpaQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory factory,
QueryExtractor extractor, Function<ParametersSource, JpaParameters> parametersFunction) {
super(method, metadata, factory, parametersFunction);
Assert.notNull(method, "Method must not be null");
Assert.notNull(extractor, "Query extractor must not be null");
this.method = method;
this.returnType = potentiallyUnwrapReturnTypeFor(metadata, method);
this.extractor = extractor;
this.lockModeType = Lazy
.of(() -> (LockModeType) Optional.ofNullable(AnnotatedElementUtils.findMergedAnnotation(method, Lock.class)) //
.map(AnnotationUtils::getValue) //
.orElse(null));
this.queryHints = Lazy.of(() -> AnnotatedElementUtils.findMergedAnnotation(method, QueryHints.class));
this.modifying = Lazy.of(() -> AnnotatedElementUtils.findMergedAnnotation(method, Modifying.class));
this.jpaEntityGraph = Lazy.of(() -> {
EntityGraph entityGraph = AnnotatedElementUtils.findMergedAnnotation(method, EntityGraph.class);
if (entityGraph == null) {
return null;
}
return new JpaEntityGraph(entityGraph, getNamedQueryName());
});
this.isNativeQuery = Lazy.of(() -> getAnnotationValue("nativeQuery", Boolean.class));
this.isCollectionQuery = Lazy.of(() -> super.isCollectionQuery() && !NATIVE_ARRAY_TYPES.contains(this.returnType));
this.isProcedureQuery = Lazy.of(() -> AnnotationUtils.findAnnotation(method, Procedure.class) != null);
this.entityMetadata = Lazy.of(() -> new DefaultJpaEntityMetadata<>(getDomainClass()));
this.metaAnnotation = Lazy
.of(() -> Optional.ofNullable(AnnotatedElementUtils.findMergedAnnotation(method, Meta.class)));
if (isModifyingQuery() && getParameters().hasSpecialParameter()) {
throw QueryCreationException.create(this,
String.format("Modifying method must not contain %s", Parameters.TYPES));
}
}
private static Class<?> potentiallyUnwrapReturnTypeFor(RepositoryMetadata metadata, Method method) {
TypeInformation<?> returnType = metadata.getReturnType(method);
while (QueryExecutionConverters.supports(returnType.getType())
|| QueryExecutionConverters.supportsUnwrapping(returnType.getType())) {
returnType = returnType.getRequiredComponentType();
}
return returnType.getType();
}
@Override
public JpaEntityMetadata<?> getEntityInformation() {
return this.entityMetadata.get();
}
/**
* Returns whether the finder is a modifying one.
*
* @return
*/
@Override
public boolean isModifyingQuery() {
return modifying.getNullable() != null;
}
/**
* Returns all {@link QueryHint}s annotated at this class. Note, that {@link QueryHints}
*
* @return
*/
List<QueryHint> getHints() {
QueryHints hints = this.queryHints.getNullable();
if (hints != null) {
return Arrays.asList(hints.value());
}
return Collections.emptyList();
}
/**
* Returns the {@link LockModeType} to be used for the query.
*
* @return
*/
@Nullable
LockModeType getLockModeType() {
return lockModeType.getNullable();
}
/**
* Returns the {@link EntityGraph} to be used for the query.
*
* @return
* @since 1.6
*/
@Nullable
JpaEntityGraph getEntityGraph() {
return jpaEntityGraph.getNullable();
}
/**
* Returns whether the potentially configured {@link QueryHint}s shall be applied when triggering the count query for
* pagination.
*
* @return
*/
boolean applyHintsToCountQuery() {
QueryHints hints = this.queryHints.getNullable();
return hints != null ? hints.forCounting() : false;
}
/**
* Returns the {@link QueryExtractor}.
*
* @return
*/
public QueryExtractor getQueryExtractor() {
return extractor;
}
/**
* Returns the {@link Method}.
*
* @return
*/
Method getMethod() {
return method;
}
/**
* Returns the actual return type of the method.
*
* @return
*/
Class<?> getReturnType() {
return returnType;
}
/**
* @return return true if {@link Meta} annotation is available.
* @since 3.0
*/
public boolean hasQueryMetaAttributes() {
return getMetaAnnotation() != null;
}
/**
* Returns the {@link Meta} annotation that is applied to the method or {@code null} if not available.
*
* @return
* @since 3.0
*/
@Nullable
Meta getMetaAnnotation() {
return metaAnnotation.get().orElse(null);
}
/**
* Returns the {@link org.springframework.data.jpa.repository.query.Meta} attributes to be applied.
*
* @return never {@literal null}.
* @since 1.6
*/
public org.springframework.data.jpa.repository.query.Meta getQueryMetaAttributes() {
Meta meta = getMetaAnnotation();
if (meta == null) {
return new org.springframework.data.jpa.repository.query.Meta();
}
org.springframework.data.jpa.repository.query.Meta metaAttributes = new org.springframework.data.jpa.repository.query.Meta();
if (StringUtils.hasText(meta.comment())) {
metaAttributes.setComment(meta.comment());
}
return metaAttributes;
}
/**
* @return {@code true} if this method is annotated with {@code @Query(value=…)}.
*/
boolean hasAnnotatedQuery() {
return StringUtils.hasText(getAnnotationValue("value", String.class));
}
/**
* Returns the query string declared in a {@link Query} annotation or {@literal null} if neither the annotation found
* nor the attribute was specified.
*
* @return
*/
public @Nullable String getAnnotatedQuery() {
String query = getAnnotationValue("value", String.class);
return StringUtils.hasText(query) ? query : null;
}
/**
* @return {@code true} if this method is annotated with {@code @Query(name=…)}.
*/
boolean hasAnnotatedQueryName() {
return StringUtils.hasText(getAnnotationValue("name", String.class));
}
/**
* Returns the required query string declared in a {@link Query} annotation or throws {@link IllegalStateException} if
* neither the annotation found nor the attribute was specified.
*
* @return
* @throws IllegalStateException if no {@link Query} annotation is present or the query is empty.
* @since 2.0
*/
public String getRequiredAnnotatedQuery() throws IllegalStateException {
String query = getAnnotatedQuery();
if (query != null) {
return query;
}
throw new IllegalStateException(String.format("No annotated query found for query method %s", getName()));
}
/**
* Returns the required {@link DeclaredQuery} from a {@link Query} annotation or throws {@link IllegalStateException}
* if neither the annotation found nor the attribute was specified.
*
* @return
* @throws IllegalStateException if no {@link Query} annotation is present or the query is empty.
* @since 4.0
*/
public DeclaredQuery getRequiredDeclaredQuery() throws IllegalStateException {
String query = getAnnotatedQuery();
if (query != null) {
return getDeclaredQuery(query);
}
throw new IllegalStateException(String.format("No annotated query found for query method %s", getName()));
}
/**
* Returns the countQuery string declared in a {@link Query} annotation or {@literal null} if neither the annotation
* found nor the attribute was specified.
*
* @return
*/
public @Nullable String getCountQuery() {
String countQuery = getAnnotationValue("countQuery", String.class);
return StringUtils.hasText(countQuery) ? countQuery : null;
}
/**
* Returns the {@link DeclaredQuery declared count query} from a {@link Query} annotation or {@literal null} if
* neither the annotation found nor the attribute was specified.
*
* @return
* @since 4.0
*/
public @Nullable DeclaredQuery getDeclaredCountQuery() {
String countQuery = getAnnotationValue("countQuery", String.class);
return StringUtils.hasText(countQuery) ? getDeclaredQuery(countQuery) : null;
}
/**
* Returns the count query projection string declared in a {@link Query} annotation or {@literal null} if neither the
* annotation found nor the attribute was specified.
*
* @return
* @since 1.6
*/
@Nullable
String getCountQueryProjection() {
String countProjection = getAnnotationValue("countProjection", String.class);
return StringUtils.hasText(countProjection) ? countProjection : null;
}
/**
* Returns whether the backing query is a native one.
*
* @return
*/
boolean isNativeQuery() {
return this.isNativeQuery.get();
}
/**
* Utility method that returns a {@link DeclaredQuery} object for the given {@code queryString}.
*
* @param query the query string to wrap.
* @return a {@link DeclaredQuery} object for the given {@code queryString}.
* @since 4.0
*/
DeclaredQuery getDeclaredQuery(String query) {
return isNativeQuery() ? DeclaredQuery.nativeQuery(query) : DeclaredQuery.jpqlQuery(query);
}
@Override
public String getNamedQueryName() {
String annotatedName = getAnnotationValue("name", String.class);
return StringUtils.hasText(annotatedName) ? annotatedName : super.getNamedQueryName();
}
/**
* Returns the name of the {@link NamedQuery} that shall be used for count queries.
*
* @return
*/
public String getNamedCountQueryName() {
String annotatedName = getAnnotationValue("countName", String.class);
return StringUtils.hasText(annotatedName) ? annotatedName : getNamedQueryName() + ".count";
}
/**
* Returns whether we should flush automatically for modifying queries.
*
* @return whether we should flush automatically.
*/
boolean getFlushAutomatically() {
return getMergedOrDefaultAnnotationValue("flushAutomatically", Modifying.class, Boolean.class);
}
/**
* Returns whether we should clear automatically for modifying queries.
*
* @return whether we should clear automatically.
*/
boolean getClearAutomatically() {
return getMergedOrDefaultAnnotationValue("clearAutomatically", Modifying.class, Boolean.class);
}
/**
* Returns the {@link Query} annotation's attribute casted to the given type or default value if no annotation
* available.
*
* @param attribute
* @param type
* @return
*/
private <T> T getAnnotationValue(String attribute, Class<T> type) {
return getMergedOrDefaultAnnotationValue(attribute, Query.class, type);
}
@SuppressWarnings({ "rawtypes", "unchecked", "NullAway" })
private <T> T getMergedOrDefaultAnnotationValue(String attribute, Class annotationType, Class<T> targetType) {
Annotation annotation = AnnotatedElementUtils.findMergedAnnotation(method, annotationType);
if (annotation == null) {
return targetType.cast(AnnotationUtils.getDefaultValue(annotationType, attribute));
}
return targetType.cast(AnnotationUtils.getValue(annotation, attribute));
}
@Override
public JpaParameters getParameters() {
return (JpaParameters) super.getParameters();
}
@Override
public boolean isCollectionQuery() {
return this.isCollectionQuery.get();
}
/**
* Return {@literal true} if the method contains a {@link Procedure} annotation.
*
* @return
*/
public boolean isProcedureQuery() {
return this.isProcedureQuery.get();
}
/**
* Returns a new {@link StoredProcedureAttributes} representing the stored procedure meta-data for this
* {@link JpaQueryMethod}.
*
* @return
*/
StoredProcedureAttributes getProcedureAttributes() {
if (storedProcedureAttributes == null) {
this.storedProcedureAttributes = storedProcedureAttributeSource.createFrom(method, getEntityInformation());
}
return storedProcedureAttributes;
}
/**
* Returns the {@link QueryRewriter} type.
*
* @return type of the {@link QueryRewriter}
* @since 3.0
*/
public Class<? extends QueryRewriter> getQueryRewriter() {
return getMergedOrDefaultAnnotationValue("queryRewriter", Query.class, Class.class);
}
}
|
JpaQueryMethod
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/routebuilder/SpringRouteTemplateLocalBeanTest.java
|
{
"start": 1194,
"end": 2056
}
|
class ____ extends SpringTestSupport {
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/spring/routebuilder/SpringRouteTemplateLocalBeanTest.xml");
}
@Test
public void testLocalBean() throws Exception {
Map<String, Object> parameters = new HashMap<>();
parameters.put("foo", "one");
parameters.put("bar", "cheese");
parameters.put("greeting", "Davs");
parameters.put("suffix", "!!!");
context.addRouteFromTemplate("first", "myTemplate", parameters);
MockEndpoint mock = getMockEndpoint("mock:cheese");
mock.expectedBodiesReceived("Davs World!!!");
template.sendBody("direct:one", "World");
assertMockEndpointsSatisfied();
}
}
|
SpringRouteTemplateLocalBeanTest
|
java
|
elastic__elasticsearch
|
modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramTests.java
|
{
"start": 608,
"end": 1376
}
|
class ____ extends AggregationBuilderTestCase<AutoDateHistogramAggregationBuilder> {
@Override
protected AutoDateHistogramAggregationBuilder createTestAggregatorBuilder() {
AutoDateHistogramAggregationBuilder builder = new AutoDateHistogramAggregationBuilder(randomAlphaOfLengthBetween(1, 10));
builder.field(INT_FIELD_NAME);
builder.setNumBuckets(randomIntBetween(1, 100000));
// TODO[PCS]: add builder pattern here
if (randomBoolean()) {
builder.format("###.##");
}
if (randomBoolean()) {
builder.missing(randomIntBetween(0, 10));
}
if (randomBoolean()) {
builder.timeZone(randomZone());
}
return builder;
}
}
|
AutoDateHistogramTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java
|
{
"start": 29880,
"end": 31921
}
|
class ____ extends StatementContext {
public LikePatternContext clusterLike;
public LikePatternContext tableLike;
public TableIdentifierContext tableIdent;
public TerminalNode SYS() {
return getToken(SqlBaseParser.SYS, 0);
}
public TerminalNode TABLES() {
return getToken(SqlBaseParser.TABLES, 0);
}
public TerminalNode CATALOG() {
return getToken(SqlBaseParser.CATALOG, 0);
}
public TerminalNode TYPE() {
return getToken(SqlBaseParser.TYPE, 0);
}
public List<StringContext> string() {
return getRuleContexts(StringContext.class);
}
public StringContext string(int i) {
return getRuleContext(StringContext.class, i);
}
public List<LikePatternContext> likePattern() {
return getRuleContexts(LikePatternContext.class);
}
public LikePatternContext likePattern(int i) {
return getRuleContext(LikePatternContext.class, i);
}
public TableIdentifierContext tableIdentifier() {
return getRuleContext(TableIdentifierContext.class, 0);
}
public SysTablesContext(StatementContext ctx) {
copyFrom(ctx);
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).enterSysTables(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).exitSysTables(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof SqlBaseVisitor) return ((SqlBaseVisitor<? extends T>) visitor).visitSysTables(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static
|
SysTablesContext
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/metadata/StaticMetadataTest.java
|
{
"start": 1049,
"end": 5340
}
|
class ____ {
@Test
public void testInjections(EntityManagerFactoryScope scope) {
// Make sure the entity manager factory is properly initialized
scope.getEntityManagerFactory();
// Address (embeddable)
assertNotNull( Address_.address1 );
assertNotNull( Address_.address2 );
assertNotNull( Address_.city );
final EmbeddableType addressType = (EmbeddableType) House_.address.getType();
assertEquals( addressType.getDeclaredSingularAttribute( "address1" ), Address_.address1 );
assertEquals( addressType.getDeclaredSingularAttribute( "address2" ), Address_.address2 );
assertTrue( Address_.address1.isOptional() );
assertFalse( Address_.address2.isOptional() );
// Animal (mapped superclass)
assertNotNull( Animal_.id );
assertTrue( Animal_.id.isId() );
assertEquals( Long.class, Animal_.id.getJavaType() );
assertNotNull( Animal_.legNbr );
assertEquals( int.class, Animal_.legNbr.getJavaType() );
// Cat (hierarchy)
assertNotNull( Cat_.id );
assertTrue( Cat_.id.isId() );
assertEquals( Animal.class, Cat_.id.getJavaMember().getDeclaringClass() );
assertNotNull( Cat_.nickname );
// FoodItem
assertNotNull( FoodItem_.version );
assertTrue( FoodItem_.version.isVersion() );
// Fridge
assertNotNull( Fridge_.id );
assertTrue( Fridge_.id.isId() );
assertEquals( Long.class, Fridge_.id.getJavaType() );
assertNotNull( Fridge_.temperature );
assertEquals( "temperature", Fridge_.temperature.getName() );
assertEquals( Fridge.class, Fridge_.temperature.getDeclaringType().getJavaType() );
assertEquals( int.class, Fridge_.temperature.getJavaType() );
assertEquals( int.class, Fridge_.temperature.getJavaType() );
assertEquals( int.class, Fridge_.temperature.getType().getJavaType() );
assertEquals( Bindable.BindableType.SINGULAR_ATTRIBUTE, Fridge_.temperature.getBindableType() );
assertEquals( Type.PersistenceType.BASIC, Fridge_.temperature.getType().getPersistenceType() );
assertEquals( Attribute.PersistentAttributeType.BASIC, Fridge_.temperature.getPersistentAttributeType() );
assertFalse( Fridge_.temperature.isId() );
assertFalse( Fridge_.temperature.isOptional() );
assertFalse( Fridge_.temperature.isAssociation() );
assertFalse( Fridge_.temperature.isCollection() );
assertFalse( Fridge_.brand.isOptional() );
// House (embedded id)
assertNotNull( House_.key );
assertTrue( House_.key.isId() );
assertEquals( Attribute.PersistentAttributeType.EMBEDDED, House_.key.getPersistentAttributeType() );
assertNotNull( House_.address );
assertEquals( Attribute.PersistentAttributeType.EMBEDDED, House_.address.getPersistentAttributeType() );
assertFalse( House_.address.isCollection() );
assertFalse( House_.address.isAssociation() );
assertNotNull( House_.rooms );
assertFalse( House_.rooms.isAssociation() );
assertTrue( House_.rooms.isCollection() );
assertEquals( Attribute.PersistentAttributeType.ELEMENT_COLLECTION, House_.rooms.getPersistentAttributeType() );
assertEquals( Room.class, House_.rooms.getBindableJavaType() );
assertEquals( Set.class, House_.rooms.getJavaType() );
assertEquals( Bindable.BindableType.PLURAL_ATTRIBUTE, House_.rooms.getBindableType() );
assertEquals( Set.class, House_.rooms.getJavaType() );
assertEquals( PluralAttribute.CollectionType.SET, House_.rooms.getCollectionType() );
assertEquals( Type.PersistenceType.EMBEDDABLE, House_.rooms.getElementType().getPersistenceType() );
assertNotNull( House_.roomsByName );
assertEquals( String.class, House_.roomsByName.getKeyJavaType() );
assertEquals( Type.PersistenceType.BASIC, House_.roomsByName.getKeyType().getPersistenceType() );
assertEquals( PluralAttribute.CollectionType.MAP, House_.roomsByName.getCollectionType() );
assertNotNull( House_.roomsBySize );
assertEquals( Type.PersistenceType.EMBEDDABLE, House_.roomsBySize.getElementType().getPersistenceType() );
assertEquals( PluralAttribute.CollectionType.LIST, House_.roomsBySize.getCollectionType() );
// Person (mapped id)
assertNotNull( Person_.firstName );
assertNotNull( Person_.lastName );
assertTrue( Person_.firstName.isId() );
assertTrue( Person_.lastName.isId() );
assertTrue( Person_.lastName.isId() );
//Garden List as bag
assertNotNull( Garden_.flowers );
}
}
|
StaticMetadataTest
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableCreate.java
|
{
"start": 10464,
"end": 11276
}
|
class ____<T> extends BaseEmitter<T> {
private static final long serialVersionUID = 4127754106204442833L;
NoOverflowBaseAsyncEmitter(Subscriber<? super T> downstream) {
super(downstream);
}
@Override
public final void onNext(T t) {
if (isCancelled()) {
return;
}
if (t == null) {
onError(ExceptionHelper.createNullPointerException("onNext called with a null value."));
return;
}
if (get() != 0) {
downstream.onNext(t);
BackpressureHelper.produced(this, 1);
} else {
onOverflow();
}
}
abstract void onOverflow();
}
static final
|
NoOverflowBaseAsyncEmitter
|
java
|
grpc__grpc-java
|
core/src/jmh/java/io/grpc/internal/SerializingExecutorBenchmark.java
|
{
"start": 1808,
"end": 3150
}
|
class ____ implements Runnable {
int val;
@Override
public void run() {
val++;
}
}
private final IncrRunnable incrRunnable = new IncrRunnable();
private final Phaser phaser = new Phaser(2);
private final Runnable phaserRunnable = new Runnable() {
@Override
public void run() {
phaser.arrive();
}
};
@TearDown
public void tearDown() throws Exception {
executorService.shutdownNow();
if (!executorService.awaitTermination(1, TimeUnit.SECONDS)) {
throw new RuntimeException("executor failed to shut down in a timely fashion");
}
}
@Benchmark
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public void oneRunnableLatency() throws Exception {
executor.execute(phaserRunnable);
phaser.arriveAndAwaitAdvance();
}
/**
* Queue many runnables, to better see queuing/consumption cost instead of just context switch.
*/
@Benchmark
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public void manyRunnables() throws Exception {
incrRunnable.val = 0;
for (int i = 0; i < 500; i++) {
executor.execute(incrRunnable);
}
executor.execute(phaserRunnable);
phaser.arriveAndAwaitAdvance();
if (incrRunnable.val != 500) {
throw new AssertionError();
}
}
}
|
IncrRunnable
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/namingstrategy/collectionJoinTableNaming/CollectionJoinTableNamingTest.java
|
{
"start": 6389,
"end": 6890
}
|
class ____ {
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "increment")
@GenericGenerator(name = "increment", strategy = "increment")
private Integer id;
@OrderColumn
@ManyToMany(cascade = {CascadeType.MERGE, CascadeType.PERSIST}, fetch = FetchType.EAGER)
private List<Input> inputs1;
@OrderColumn
@ManyToMany(cascade = {CascadeType.MERGE, CascadeType.PERSIST}, fetch = FetchType.EAGER)
private List<Input> inputs2;
}
@Entity
@Table(name = "input")
public
|
Ptx
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/bugs/ImplementationOfGenericAbstractMethodNotInvokedOnSpyTest.java
|
{
"start": 580,
"end": 1032
}
|
class ____<T extends Number> extends GenericAbstract<T> {
@Override
protected String method_to_implement(T value) {
return "concrete value";
}
}
@Test
public void should_invoke_method_to_implement() {
GenericAbstract<Number> spy = spy(new ImplementsGenericMethodOfAbstract<Number>());
assertThat(spy.public_method(73L)).isEqualTo("concrete value");
}
}
|
ImplementsGenericMethodOfAbstract
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/struct/BackReference1878Test.java
|
{
"start": 558,
"end": 872
}
|
class ____ {
@JsonManagedReference
public Child a;
}
private final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testChildDeserialization() throws Exception {
Child child = MAPPER.readValue("{\"b\": {}}", Child.class);
assertNotNull(child.b);
}
}
|
Parent
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.