language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/query/SpanContainingQueryBuilderTests.java
|
{
"start": 850,
"end": 6475
}
|
class ____ extends AbstractQueryTestCase<SpanContainingQueryBuilder> {
@Override
protected SpanContainingQueryBuilder doCreateTestQueryBuilder() {
SpanTermQueryBuilder[] spanTermQueries = new SpanTermQueryBuilderTests().createSpanTermQueryBuilders(2);
return new SpanContainingQueryBuilder(spanTermQueries[0], spanTermQueries[1]);
}
@Override
protected SpanContainingQueryBuilder createQueryWithInnerQuery(QueryBuilder queryBuilder) {
if (queryBuilder instanceof SpanContainingQueryBuilder) {
return new SpanContainingQueryBuilder((SpanContainingQueryBuilder) queryBuilder, (SpanContainingQueryBuilder) queryBuilder);
}
return new SpanContainingQueryBuilder(new SpanTermQueryBuilder("field", "value"), new SpanTermQueryBuilder("field", "value"));
}
@Override
protected void doAssertLuceneQuery(SpanContainingQueryBuilder queryBuilder, Query query, SearchExecutionContext context) {
assertThat(query, instanceOf(SpanContainingQuery.class));
}
public void testIllegalArguments() {
SpanTermQueryBuilder spanTermQuery = new SpanTermQueryBuilder("field", "value");
expectThrows(IllegalArgumentException.class, () -> new SpanContainingQueryBuilder(null, spanTermQuery));
expectThrows(IllegalArgumentException.class, () -> new SpanContainingQueryBuilder(spanTermQuery, null));
}
public void testFromJson() throws IOException {
String json = """
{
"span_containing" : {
"big" : {
"span_near" : {
"clauses" : [ {
"span_term" : {
"field1" : {
"value" : "bar"
}
}
}, {
"span_term" : {
"field1" : {
"value" : "baz"
}
}
} ],
"slop" : 5,
"in_order" : true
}
},
"little" : {
"span_term" : {
"field1" : {
"value" : "foo"
}
}
},
"boost" : 2.0
}
}""";
SpanContainingQueryBuilder parsed = (SpanContainingQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, 2, ((SpanNearQueryBuilder) parsed.bigQuery()).clauses().size());
assertEquals(json, "foo", ((SpanTermQueryBuilder) parsed.littleQuery()).value());
assertEquals(json, 2.0, parsed.boost(), 0.0);
}
public void testFromJsoWithNonDefaultBoostInBigQuery() {
String json = """
{
"span_containing" : {
"big" : {
"span_near" : {
"clauses" : [ {
"span_term" : {
"field1" : {
"value" : "bar",
"boost" : 1.0
}
}
}, {
"span_term" : {
"field1" : {
"value" : "baz",
"boost" : 1.0
}
}
} ],
"slop" : 5,
"in_order" : true,
"boost" : 2.0
}
},
"little" : {
"span_term" : {
"field1" : {
"value" : "foo",
"boost" : 1.0
}
}
},
"boost" : 1.0
}
}""";
Exception exception = expectThrows(ParsingException.class, () -> parseQuery(json));
assertThat(
exception.getMessage(),
equalTo("span_containing [big] as a nested span clause can't have non-default boost value [2.0]")
);
}
public void testFromJsonWithNonDefaultBoostInLittleQuery() {
String json = """
{
"span_containing" : {
"little" : {
"span_near" : {
"clauses" : [ {
"span_term" : {
"field1" : {
"value" : "bar",
"boost" : 1.0
}
}
}, {
"span_term" : {
"field1" : {
"value" : "baz",
"boost" : 1.0
}
}
} ],
"slop" : 5,
"in_order" : true,
"boost" : 2.0
}
},
"big" : {
"span_term" : {
"field1" : {
"value" : "foo",
"boost" : 1.0
}
}
},
"boost" : 1.0
}
}""";
Exception exception = expectThrows(ParsingException.class, () -> parseQuery(json));
assertThat(
exception.getMessage(),
equalTo("span_containing [little] as a nested span clause can't have non-default boost value [2.0]")
);
}
}
|
SpanContainingQueryBuilderTests
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/biginteger/BigIntegerAssert_isNotNegative_Test.java
|
{
"start": 803,
"end": 1140
}
|
class ____ extends BigIntegerAssertBaseTest {
@Override
protected BigIntegerAssert invoke_api_method() {
return assertions.isNotNegative();
}
@Override
protected void verify_internal_effects() {
verify(bigIntegers).assertIsNotNegative(getInfo(assertions), getActual(assertions));
}
}
|
BigIntegerAssert_isNotNegative_Test
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-search-orm-elasticsearch-outbox-polling/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/coordination/outboxpolling/HibernateSearchOutboxPollingTest.java
|
{
"start": 261,
"end": 2059
}
|
class ____ {
@Test
public void testSearch() {
// If agents are running, we know we are actually using the outbox-polling coordination strategy
RestAssured.when().put("/test/hibernate-search-outbox-polling/check-agents-running").then()
.statusCode(200)
.body(is("OK"));
RestAssured.when().put("/test/hibernate-search-outbox-polling/init-data").then()
.statusCode(204);
RestAssured.when().put("/test/hibernate-search-outbox-polling/await-event-processing").then()
.statusCode(204);
RestAssured.when().put("/test/hibernate-search-outbox-polling/refresh").then()
.statusCode(200)
.body(is("OK"));
RestAssured.when().get("/test/hibernate-search-outbox-polling/search").then()
.statusCode(200)
.body(is("OK"));
RestAssured.when().put("/test/hibernate-search-outbox-polling/purge").then()
.statusCode(200)
.body(is("OK"));
RestAssured.when().put("/test/hibernate-search-outbox-polling/refresh").then()
.statusCode(200)
.body(is("OK"));
RestAssured.when().get("/test/hibernate-search-outbox-polling/search-empty").then()
.statusCode(200);
// Mass indexing involves additional steps when using outbox-polling;
// let's just check it doesn't fail.
RestAssured.when().put("/test/hibernate-search-outbox-polling/mass-indexer").then()
.statusCode(200)
.body(is("OK"));
RestAssured.when().get("/test/hibernate-search-outbox-polling/search").then()
.statusCode(200)
.body(is("OK"));
}
}
|
HibernateSearchOutboxPollingTest
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/reflect/MethodUtilsTest.java
|
{
"start": 22064,
"end": 69618
}
|
class ____ package-private
final int modifiers = Class.forName("org.apache.commons.lang3.reflect.testbed.PackageBeanOtherPackage").getModifiers();
assertFalse(Modifier.isPrivate(modifiers));
assertFalse(Modifier.isProtected(modifiers));
assertFalse(Modifier.isPublic(modifiers));
// make sure that bean does what it should: compile
new PublicSubBeanOtherPackage().setBar("");
// make sure that bean does what it should
final PublicSubBeanOtherPackage bean = new PublicSubBeanOtherPackage();
assertEquals(bean.getFoo(), "This is foo", "Start value (foo)");
assertEquals(bean.getBar(), "This is bar", "Start value (bar)");
bean.setFoo("new foo");
bean.setBar("new bar");
assertEquals(bean.getFoo(), "new foo", "Set value (foo)");
assertEquals(bean.getBar(), "new bar", "Set value (bar)");
// see if we can access public methods in a default access superclass
// from a public access subclass instance
MethodUtils.invokeExactMethod(bean, "setFoo", "alpha");
assertEquals(bean.getFoo(), "alpha", "Set value (foo:2)");
MethodUtils.invokeExactMethod(bean, "setBar", "beta");
assertEquals(bean.getBar(), "beta", "Set value (bar:2)");
// PublicSubBean.setFoo(String)
Method method = MethodUtils.getAccessibleMethod(PublicSubBeanOtherPackage.class, "setFoo", String.class);
assertNotNull(method, "getAccessibleMethod() setFoo is Null");
method.invoke(bean, "1111");
assertEquals("1111", bean.getFoo(), "Set value (foo:3)");
// PublicSubBean.setBar(String)
method = MethodUtils.getAccessibleMethod(PublicSubBeanOtherPackage.class, "setBar", String.class);
assertNotNull(method, "getAccessibleMethod() setBar is Null");
method.invoke(bean, "2222");
assertEquals("2222", bean.getBar(), "Set value (bar:3)");
}
@Test
void testGetAccessiblePublicMethod() throws Exception {
assertSame(MutableObject.class,
MethodUtils.getAccessibleMethod(MutableObject.class.getMethod("getValue", ArrayUtils.EMPTY_CLASS_ARRAY)).getDeclaringClass());
assertSame(MutableObject.class, MethodUtils
.getAccessibleMethod(MutableObject.class, MutableObject.class.getMethod("getValue", ArrayUtils.EMPTY_CLASS_ARRAY)).getDeclaringClass());
}
@Test
void testGetAccessiblePublicMethodFromDescription() {
assertSame(MutableObject.class, MethodUtils.getAccessibleMethod(MutableObject.class, "getValue", ArrayUtils.EMPTY_CLASS_ARRAY).getDeclaringClass());
}
@Test
void testGetAnnotationIllegalArgumentException1() {
assertNullPointerException(() -> MethodUtils.getAnnotation(FieldUtilsTest.class.getDeclaredMethods()[0], null, true, true));
}
@Test
void testGetAnnotationIllegalArgumentException2() {
assertNullPointerException(() -> MethodUtils.getAnnotation(null, Annotated.class, true, true));
}
@Test
void testGetAnnotationIllegalArgumentException3() {
assertNullPointerException(() -> MethodUtils.getAnnotation(null, null, true, true));
}
@Test
void testGetAnnotationNotSearchSupersAndNotIgnoreAccess() throws NoSuchMethodException {
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("parentNotAnnotatedMethod"), Annotated.class, false, false));
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("doIt"), Annotated.class, false, false));
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("parentProtectedAnnotatedMethod"), Annotated.class, false, false));
assertNull(MethodUtils.getAnnotation(PublicChild.class.getDeclaredMethod("privateAnnotatedMethod"), Annotated.class, false, false));
assertNotNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("publicAnnotatedMethod"), Annotated.class, false, false));
}
@Test
void testGetAnnotationNotSearchSupersButIgnoreAccess() throws NoSuchMethodException {
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("parentNotAnnotatedMethod"), Annotated.class, false, true));
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("doIt"), Annotated.class, false, true));
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("parentProtectedAnnotatedMethod"), Annotated.class, false, true));
assertNotNull(MethodUtils.getAnnotation(PublicChild.class.getDeclaredMethod("privateAnnotatedMethod"), Annotated.class, false, true));
assertNotNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("publicAnnotatedMethod"), Annotated.class, false, true));
}
@Test
void testGetAnnotationSearchSupersAndIgnoreAccess() throws NoSuchMethodException {
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("parentNotAnnotatedMethod"), Annotated.class, true, true));
assertNotNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("doIt"), Annotated.class, true, true));
assertNotNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("parentProtectedAnnotatedMethod"), Annotated.class, true, true));
assertNotNull(MethodUtils.getAnnotation(PublicChild.class.getDeclaredMethod("privateAnnotatedMethod"), Annotated.class, true, true));
assertNotNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("publicAnnotatedMethod"), Annotated.class, true, true));
assertNull(MethodUtils.getAnnotation(StringParameterizedChild.class.getMethod("parentNotAnnotatedMethod", String.class), Annotated.class, true, true));
assertNotNull(MethodUtils.getAnnotation(StringParameterizedChild.class.getMethod("parentProtectedAnnotatedMethod", String.class), Annotated.class, true,
true));
assertNotNull(MethodUtils.getAnnotation(StringParameterizedChild.class.getDeclaredMethod("privateAnnotatedMethod", String.class), Annotated.class, true,
true));
assertNotNull(MethodUtils.getAnnotation(StringParameterizedChild.class.getMethod("publicAnnotatedMethod", String.class), Annotated.class, true, true));
}
@Test
void testGetAnnotationSearchSupersButNotIgnoreAccess() throws NoSuchMethodException {
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("parentNotAnnotatedMethod"), Annotated.class, true, false));
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("doIt"), Annotated.class, true, false));
assertNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("parentProtectedAnnotatedMethod"), Annotated.class, true, false));
assertNull(MethodUtils.getAnnotation(PublicChild.class.getDeclaredMethod("privateAnnotatedMethod"), Annotated.class, true, false));
assertNotNull(MethodUtils.getAnnotation(PublicChild.class.getMethod("publicAnnotatedMethod"), Annotated.class, true, false));
assertNull(MethodUtils.getAnnotation(StringParameterizedChild.class.getMethod("parentNotAnnotatedMethod", String.class), Annotated.class, true, false));
assertNull(MethodUtils.getAnnotation(StringParameterizedChild.class.getMethod("parentProtectedAnnotatedMethod", String.class), Annotated.class, true,
false));
assertNull(MethodUtils.getAnnotation(StringParameterizedChild.class.getDeclaredMethod("privateAnnotatedMethod", String.class), Annotated.class, true,
false));
assertNotNull(MethodUtils.getAnnotation(StringParameterizedChild.class.getMethod("publicAnnotatedMethod", String.class), Annotated.class, true, false));
}
@Test
void testGetMatchingAccessibleMethod() {
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", ArrayUtils.EMPTY_CLASS_ARRAY, ArrayUtils.EMPTY_CLASS_ARRAY);
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", null, ArrayUtils.EMPTY_CLASS_ARRAY);
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(String.class), singletonArray(String.class));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Object.class), singletonArray(Object.class));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Boolean.class), singletonArray(Object.class));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Byte.class), singletonArray(Integer.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Byte.TYPE), singletonArray(Integer.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Short.class), singletonArray(Integer.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Short.TYPE), singletonArray(Integer.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Character.class), singletonArray(Integer.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Character.TYPE), singletonArray(Integer.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Integer.class), singletonArray(Integer.class));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Integer.TYPE), singletonArray(Integer.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Long.class), singletonArray(Long.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Long.TYPE), singletonArray(Long.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Float.class), singletonArray(Double.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Float.TYPE), singletonArray(Double.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Double.class), singletonArray(Double.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Double.TYPE), singletonArray(Double.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", singletonArray(Double.TYPE), singletonArray(Double.TYPE));
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", new Class[] { String.class, String.class }, new Class[] { String[].class });
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "foo", new Class[] { Integer.TYPE, String.class, String.class },
new Class[] { Integer.class, String[].class });
expectMatchingAccessibleMethodParameterTypes(InheritanceBean.class, "testOne", singletonArray(ParentObject.class), singletonArray(ParentObject.class));
expectMatchingAccessibleMethodParameterTypes(InheritanceBean.class, "testOne", singletonArray(ChildObject.class), singletonArray(ParentObject.class));
expectMatchingAccessibleMethodParameterTypes(InheritanceBean.class, "testTwo", singletonArray(ParentObject.class),
singletonArray(GrandParentObject.class));
expectMatchingAccessibleMethodParameterTypes(InheritanceBean.class, "testTwo", singletonArray(ChildObject.class), singletonArray(PackagePrivateEmptyInterface.class));
// LANG-1757
expectMatchingAccessibleMethodParameterTypes(Files.class, "exists", singletonArray(Path.class), new Class[] { Path.class, LinkOption[].class });
}
@Test
void testGetMatchingMethod() throws NoSuchMethodException {
assertEquals(MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod"), GetMatchingMethodClass.class.getMethod("testMethod"));
assertEquals(MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod", Long.TYPE),
GetMatchingMethodClass.class.getMethod("testMethod", Long.TYPE));
assertEquals(MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod", Long.class),
GetMatchingMethodClass.class.getMethod("testMethod", Long.class));
assertEquals(MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod", (Class<?>) null),
GetMatchingMethodClass.class.getMethod("testMethod", Long.class));
assertThrows(IllegalStateException.class, () -> MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod2", (Class<?>) null));
assertEquals(MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod3", Long.TYPE, Long.class),
GetMatchingMethodClass.class.getMethod("testMethod3", Long.TYPE, Long.class));
assertEquals(MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod3", Long.class, Long.TYPE),
GetMatchingMethodClass.class.getMethod("testMethod3", Long.class, Long.TYPE));
assertEquals(MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod3", null, Long.TYPE),
GetMatchingMethodClass.class.getMethod("testMethod3", Long.class, Long.TYPE));
assertEquals(MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod3", Long.TYPE, null),
GetMatchingMethodClass.class.getMethod("testMethod3", Long.TYPE, Long.class));
assertThrows(IllegalStateException.class, () -> MethodUtils.getMatchingMethod(GetMatchingMethodClass.class, "testMethod4", null, null));
assertEquals(MethodUtils.getMatchingMethod(GetMatchingMethodImpl.class, "testMethod5", RuntimeException.class),
GetMatchingMethodImpl.class.getMethod("testMethod5", Exception.class));
assertEquals(GetMatchingMethodImpl.class.getMethod("testMethod6"), MethodUtils.getMatchingMethod(GetMatchingMethodImpl.class, "testMethod6"));
assertNullPointerException(() -> MethodUtils.getMatchingMethod(null, "testMethod5", RuntimeException.class));
Method testMethod6 = MethodUtils.getMatchingMethod(ConcreteGetMatchingMethod2.class, "testMethod6");
assertNotNull(testMethod6);
assertEquals(AbstractGetMatchingMethod2.class, testMethod6.getDeclaringClass());
testMethod6 = MethodUtils.getMatchingMethod(ConcreteGetMatchingMethod22.class, "testMethod6");
assertNotNull(testMethod6);
assertEquals(ConcreteGetMatchingMethod22.class, testMethod6.getDeclaringClass());
}
@Test
void testGetMethodObject() throws Exception {
assertEquals(MutableObject.class.getMethod("getValue", ArrayUtils.EMPTY_CLASS_ARRAY),
MethodUtils.getMethodObject(MutableObject.class, "getValue", ArrayUtils.EMPTY_CLASS_ARRAY));
assertNull(MethodUtils.getMethodObject(MutableObject.class, "does not exist, at all", ArrayUtils.EMPTY_CLASS_ARRAY));
assertNull(MethodUtils.getMethodObject(null, "does not exist, at all", ArrayUtils.EMPTY_CLASS_ARRAY));
assertNull(MethodUtils.getMethodObject(null, null, ArrayUtils.EMPTY_CLASS_ARRAY));
assertNull(MethodUtils.getMethodObject(MutableObject.class, null, ArrayUtils.EMPTY_CLASS_ARRAY));
// 0 args
assertNull(MethodUtils.getMethodObject(MutableObject.class, "getValue", new Class[] { null }));
// 1 args
assertNull(MethodUtils.getMethodObject(MutableObject.class, "equals", new Class[] { null }));
assertNull(MethodUtils.getMethodObject(MutableObject.class, "equals", new Class[] { String.class, null, String.class }));
}
/**
* Tests a {@code public} method.
*/
@Test
@Annotated
public void testGetMethodsListWithAnnotation() throws NoSuchMethodException {
assertEquals(0, MethodUtils.getMethodsListWithAnnotation(Object.class, Annotated.class).size());
final List<Method> methodWithAnnotation = MethodUtils.getMethodsListWithAnnotation(MethodUtilsTest.class, Annotated.class);
assertEquals(2, methodWithAnnotation.size());
assertTrue(methodWithAnnotation.contains(MethodUtilsTest.class.getMethod("testGetMethodsWithAnnotation")));
assertTrue(methodWithAnnotation.contains(MethodUtilsTest.class.getMethod("testGetMethodsListWithAnnotation")));
}
@Test
void testGetMethodsListWithAnnotationNullPointerException1() {
assertNullPointerException(() -> MethodUtils.getMethodsListWithAnnotation(FieldUtilsTest.class, null));
}
@Test
void testGetMethodsListWithAnnotationNullPointerException2() {
assertNullPointerException(() -> MethodUtils.getMethodsListWithAnnotation(null, Annotated.class));
}
@Test
void testGetMethodsListWithAnnotationNullPointerException3() {
assertNullPointerException(() -> MethodUtils.getMethodsListWithAnnotation(null, null));
}
@Test
@Annotated
public void testGetMethodsWithAnnotation() throws NoSuchMethodException {
assertArrayEquals(new Method[0], MethodUtils.getMethodsWithAnnotation(Object.class, Annotated.class));
final Method[] methodsWithAnnotation = MethodUtils.getMethodsWithAnnotation(MethodUtilsTest.class, Annotated.class);
assertEquals(2, methodsWithAnnotation.length);
assertTrue(ArrayUtils.contains(methodsWithAnnotation, MethodUtilsTest.class.getMethod("testGetMethodsWithAnnotation")));
assertTrue(ArrayUtils.contains(methodsWithAnnotation, MethodUtilsTest.class.getMethod("testGetMethodsListWithAnnotation")));
}
@Test
void testGetMethodsWithAnnotationIllegalArgumentException1() {
assertNullPointerException(() -> MethodUtils.getMethodsWithAnnotation(FieldUtilsTest.class, null));
}
@Test
void testGetMethodsWithAnnotationIllegalArgumentException2() {
assertNullPointerException(() -> MethodUtils.getMethodsWithAnnotation(null, Annotated.class));
}
@Test
void testGetMethodsWithAnnotationIllegalArgumentException3() {
assertNullPointerException(() -> MethodUtils.getMethodsWithAnnotation(null, null));
}
@Test
void testGetMethodsWithAnnotationNotSearchSupersAndNotIgnoreAccess() {
assertArrayEquals(new Method[0], MethodUtils.getMethodsWithAnnotation(Object.class, Annotated.class, false, false));
final Method[] methodsWithAnnotation = MethodUtils.getMethodsWithAnnotation(PublicChild.class, Annotated.class, false, false);
assertEquals(1, methodsWithAnnotation.length);
assertEquals("PublicChild.publicAnnotatedMethod",
methodsWithAnnotation[0].getDeclaringClass().getSimpleName() + '.' + methodsWithAnnotation[0].getName());
}
@Test
void testGetMethodsWithAnnotationNotSearchSupersButIgnoreAccess() {
assertArrayEquals(new Method[0], MethodUtils.getMethodsWithAnnotation(Object.class, Annotated.class, false, true));
final Method[] methodsWithAnnotation = MethodUtils.getMethodsWithAnnotation(PublicChild.class, Annotated.class, false, true);
assertEquals(2, methodsWithAnnotation.length);
assertEquals("PublicChild", methodsWithAnnotation[0].getDeclaringClass().getSimpleName());
assertEquals("PublicChild", methodsWithAnnotation[1].getDeclaringClass().getSimpleName());
assertTrue(methodsWithAnnotation[0].getName().endsWith("AnnotatedMethod"));
assertTrue(methodsWithAnnotation[1].getName().endsWith("AnnotatedMethod"));
}
@Test
void testGetMethodsWithAnnotationSearchSupersAndIgnoreAccess() {
assertArrayEquals(new Method[0], MethodUtils.getMethodsWithAnnotation(Object.class, Annotated.class, true, true));
final Method[] methodsWithAnnotation = MethodUtils.getMethodsWithAnnotation(PublicChild.class, Annotated.class, true, true);
assertEquals(4, methodsWithAnnotation.length);
assertEquals("PublicChild", methodsWithAnnotation[0].getDeclaringClass().getSimpleName());
assertEquals("PublicChild", methodsWithAnnotation[1].getDeclaringClass().getSimpleName());
assertTrue(methodsWithAnnotation[0].getName().endsWith("AnnotatedMethod"));
assertTrue(methodsWithAnnotation[1].getName().endsWith("AnnotatedMethod"));
assertEquals("Foo.doIt", methodsWithAnnotation[2].getDeclaringClass().getSimpleName() + '.' + methodsWithAnnotation[2].getName());
assertEquals("Parent.parentProtectedAnnotatedMethod",
methodsWithAnnotation[3].getDeclaringClass().getSimpleName() + '.' + methodsWithAnnotation[3].getName());
}
@Test
void testGetMethodsWithAnnotationSearchSupersButNotIgnoreAccess() {
assertArrayEquals(new Method[0], MethodUtils.getMethodsWithAnnotation(Object.class, Annotated.class, true, false));
final Method[] methodsWithAnnotation = MethodUtils.getMethodsWithAnnotation(PublicChild.class, Annotated.class, true, false);
assertEquals(2, methodsWithAnnotation.length);
assertEquals("PublicChild.publicAnnotatedMethod",
methodsWithAnnotation[0].getDeclaringClass().getSimpleName() + '.' + methodsWithAnnotation[0].getName());
assertEquals("Foo.doIt", methodsWithAnnotation[1].getDeclaringClass().getSimpleName() + '.' + methodsWithAnnotation[1].getName());
}
@Test
void testGetOverrideHierarchyExcludingInterfaces() {
final Method method = MethodUtils.getAccessibleMethod(StringParameterizedChild.class, "consume", String.class);
final Iterator<MethodDescriptor> expected =
Arrays.asList(new MethodDescriptor(StringParameterizedChild.class, "consume", String.class),
new MethodDescriptor(GenericParent.class, "consume", GenericParent.class.getTypeParameters()[0]))
.iterator();
for (final Method m : MethodUtils.getOverrideHierarchy(method, Interfaces.EXCLUDE)) {
assertTrue(expected.hasNext());
final MethodDescriptor md = expected.next();
assertEquals(md.declaringClass, m.getDeclaringClass());
assertEquals(md.name, m.getName());
assertEquals(md.parameterTypes.length, m.getParameterTypes().length);
for (int i = 0; i < md.parameterTypes.length; i++) {
assertTrue(TypeUtils.equals(md.parameterTypes[i], m.getGenericParameterTypes()[i]));
}
}
assertFalse(expected.hasNext());
}
@Test
void testGetOverrideHierarchyIncludingInterfaces() {
final Method method = MethodUtils.getAccessibleMethod(StringParameterizedChild.class, "consume", String.class);
final Iterator<MethodDescriptor> expected =
Arrays.asList(new MethodDescriptor(StringParameterizedChild.class, "consume", String.class),
new MethodDescriptor(GenericParent.class, "consume", GenericParent.class.getTypeParameters()[0]),
new MethodDescriptor(GenericConsumer.class, "consume", GenericConsumer.class.getTypeParameters()[0]))
.iterator();
for (final Method m : MethodUtils.getOverrideHierarchy(method, Interfaces.INCLUDE)) {
assertTrue(expected.hasNext());
final MethodDescriptor md = expected.next();
assertEquals(md.declaringClass, m.getDeclaringClass());
assertEquals(md.name, m.getName());
assertEquals(md.parameterTypes.length, m.getParameterTypes().length);
for (int i = 0; i < md.parameterTypes.length; i++) {
assertTrue(TypeUtils.equals(md.parameterTypes[i], m.getGenericParameterTypes()[i]));
}
}
assertFalse(expected.hasNext());
}
@Test
void testInvokeExactMethod() throws Exception {
assertEquals("foo()", MethodUtils.invokeExactMethod(testBean, "foo", (Object[]) ArrayUtils.EMPTY_CLASS_ARRAY));
assertEquals("foo()", MethodUtils.invokeExactMethod(testBean, "foo"));
assertEquals("foo()", MethodUtils.invokeExactMethod(testBean, "foo", (Object[]) null));
assertEquals("foo()", MethodUtils.invokeExactMethod(testBean, "foo", null, null));
assertEquals("foo(String)", MethodUtils.invokeExactMethod(testBean, "foo", ""));
assertEquals("foo(Object)", MethodUtils.invokeExactMethod(testBean, "foo", new Object()));
assertEquals("foo(Integer)", MethodUtils.invokeExactMethod(testBean, "foo", NumberUtils.INTEGER_ONE));
assertEquals("foo(double)", MethodUtils.invokeExactMethod(testBean, "foo", new Object[] { NumberUtils.DOUBLE_ONE }, new Class[] { Double.TYPE }));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeExactMethod(testBean, "foo", NumberUtils.BYTE_ONE));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeExactMethod(testBean, "foo", NumberUtils.LONG_ONE));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeExactMethod(testBean, "foo", Boolean.TRUE));
assertThrows(NullPointerException.class, () -> MethodUtils.invokeExactMethod(null, "foo", NumberUtils.BYTE_ONE));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeExactMethod(testBean, null, NumberUtils.BYTE_ONE));
assertThrows(NullPointerException.class,
() -> MethodUtils.invokeExactMethod(null, "foo", new Object[] { NumberUtils.DOUBLE_ONE }, new Class[] { Double.TYPE }));
assertThrows(NoSuchMethodException.class,
() -> MethodUtils.invokeExactMethod(testBean, null, new Object[] { NumberUtils.DOUBLE_ONE }, new Class[] { Double.TYPE }));
}
@Test
void testInvokeExactStaticMethod() throws Exception {
assertEquals("bar()", MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", (Object[]) ArrayUtils.EMPTY_CLASS_ARRAY));
assertEquals("bar()", MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", (Object[]) null));
assertEquals("bar()", MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", null, null));
assertEquals("bar(String)", MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", ""));
assertEquals("bar(Object)", MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", new Object()));
assertEquals("bar(Integer)", MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", NumberUtils.INTEGER_ONE));
assertEquals("bar(double)",
MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", new Object[] { NumberUtils.DOUBLE_ONE }, new Class[] { Double.TYPE }));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", NumberUtils.BYTE_ONE));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", NumberUtils.LONG_ONE));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeExactStaticMethod(TestBean.class, "bar", Boolean.TRUE));
}
@Test
void testInvokeJavaVarArgsOverloadingResolution() throws Exception {
// Primitive wrappers
assertEquals("Byte...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", (byte) 1, (byte) 2));
assertEquals("Short...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", (short) 1, (short) 2));
assertEquals("Integer...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1, 2));
assertEquals("Long...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1L, 2L));
assertEquals("Float...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1f, 2f));
assertEquals("Double...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1d, 2d));
assertEquals("Boolean...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", true, false));
// Number
assertEquals("Number...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1, 1.1));
assertEquals("Number...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1, 1L));
assertEquals("Number...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1d, 1f));
assertEquals("Number...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", (short) 1, (byte) 1));
assertEquals("Number...", MethodUtils.invokeStaticMethod(TestBean.class, "numOverload", ArrayUtils.EMPTY_OBJECT_ARRAY));
assertEquals("Number...", MethodUtils.invokeStaticMethod(TestBean.class, "numOverload", (Object[]) ArrayUtils.EMPTY_CLASS_ARRAY));
assertEquals("Number...", MethodUtils.invokeStaticMethod(TestBean.class, "numOverload", (Object[]) ArrayUtils.EMPTY_INTEGER_OBJECT_ARRAY));
// Object
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1, "s"));
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1, true));
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1.1, true));
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 'c', true));
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 1, 'c'));
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 'c', "s"));
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", ArrayUtils.EMPTY_OBJECT_ARRAY));
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", (Object[]) ArrayUtils.EMPTY_CLASS_ARRAY));
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", (Object[]) ArrayUtils.EMPTY_INTEGER_OBJECT_ARRAY));
assertEquals("Object...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload"));
// Other
assertEquals("String...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", "a", "b"));
assertEquals("Character...", MethodUtils.invokeStaticMethod(TestBean.class, "varOverload", 'a', 'b'));
}
@Test
void testInvokeMethod() throws Exception {
assertEquals("foo()", MethodUtils.invokeMethod(testBean, "foo", (Object[]) ArrayUtils.EMPTY_CLASS_ARRAY));
assertEquals("foo()", MethodUtils.invokeMethod(testBean, "foo"));
assertEquals("foo()", MethodUtils.invokeMethod(testBean, "foo", (Object[]) null));
assertEquals("foo()", MethodUtils.invokeMethod(testBean, "foo", null, null));
assertEquals("foo(String)", MethodUtils.invokeMethod(testBean, "foo", ""));
assertEquals("foo(Object)", MethodUtils.invokeMethod(testBean, "foo", new Object()));
assertEquals("foo(Object)", MethodUtils.invokeMethod(testBean, "foo", Boolean.TRUE));
assertEquals("foo(Integer)", MethodUtils.invokeMethod(testBean, "foo", NumberUtils.INTEGER_ONE));
assertEquals("foo(int)", MethodUtils.invokeMethod(testBean, "foo", NumberUtils.BYTE_ONE));
assertEquals("foo(long)", MethodUtils.invokeMethod(testBean, "foo", NumberUtils.LONG_ONE));
assertEquals("foo(double)", MethodUtils.invokeMethod(testBean, "foo", NumberUtils.DOUBLE_ONE));
assertEquals("foo(String...)", MethodUtils.invokeMethod(testBean, "foo", "a", "b", "c"));
assertEquals("foo(String...)", MethodUtils.invokeMethod(testBean, "foo", "a", "b", "c"));
assertEquals("foo(int, String...)", MethodUtils.invokeMethod(testBean, "foo", 5, "a", "b", "c"));
assertEquals("foo(long...)", MethodUtils.invokeMethod(testBean, "foo", 1L, 2L));
assertEquals("foo(long...)", MethodUtils.invokeMethod(testBean, "foo", 1, 2));
assertEquals("foo(long...)", MethodUtils.invokeMethod(testBean, "foo", (byte) 1, (byte) 2)); // widen
assertEquals("foo(long...)", MethodUtils.invokeMethod(testBean, "foo", (short) 1, (short) 2)); // widen
assertEquals("foo(long...)", MethodUtils.invokeMethod(testBean, "foo", (char) 1, (char) 2)); // widen
TestBean.verify(new ImmutablePair<>("String...", new String[] { "x", "y" }), MethodUtils.invokeMethod(testBean, "varOverloadEcho", "x", "y"));
TestBean.verify(new ImmutablePair<>("Number...", new Number[] { 17, 23, 42 }), MethodUtils.invokeMethod(testBean, "varOverloadEcho", 17, 23, 42));
TestBean.verify(new ImmutablePair<>("String...", new String[] { "x", "y" }), MethodUtils.invokeMethod(testBean, "varOverloadEcho", "x", "y"));
TestBean.verify(new ImmutablePair<>("Number...", new Number[] { 17, 23, 42 }), MethodUtils.invokeMethod(testBean, "varOverloadEcho", 17, 23, 42));
assertNullPointerException(() -> MethodUtils.invokeMethod(null, "foo", 1, 2));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeMethod(testBean, null, 1, 2));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeMethod(new Object(), "nonExistent", new Object[] { "val" }, new Class<?>[] { null }));
}
@Test
void testInvokeMethod_VarArgsWithNullValues() throws Exception {
assertEquals("String...", MethodUtils.invokeMethod(testBean, "varOverload", "a", null, "c"));
assertEquals("String...", MethodUtils.invokeMethod(testBean, "varOverload", "a", "b", null));
assertEquals("String...", MethodUtils.invokeMethod(testBean, "varOverload", new String[] { "a" }, new Class<?>[] { String.class }));
assertThrows(NoSuchMethodException.class,
() -> assertEquals("String...", MethodUtils.invokeMethod(testBean, "doesn't exist", new String[] { "a" }, new Class<?>[] { null })));
}
@Test
void testInvokeMethod1PlusVarArgs() throws Exception {
// intStringVarArg
assertEquals("int, String...", MethodUtils.invokeMethod(testBean, "intStringVarArg", 1));
assertEquals("int, String...", MethodUtils.invokeMethod(testBean, "intStringVarArg", 1, "s"));
assertEquals("int, String...", MethodUtils.invokeMethod(testBean, "intStringVarArg", 1, "s1", "s2"));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeMethod(testBean, "intStringVarArg", 1, "s1", 5));
// intLongVarArg
assertEquals("int, long...", MethodUtils.invokeMethod(testBean, "intLongVarArg", 1));
assertEquals("int, long...", MethodUtils.invokeMethod(testBean, "intLongVarArg", 1, 2L));
assertEquals("int, long...", MethodUtils.invokeMethod(testBean, "intLongVarArg", 1, 2L, 3L));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeMethod(testBean, "intLongVarArg", 1, "s1", 5));
// intIntVarArg
assertEquals("int, int...", MethodUtils.invokeMethod(testBean, "intIntVarArg", 1));
assertEquals("int, int...", MethodUtils.invokeMethod(testBean, "intIntVarArg", 1, 2));
assertEquals("int, int...", MethodUtils.invokeMethod(testBean, "intIntVarArg", 1, 2, 3));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeMethod(testBean, "intLongVarArg", 1, "s1", 5));
}
@Test
void testInvokeMethodForceAccessNoArgs() throws Exception {
assertEquals("privateStringStuff()", MethodUtils.invokeMethod(testBean, true, "privateStringStuff"));
}
@Test
void testInvokeMethodForceAccessWithArgs() throws Exception {
assertEquals("privateStringStuff(Integer)", MethodUtils.invokeMethod(testBean, true, "privateStringStuff", 5));
assertEquals("privateStringStuff(double)", MethodUtils.invokeMethod(testBean, true, "privateStringStuff", 5.0d));
assertEquals("privateStringStuff(String)", MethodUtils.invokeMethod(testBean, true, "privateStringStuff", "Hi There"));
assertEquals("privateStringStuff(Object)", MethodUtils.invokeMethod(testBean, true, "privateStringStuff", new Date()));
assertNullPointerException(() -> MethodUtils.invokeMethod(null, true, "privateStringStuff", "Hi There"));
assertNullPointerException(() -> MethodUtils.invokeMethod(testBean, true, null, "Hi There"));
}
@Test
void testInvokeMethodVarArgsNotUniqueResolvable() throws Exception {
assertEquals("Boolean...", MethodUtils.invokeMethod(testBean, "varOverload", new Object[] { null }));
assertEquals("Object...", MethodUtils.invokeMethod(testBean, "varOverload", (Object[]) null));
}
@Test
void testInvokeMethodVarArgsOfInterface() throws Exception {
// packagePrivateEmptyInterface
assertEquals("PackagePrivateEmptyInterface...", MethodUtils.invokeMethod(testBean, "packagePrivateEmptyInterface",
new PublicImpl1OfPackagePrivateEmptyInterface(), new PublicImpl2OfPackagePrivateEmptyInterface()));
assertEquals("PackagePrivateEmptyInterface...", MethodUtils.invokeMethod(testBean, "packagePrivateEmptyInterface", new PackagePrivateEmptyInterface() {
// empty
}, new PackagePrivateEmptyInterface() {
// empty
}));
}
@Test
void testInvokeMethodVarArgsUnboxingBooleanArray() throws Exception {
final TestBean testBean = new TestBean();
final boolean[] actual = (boolean[]) MethodUtils.invokeMethod(testBean, "unboxing", Boolean.TRUE, Boolean.FALSE);
assertArrayEquals(new boolean[] { true, false }, actual);
assertTrue(testBean.unboxBooleanArray);
}
@Test
void testInvokeMethodVarArgsUnboxingByteArray() throws Exception {
final TestBean testBean = new TestBean();
final byte[] actual = (byte[]) MethodUtils.invokeMethod(testBean, "unboxing", Byte.valueOf((byte) 1), Byte.valueOf((byte) 2));
assertArrayEquals(new byte[] { 1, 2 }, actual);
assertTrue(testBean.unboxByteArray);
}
@Test
void testInvokeMethodVarArgsUnboxingCharArray() throws Exception {
final TestBean testBean = new TestBean();
final char[] actual = (char[]) MethodUtils.invokeMethod(testBean, "unboxing", Character.valueOf((char) 1), Character.valueOf((char) 2));
assertArrayEquals(new char[] { 1, 2 }, actual);
assertTrue(testBean.unboxCharArray);
}
@Test
void testInvokeMethodVarArgsUnboxingDoubleArray() throws Exception {
final TestBean testBean = new TestBean();
final double[] actual = (double[]) MethodUtils.invokeMethod(testBean, "unboxing", Double.valueOf(1), Double.valueOf(2));
assertArrayEquals(new double[] { 1, 2 }, actual);
assertTrue(testBean.unboxDoubleArray);
}
@Test
void testInvokeMethodVarArgsUnboxingFloatArray() throws Exception {
final TestBean testBean = new TestBean();
final float[] actual = (float[]) MethodUtils.invokeMethod(testBean, "unboxing", Float.valueOf(1), Float.valueOf(2));
assertArrayEquals(new float[] { 1, 2 }, actual);
assertTrue(testBean.unboxFloatArray);
}
@Test
void testInvokeMethodVarArgsUnboxingIntArray() throws Exception {
final TestBean testBean = new TestBean();
final int[] actual = (int[]) MethodUtils.invokeMethod(testBean, "unboxing", Integer.valueOf(1), Integer.valueOf(2));
assertArrayEquals(new int[] { 1, 2 }, actual);
assertTrue(testBean.unboxIntArray);
}
@Test
void testInvokeMethodVarArgsUnboxingLongArray() throws Exception {
final TestBean testBean = new TestBean();
final long[] actual = (long[]) MethodUtils.invokeMethod(testBean, "unboxing", Long.valueOf(1), Long.valueOf(2));
assertArrayEquals(new long[] { 1, 2 }, actual);
assertTrue(testBean.unboxLongArray);
}
@Test
void testInvokeMethodVarArgsUnboxingShortArray() throws Exception {
final TestBean testBean = new TestBean();
final short[] actual = (short[]) MethodUtils.invokeMethod(testBean, "unboxing", Short.valueOf((short) 1), Short.valueOf((short) 2));
assertArrayEquals(new short[] { 1, 2 }, actual);
assertTrue(testBean.unboxShortArray);
}
@Test
void testInvokeStaticMethod() throws Exception {
assertEquals("bar()", MethodUtils.invokeStaticMethod(TestBean.class, "bar"));
assertEquals("bar()", MethodUtils.invokeStaticMethod(TestBean.class, "bar", (Object[]) ArrayUtils.EMPTY_CLASS_ARRAY));
assertEquals("bar()", MethodUtils.invokeStaticMethod(TestBean.class, "bar", (Object[]) null));
assertEquals("bar()", MethodUtils.invokeStaticMethod(TestBean.class, "bar", null, null));
assertEquals("bar(String)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", ""));
assertEquals("bar(Object)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", new Object()));
assertEquals("bar(Object)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", Boolean.TRUE));
assertEquals("bar(Integer)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", NumberUtils.INTEGER_ONE));
assertEquals("bar(int)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", NumberUtils.BYTE_ONE));
assertEquals("static int", MethodUtils.invokeStaticMethod(TestBean.class, "staticInt", NumberUtils.BYTE_ONE));
assertEquals("static int", MethodUtils.invokeStaticMethod(TestBean.class, "staticInt", NumberUtils.SHORT_ONE));
assertEquals("static int", MethodUtils.invokeStaticMethod(TestBean.class, "staticInt", NumberUtils.INTEGER_ONE));
assertEquals("static int", MethodUtils.invokeStaticMethod(TestBean.class, "staticInt", 'a'));
assertEquals("bar(double)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", NumberUtils.DOUBLE_ONE));
assertEquals("bar(String...)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", "a", "b"));
assertEquals("bar(long...)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", 1L, 2L));
assertEquals("bar(long...)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", (byte) 1, (byte) 2)); // widen
assertEquals("bar(long...)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", (short) 1, (short) 2)); // widen
assertEquals("bar(long...)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", 1, 2)); // widen
assertEquals("bar(Integer, String...)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", NumberUtils.INTEGER_ONE, "a", "b"));
// You cannot widen a Short to an Integer in Java source, but you can a short to an int but this API declares an Integer, not an int.
assertThrows(NoSuchMethodException.class,
() -> assertEquals("bar(Integer, String...)", MethodUtils.invokeStaticMethod(TestBean.class, "bar", NumberUtils.SHORT_ONE, "a", "b"))); // widen
TestBean.verify(new ImmutablePair<>("String...", new String[] { "x", "y" }),
MethodUtils.invokeStaticMethod(TestBean.class, "varOverloadEchoStatic", "x", "y"));
TestBean.verify(new ImmutablePair<>("Number...", new Number[] { 17, 23, 42 }),
MethodUtils.invokeStaticMethod(TestBean.class, "varOverloadEchoStatic", 17, 23, 42));
TestBean.verify(new ImmutablePair<>("String...", new String[] { "x", "y" }),
MethodUtils.invokeStaticMethod(TestBean.class, "varOverloadEchoStatic", "x", "y"));
TestBean.verify(new ImmutablePair<>("Number...", new Number[] { 17, 23, 42 }),
MethodUtils.invokeStaticMethod(TestBean.class, "varOverloadEchoStatic", 17, 23, 42));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeStaticMethod(TestBean.class, "does_not_exist"));
}
@Test
void testInvokeStaticMethod1PlusVarArgs() throws Exception {
// staticIntStringVarArg
assertEquals("static int, String...", MethodUtils.invokeStaticMethod(TestBean.class, "staticIntStringVarArg", 1));
assertEquals("static int, String...", MethodUtils.invokeStaticMethod(TestBean.class, "staticIntStringVarArg", 1, "s"));
assertEquals("static int, String...", MethodUtils.invokeStaticMethod(TestBean.class, "staticIntStringVarArg", 1, "s1", "s2"));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeStaticMethod(TestBean.class, "staticIntStringVarArg", 1, "s1", 5));
// staticIntLongVarArg
assertEquals("static int, long...", MethodUtils.invokeMethod(testBean, "staticIntLongVarArg", 1));
assertEquals("static int, long...", MethodUtils.invokeMethod(testBean, "staticIntLongVarArg", 1, 2L));
assertEquals("static int, long...", MethodUtils.invokeMethod(testBean, "staticIntLongVarArg", 1, 2L, 3L));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeMethod(testBean, "staticIntLongVarArg", 1, "s1", 5));
// staticIntIntVarArg
assertEquals("static int, int...", MethodUtils.invokeMethod(testBean, "staticIntIntVarArg", 1));
assertEquals("static int, int...", MethodUtils.invokeMethod(testBean, "staticIntIntVarArg", 1, 2));
assertEquals("static int, int...", MethodUtils.invokeMethod(testBean, "staticIntIntVarArg", 1, 2, 3));
assertThrows(NoSuchMethodException.class, () -> MethodUtils.invokeMethod(testBean, "staticIntIntVarArg", 1, "s1", 5));
}
@Test
void testInvokeStaticMethodVarArgsOfInterface() throws Exception {
// staticPackagePrivateEmptyInterface
assertEquals("static PackagePrivateEmptyInterface...", MethodUtils.invokeStaticMethod(TestBean.class, "staticPackagePrivateEmptyInterface",
new PublicImpl1OfPackagePrivateEmptyInterface(), new PublicImpl2OfPackagePrivateEmptyInterface()));
assertEquals("static PackagePrivateEmptyInterface...",
MethodUtils.invokeStaticMethod(TestBean.class, "staticPackagePrivateEmptyInterface", new PackagePrivateEmptyInterface() {
// empty
}, new PackagePrivateEmptyInterface() {
// empty
}));
}
@Test
void testNullArgument() {
expectMatchingAccessibleMethodParameterTypes(TestBean.class, "oneParameter", singletonArray(null), singletonArray(String.class));
}
@Test
void testVarargsOverloadingResolution() {
// This code is not a test of MethodUtils.
// Rather it makes explicit the behavior of the Java specification for
// various cases of overload resolution.
assertEquals("Byte...", TestBean.varOverload((byte) 1, (byte) 2));
assertEquals("Short...", TestBean.varOverload((short) 1, (short) 2));
assertEquals("Integer...", TestBean.varOverload(1, 2));
assertEquals("Long...", TestBean.varOverload(1L, 2L));
assertEquals("Float...", TestBean.varOverload(1f, 2f));
assertEquals("Double...", TestBean.varOverload(1d, 2d));
assertEquals("Character...", TestBean.varOverload('a', 'b'));
assertEquals("String...", TestBean.varOverload("a", "b"));
assertEquals("Boolean...", TestBean.varOverload(true, false));
assertEquals("Object...", TestBean.varOverload(1, "s"));
assertEquals("Object...", TestBean.varOverload(1, true));
assertEquals("Object...", TestBean.varOverload(1.1, true));
assertEquals("Object...", TestBean.varOverload('c', true));
assertEquals("Number...", TestBean.varOverload(1, 1.1));
assertEquals("Number...", TestBean.varOverload(1, 1L));
assertEquals("Number...", TestBean.varOverload(1d, 1f));
assertEquals("Number...", TestBean.varOverload((short) 1, (byte) 1));
assertEquals("Object...", TestBean.varOverload(1, 'c'));
assertEquals("Object...", TestBean.varOverload('c', "s"));
}
private String toString(final Class<?>[] c) {
return Arrays.asList(c).toString();
}
}
|
is
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/spi/CompositeTypeImplementor.java
|
{
"start": 353,
"end": 563
}
|
interface ____ extends CompositeType {
void injectMappingModelPart(EmbeddableValuedModelPart part, MappingModelCreationProcess process);
EmbeddableValuedModelPart getMappingModelPart();
}
|
CompositeTypeImplementor
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java
|
{
"start": 1538,
"end": 8734
}
|
class ____ extends SingleDimensionValuesSource<BytesRef> {
private final LongConsumer breakerConsumer;
private final CheckedFunction<LeafReaderContext, SortedBinaryDocValues, IOException> docValuesFunc;
private ObjectArray<BytesRef> values;
private ObjectArray<BytesRefBuilder> valueBuilders;
private BytesRef currentValue;
BinaryValuesSource(
BigArrays bigArrays,
LongConsumer breakerConsumer,
MappedFieldType fieldType,
CheckedFunction<LeafReaderContext, SortedBinaryDocValues, IOException> docValuesFunc,
DocValueFormat format,
boolean missingBucket,
MissingOrder missingOrder,
int size,
int reverseMul
) {
super(bigArrays, format, fieldType, missingBucket, missingOrder, reverseMul);
this.breakerConsumer = breakerConsumer;
this.docValuesFunc = docValuesFunc;
this.values = bigArrays.newObjectArray(Math.min(size, 100));
boolean success = false;
try {
this.valueBuilders = bigArrays.newObjectArray(Math.min(size, 100));
success = true;
} finally {
if (success == false) {
close();
}
}
}
@Override
void copyCurrent(int slot) {
values = bigArrays.grow(values, slot + 1);
valueBuilders = bigArrays.grow(valueBuilders, slot + 1);
BytesRefBuilder builder = valueBuilders.get(slot);
int byteSize = builder == null ? 0 : builder.bytes().length;
if (builder == null) {
builder = new BytesRefBuilder();
valueBuilders.set(slot, builder);
}
if (missingBucket && currentValue == null) {
values.set(slot, null);
} else {
assert currentValue != null;
builder.copyBytes(currentValue);
breakerConsumer.accept(builder.bytes().length - byteSize);
values.set(slot, builder.get());
}
}
@Override
int compare(int from, int to) {
if (missingBucket) {
if (values.get(from) == null) {
return values.get(to) == null ? 0 : -1 * missingOrder.compareAnyValueToMissing(reverseMul);
} else if (values.get(to) == null) {
return missingOrder.compareAnyValueToMissing(reverseMul);
}
}
return compareValues(values.get(from), values.get(to));
}
@Override
int compareCurrent(int slot) {
if (missingBucket) {
if (currentValue == null) {
return values.get(slot) == null ? 0 : -1 * missingOrder.compareAnyValueToMissing(reverseMul);
} else if (values.get(slot) == null) {
return missingOrder.compareAnyValueToMissing(reverseMul);
}
}
return compareValues(currentValue, values.get(slot));
}
@Override
int compareCurrentWithAfter() {
if (missingBucket) {
if (currentValue == null) {
return afterValue == null ? 0 : -1 * missingOrder.compareAnyValueToMissing(reverseMul);
} else if (afterValue == null) {
return missingOrder.compareAnyValueToMissing(reverseMul);
}
}
return compareValues(currentValue, afterValue);
}
@Override
int hashCode(int slot) {
if (missingBucket && values.get(slot) == null) {
return 0;
} else {
return values.get(slot).hashCode();
}
}
@Override
int hashCodeCurrent() {
if (missingBucket && currentValue == null) {
return 0;
} else {
return currentValue.hashCode();
}
}
int compareValues(BytesRef v1, BytesRef v2) {
return v1.compareTo(v2) * reverseMul;
}
@Override
void setAfter(Comparable<?> value) {
if (missingBucket && value == null) {
afterValue = null;
} else if (value.getClass() == String.class) {
afterValue = format.parseBytesRef(value);
} else if (value.getClass() == BytesRef.class) {
// The value may be a bytes reference (eg an encoded tsid field)
afterValue = (BytesRef) value;
} else {
throw new IllegalArgumentException("invalid value, expected string, got " + value.getClass().getSimpleName());
}
}
@Override
BytesRef toComparable(int slot) {
return values.get(slot);
}
@Override
LeafBucketCollector getLeafCollector(LeafReaderContext context, LeafBucketCollector next) throws IOException {
final SortedBinaryDocValues dvs = docValuesFunc.apply(context);
final BinaryDocValues singleton = FieldData.unwrapSingleton(dvs);
return singleton != null ? getLeafCollector(singleton, next) : getLeafCollector(dvs, next);
}
private LeafBucketCollector getLeafCollector(SortedBinaryDocValues dvs, LeafBucketCollector next) {
return new LeafBucketCollector() {
@Override
public void collect(int doc, long bucket) throws IOException {
if (dvs.advanceExact(doc)) {
int num = dvs.docValueCount();
for (int i = 0; i < num; i++) {
currentValue = dvs.nextValue();
next.collect(doc, bucket);
}
} else if (missingBucket) {
currentValue = null;
next.collect(doc, bucket);
}
}
};
}
private LeafBucketCollector getLeafCollector(BinaryDocValues dvs, LeafBucketCollector next) {
return new LeafBucketCollector() {
@Override
public void collect(int doc, long bucket) throws IOException {
if (dvs.advanceExact(doc)) {
currentValue = dvs.binaryValue();
next.collect(doc, bucket);
} else if (missingBucket) {
currentValue = null;
next.collect(doc, bucket);
}
}
};
}
@Override
LeafBucketCollector getLeafCollector(Comparable<BytesRef> value, LeafReaderContext context, LeafBucketCollector next) {
if (value.getClass() != BytesRef.class) {
throw new IllegalArgumentException("Expected BytesRef, got " + value.getClass());
}
currentValue = (BytesRef) value;
return new LeafBucketCollector() {
@Override
public void collect(int doc, long bucket) throws IOException {
next.collect(doc, bucket);
}
};
}
@Override
SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) {
if (checkIfSortedDocsIsApplicable(reader, fieldType) == false
|| fieldType instanceof StringFieldType == false
|| (query != null && query.getClass() != MatchAllDocsQuery.class)) {
return null;
}
return new TermsSortedDocsProducer(fieldType.name());
}
@Override
public void close() {
Releasables.close(values, valueBuilders);
}
}
|
BinaryValuesSource
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/network/Authenticator.java
|
{
"start": 1159,
"end": 2319
}
|
interface ____ extends Closeable {
/**
* Implements any authentication mechanism. Use transportLayer to read or write tokens.
* For security protocols PLAINTEXT and SSL, this is a no-op since no further authentication
* needs to be done. For SASL_PLAINTEXT and SASL_SSL, this performs the SASL authentication.
*
* @throws AuthenticationException if authentication fails due to invalid credentials or
* other security configuration errors
* @throws IOException if read/write fails due to an I/O error
*/
void authenticate() throws AuthenticationException, IOException;
/**
* Perform any processing related to authentication failure. This is invoked when the channel is about to be closed
* because of an {@link AuthenticationException} thrown from a prior {@link #authenticate()} call.
* @throws IOException if read/write fails due to an I/O error
*/
default void handleAuthenticationFailure() throws IOException {
}
/**
* Returns Principal using PrincipalBuilder
*/
KafkaPrincipal principal();
/**
* Returns the serializer/deserializer
|
Authenticator
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/InsertOrderingWithCompositeTypeAssociation.java
|
{
"start": 2483,
"end": 3126
}
|
class ____ {
@Id
private String id;
private String title;
@Embedded
private IntermediateObject intermediateObject;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public IntermediateObject getIntermediateObject() {
return intermediateObject;
}
public void setIntermediateObject(IntermediateObject intermediateObject) {
this.intermediateObject = intermediateObject;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
}
@Entity(name = "Comment")
@Table(name = "COMMENT_TABLE")
public static
|
Book
|
java
|
apache__kafka
|
clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/TransactionsExpirationTest.java
|
{
"start": 5022,
"end": 17550
}
|
class ____ {
private static final String TOPIC1 = "topic1";
private static final String TOPIC2 = "topic2";
private static final String TRANSACTION_ID = "transactionalProducer";
private static final String HEADER_KEY = "transactionStatus";
private static final byte[] ABORTED_VALUE = "aborted".getBytes();
private static final byte[] COMMITTED_VALUE = "committed".getBytes();
private static final TopicPartition TOPIC1_PARTITION0 = new TopicPartition(TOPIC1, 0);
@ClusterTest(features = {@ClusterFeature(feature = Feature.TRANSACTION_VERSION, version = 1)})
public void testFatalErrorAfterInvalidProducerIdMappingWithTV1(ClusterInstance clusterInstance) throws InterruptedException {
testFatalErrorAfterInvalidProducerIdMapping(clusterInstance);
}
@ClusterTest(features = {@ClusterFeature(feature = Feature.TRANSACTION_VERSION, version = 2)})
public void testFatalErrorAfterInvalidProducerIdMappingWithTV2(ClusterInstance clusterInstance) throws InterruptedException {
testFatalErrorAfterInvalidProducerIdMapping(clusterInstance);
}
@ClusterTest(features = {@ClusterFeature(feature = Feature.TRANSACTION_VERSION, version = 1)})
public void testTransactionAfterProducerIdExpiresWithTV1(ClusterInstance clusterInstance) throws InterruptedException {
testTransactionAfterProducerIdExpires(clusterInstance, false);
}
@ClusterTest(features = {@ClusterFeature(feature = Feature.TRANSACTION_VERSION, version = 2)})
public void testTransactionAfterProducerIdExpiresWithTV2(ClusterInstance clusterInstance) throws InterruptedException {
testTransactionAfterProducerIdExpires(clusterInstance, true);
}
private void testFatalErrorAfterInvalidProducerIdMapping(ClusterInstance clusterInstance) throws InterruptedException {
clusterInstance.createTopic(TOPIC1, 4, (short) 3);
clusterInstance.createTopic(TOPIC2, 4, (short) 3);
try (Producer<byte[], byte[]> producer = clusterInstance.producer(Map.of(
ProducerConfig.TRANSACTIONAL_ID_CONFIG, TRANSACTION_ID
))
) {
producer.initTransactions();
// Start and then abort a transaction to allow the transactional ID to expire.
producer.beginTransaction();
producer.send(new ProducerRecord<>(TOPIC1, 0, "2".getBytes(), "2".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, ABORTED_VALUE))));
producer.send(new ProducerRecord<>(TOPIC2, 0, "4".getBytes(), "4".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, ABORTED_VALUE))));
producer.abortTransaction();
// Check the transactional state exists and then wait for it to expire.
waitUntilTransactionalStateExists(clusterInstance);
waitUntilTransactionalStateExpires(clusterInstance);
// Start a new transaction and attempt to send, triggering an AddPartitionsToTxnRequest that will fail
// due to the expired transactional ID, resulting in a fatal error.
producer.beginTransaction();
Future<RecordMetadata> failedFuture = producer.send(
new ProducerRecord<>(TOPIC1, 3, "1".getBytes(), "1".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, ABORTED_VALUE))));
TestUtils.waitForCondition(failedFuture::isDone, "Producer future never completed.");
org.apache.kafka.test.TestUtils.assertFutureThrows(InvalidPidMappingException.class, failedFuture);
// Assert that aborting the transaction throws a KafkaException due to the fatal error.
assertThrows(KafkaException.class, producer::abortTransaction);
}
// Reinitialize to recover from the fatal error.
try (Producer<byte[], byte[]> producer = clusterInstance.producer(Map.of(
ProducerConfig.TRANSACTIONAL_ID_CONFIG, TRANSACTION_ID
))
) {
producer.initTransactions();
// Proceed with a new transaction after reinitializing.
producer.beginTransaction();
producer.send(new ProducerRecord<>(TOPIC2, null, "2".getBytes(), "2".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, COMMITTED_VALUE))));
producer.send(new ProducerRecord<>(TOPIC1, 2, "4".getBytes(), "4".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, COMMITTED_VALUE))));
producer.send(new ProducerRecord<>(TOPIC2, null, "1".getBytes(), "1".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, COMMITTED_VALUE))));
producer.send(new ProducerRecord<>(TOPIC1, 3, "3".getBytes(), "3".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, COMMITTED_VALUE))));
producer.commitTransaction();
waitUntilTransactionalStateExists(clusterInstance);
}
assertConsumeRecords(clusterInstance, List.of(TOPIC1, TOPIC2), 4);
}
private void testTransactionAfterProducerIdExpires(ClusterInstance clusterInstance, boolean isTV2Enabled) throws InterruptedException {
clusterInstance.createTopic(TOPIC1, 4, (short) 3);
long oldProducerId;
long oldProducerEpoch;
try (Producer<byte[], byte[]> producer = clusterInstance.producer(Map.of(
ProducerConfig.TRANSACTIONAL_ID_CONFIG, TRANSACTION_ID
))
) {
producer.initTransactions();
// Start and then abort a transaction to allow the producer ID to expire.
producer.beginTransaction();
producer.send(new ProducerRecord<>(TOPIC1, 0, "2".getBytes(), "2".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, ABORTED_VALUE))));
producer.flush();
// Ensure producer IDs are added.
List<ProducerState> producerStates = new ArrayList<>();
TestUtils.waitForCondition(() -> {
try {
producerStates.addAll(producerState(clusterInstance));
return !producerStates.isEmpty();
} catch (ExecutionException | InterruptedException e) {
return false;
}
}, "Producer IDs for " + TOPIC1_PARTITION0 + " did not propagate quickly");
assertEquals(1, producerStates.size(), "Unexpected producer to " + TOPIC1_PARTITION0);
oldProducerId = producerStates.get(0).producerId();
oldProducerEpoch = producerStates.get(0).producerEpoch();
producer.abortTransaction();
// Wait for the producer ID to expire.
TestUtils.waitForCondition(() -> {
try {
return producerState(clusterInstance).isEmpty();
} catch (ExecutionException | InterruptedException e) {
return false;
}
}, "Producer IDs for " + TOPIC1_PARTITION0 + " did not expire.");
}
// Create a new producer to check that we retain the producer ID in transactional state.
try (Producer<byte[], byte[]> producer = clusterInstance.producer(Map.of(
ProducerConfig.TRANSACTIONAL_ID_CONFIG, TRANSACTION_ID
))
) {
producer.initTransactions();
// Start a new transaction and attempt to send. This should work since only the producer ID was removed from its mapping in ProducerStateManager.
producer.beginTransaction();
producer.send(new ProducerRecord<>(TOPIC1, 0, "4".getBytes(), "4".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, COMMITTED_VALUE))));
producer.send(new ProducerRecord<>(TOPIC1, 3, "3".getBytes(), "3".getBytes(), Collections.singleton(new RecordHeader(HEADER_KEY, COMMITTED_VALUE))));
producer.commitTransaction();
// Producer IDs should repopulate.
List<ProducerState> producerStates = new ArrayList<>();
TestUtils.waitForCondition(() -> {
try {
producerStates.addAll(producerState(clusterInstance));
return !producerStates.isEmpty();
} catch (ExecutionException | InterruptedException e) {
return false;
}
}, "Producer IDs for " + TOPIC1_PARTITION0 + " did not propagate quickly");
assertEquals(1, producerStates.size(), "Unexpected producer to " + TOPIC1_PARTITION0);
long newProducerId = producerStates.get(0).producerId();
long newProducerEpoch = producerStates.get(0).producerEpoch();
// Because the transaction IDs outlive the producer IDs, creating a producer with the same transactional id
// soon after the first will re-use the same producerId, while bumping the epoch to indicate that they are distinct.
assertEquals(oldProducerId, newProducerId);
if (isTV2Enabled) {
// TV2 bumps epoch on EndTxn, and the final commit may or may not have bumped the epoch in the producer state.
// The epoch should be at least oldProducerEpoch + 2 for the first commit and the restarted producer.
assertTrue(oldProducerEpoch + 2 <= newProducerEpoch);
} else {
assertEquals(oldProducerEpoch + 1, newProducerEpoch);
}
assertConsumeRecords(clusterInstance, List.of(TOPIC1), 2);
}
}
private void waitUntilTransactionalStateExists(ClusterInstance clusterInstance) throws InterruptedException {
try (Admin admin = clusterInstance.admin()) {
TestUtils.waitForCondition(() -> {
try {
admin.describeTransactions(List.of(TRANSACTION_ID)).description(TRANSACTION_ID).get();
return true;
} catch (Exception e) {
return false;
}
}, "Transactional state was never added.");
}
}
private void waitUntilTransactionalStateExpires(ClusterInstance clusterInstance) throws InterruptedException {
try (Admin admin = clusterInstance.admin()) {
TestUtils.waitForCondition(() -> {
try {
admin.describeTransactions(List.of(TRANSACTION_ID)).description(TRANSACTION_ID).get();
return false;
} catch (Exception e) {
return e.getCause() instanceof TransactionalIdNotFoundException;
}
}, "Transaction state never expired.");
}
}
private List<ProducerState> producerState(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
try (Admin admin = clusterInstance.admin()) {
return admin.describeProducers(List.of(TOPIC1_PARTITION0)).partitionResult(TOPIC1_PARTITION0).get().activeProducers();
}
}
private void assertConsumeRecords(
ClusterInstance clusterInstance,
List<String> topics,
int expectedCount
) throws InterruptedException {
for (GroupProtocol groupProtocol : clusterInstance.supportedGroupProtocols()) {
ArrayList<ConsumerRecord<byte[], byte[]>> consumerRecords = new ArrayList<>();
try (Consumer<byte[], byte[]> consumer = clusterInstance.consumer(Map.of(
ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name(),
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false",
ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed"
)
)) {
consumer.subscribe(topics);
TestUtils.waitForCondition(() -> {
ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofMillis(100));
records.forEach(consumerRecords::add);
return consumerRecords.size() == expectedCount;
}, 15_000, () -> "Consumer with protocol " + groupProtocol.name + " should consume " + expectedCount + " records, but get " + consumerRecords.size());
}
consumerRecords.forEach(record -> {
Iterator<Header> headers = record.headers().headers(HEADER_KEY).iterator();
assertTrue(headers.hasNext());
Header header = headers.next();
assertArrayEquals(COMMITTED_VALUE, header.value(), "Record does not have the expected header value.");
});
}
}
}
|
TransactionsExpirationTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
|
{
"start": 64107,
"end": 65433
}
|
interface ____ {}
/**
* Create an option to specify the path name of the sequence file.
* @param value the path to read
* @return a new option
*/
public static Option file(Path value) {
return new FileOption(value);
}
/**
* Create an option to specify the stream with the sequence file.
* @param value the stream to read.
* @return a new option
*/
public static Option stream(FSDataInputStream value) {
return new InputStreamOption(value);
}
/**
* Create an option to specify the starting byte to read.
* @param value the number of bytes to skip over
* @return a new option
*/
public static Option start(long value) {
return new StartOption(value);
}
/**
* Create an option to specify the number of bytes to read.
* @param value the number of bytes to read
* @return a new option
*/
public static Option length(long value) {
return new LengthOption(value);
}
/**
* Create an option with the buffer size for reading the given pathname.
* @param value the number of bytes to buffer
* @return a new option
*/
public static Option bufferSize(int value) {
return new BufferSizeOption(value);
}
private static
|
Option
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/parameters/ParameterExtractor.java
|
{
"start": 187,
"end": 490
}
|
interface ____ {
/**
* Extracts a parameter from the request.
*
* If this returns a {@link ParameterCallback} then the value must be obtained from the listener
*
*/
Object extractParameter(ResteasyReactiveRequestContext context);
/**
* listener
|
ParameterExtractor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/annotations/processing/HQL.java
|
{
"start": 416,
"end": 666
}
|
interface ____ defining
* the signature of a method which is used to execute the given
* {@linkplain #value HQL query}, with an implementation generated
* automatically by the Hibernate Metamodel Generator.
* <p>
* For example:
* <pre>
* public
|
as
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/merge/CompositeMergeTest.java
|
{
"start": 3000,
"end": 3221
}
|
class ____ {
@Id
@GeneratedValue
Long id;
String description;
@Embedded
Address address;
@Basic( fetch = FetchType.LAZY )
byte[] lazyField;
}
@Embeddable
@Table( name = "ADDRESS" )
static
|
ParentEntity
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/api/connector/source/lib/util/RateLimitedSourceReaderITCase.java
|
{
"start": 2073,
"end": 3895
}
|
class ____ extends TestLogger {
private static final int PARALLELISM = 4;
@RegisterExtension
private static final MiniClusterExtension miniClusterExtension =
new MiniClusterExtension(
new MiniClusterResourceConfiguration.Builder()
.setNumberTaskManagers(1)
.setNumberSlotsPerTaskManager(PARALLELISM)
.build());
// ------------------------------------------------------------------------
@Test
@DisplayName("Rate limiter is used correctly.")
public void testRateLimitingParallelExecution() throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
final int count = 10;
final MockRateLimiterStrategy rateLimiterStrategy = new MockRateLimiterStrategy();
final DataGeneratorSource<Long> dataGeneratorSource =
new DataGeneratorSource<>(index -> index, 10, rateLimiterStrategy, Types.LONG);
final DataStream<Long> stream =
env.fromSource(
dataGeneratorSource, WatermarkStrategy.noWatermarks(), "generator source");
final List<Long> result = stream.executeAndCollect(10000);
int rateLimiterCallCount = MockRateLimiterStrategy.getRateLimitersCallCount();
assertThat(result).containsExactlyInAnyOrderElementsOf(range(0, 9));
assertThat(rateLimiterCallCount).isGreaterThan(count);
}
private List<Long> range(int startInclusive, int endInclusive) {
return LongStream.rangeClosed(startInclusive, endInclusive)
.boxed()
.collect(Collectors.toList());
}
private static final
|
RateLimitedSourceReaderITCase
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4412OfflineModeInPluginTest.java
|
{
"start": 1040,
"end": 3368
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that plugins using the 2.x style artifact resolver directly are subject to the offline mode of the
* current Maven session.
*
* @throws Exception in case of failure
*/
@Test
public void testitResolver() throws Exception {
File testDir = extractResources("/mng-4412");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.deleteArtifacts("org.apache.maven.its.mng4412");
verifier.filterFile("settings-template.xml", "settings.xml");
verifier.addCliArgument("-Presolver");
verifier.addCliArgument("--offline");
verifier.addCliArgument("-s");
verifier.addCliArgument("settings.xml");
verifier.setLogFileName("log-resolver.txt");
try {
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
fail("Plugin could resolve artifact from remote repository despite Maven being offline");
} catch (VerificationException e) {
// expected
}
}
/**
* Verify that plugins using the 2.x style artifact collector directly are subject to the offline mode of the
* current Maven session.
*
* @throws Exception in case of failure
*/
@Test
public void testitCollector() throws Exception {
File testDir = extractResources("/mng-4412");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.deleteArtifacts("org.apache.maven.its.mng4412");
verifier.filterFile("settings-template.xml", "settings.xml");
verifier.addCliArgument("-Pcollector");
verifier.addCliArgument("--offline");
verifier.addCliArgument("-s");
verifier.addCliArgument("settings.xml");
verifier.setLogFileName("log-collector.txt");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyArtifactNotPresent("org.apache.maven.its.mng4412", "dep", "0.1", "pom");
}
}
|
MavenITmng4412OfflineModeInPluginTest
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/RedissonMapReduceTest.java
|
{
"start": 697,
"end": 1076
}
|
class ____ implements RMapper<String, String, String, Integer> {
@Override
public void map(String key, String value, RCollector<String, Integer> collector) {
String[] words = value.split("[^a-zA-Z]");
for (String word : words) {
collector.emit(word, 1);
}
}
}
public static
|
WordMapper
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/selection/generics/ErroneousSourceTargetMapper5.java
|
{
"start": 316,
"end": 524
}
|
interface ____ {
ErroneousSourceTargetMapper5 INSTANCE = Mappers.getMapper( ErroneousSourceTargetMapper5.class );
ErroneousTarget5 sourceToTarget(ErroneousSource5 source);
}
|
ErroneousSourceTargetMapper5
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/cluster/api/sync/NodeSelectionSetCommands.java
|
{
"start": 1285,
"end": 11705
}
|
interface ____<K, V> {
/**
* Add one or more members to a set.
*
* @param key the key.
* @param members the member type: value.
* @return Long integer-reply the number of elements that were added to the set, not including all the elements already
* present into the set.
*/
Executions<Long> sadd(K key, V... members);
/**
* Get the number of members in a set.
*
* @param key the key.
* @return Long integer-reply the cardinality (number of elements) of the set, or {@code false} if {@code key} does not
* exist.
*/
Executions<Long> scard(K key);
/**
* Subtract multiple sets.
*
* @param keys the key.
* @return Set<V> array-reply list with members of the resulting set.
*/
Executions<Set<V>> sdiff(K... keys);
/**
* Subtract multiple sets.
*
* @param channel the channel.
* @param keys the keys.
* @return Long count of members of the resulting set.
*/
Executions<Long> sdiff(ValueStreamingChannel<V> channel, K... keys);
/**
* Subtract multiple sets and store the resulting set in a key.
*
* @param destination the destination type: key.
* @param keys the key.
* @return Long integer-reply the number of elements in the resulting set.
*/
Executions<Long> sdiffstore(K destination, K... keys);
/**
* Intersect multiple sets.
*
* @param keys the key.
* @return Set<V> array-reply list with members of the resulting set.
*/
Executions<Set<V>> sinter(K... keys);
/**
* Intersect multiple sets.
*
* @param channel the channel.
* @param keys the keys.
* @return Long count of members of the resulting set.
*/
Executions<Long> sinter(ValueStreamingChannel<V> channel, K... keys);
/**
* This command works exactly like {@link #sinter(java.lang.Object[])} but instead of returning the result set, it returns
* just the cardinality of the result.
*
* @param keys the keys.
* @return The cardinality of the set which would result from the intersection of all the given sets.
* @since 6.2
*/
Executions<Long> sintercard(K... keys);
/**
* This command works exactly like {@link #sinter(java.lang.Object[])} but instead of returning the result set, it returns
* just the cardinality of the result.
*
* @param limit If the intersection cardinality reaches limit partway through the computation, the algorithm will exit and
* yield limit as the cardinality.
* @param keys the keys.
* @return The cardinality of the set which would result from the intersection of all the given sets.
* @since 6.2
*/
Executions<Long> sintercard(long limit, K... keys);
/**
* Intersect multiple sets and store the resulting set in a key.
*
* @param destination the destination type: key.
* @param keys the key.
* @return Long integer-reply the number of elements in the resulting set.
*/
Executions<Long> sinterstore(K destination, K... keys);
/**
* Determine if a given value is a member of a set.
*
* @param key the key.
* @param member the member type: value.
* @return Boolean integer-reply specifically:
*
* {@code true} if the element is a member of the set. {@code false} if the element is not a member of the set, or
* if {@code key} does not exist.
*/
Executions<Boolean> sismember(K key, V member);
/**
* Get all the members in a set.
*
* @param key the key.
* @return Set<V> array-reply all elements of the set.
*/
Executions<Set<V>> smembers(K key);
/**
* Get all the members in a set.
*
* @param channel the channel.
* @param key the keys.
* @return Long count of members of the resulting set.
*/
Executions<Long> smembers(ValueStreamingChannel<V> channel, K key);
/**
* Returns whether each member is a member of the set stored at key.
*
* @param key the key.
* @param members the member type: value.
* @return List<Boolean> array-reply list representing the membership of the given elements, in the same order as they
* are requested.
* @since 6.1
*/
Executions<List<Boolean>> smismember(K key, V... members);
/**
* Move a member from one set to another.
*
* @param source the source key.
* @param destination the destination type: key.
* @param member the member type: value.
* @return Boolean integer-reply specifically:
*
* {@code true} if the element is moved. {@code false} if the element is not a member of {@code source} and no
* operation was performed.
*/
Executions<Boolean> smove(K source, K destination, V member);
/**
* Remove and return a random member from a set.
*
* @param key the key.
* @return V bulk-string-reply the removed element, or {@code null} when {@code key} does not exist.
*/
Executions<V> spop(K key);
/**
* Remove and return one or multiple random members from a set.
*
* @param key the key.
* @param count number of members to pop.
* @return V bulk-string-reply the removed element, or {@code null} when {@code key} does not exist.
*/
Executions<Set<V>> spop(K key, long count);
/**
* Get one random member from a set.
*
* @param key the key.
* @return V bulk-string-reply without the additional {@code count} argument the command returns a Bulk Reply with the
* randomly selected element, or {@code null} when {@code key} does not exist.
*/
Executions<V> srandmember(K key);
/**
* Get one or multiple random members from a set.
*
* @param key the key.
* @param count the count type: long.
* @return Set<V> bulk-string-reply without the additional {@code count} argument the command returns a Bulk Reply
* with the randomly selected element, or {@code null} when {@code key} does not exist.
*/
Executions<List<V>> srandmember(K key, long count);
/**
* Get one or multiple random members from a set.
*
* @param channel streaming channel that receives a call for every value.
* @param key the key.
* @param count the count.
* @return Long count of members of the resulting set.
*/
Executions<Long> srandmember(ValueStreamingChannel<V> channel, K key, long count);
/**
* Remove one or more members from a set.
*
* @param key the key.
* @param members the member type: value.
* @return Long integer-reply the number of members that were removed from the set, not including non existing members.
*/
Executions<Long> srem(K key, V... members);
/**
* Add multiple sets.
*
* @param keys the key.
* @return Set<V> array-reply list with members of the resulting set.
*/
Executions<Set<V>> sunion(K... keys);
/**
* Add multiple sets.
*
* @param channel streaming channel that receives a call for every value.
* @param keys the keys.
* @return Long count of members of the resulting set.
*/
Executions<Long> sunion(ValueStreamingChannel<V> channel, K... keys);
/**
* Add multiple sets and store the resulting set in a key.
*
* @param destination the destination type: key.
* @param keys the key.
* @return Long integer-reply the number of elements in the resulting set.
*/
Executions<Long> sunionstore(K destination, K... keys);
/**
* Incrementally iterate Set elements.
*
* @param key the key.
* @return ValueScanCursor<V> scan cursor.
*/
Executions<ValueScanCursor<V>> sscan(K key);
/**
* Incrementally iterate Set elements.
*
* @param key the key.
* @param scanArgs scan arguments.
* @return ValueScanCursor<V> scan cursor.
*/
Executions<ValueScanCursor<V>> sscan(K key, ScanArgs scanArgs);
/**
* Incrementally iterate Set elements.
*
* @param key the key.
* @param scanCursor cursor to resume from a previous scan, must not be {@code null}.
* @param scanArgs scan arguments.
* @return ValueScanCursor<V> scan cursor.
*/
Executions<ValueScanCursor<V>> sscan(K key, ScanCursor scanCursor, ScanArgs scanArgs);
/**
* Incrementally iterate Set elements.
*
* @param key the key.
* @param scanCursor cursor to resume from a previous scan, must not be {@code null}.
* @return ValueScanCursor<V> scan cursor.
*/
Executions<ValueScanCursor<V>> sscan(K key, ScanCursor scanCursor);
/**
* Incrementally iterate Set elements.
*
* @param channel streaming channel that receives a call for every value.
* @param key the key.
* @return StreamScanCursor scan cursor.
*/
Executions<StreamScanCursor> sscan(ValueStreamingChannel<V> channel, K key);
/**
* Incrementally iterate Set elements.
*
* @param channel streaming channel that receives a call for every value.
* @param key the key.
* @param scanArgs scan arguments.
* @return StreamScanCursor scan cursor.
*/
Executions<StreamScanCursor> sscan(ValueStreamingChannel<V> channel, K key, ScanArgs scanArgs);
/**
* Incrementally iterate Set elements.
*
* @param channel streaming channel that receives a call for every value.
* @param key the key.
* @param scanCursor cursor to resume from a previous scan, must not be {@code null}.
* @param scanArgs scan arguments.
* @return StreamScanCursor scan cursor.
*/
Executions<StreamScanCursor> sscan(ValueStreamingChannel<V> channel, K key, ScanCursor scanCursor, ScanArgs scanArgs);
/**
* Incrementally iterate Set elements.
*
* @param channel streaming channel that receives a call for every value.
* @param key the key.
* @param scanCursor cursor to resume from a previous scan, must not be {@code null}.
* @return StreamScanCursor scan cursor.
*/
Executions<StreamScanCursor> sscan(ValueStreamingChannel<V> channel, K key, ScanCursor scanCursor);
}
|
NodeSelectionSetCommands
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_1300/Issue1319.java
|
{
"start": 793,
"end": 864
}
|
interface ____{
}
@JSONType(typeName = "myEnum")
|
EnumInterface
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/convention/TestBeanOverrideProcessorTests.java
|
{
"start": 4797,
"end": 5706
}
|
class ____ fully-qualified method name: %s", methodName)
.withCauseInstanceOf(ClassNotFoundException.class);
}
@Test
void findTestBeanFactoryMethodByFullyQualifiedNameWithMissingMethodName() {
Class<?> clazz = getClass();
Class<?> returnType = String.class;
String methodName = TestBeanFactory.class.getName() + "#";
assertThatIllegalArgumentException()
.isThrownBy(() -> this.processor.findTestBeanFactoryMethod(clazz, returnType, methodName))
.withMessage("No method name present in fully-qualified method name: %s", methodName);
}
@Test
void findTestBeanFactoryMethodByFullyQualifiedNameWithMissingClassName() {
Class<?> clazz = getClass();
Class<?> returnType = String.class;
String methodName = "#createTestBean";
assertThatIllegalArgumentException()
.isThrownBy(() -> this.processor.findTestBeanFactoryMethod(clazz, returnType, methodName))
.withMessage("No
|
for
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/ProcessorDefinitionHelper.java
|
{
"start": 1182,
"end": 1258
}
|
class ____ ProcessorDefinition and the other model classes.
*/
public final
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocations.java
|
{
"start": 587,
"end": 1568
}
|
class ____ extends AbstractPreserveAllocations {
protected PreserveAllAllocations(List<Node> nodes, List<Deployment> deployments) {
super(nodes, deployments);
}
@Override
protected int calculateUsedCores(Node n, Deployment m) {
return m.currentAllocationsByNodeId().get(n.id()) * m.threadsPerAllocation();
}
@Override
protected Map<String, Integer> calculateAllocationsPerNodeToPreserve(Deployment m) {
return m.currentAllocationsByNodeId().entrySet().stream().collect(Collectors.toMap(e -> e.getKey(), e -> 0));
}
@Override
protected int calculatePreservedAllocations(Deployment m) {
return m.currentAllocationsByNodeId().values().stream().mapToInt(Integer::intValue).sum();
}
@Override
protected int addPreservedAllocations(Node n, Deployment m) {
return m.currentAllocationsByNodeId().containsKey(n.id()) ? m.currentAllocationsByNodeId().get(n.id()) : 0;
}
}
|
PreserveAllAllocations
|
java
|
apache__flink
|
flink-formats/flink-json/src/main/java/org/apache/flink/formats/json/canal/CanalJsonDecodingFormat.java
|
{
"start": 2058,
"end": 5920
}
|
class ____
implements ProjectableDecodingFormat<DeserializationSchema<RowData>> {
// --------------------------------------------------------------------------------------------
// Mutable attributes
// --------------------------------------------------------------------------------------------
private List<String> metadataKeys;
// --------------------------------------------------------------------------------------------
// Canal-specific attributes
// --------------------------------------------------------------------------------------------
private final @Nullable String database;
private final @Nullable String table;
private final boolean ignoreParseErrors;
private final TimestampFormat timestampFormat;
public CanalJsonDecodingFormat(
String database,
String table,
boolean ignoreParseErrors,
TimestampFormat timestampFormat) {
this.database = database;
this.table = table;
this.ignoreParseErrors = ignoreParseErrors;
this.timestampFormat = timestampFormat;
this.metadataKeys = Collections.emptyList();
}
@Override
public DeserializationSchema<RowData> createRuntimeDecoder(
DynamicTableSource.Context context, DataType physicalDataType, int[][] projections) {
physicalDataType = Projection.of(projections).project(physicalDataType);
final List<ReadableMetadata> readableMetadata =
metadataKeys.stream()
.map(
k ->
Stream.of(ReadableMetadata.values())
.filter(rm -> rm.key.equals(k))
.findFirst()
.orElseThrow(IllegalStateException::new))
.collect(Collectors.toList());
final List<DataTypes.Field> metadataFields =
readableMetadata.stream()
.map(m -> DataTypes.FIELD(m.key, m.dataType))
.collect(Collectors.toList());
final DataType producedDataType =
DataTypeUtils.appendRowFields(physicalDataType, metadataFields);
final TypeInformation<RowData> producedTypeInfo =
context.createTypeInformation(producedDataType);
return CanalJsonDeserializationSchema.builder(
physicalDataType, readableMetadata, producedTypeInfo)
.setDatabase(database)
.setTable(table)
.setIgnoreParseErrors(ignoreParseErrors)
.setTimestampFormat(timestampFormat)
.build();
}
@Override
public Map<String, DataType> listReadableMetadata() {
final Map<String, DataType> metadataMap = new LinkedHashMap<>();
Stream.of(ReadableMetadata.values())
.forEachOrdered(m -> metadataMap.put(m.key, m.dataType));
return metadataMap;
}
@Override
public void applyReadableMetadata(List<String> metadataKeys) {
this.metadataKeys = metadataKeys;
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.newBuilder()
.addContainedKind(RowKind.INSERT)
.addContainedKind(RowKind.UPDATE_BEFORE)
.addContainedKind(RowKind.UPDATE_AFTER)
.addContainedKind(RowKind.DELETE)
.build();
}
// --------------------------------------------------------------------------------------------
// Metadata handling
// --------------------------------------------------------------------------------------------
/** List of metadata that can be read with this format. */
|
CanalJsonDecodingFormat
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/nullvaluepropertymapping/CustomerNvpmsPropertyMappingMapper.java
|
{
"start": 424,
"end": 956
}
|
interface ____ {
CustomerNvpmsPropertyMappingMapper INSTANCE = Mappers.getMapper( CustomerNvpmsPropertyMappingMapper.class );
@Mapping( target = "address", nullValuePropertyMappingStrategy = IGNORE)
@Mapping( target = "details", nullValuePropertyMappingStrategy = IGNORE)
void map(Customer customer, @MappingTarget CustomerDTO mappingTarget);
@Mapping(target = "houseNo", source = "houseNumber")
void mapCustomerHouse(Address address, @MappingTarget AddressDTO addrDTO);
}
|
CustomerNvpmsPropertyMappingMapper
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeFlatMapNotificationTest.java
|
{
"start": 944,
"end": 3625
}
|
class ____ extends RxJavaTest {
@Test
public void dispose() {
TestHelper.checkDisposed(Maybe.just(1)
.flatMap(Functions.justFunction(Maybe.just(1)),
Functions.justFunction(Maybe.just(1)), Functions.justSupplier(Maybe.just(1))));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeMaybe(new Function<Maybe<Integer>, MaybeSource<Integer>>() {
@Override
public MaybeSource<Integer> apply(Maybe<Integer> m) throws Exception {
return m
.flatMap(Functions.justFunction(Maybe.just(1)),
Functions.justFunction(Maybe.just(1)), Functions.justSupplier(Maybe.just(1)));
}
});
}
@Test
public void onSuccessNull() {
Maybe.just(1)
.flatMap(Functions.justFunction((Maybe<Integer>)null),
Functions.justFunction(Maybe.just(1)),
Functions.justSupplier(Maybe.just(1)))
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void onErrorNull() {
TestObserverEx<Integer> to = Maybe.<Integer>error(new TestException())
.flatMap(Functions.justFunction(Maybe.just(1)),
Functions.justFunction((Maybe<Integer>)null),
Functions.justSupplier(Maybe.just(1)))
.to(TestHelper.<Integer>testConsumer())
.assertFailure(CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(to.errors().get(0));
TestHelper.assertError(ce, 0, TestException.class);
TestHelper.assertError(ce, 1, NullPointerException.class);
}
@Test
public void onCompleteNull() {
Maybe.<Integer>empty()
.flatMap(Functions.justFunction(Maybe.just(1)),
Functions.justFunction(Maybe.just(1)),
Functions.justSupplier((Maybe<Integer>)null))
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void onSuccessEmpty() {
Maybe.just(1)
.flatMap(Functions.justFunction(Maybe.<Integer>empty()),
Functions.justFunction(Maybe.just(1)),
Functions.justSupplier(Maybe.just(1)))
.test()
.assertResult();
}
@Test
public void onSuccessError() {
Maybe.just(1)
.flatMap(Functions.justFunction(Maybe.<Integer>error(new TestException())),
Functions.justFunction((Maybe<Integer>)null),
Functions.justSupplier(Maybe.just(1)))
.test()
.assertFailure(TestException.class);
}
}
|
MaybeFlatMapNotificationTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java
|
{
"start": 11070,
"end": 11158
}
|
interface ____ {
<T> T deduplicate(T object);
}
private static
|
Deduplicator
|
java
|
quarkusio__quarkus
|
extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanServiceBindingConverter.java
|
{
"start": 1133,
"end": 1651
}
|
class ____:
* <ul>
* <li>quarkus.infinispan-client.uri (<i>if uri is provided</i>)</li>
* <li>quarkus.infinispan-client.hosts (<i>if hosts is provided and uri is not provided</i>)</li>
* <li>quarkus.infinispan-client.use-auth (<i>if useauth is provided and uri is not provided</i>)</li>
* <li>quarkus.infinispan-client.username (<i>if username is provided and uri is not provided</i>)</li>
* <li>quarkus.infinispan-client.password (<i>if password is provided and uri is not provided</i>)</li>
* </ul>
*/
public
|
are
|
java
|
quarkusio__quarkus
|
extensions/amazon-lambda/event-server/src/main/java/io/quarkus/amazon/lambda/runtime/MockBodyHandler.java
|
{
"start": 3789,
"end": 6840
}
|
class ____ implements Handler<Buffer> {
private static final int MAX_PREALLOCATED_BODY_BUFFER_BYTES = 65535;
final RoutingContext context;
final long contentLength;
Buffer body;
boolean failed;
AtomicInteger uploadCount = new AtomicInteger();
AtomicBoolean cleanup = new AtomicBoolean(false);
boolean ended;
long uploadSize = 0L;
public BHandler(RoutingContext context, long contentLength) {
this.context = context;
this.contentLength = contentLength;
// the request clearly states that there should
// be a body, so we respect the client and ensure
// that the body will not be null
if (contentLength != -1) {
initBodyBuffer();
}
context.request().exceptionHandler(t -> {
if (t instanceof DecoderException) {
// bad request
context.fail(400, t.getCause());
} else {
context.fail(t);
}
});
}
private void initBodyBuffer() {
int initialBodyBufferSize;
if (contentLength < 0) {
initialBodyBufferSize = DEFAULT_INITIAL_BODY_BUFFER_SIZE;
} else if (contentLength > MAX_PREALLOCATED_BODY_BUFFER_BYTES) {
initialBodyBufferSize = MAX_PREALLOCATED_BODY_BUFFER_BYTES;
} else {
initialBodyBufferSize = (int) contentLength;
}
if (bodyLimit != -1) {
initialBodyBufferSize = (int) Math.min(initialBodyBufferSize, bodyLimit);
}
this.body = Buffer.buffer(initialBodyBufferSize);
}
@Override
public void handle(Buffer buff) {
if (failed) {
return;
}
uploadSize += buff.length();
if (bodyLimit != -1 && uploadSize > bodyLimit) {
failed = true;
context.fail(413);
} else {
if (body == null) {
initBodyBuffer();
}
body.appendBuffer(buff);
}
}
void end() {
// this marks the end of body parsing, calling doEnd should
// only be possible from this moment onwards
ended = true;
// only if parsing is done and count is 0 then all files have been processed
if (uploadCount.get() == 0) {
doEnd();
}
}
void doEnd() {
if (failed) {
return;
}
HttpServerRequest req = context.request();
if (mergeFormAttributes && req.isExpectMultipart()) {
req.params().addAll(req.formAttributes());
}
context.setBody(body);
// release body as it may take lots of memory
body = null;
context.next();
}
}
}
|
BHandler
|
java
|
apache__camel
|
components/camel-azure/camel-azure-storage-queue/src/main/java/org/apache/camel/component/azure/storage/queue/QueueConsumer.java
|
{
"start": 1935,
"end": 8250
}
|
class ____ extends ScheduledBatchPollingConsumer {
private static final Logger LOG = LoggerFactory.getLogger(QueueConsumer.class);
private QueueClientWrapper clientWrapper;
private QueueOperations queueOperations;
public QueueConsumer(final QueueEndpoint endpoint, final Processor processor) {
super(endpoint, processor);
}
@Override
protected void doStart() throws Exception {
super.doStart();
clientWrapper = new QueueClientWrapper(getServiceClient().getQueueClient(getConfiguration().getQueueName()));
queueOperations = new QueueOperations(getConfiguration(), clientWrapper);
}
@Override
protected int poll() throws Exception {
// must reset for each poll
shutdownRunningTask = null;
pendingExchanges = 0;
try {
final List<QueueMessageItem> messageItems = clientWrapper.receiveMessages(getConfiguration().getMaxMessages(),
getConfiguration().getVisibilityTimeout(),
getConfiguration().getTimeout());
// okay we have some response from azure so lets mark the consumer as ready
forceConsumerAsReady();
LOG.trace("Receiving messages [{}]...", messageItems);
final Queue<Exchange> exchanges = createExchanges(messageItems);
return processBatch(CastUtils.cast(exchanges));
} catch (QueueStorageException ex) {
if (404 == ex.getStatusCode()) {
return 0;
} else {
throw ex;
}
}
}
private Queue<Exchange> createExchanges(final List<QueueMessageItem> messageItems) {
return messageItems
.stream()
.map(this::createExchange)
.collect(Collectors.toCollection(LinkedList::new));
}
private QueueServiceClient getServiceClient() {
return getEndpoint().getQueueServiceClient();
}
private QueueConfiguration getConfiguration() {
return getEndpoint().getConfiguration();
}
@Override
public QueueEndpoint getEndpoint() {
return (QueueEndpoint) super.getEndpoint();
}
@Override
public int processBatch(Queue<Object> exchanges) {
final int total = exchanges.size();
for (int index = 0; index < total && isBatchAllowed(); index++) {
// only loop if we are started (allowed to run)
final Exchange exchange = ObjectHelper.cast(Exchange.class, exchanges.poll());
// add current index and total as properties
exchange.setProperty(ExchangePropertyKey.BATCH_INDEX, index);
exchange.setProperty(ExchangePropertyKey.BATCH_SIZE, total);
exchange.setProperty(ExchangePropertyKey.BATCH_COMPLETE, index == total - 1);
// update pending number of exchanges
pendingExchanges = total - index - 1;
// copy messageId, popReceipt, timeout for fix exchange override case
// azure storage blob can override this headers
final String messageId = exchange.getIn().getHeader(QueueConstants.MESSAGE_ID, String.class);
final String popReceipt = exchange.getIn().getHeader(QueueConstants.POP_RECEIPT, String.class);
final Duration timeout = exchange.getIn().getHeader(QueueConstants.TIMEOUT, Duration.class);
// add on completion to handle after work when the exchange is done
exchange.getExchangeExtension().addOnCompletion(new Synchronization() {
@Override
public void onComplete(Exchange exchange) {
// past messageId, popReceipt, timeout for fix exchange override case
exchange.getIn().setHeader(QueueConstants.MESSAGE_ID, messageId);
exchange.getIn().setHeader(QueueConstants.POP_RECEIPT, popReceipt);
exchange.getIn().setHeader(QueueConstants.TIMEOUT, timeout);
processCommit(exchange);
}
@Override
public void onFailure(Exchange exchange) {
processRollback(exchange);
}
});
LOG.trace("Processing exchange [{}]...", exchange);
// use default consumer callback
AsyncCallback cb = defaultConsumerCallback(exchange, true);
getAsyncProcessor().process(exchange, cb);
}
return total;
}
private Exchange createExchange(final QueueMessageItem messageItem) {
final Exchange exchange = createExchange(true);
final Message message = exchange.getIn();
BinaryData data = messageItem.getBody();
InputStream is = data == null ? null : data.toStream();
message.setBody(is);
message.setHeaders(QueueExchangeHeaders.createQueueExchangeHeadersFromQueueMessageItem(messageItem).toMap());
exchange.getExchangeExtension().addOnCompletion(new SynchronizationAdapter() {
@Override
public void onDone(Exchange exchange) {
IOHelper.close(is);
}
});
return exchange;
}
/**
* Strategy to delete the message after being processed.
*
* @param exchange the exchange
*/
private void processCommit(final Exchange exchange) {
try {
if (LOG.isTraceEnabled()) {
LOG.trace("Deleting message with pop receipt handle {}...",
QueueExchangeHeaders.getPopReceiptFromHeaders(exchange));
}
queueOperations.deleteMessage(exchange);
} catch (QueueStorageException ex) {
getExceptionHandler().handleException("Error occurred during deleting message. This exception is ignored.",
exchange, ex);
}
}
/**
* Strategy when processing the exchange failed.
*
* @param exchange the exchange
*/
private void processRollback(Exchange exchange) {
final Exception cause = exchange.getException();
if (cause != null) {
getExceptionHandler().handleException(
"Error during processing exchange. Will attempt to process the message on next poll.", exchange, cause);
}
}
}
|
QueueConsumer
|
java
|
quarkusio__quarkus
|
extensions/devui/deployment/src/main/java/io/quarkus/devui/deployment/jsonrpc/ByteArrayInputStreamDeserializer.java
|
{
"start": 482,
"end": 1077
}
|
class ____ extends JsonDeserializer<ByteArrayInputStream> {
@Override
public ByteArrayInputStream deserialize(JsonParser p, DeserializationContext ctxt)
throws IOException, JsonProcessingException {
String text = p.getText();
try {
byte[] decode = BASE64_DECODER.decode(text);
return new ByteArrayInputStream(decode);
} catch (IllegalArgumentException e) {
throw new InvalidFormatException(p, "Expected a base64 encoded byte array", text, ByteArrayInputStream.class);
}
}
}
|
ByteArrayInputStreamDeserializer
|
java
|
junit-team__junit5
|
junit-jupiter-params/src/main/java/org/junit/jupiter/params/ParameterizedInvocationNameFormatter.java
|
{
"start": 9632,
"end": 12930
}
|
class ____ implements PartialFormatter {
@SuppressWarnings("UnnecessaryUnicodeEscape")
private static final char ELLIPSIS = '\u2026';
private final MessageFormat messageFormat;
private final int argumentMaxLength;
private final boolean generateNameValuePairs;
private final @Nullable ResolverFacade resolverFacade;
MessageFormatPartialFormatter(String pattern, int argumentMaxLength) {
this(pattern, argumentMaxLength, false, null);
}
MessageFormatPartialFormatter(String pattern, int argumentMaxLength, boolean generateNameValuePairs,
@Nullable ResolverFacade resolverFacade) {
this.messageFormat = new MessageFormat(pattern);
this.argumentMaxLength = argumentMaxLength;
this.generateNameValuePairs = generateNameValuePairs;
this.resolverFacade = resolverFacade;
}
// synchronized because MessageFormat is not thread-safe
@Override
public synchronized void append(ArgumentsContext context, StringBuffer result) {
this.messageFormat.format(makeReadable(context.consumedArguments, context.quoteTextArguments), result,
new FieldPosition(0));
}
private @Nullable Object[] makeReadable(@Nullable Object[] arguments, boolean quoteTextArguments) {
@Nullable
Format[] formats = messageFormat.getFormatsByArgumentIndex();
@Nullable
Object[] result = Arrays.copyOf(arguments, Math.min(arguments.length, formats.length), Object[].class);
for (int i = 0; i < result.length; i++) {
if (formats[i] == null) {
Object argument = arguments[i];
String prefix = "";
if (argument instanceof ParameterNameAndArgument parameterNameAndArgument) {
// This supports the useHeadersInDisplayName attributes in @CsvSource and @CsvFileSource.
prefix = parameterNameAndArgument.getName() + " = ";
argument = parameterNameAndArgument.getPayload();
}
else if (this.generateNameValuePairs && this.resolverFacade != null) {
Optional<String> parameterName = this.resolverFacade.getParameterName(i);
if (parameterName.isPresent()) {
// This supports the {argumentsWithNames} pattern.
prefix = parameterName.get() + " = ";
}
}
if (argument instanceof Character ch) {
result[i] = prefix + (quoteTextArguments ? QuoteUtils.quote(ch) : ch);
}
else {
String argumentText = (argument == null ? "null"
: truncateIfExceedsMaxLength(StringUtils.nullSafeToString(argument)));
result[i] = prefix + (quoteTextArguments && argument instanceof CharSequence//
? QuoteUtils.quote(argumentText)
: argumentText);
}
}
}
return result;
}
private String truncateIfExceedsMaxLength(String argument) {
if (argument.length() > this.argumentMaxLength) {
return argument.substring(0, this.argumentMaxLength - 1) + ELLIPSIS;
}
return argument;
}
}
/**
* Caches formatters by the length of the consumed <em>arguments</em> which
* may differ from the number of declared parameters.
*
* <p>For example, when using multiple providers or a provider that returns
* argument arrays of different length, such as:
*
* <pre>
* @ParameterizedTest
* @CsvSource({"a", "a,b", "a,b,c"})
* void test(ArgumentsAccessor accessor) {}
* </pre>
*/
private static
|
MessageFormatPartialFormatter
|
java
|
spring-projects__spring-boot
|
test-support/spring-boot-test-support/src/main/java/org/springframework/boot/testsupport/classpath/ModifiedClassPathClassLoader.java
|
{
"start": 2646,
"end": 11428
}
|
class ____ extends URLClassLoader {
private static final Map<List<AnnotatedElement>, ModifiedClassPathClassLoader> cache = new ConcurrentReferenceHashMap<>();
private static final Pattern INTELLIJ_CLASSPATH_JAR_PATTERN = Pattern.compile(".*classpath(\\d+)?\\.jar");
private static final int MAX_RESOLUTION_ATTEMPTS = 5;
private final Set<String> excludedPackages;
private final ClassLoader junitLoader;
ModifiedClassPathClassLoader(URL[] urls, Set<String> excludedPackages, ClassLoader parent,
ClassLoader junitLoader) {
super(urls, parent);
this.excludedPackages = excludedPackages;
this.junitLoader = junitLoader;
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
if (name.startsWith("org.junit.") || name.startsWith("org.hamcrest.")
|| name.startsWith("io.netty.internal.tcnative.")) {
return Class.forName(name, false, this.junitLoader);
}
String packageName = ClassUtils.getPackageName(name);
if (this.excludedPackages.contains(packageName)) {
throw new ClassNotFoundException();
}
return super.loadClass(name);
}
@SuppressWarnings("resource")
static ModifiedClassPathClassLoader get(Class<?> testClass, Method testMethod, List<Object> arguments) {
Set<AnnotatedElement> candidates = new LinkedHashSet<>();
candidates.add(testClass);
candidates.add(testMethod);
candidates.addAll(getAnnotatedElements(arguments.toArray()));
List<AnnotatedElement> annotatedElements = candidates.stream()
.filter(ModifiedClassPathClassLoader::hasAnnotation)
.toList();
if (annotatedElements.isEmpty()) {
return null;
}
return cache.computeIfAbsent(annotatedElements, (key) -> compute(testClass.getClassLoader(), key));
}
private static Collection<AnnotatedElement> getAnnotatedElements(Object[] array) {
Set<AnnotatedElement> result = new LinkedHashSet<>();
for (Object item : array) {
if (item instanceof AnnotatedElement annotatedElement) {
result.add(annotatedElement);
}
else if (ObjectUtils.isArray(item)) {
result.addAll(getAnnotatedElements(ObjectUtils.toObjectArray(item)));
}
}
return result;
}
private static boolean hasAnnotation(AnnotatedElement element) {
MergedAnnotations annotations = MergedAnnotations.from(element,
MergedAnnotations.SearchStrategy.TYPE_HIERARCHY);
return annotations.isPresent(ForkedClassPath.class) || annotations.isPresent(ClassPathOverrides.class)
|| annotations.isPresent(ClassPathExclusions.class);
}
private static ModifiedClassPathClassLoader compute(ClassLoader classLoader,
List<AnnotatedElement> annotatedClasses) {
List<MergedAnnotations> annotations = annotatedClasses.stream()
.map((source) -> MergedAnnotations.from(source, MergedAnnotations.SearchStrategy.TYPE_HIERARCHY))
.toList();
return new ModifiedClassPathClassLoader(processUrls(extractUrls(classLoader), annotations),
excludedPackages(annotations), classLoader.getParent(), classLoader);
}
private static URL[] extractUrls(ClassLoader classLoader) {
List<URL> extractedUrls = new ArrayList<>();
doExtractUrls(classLoader).forEach((URL url) -> {
if (isManifestOnlyJar(url)) {
extractedUrls.addAll(extractUrlsFromManifestClassPath(url));
}
else {
extractedUrls.add(url);
}
});
return extractedUrls.toArray(new URL[0]);
}
private static Stream<URL> doExtractUrls(ClassLoader classLoader) {
if (classLoader instanceof URLClassLoader urlClassLoader) {
return Stream.of(urlClassLoader.getURLs());
}
return Stream.of(ManagementFactory.getRuntimeMXBean().getClassPath().split(File.pathSeparator))
.map(ModifiedClassPathClassLoader::toURL);
}
private static URL toURL(String entry) {
try {
return new File(entry).toURI().toURL();
}
catch (Exception ex) {
throw new IllegalArgumentException(ex);
}
}
private static boolean isManifestOnlyJar(URL url) {
return isShortenedIntelliJJar(url);
}
private static boolean isShortenedIntelliJJar(URL url) {
String urlPath = url.getPath();
boolean isCandidate = INTELLIJ_CLASSPATH_JAR_PATTERN.matcher(urlPath).matches();
if (isCandidate) {
try {
Attributes attributes = getManifestMainAttributesFromUrl(url);
String createdBy = attributes.getValue("Created-By");
return createdBy != null && createdBy.contains("IntelliJ");
}
catch (Exception ex) {
// Ignore
}
}
return false;
}
private static List<URL> extractUrlsFromManifestClassPath(URL booterJar) {
List<URL> urls = new ArrayList<>();
try {
for (String entry : getClassPath(booterJar)) {
urls.add(new URL(entry));
}
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
return urls;
}
private static String[] getClassPath(URL booterJar) throws Exception {
Attributes attributes = getManifestMainAttributesFromUrl(booterJar);
return StringUtils.delimitedListToStringArray(attributes.getValue(Attributes.Name.CLASS_PATH), " ");
}
private static Attributes getManifestMainAttributesFromUrl(URL url) throws Exception {
try (JarFile jarFile = new JarFile(new File(url.toURI()))) {
return jarFile.getManifest().getMainAttributes();
}
}
private static URL[] processUrls(URL[] urls, List<MergedAnnotations> annotations) {
ClassPathEntryFilter filter = new ClassPathEntryFilter(annotations);
List<URL> additionalUrls = getAdditionalUrls(annotations);
List<URL> processedUrls = new ArrayList<>(additionalUrls);
for (URL url : urls) {
if (!filter.isExcluded(url)) {
processedUrls.add(url);
}
}
return processedUrls.toArray(new URL[0]);
}
private static List<URL> getAdditionalUrls(List<MergedAnnotations> annotations) {
Set<URL> urls = new LinkedHashSet<>();
for (MergedAnnotations candidate : annotations) {
MergedAnnotation<ClassPathOverrides> annotation = candidate.get(ClassPathOverrides.class);
if (annotation.isPresent()) {
urls.addAll(resolveCoordinates(annotation.getStringArray(MergedAnnotation.VALUE)));
}
}
return urls.stream().toList();
}
private static List<URL> resolveCoordinates(String[] coordinates) {
Exception latestFailure = null;
RepositorySystem repositorySystem = createRepositorySystem();
DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
session.setSystemProperties(System.getProperties());
LocalRepository localRepository = new LocalRepository(System.getProperty("user.home") + "/.m2/repository");
RemoteRepository remoteRepository = new RemoteRepository.Builder("central", "default",
"https://repo.maven.apache.org/maven2")
.build();
session.setLocalRepositoryManager(repositorySystem.newLocalRepositoryManager(session, localRepository));
for (int i = 0; i < MAX_RESOLUTION_ATTEMPTS; i++) {
CollectRequest collectRequest = new CollectRequest(null, Arrays.asList(remoteRepository));
collectRequest.setDependencies(createDependencies(coordinates));
DependencyRequest dependencyRequest = new DependencyRequest(collectRequest, null);
try {
DependencyResult result = repositorySystem.resolveDependencies(session, dependencyRequest);
List<URL> resolvedArtifacts = new ArrayList<>();
for (ArtifactResult artifact : result.getArtifactResults()) {
resolvedArtifacts.add(artifact.getArtifact().getFile().toURI().toURL());
}
return resolvedArtifacts;
}
catch (Exception ex) {
latestFailure = ex;
}
}
throw new IllegalStateException("Resolution failed after " + MAX_RESOLUTION_ATTEMPTS + " attempts",
latestFailure);
}
@SuppressWarnings("deprecation")
private static RepositorySystem createRepositorySystem() {
org.eclipse.aether.impl.DefaultServiceLocator serviceLocator = MavenRepositorySystemUtils.newServiceLocator();
serviceLocator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class);
serviceLocator.addService(TransporterFactory.class, HttpTransporterFactory.class);
RepositorySystem repositorySystem = serviceLocator.getService(RepositorySystem.class);
return repositorySystem;
}
private static List<Dependency> createDependencies(String[] allCoordinates) {
List<Dependency> dependencies = new ArrayList<>();
for (String coordinate : allCoordinates) {
dependencies.add(new Dependency(new DefaultArtifact(coordinate), null));
}
return dependencies;
}
private static Set<String> excludedPackages(List<MergedAnnotations> annotations) {
Set<String> excludedPackages = new HashSet<>();
for (MergedAnnotations candidate : annotations) {
MergedAnnotation<ClassPathExclusions> annotation = candidate.get(ClassPathExclusions.class);
if (annotation.isPresent()) {
excludedPackages.addAll(Arrays.asList(annotation.getStringArray("packages")));
}
}
return excludedPackages;
}
/**
* Filter for
|
ModifiedClassPathClassLoader
|
java
|
apache__rocketmq
|
tools/src/main/java/org/apache/rocketmq/tools/command/cluster/CLusterSendMsgRTCommand.java
|
{
"start": 1603,
"end": 8020
}
|
class ____ implements SubCommand {
public static void main(String[] args) {
}
@Override
public String commandName() {
return "clusterRT";
}
@Override
public String commandDesc() {
return "List All clusters Message Send RT.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("a", "amount", true, "message amount | default 100");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("s", "size", true, "message size | default 128 Byte");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("c", "cluster", true, "cluster name | default display all cluster");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("p", "print log", true, "print as tlog | default false");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("m", "machine room", true, "machine room name | default noname");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("i", "interval", true, "print interval | default 10 seconds");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
DefaultMQProducer producer = new DefaultMQProducer(rpcHook);
producer.setProducerGroup(Long.toString(System.currentTimeMillis()));
try {
defaultMQAdminExt.start();
producer.start();
ClusterInfo clusterInfoSerializeWrapper = defaultMQAdminExt.examineBrokerClusterInfo();
Map<String, Set<String>> clusterAddr = clusterInfoSerializeWrapper
.getClusterAddrTable();
Set<String> clusterNames = null;
long amount = !commandLine.hasOption('a') ? 100 : Long.parseLong(commandLine
.getOptionValue('a').trim());
long size = !commandLine.hasOption('s') ? 128 : Long.parseLong(commandLine
.getOptionValue('s').trim());
long interval = !commandLine.hasOption('i') ? 10 : Long.parseLong(commandLine
.getOptionValue('i').trim());
boolean printAsTlog = commandLine.hasOption('p') && Boolean.parseBoolean(commandLine.getOptionValue('p').trim());
String machineRoom = !commandLine.hasOption('m') ? "noname" : commandLine
.getOptionValue('m').trim();
if (commandLine.hasOption('c')) {
clusterNames = new TreeSet<>();
clusterNames.add(commandLine.getOptionValue('c').trim());
} else {
clusterNames = clusterAddr.keySet();
}
if (!printAsTlog) {
System.out.printf("%-24s %-24s %-4s %-8s %-8s%n",
"#Cluster Name",
"#Broker Name",
"#RT",
"#successCount",
"#failCount"
);
}
while (true) {
for (String clusterName : clusterNames) {
Set<String> brokerNames = clusterAddr.get(clusterName);
if (brokerNames == null) {
System.out.printf("cluster [%s] not exist", clusterName);
break;
}
for (String brokerName : brokerNames) {
Message msg = new Message(brokerName, getStringBySize(size).getBytes(MixAll.DEFAULT_CHARSET));
long start = 0;
long end = 0;
long elapsed = 0;
int successCount = 0;
int failCount = 0;
for (int i = 0; i < amount; i++) {
start = System.currentTimeMillis();
try {
producer.send(msg);
successCount++;
end = System.currentTimeMillis();
} catch (Exception e) {
failCount++;
end = System.currentTimeMillis();
}
if (i != 0) {
elapsed += end - start;
}
}
double rt = (double) elapsed / (amount - 1);
if (!printAsTlog) {
System.out.printf("%-24s %-24s %-8s %-16s %-16s%n",
clusterName,
brokerName,
String.format("%.2f", rt),
successCount,
failCount
);
} else {
System.out.printf("%s", String.format("%s|%s|%s|%s|%s%n", getCurTime(),
machineRoom, clusterName, brokerName,
new BigDecimal(rt).setScale(0, BigDecimal.ROUND_HALF_UP)));
}
}
}
Thread.sleep(interval * 1000);
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
producer.shutdown();
}
}
public String getStringBySize(long size) {
StringBuilder res = new StringBuilder();
for (int i = 0; i < size; i++) {
res.append('a');
}
return res.toString();
}
public String getCurTime() {
String fromTimeZone = "GMT+8";
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date date = new Date();
format.setTimeZone(TimeZone.getTimeZone(fromTimeZone));
String chinaDate = format.format(date);
return chinaDate;
}
}
|
CLusterSendMsgRTCommand
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/rpc/RpcException.java
|
{
"start": 918,
"end": 1419
}
|
class ____ extends RemotingException {
private int errorCode;
public RpcException(int errorCode, String message) {
super(message);
this.errorCode = errorCode;
}
public RpcException(int errorCode, String message, Throwable cause) {
super(message, cause);
this.errorCode = errorCode;
}
public int getErrorCode() {
return errorCode;
}
public void setErrorCode(int errorCode) {
this.errorCode = errorCode;
}
}
|
RpcException
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java
|
{
"start": 1779,
"end": 5980
}
|
class ____ {
private static final String FROM_DIR_NAME = "fromDir";
private static final String TO_DIR_NAME = "toDir";
private static FileSystem fs;
private static Path testDir;
private static Configuration conf;
private Path dir = null;
private int numFiles = 0;
private static int initialize(Path dir) throws Exception {
fs.mkdirs(dir);
Path fromDirPath = new Path(dir, FROM_DIR_NAME);
fs.mkdirs(fromDirPath);
Path toDirPath = new Path(dir, TO_DIR_NAME);
fs.mkdirs(toDirPath);
int numTotalFiles = 0;
int numDirs = RandomUtils.nextInt(0, 5);
for (int dirCount = 0; dirCount < numDirs; ++dirCount) {
Path subDirPath = new Path(fromDirPath, "subdir" + dirCount);
fs.mkdirs(subDirPath);
int numFiles = RandomUtils.nextInt(0, 10);
for (int fileCount = 0; fileCount < numFiles; ++fileCount) {
numTotalFiles++;
Path subFile = new Path(subDirPath, "file" + fileCount);
fs.createNewFile(subFile);
FSDataOutputStream output = fs.create(subFile, true);
for (int i = 0; i < 100; ++i) {
output.writeInt(i);
output.writeChar('\n');
}
output.close();
}
}
return numTotalFiles;
}
@BeforeAll
public static void init() throws Exception {
conf = new Configuration(false);
conf.set("fs.file.impl", LocalFileSystem.class.getName());
fs = FileSystem.getLocal(conf);
testDir = new FileSystemTestHelper().getTestRootPath(fs);
// don't want scheme on the path, just an absolute path
testDir = new Path(fs.makeQualified(testDir).toUri().getPath());
FileSystem.setDefaultUri(conf, fs.getUri());
fs.setWorkingDirectory(testDir);
}
@AfterAll
public static void cleanup() throws Exception {
fs.delete(testDir, true);
fs.close();
}
private void run(CopyCommandWithMultiThread cmd, String... args) {
cmd.setConf(conf);
assertEquals(0, cmd.run(args));
}
@BeforeEach
public void initDirectory() throws Exception {
dir = new Path("dir" + RandomStringUtils.randomNumeric(4));
numFiles = initialize(dir);
}
@Test
@Timeout(value = 10)
public void testCopy() throws Exception {
MultiThreadedCopy copy = new MultiThreadedCopy(1, DEFAULT_QUEUE_SIZE, 0);
run(copy, new Path(dir, FROM_DIR_NAME).toString(),
new Path(dir, TO_DIR_NAME).toString());
assert copy.getExecutor() == null;
}
@Test
@Timeout(value = 10)
public void testCopyWithThreads() {
run(new MultiThreadedCopy(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5",
new Path(dir, FROM_DIR_NAME).toString(),
new Path(dir, TO_DIR_NAME).toString());
}
@Test
@Timeout(value = 10)
public void testCopyWithThreadWrong() {
run(new MultiThreadedCopy(1, DEFAULT_QUEUE_SIZE, 0), "-t", "0",
new Path(dir, FROM_DIR_NAME).toString(),
new Path(dir, TO_DIR_NAME).toString());
}
@Test
@Timeout(value = 10)
public void testCopyWithThreadsAndQueueSize() {
int queueSize = 256;
run(new MultiThreadedCopy(5, queueSize, numFiles), "-t", "5", "-q",
Integer.toString(queueSize),
new Path(dir, FROM_DIR_NAME).toString(),
new Path(dir, TO_DIR_NAME).toString());
}
@Test
@Timeout(value = 10)
public void testCopyWithThreadsAndQueueSizeWrong() {
int queueSize = 0;
run(new MultiThreadedCopy(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5", "-q",
Integer.toString(queueSize),
new Path(dir, FROM_DIR_NAME).toString(),
new Path(dir, TO_DIR_NAME).toString());
}
@Test
@Timeout(value = 10)
public void testCopySingleFile() throws Exception {
Path fromDirPath = new Path(dir, FROM_DIR_NAME);
Path subFile = new Path(fromDirPath, "file0");
fs.createNewFile(subFile);
FSDataOutputStream output = fs.create(subFile, true);
for (int i = 0; i < 100; ++i) {
output.writeInt(i);
output.writeChar('\n');
}
output.close();
MultiThreadedCopy copy = new MultiThreadedCopy(5, DEFAULT_QUEUE_SIZE, 0);
run(copy, "-t", "5", subFile.toString(),
new Path(dir, TO_DIR_NAME).toString());
assert copy.getExecutor() == null;
}
private static
|
TestCopyToLocal
|
java
|
spring-projects__spring-framework
|
spring-websocket/src/main/java/org/springframework/web/socket/sockjs/transport/SockJsSessionFactory.java
|
{
"start": 965,
"end": 1359
}
|
interface ____ {
/**
* Create a new SockJS session.
* @param sessionId the ID of the session
* @param handler the underlying {@link WebSocketHandler}
* @param attributes handshake request specific attributes
* @return a new session, never {@code null}
*/
SockJsSession createSession(String sessionId, WebSocketHandler handler, Map<String, Object> attributes);
}
|
SockJsSessionFactory
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/refaster/testdata/template/SuppressWarningsTemplate.java
|
{
"start": 877,
"end": 1046
}
|
class ____ {
@BeforeTemplate
int abs(int x) {
return x < 0 ? -x : x;
}
@AfterTemplate
int math(int x) {
return Math.abs(x);
}
}
|
SuppressWarningsTemplate
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/internal/BinderHelper.java
|
{
"start": 33226,
"end": 40618
}
|
class ____ not found in the inheritance state hierarchy: " + declaringClass
);
}
if ( inheritanceState.isEmbeddableSuperclass() ) {
return context.getMetadataCollector().getMappedSuperclass( declaringClass.toJavaClass() );
}
}
return null;
}
public static String getPath(PropertyHolder holder, PropertyData property) {
return qualify( holder.getPath(), property.getPropertyName() );
}
public static Map<String,String> toAliasTableMap(SqlFragmentAlias[] aliases){
final Map<String,String> result = new HashMap<>();
for ( var aliasAnnotation : aliases ) {
final String table = aliasAnnotation.table();
if ( !table.isBlank() ) {
result.put( aliasAnnotation.alias(), table );
}
}
return result;
}
public static Map<String,String> toAliasEntityMap(SqlFragmentAlias[] aliases){
final Map<String,String> result = new HashMap<>();
for ( var aliasAnnotation : aliases ) {
final var entityClass = aliasAnnotation.entity();
if ( entityClass != void.class ) {
result.put( aliasAnnotation.alias(), entityClass.getName() );
}
}
return result;
}
public static boolean hasToOneAnnotation(AnnotationTarget property) {
return property.hasDirectAnnotationUsage(ManyToOne.class)
|| property.hasDirectAnnotationUsage(OneToOne.class);
}
public static FetchMode getFetchMode(FetchType fetch) {
return switch ( fetch ) {
case EAGER -> FetchMode.JOIN;
case LAZY -> FetchMode.SELECT;
};
}
public static EnumSet<CascadeType> aggregateCascadeTypes(
jakarta.persistence.CascadeType[] cascadeTypes,
Cascade cascadeAnnotation,
boolean orphanRemoval,
MetadataBuildingContext context) {
final var cascades = convertToHibernateCascadeType( cascadeTypes );
final var hibernateCascades = cascadeAnnotation == null ? null : cascadeAnnotation.value();
if ( !isEmpty( hibernateCascades ) ) {
addAll( cascades, hibernateCascades );
}
if ( orphanRemoval ) {
cascades.add( CascadeType.DELETE_ORPHAN );
cascades.add( CascadeType.REMOVE );
}
if ( cascades.contains( CascadeType.REPLICATE ) ) {
warnAboutDeprecatedCascadeType( CascadeType.REPLICATE );
}
cascades.addAll( context.getEffectiveDefaults().getDefaultCascadeTypes() );
return cascades;
}
private static EnumSet<CascadeType> convertToHibernateCascadeType(jakarta.persistence.CascadeType[] cascades) {
final var cascadeTypes = EnumSet.noneOf( CascadeType.class );
if ( cascades != null ) {
for ( var cascade: cascades ) {
cascadeTypes.add( convertCascadeType( cascade ) );
}
}
return cascadeTypes;
}
private static CascadeType convertCascadeType(jakarta.persistence.CascadeType cascade) {
return switch (cascade) {
case ALL -> CascadeType.ALL;
case PERSIST -> CascadeType.PERSIST;
case MERGE -> CascadeType.MERGE;
case REMOVE -> CascadeType.REMOVE;
case REFRESH -> CascadeType.REFRESH;
case DETACH -> CascadeType.DETACH;
};
}
public static String renderCascadeTypeList(EnumSet<CascadeType> cascadeTypes) {
final var cascade = new StringBuilder();
for ( var cascadeType : cascadeTypes ) {
cascade.append( "," );
cascade.append( switch ( cascadeType ) {
case ALL -> "all";
case PERSIST -> "persist";
case MERGE -> "merge";
case LOCK -> "lock";
case REFRESH -> "refresh";
case DETACH -> "evict";
case REMOVE -> "delete";
case DELETE_ORPHAN -> "delete-orphan";
case REPLICATE -> "replicate";
} );
}
return cascade.isEmpty() ? "none" : cascade.substring(1);
}
private static void warnAboutDeprecatedCascadeType(CascadeType cascadeType) {
DEPRECATION_LOGGER.warnf( "CascadeType.%s is deprecated", cascadeType.name() );
}
static boolean isGlobalGeneratorNameGlobal(MetadataBuildingContext context) {
return context.getBootstrapContext().getJpaCompliance().isGlobalGeneratorScopeEnabled();
}
public static boolean isDefault(ClassDetails clazz) {
return clazz == ClassDetails.VOID_CLASS_DETAILS;
}
public static boolean isDefault(TypeDetails clazz) {
return resolveRawClass( clazz ) == ClassDetails.VOID_CLASS_DETAILS;
}
public static void checkMappedByType(
String mappedBy,
Value targetValue,
String propertyName,
PropertyHolder propertyHolder,
Map<String, PersistentClass> persistentClasses) {
if ( targetValue instanceof Collection collection ) {
final var element = (ToOne) collection.getElement();
checkMappedByType( mappedBy, propertyName, propertyHolder, persistentClasses, element );
}
else if ( targetValue instanceof ToOne toOne ) {
checkMappedByType( mappedBy, propertyName, propertyHolder, persistentClasses, toOne );
}
}
private static void checkMappedByType(
String mappedBy,
String propertyName,
PropertyHolder propertyHolder,
Map<String, PersistentClass> persistentClasses,
ToOne toOne) {
final String referencedEntityName = toOne.getReferencedEntityName();
final PersistentClass referencedClass = persistentClasses.get( referencedEntityName );
PersistentClass ownerClass = propertyHolder.getPersistentClass();
while ( ownerClass != null ) {
if ( checkReferencedClass( ownerClass, referencedClass ) ) {
// the two entities map to the same table, so we are good
return;
}
ownerClass = ownerClass.getSuperPersistentClass();
}
// we could not find any entity mapping to the same table
throw new AnnotationException(
"Association '" + qualify( propertyHolder.getPath(), propertyName )
+ "' is 'mappedBy' a property named '" + mappedBy
+ "' which references the wrong entity type '" + referencedEntityName
+ "', expected '" + propertyHolder.getEntityName() + "'"
);
}
private static boolean checkReferencedClass(PersistentClass ownerClass, PersistentClass referencedClass) {
while ( referencedClass != null ) {
// Allow different entity types as long as they map to the same table
if ( ownerClass.getTable() == referencedClass.getTable() ) {
return true;
}
referencedClass = referencedClass.getSuperPersistentClass();
}
return false;
}
static boolean noConstraint(ForeignKey foreignKey, boolean noConstraintByDefault) {
if ( foreignKey == null ) {
return false;
}
else {
final var mode = foreignKey.value();
return mode == NO_CONSTRAINT
|| mode == PROVIDER_DEFAULT && noConstraintByDefault;
}
}
static void handleForeignKeyConstraint(
SimpleValue key,
ForeignKey foreignKey,
ForeignKey nestedForeignKey,
MetadataBuildingContext context) {
final boolean noConstraintByDefault = context.getBuildingOptions().isNoConstraintByDefault();
if ( noConstraint( foreignKey, noConstraintByDefault )
|| noConstraint( nestedForeignKey, noConstraintByDefault ) ) {
key.disableForeignKey();
}
else if ( foreignKey != null ) {
key.setForeignKeyName( nullIfEmpty( foreignKey.name() ) );
key.setForeignKeyDefinition( nullIfEmpty( foreignKey.foreignKeyDefinition() ) );
key.setForeignKeyOptions( foreignKey.options() );
}
else if ( noConstraintByDefault ) {
key.disableForeignKey();
}
else if ( nestedForeignKey != null ) {
key.setForeignKeyName( nullIfEmpty( nestedForeignKey.name() ) );
key.setForeignKeyDefinition( nullIfEmpty( nestedForeignKey.foreignKeyDefinition() ) );
key.setForeignKeyOptions( nestedForeignKey.options() );
}
}
/**
* Extract an annotation from the package-info for the package the given
|
is
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/InsertOrderingWithJoinedTableMultiLevelInheritance.java
|
{
"start": 1102,
"end": 3543
}
|
class ____ extends BaseInsertOrderingTest {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
Address.class,
Person.class,
SpecialPerson.class,
AnotherPerson.class,
President.class,
Office.class
};
}
@Test
public void testBatchingAmongstSubClasses() {
sessionFactoryScope().inTransaction( session -> {
int iterations = 2;
for ( int i = 0; i < iterations; i++ ) {
final President president = new President();
president.addAddress( new Address() );
session.persist( president );
final AnotherPerson anotherPerson = new AnotherPerson();
Office office = new Office();
session.persist( office );
anotherPerson.office = office;
session.persist( anotherPerson );
final Person person = new Person();
session.persist( person );
final SpecialPerson specialPerson = new SpecialPerson();
specialPerson.addAddress( new Address() );
session.persist( specialPerson );
}
clearBatches();
} );
// 1 for Person (1)
// 2 for SpecialPerson (3)
// 2 for AnotherPerson (5)
// 3 for President (8)
// 1 for Address (9)
// 1 for Office (10)
verifyPreparedStatementCount( 10 );
sessionFactoryScope().inTransaction( (session) -> {
// 2 Address per loop (4)
// 1 Office per loop (2)
// 1 Person per loop (2)
// 1 SpecialPerson per loop (2)
// 1 AnotherPerson per loop (2)
// 1 President per loop (2)
final Long addressCount = session
.createSelectionQuery( "select count(1) from Address", Long.class )
.getSingleResult();
assertThat( addressCount ).isEqualTo( 4L );
final Long officeCount = session
.createSelectionQuery( "select count(1) from Office", Long.class )
.getSingleResult();
assertThat( officeCount ).isEqualTo( 2L );
final Long presidentCount = session
.createSelectionQuery( "select count(1) from President", Long.class )
.getSingleResult();
assertThat( presidentCount ).isEqualTo( 2L );
final Long anotherPersonCount = session
.createSelectionQuery( "select count(1) from AnotherPerson", Long.class )
.getSingleResult();
assertThat( presidentCount ).isEqualTo( 2L );
} );
}
@After
protected void cleanupTestData() {
sessionFactoryScope().getSessionFactory().getSchemaManager().truncate();
}
@Entity(name = "Address")
@Table(name = "ADDRESS")
public static
|
InsertOrderingWithJoinedTableMultiLevelInheritance
|
java
|
spring-projects__spring-security
|
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/registration/RelyingPartyRegistrations.java
|
{
"start": 1110,
"end": 1290
}
|
class ____ constructing instances of {@link RelyingPartyRegistration}
*
* @author Josh Cummings
* @author Ryan Cassar
* @author Marcus da Coregio
* @since 5.4
*/
public final
|
for
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/namingstrategy/CamelCaseToUnderscoresNamingStrategyTest.java
|
{
"start": 2459,
"end": 3457
}
|
class ____ implements java.io.Serializable {
@Id
protected String AbcdEfghI21;
protected String wordWithDigitD1;
protected String hello1;
protected String hello1D2;
protected String hello3d4;
@Column(name = "\"Quoted-ColumnName\"")
protected String quoted;
public String getAbcdEfghI21() {
return AbcdEfghI21;
}
public void setAbcdEfghI21(String abcdEfghI21) {
AbcdEfghI21 = abcdEfghI21;
}
public String getWordWithDigitD1() {
return wordWithDigitD1;
}
public void setWordWithDigitD1(String wordWithDigitD1) {
this.wordWithDigitD1 = wordWithDigitD1;
}
public String getHello1() {
return hello1;
}
public void setHello1(String hello1) {
this.hello1 = hello1;
}
public String getHello1D2() {
return hello1D2;
}
public void setHello1D2(String hello1D2) {
this.hello1D2 = hello1D2;
}
public String getHello3d4() {
return hello3d4;
}
public void setHello3d4(String hello3d4) {
this.hello3d4 = hello3d4;
}
}
}
|
B
|
java
|
elastic__elasticsearch
|
x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java
|
{
"start": 52663,
"end": 55343
}
|
class ____ implements SyntheticSourceSupport {
private final Integer ignoreAbove = randomBoolean() ? null : between(10, 100);
private final boolean allIgnored = ignoreAbove != null && rarely();
private final String nullValue = usually() ? null : randomAlphaOfLength(2);
@Override
public SyntheticSourceExample example(int maxValues) {
if (randomBoolean()) {
Tuple<String, String> v = generateValue();
return new SyntheticSourceExample(v.v1(), v.v2(), this::mapping);
}
List<Tuple<String, String>> values = randomList(1, maxValues, this::generateValue);
List<String> in = values.stream().map(Tuple::v1).toList();
List<String> docValuesValues = new ArrayList<>();
List<String> outExtraValues = new ArrayList<>();
values.stream().map(Tuple::v2).forEach(v -> {
if (ignoreAbove != null && v.length() > ignoreAbove) {
outExtraValues.add(v);
} else {
docValuesValues.add(v);
}
});
List<String> outList = new ArrayList<>(new HashSet<>(docValuesValues));
Collections.sort(outList);
outList.addAll(outExtraValues);
Object out = outList.size() == 1 ? outList.get(0) : outList;
return new SyntheticSourceExample(in, out, this::mapping);
}
private Tuple<String, String> generateValue() {
if (nullValue != null && randomBoolean()) {
return Tuple.tuple(null, nullValue);
}
int length = 5;
if (ignoreAbove != null && (allIgnored || randomBoolean())) {
length = ignoreAbove + 5;
}
String v = randomAlphaOfLength(length);
return Tuple.tuple(v, v);
}
private void mapping(XContentBuilder b) throws IOException {
b.field("type", "wildcard");
if (nullValue != null) {
b.field("null_value", nullValue);
}
if (ignoreAbove != null) {
b.field("ignore_above", ignoreAbove);
}
}
@Override
public List<SyntheticSourceInvalidExample> invalidExample() throws IOException {
return List.of();
}
}
@Override
protected List<SortShortcutSupport> getSortShortcutSupport() {
return List.of(new SortShortcutSupport(this::minimalMapping, this::writeField, false));
}
@Override
protected boolean supportsDocValuesSkippers() {
return false;
}
}
|
WildcardSyntheticSourceSupport
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/Verticle.java
|
{
"start": 511,
"end": 1322
}
|
class ____ not deprecated, however we encourage instead to use {@link VerticleBase}
*
* A verticle is a piece of code that can be deployed by Vert.x.
* <p>
* Use of verticles with Vert.x is entirely optional, but if you use them they provide an <i>actor-like</i>
* deployment and concurrency model, out of the box.
* <p>
* Vert.x does not provide a strict actor implementation, but there are significant similarities.
* <p>
* You can think of verticle instances as a bit like actors in the Actor Model. A typical verticle-based Vert.x application
* will be composed of many verticle instances in each Vert.x instance.
* <p>
* The verticles communicate with each other by sending messages over the {@link io.vertx.core.eventbus.EventBus}.
*
* @author <a href="http://tfox.org">Tim Fox</a>
*/
public
|
is
|
java
|
apache__camel
|
test-infra/camel-test-infra-couchdb/src/main/java/org/apache/camel/test/infra/couchdb/services/CouchDbInfraService.java
|
{
"start": 982,
"end": 1186
}
|
interface ____ extends InfrastructureService {
String host();
int port();
default String getServiceAddress() {
return String.format("%s:%d", host(), port());
}
}
|
CouchDbInfraService
|
java
|
netty__netty
|
codec-dns/src/main/java/io/netty/handler/codec/dns/DnsOpCode.java
|
{
"start": 850,
"end": 3389
}
|
class ____ implements Comparable<DnsOpCode> {
/**
* The 'Query' DNS OpCode, as defined in <a href="https://tools.ietf.org/html/rfc1035">RFC1035</a>.
*/
public static final DnsOpCode QUERY = new DnsOpCode(0x00, "QUERY");
/**
* The 'IQuery' DNS OpCode, as defined in <a href="https://tools.ietf.org/html/rfc1035">RFC1035</a>.
*/
public static final DnsOpCode IQUERY = new DnsOpCode(0x01, "IQUERY");
/**
* The 'Status' DNS OpCode, as defined in <a href="https://tools.ietf.org/html/rfc1035">RFC1035</a>.
*/
public static final DnsOpCode STATUS = new DnsOpCode(0x02, "STATUS");
/**
* The 'Notify' DNS OpCode, as defined in <a href="https://tools.ietf.org/html/rfc1996">RFC1996</a>.
*/
public static final DnsOpCode NOTIFY = new DnsOpCode(0x04, "NOTIFY");
/**
* The 'Update' DNS OpCode, as defined in <a href="https://tools.ietf.org/html/rfc2136">RFC2136</a>.
*/
public static final DnsOpCode UPDATE = new DnsOpCode(0x05, "UPDATE");
/**
* Returns the {@link DnsOpCode} instance of the specified byte value.
*/
public static DnsOpCode valueOf(int b) {
switch (b) {
case 0x00:
return QUERY;
case 0x01:
return IQUERY;
case 0x02:
return STATUS;
case 0x04:
return NOTIFY;
case 0x05:
return UPDATE;
default:
break;
}
return new DnsOpCode(b);
}
private final byte byteValue;
private final String name;
private String text;
private DnsOpCode(int byteValue) {
this(byteValue, "UNKNOWN");
}
public DnsOpCode(int byteValue, String name) {
this.byteValue = (byte) byteValue;
this.name = checkNotNull(name, "name");
}
public byte byteValue() {
return byteValue;
}
@Override
public int hashCode() {
return byteValue;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof DnsOpCode)) {
return false;
}
return byteValue == ((DnsOpCode) obj).byteValue;
}
@Override
public int compareTo(DnsOpCode o) {
return byteValue - o.byteValue;
}
@Override
public String toString() {
String text = this.text;
if (text == null) {
this.text = text = name + '(' + (byteValue & 0xFF) + ')';
}
return text;
}
}
|
DnsOpCode
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/transaction/JtaReusingEntityTransactionTest.java
|
{
"start": 1263,
"end": 2164
}
|
class ____ {
@Test
public void entityTransactionShouldBeReusableTest(EntityManagerFactoryScope scope) {
scope.inEntityManager(
entityManager -> {
EntityTransaction transaction = null;
try {
transaction = entityManager.getTransaction();
entityManager.persist( new TestEntity() );
transaction.begin();
transaction.commit();
transaction.begin();
entityManager.persist( new TestEntity() );
transaction.commit();
}
finally {
if ( transaction != null && transaction.isActive() ) {
transaction.rollback();
}
}
}
);
scope.inTransaction(
entityManager -> {
List<TestEntity> results = entityManager.createQuery( "from TestEntity", TestEntity.class ).getResultList();
assertThat( results.size(), is( 2 ) );
}
);
}
@Entity(name = "TestEntity")
public static
|
JtaReusingEntityTransactionTest
|
java
|
google__dagger
|
dagger-runtime/main/java/dagger/internal/SingleCheck.java
|
{
"start": 975,
"end": 2827
}
|
class ____<T extends @Nullable Object> implements Provider<T> {
private static final Object UNINITIALIZED = new Object();
private volatile @Nullable Provider<T> provider;
private volatile @Nullable Object instance = UNINITIALIZED;
private SingleCheck(Provider<T> provider) {
assert provider != null;
this.provider = provider;
}
@SuppressWarnings("unchecked") // cast only happens when result comes from the delegate provider
@Override
public T get() {
@Nullable Object local = instance;
if (local == UNINITIALIZED) {
// provider is volatile and might become null after the check, so retrieve the provider first
@Nullable Provider<T> providerReference = provider;
if (providerReference == null) {
// The provider was null, so the instance must already be set
local = instance;
} else {
local = providerReference.get();
instance = local;
// Null out the reference to the provider. We are never going to need it again, so we can
// make it eligible for GC.
provider = null;
}
}
return (T) local;
}
/** Returns a {@link Provider} that caches the value from the given delegate provider. */
public static <T> Provider<T> provider(Provider<T> provider) {
// If a scoped @Binds delegates to a scoped binding, don't cache the value again.
if (provider instanceof SingleCheck || provider instanceof DoubleCheck) {
return provider;
}
return new SingleCheck<T>(checkNotNull(provider));
}
/**
* Legacy javax version of the method to support libraries compiled with an older version of
* Dagger. Do not use directly.
*/
public static <P extends javax.inject.Provider<T>, T> javax.inject.Provider<T> provider(
P delegate) {
return provider(asDaggerProvider(delegate));
}
}
|
SingleCheck
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/interceptor/InterceptorJtaTransactionTest.java
|
{
"start": 11834,
"end": 13581
}
|
class ____ implements Interceptor {
private boolean afterTransactionBeginMethodCalled;
private Boolean afterTransactionBeginAssertionPassed;
private boolean beforeTransactionCompletionMethodCalled;
private Boolean beforeTransactionCompletionAssertionPassed;
private boolean afterTransactionCompletionMethodCalled;
private Boolean afterTransactionCompletionAssertionPassed;
public void reset() {
afterTransactionBeginMethodCalled = false;
afterTransactionBeginAssertionPassed = null;
beforeTransactionCompletionMethodCalled = false;
beforeTransactionCompletionAssertionPassed = null;
afterTransactionCompletionMethodCalled = false;
afterTransactionCompletionAssertionPassed = null;
}
@Override
public void afterTransactionBegin(org.hibernate.Transaction tx) {
afterTransactionBeginMethodCalled = true;
if ( tx != null ) {
afterTransactionBeginAssertionPassed = false;
assertEquals( TransactionStatus.ACTIVE, tx.getStatus() );
afterTransactionBeginAssertionPassed = true;
}
}
@Override
public void beforeTransactionCompletion(org.hibernate.Transaction tx) {
beforeTransactionCompletionMethodCalled = true;
if ( tx != null ) {
beforeTransactionCompletionAssertionPassed = false;
assertEquals( TransactionStatus.ACTIVE, tx.getStatus() );
beforeTransactionCompletionAssertionPassed = true;
}
}
@Override
public void afterTransactionCompletion(org.hibernate.Transaction tx) {
afterTransactionCompletionMethodCalled = true;
if ( tx != null ) {
afterTransactionCompletionAssertionPassed = false;
assertEquals( TransactionStatus.COMMITTED, tx.getStatus() );
afterTransactionCompletionAssertionPassed = true;
}
}
};
}
|
TransactionInterceptor
|
java
|
grpc__grpc-java
|
binder/src/main/java/io/grpc/binder/internal/PendingAuthListener.java
|
{
"start": 491,
"end": 2803
}
|
class ____<ReqT, RespT> extends ServerCall.Listener<ReqT> {
private final ConcurrentLinkedQueue<ListenerConsumer<ReqT>> pendingSteps =
new ConcurrentLinkedQueue<>();
private final AtomicReference<ServerCall.Listener<ReqT>> delegateRef =
new AtomicReference<>(null);
PendingAuthListener() {}
void startCall(
ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) {
ServerCall.Listener<ReqT> delegate;
try {
delegate = next.startCall(call, headers);
} catch (RuntimeException e) {
call.close(
Status.INTERNAL
.withCause(e)
.withDescription("Failed to start server call after authorization check"),
new Metadata());
return;
}
delegateRef.set(delegate);
maybeRunPendingSteps();
}
/**
* Runs any enqueued step in this ServerCall listener as long as the authorization check is
* complete. Otherwise, no-op and returns immediately.
*/
private void maybeRunPendingSteps() {
@Nullable ServerCall.Listener<ReqT> delegate = delegateRef.get();
if (delegate == null) {
return;
}
// This section is synchronized so that no 2 threads may attempt to retrieve elements from the
// queue in order but end up executing the steps out of order.
synchronized (this) {
ListenerConsumer<ReqT> nextStep;
while ((nextStep = pendingSteps.poll()) != null) {
nextStep.accept(delegate);
}
}
}
@Override
public void onCancel() {
pendingSteps.offer(ServerCall.Listener::onCancel);
maybeRunPendingSteps();
}
@Override
public void onComplete() {
pendingSteps.offer(ServerCall.Listener::onComplete);
maybeRunPendingSteps();
}
@Override
public void onHalfClose() {
pendingSteps.offer(ServerCall.Listener::onHalfClose);
maybeRunPendingSteps();
}
@Override
public void onMessage(ReqT message) {
pendingSteps.offer(delegate -> delegate.onMessage(message));
maybeRunPendingSteps();
}
@Override
public void onReady() {
pendingSteps.offer(ServerCall.Listener::onReady);
maybeRunPendingSteps();
}
/**
* Similar to Java8's {@link java.util.function.Consumer}, but redeclared in order to support
* Android SDK 21.
*/
private
|
PendingAuthListener
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/config/MvcNamespaceTests.java
|
{
"start": 49431,
"end": 49880
}
|
class ____ {
private Date date;
private Double percent;
private boolean recordedValidationError;
@RequestMapping
public void testBind(@RequestParam @IsoDate Date date,
@RequestParam(required = false) @PercentNumber Double percent,
@MyValid TestBean bean, BindingResult result) {
this.date = date;
this.percent = percent;
this.recordedValidationError = (result.getErrorCount() == 1);
}
}
public static
|
TestController
|
java
|
quarkusio__quarkus
|
extensions/smallrye-reactive-messaging/runtime/src/main/java/io/quarkus/smallrye/reactivemessaging/runtime/QuarkusParameterDescriptor.java
|
{
"start": 392,
"end": 1245
}
|
class ____ implements MethodParameterDescriptor {
private List<TypeInfo> infos;
public QuarkusParameterDescriptor() {
// Empty constructor, required for the proxies
}
public QuarkusParameterDescriptor(List<TypeInfo> infos) {
this.infos = infos;
}
public List<TypeInfo> getInfos() {
return infos;
}
public void setInfos(List<TypeInfo> infos) {
this.infos = infos;
}
@Override
public List<Class<?>> getTypes() {
if (infos == null) {
return new ArrayList<>();
}
return infos.stream().map(TypeInfo::getName).collect(Collectors.toList());
}
@Override
public Class<?> getGenericParameterType(int paramIndex, int genericIndex) {
return infos.get(paramIndex).getGenerics().get(genericIndex);
}
}
|
QuarkusParameterDescriptor
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java
|
{
"start": 1191,
"end": 2681
}
|
class ____ extends AcknowledgedRequest<Request> {
private String calendarId;
private String eventId;
public Request(StreamInput in) throws IOException {
super(in);
calendarId = in.readString();
eventId = in.readString();
}
public Request(String calendarId, String eventId) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT);
this.calendarId = ExceptionsHelper.requireNonNull(calendarId, Calendar.ID.getPreferredName());
this.eventId = ExceptionsHelper.requireNonNull(eventId, ScheduledEvent.EVENT_ID.getPreferredName());
}
public String getCalendarId() {
return calendarId;
}
public String getEventId() {
return eventId;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(calendarId);
out.writeString(eventId);
}
@Override
public int hashCode() {
return Objects.hash(eventId, calendarId);
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Request other = (Request) obj;
return Objects.equals(eventId, other.eventId) && Objects.equals(calendarId, other.calendarId);
}
}
}
|
Request
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerCommand.java
|
{
"start": 1685,
"end": 4766
}
|
class ____ {
private final String command;
private final Map<String, List<String>> commandArguments;
protected DockerCommand(String command) {
String dockerCommandKey = "docker-command";
this.command = command;
this.commandArguments = new TreeMap<>();
commandArguments.put(dockerCommandKey, new ArrayList<>());
commandArguments.get(dockerCommandKey).add(command);
}
/**
* Returns the docker sub-command string being used
* e.g 'run'.
*/
public final String getCommandOption() {
return this.command;
}
/**
* Add command commandWithArguments - this method is only meant for use by
* sub-classes.
*
* @param key name of the key to be added
* @param value value of the key
*/
protected final void addCommandArguments(String key, String value) {
List<String> list = commandArguments.get(key);
if (list != null) {
list.add(value);
return;
}
list = new ArrayList<>();
list.add(value);
this.commandArguments.put(key, list);
}
public Map<String, List<String>> getDockerCommandWithArguments() {
return Collections.unmodifiableMap(commandArguments);
}
@Override
public String toString() {
StringBuilder ret = new StringBuilder(this.command);
for (Map.Entry<String, List<String>> entry : commandArguments.entrySet()) {
ret.append(" ").append(entry.getKey());
ret.append("=").append(StringUtils.join(",", entry.getValue()));
}
return ret.toString();
}
/**
* Add the client configuration directory to the docker command.
*
* The client configuration option proceeds any of the docker subcommands
* (such as run, load, pull, etc). Ordering will be handled by
* container-executor. Docker expects the value to be a directory containing
* the file config.json. This file is typically generated via docker login.
*
* @param clientConfigDir - directory containing the docker client config.
*/
public void setClientConfigDir(String clientConfigDir) {
if (clientConfigDir != null) {
addCommandArguments("docker-config", clientConfigDir);
}
}
/**
* Prepare the privileged operation object that will be used to invoke
* the container-executor.
*
* @param dockerCommand Specific command to be run by docker.
* @param containerName
* @param env
* @param nmContext
* @return Returns the PrivilegedOperation object to be used.
* @throws ContainerExecutionException
*/
public PrivilegedOperation preparePrivilegedOperation(
DockerCommand dockerCommand, String containerName, Map<String,
String> env, Context nmContext)
throws ContainerExecutionException {
DockerClient dockerClient = new DockerClient();
String commandFile =
dockerClient.writeCommandToTempFile(dockerCommand,
ContainerId.fromString(containerName),
nmContext);
PrivilegedOperation dockerOp = new PrivilegedOperation(
PrivilegedOperation.OperationType.RUN_DOCKER_CMD);
dockerOp.appendArgs(commandFile);
return dockerOp;
}
}
|
DockerCommand
|
java
|
elastic__elasticsearch
|
x-pack/plugin/wildcard/src/internalClusterTest/java/org/elasticsearch/xpack/wildcard/search/WildcardSearchIT.java
|
{
"start": 1096,
"end": 5649
}
|
class ____ extends ESIntegTestCase {
private List<String> terms = null;
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(Wildcard.class);
}
@Before
public void setup() throws IOException {
terms = new ArrayList<>();
XContentBuilder xcb = XContentFactory.jsonBuilder()
.startObject()
.startObject("properties")
.startObject("wildcard")
.field("type", "wildcard")
.endObject()
.startObject("keyword")
.field("type", "keyword")
.endObject()
.endObject()
.endObject();
indicesAdmin().prepareCreate("test").setMapping(xcb).get();
final int numDocs = randomIntBetween(100, 1000);
final BulkRequestBuilder builder = client().prepareBulk();
for (int i = 0; i < numDocs; i++) {
if (rarely()) {
indexMultiValue(builder);
} else {
indexSingleValue(builder);
}
}
assertFalse(builder.get().hasFailures());
indicesAdmin().prepareRefresh("test").get();
}
private void indexSingleValue(BulkRequestBuilder builder) {
String term = randomIndexString();
builder.add(
new IndexRequest("test").source("{\"wildcard\" : \"" + term + "\", \"keyword\" : \"" + term + "\"}", XContentType.JSON)
);
terms.add(term);
}
private void indexMultiValue(BulkRequestBuilder builder) {
int docSize = randomIntBetween(1, 10);
String[] docTerms = new String[docSize];
for (int i = 0; i < docSize; i++) {
String term = randomIndexString();
terms.add(term);
docTerms[i] = "\"" + term + "\"";
}
builder.add(
new IndexRequest("test").source(
"{\"wildcard\" : " + Arrays.toString(docTerms) + ", \"keyword\" : " + Arrays.toString(docTerms) + "}",
XContentType.JSON
)
);
}
public void testTermQueryDuel() {
for (int i = 0; i < 50; i++) {
String term = randomQueryString(terms);
TermQueryBuilder termQueryBuilder1 = new TermQueryBuilder("wildcard", term);
TermQueryBuilder termQueryBuilder2 = new TermQueryBuilder("keyword", term);
assertResponse(
client().prepareSearch("test").setQuery(termQueryBuilder1),
response -> assertResponse(
client().prepareSearch("test").setQuery(termQueryBuilder2),
response2 -> assertThat(
response.getHits().getTotalHits().value(),
Matchers.equalTo(response2.getHits().getTotalHits().value())
)
)
);
}
}
public void testTermsQueryDuel() {
for (int i = 0; i < 10; i++) {
String[] terms = new String[randomIntBetween(2, 8192)];
for (int j = 0; j < terms.length; j++) {
terms[j] = randomQueryString(this.terms);
}
TermsQueryBuilder termsQueryBuilder1 = new TermsQueryBuilder("wildcard", terms);
TermsQueryBuilder termsQueryBuilder2 = new TermsQueryBuilder("keyword", terms);
assertResponse(
client().prepareSearch("test").setQuery(termsQueryBuilder1),
response -> assertResponse(
client().prepareSearch("test").setQuery(termsQueryBuilder2),
response2 -> assertThat(
response.getHits().getTotalHits().value(),
Matchers.equalTo(response2.getHits().getTotalHits().value())
)
)
);
}
}
private static String randomIndexString() {
String string = randomAlphaOfLength(randomIntBetween(0, 30));
if (rarely()) {
return string + "*";
} else if (rarely()) {
return "*" + string;
} else if (rarely()) {
return "*" + string + "*";
} else {
return string;
}
}
private static String randomQueryString(List<String> terms) {
if (rarely()) {
return terms.get(randomIntBetween(0, terms.size() - 1));
} else if (randomBoolean()) {
return randomAlphaOfLength(randomIntBetween(0, 30));
} else {
return randomAlphaOfLength(1) + "*";
}
}
}
|
WildcardSearchIT
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/ValueObjectBinderTests.java
|
{
"start": 23032,
"end": 23569
}
|
class ____ {
private final NestedImmutable nestedImmutable;
private final NestedJavaBean nestedJavaBean;
NestedConstructorBeanWithDefaultValue(@DefaultValue NestedImmutable nestedImmutable,
@DefaultValue NestedJavaBean nestedJavaBean) {
this.nestedImmutable = nestedImmutable;
this.nestedJavaBean = nestedJavaBean;
}
NestedImmutable getNestedImmutable() {
return this.nestedImmutable;
}
NestedJavaBean getNestedJavaBean() {
return this.nestedJavaBean;
}
}
static
|
NestedConstructorBeanWithDefaultValue
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/MissingSuperCallTest.java
|
{
"start": 4822,
"end": 5029
}
|
interface ____ {}
""")
.addSourceLines(
"Super.java",
"""
import edu.umd.cs.findbugs.annotations.OverrideMustInvoke;
public
|
OverrideMustInvoke
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigDataLocationResolversTests.java
|
{
"start": 11515,
"end": 11576
}
|
class ____ extends TestResolver {
}
static
|
LowestTestResolver
|
java
|
junit-team__junit5
|
platform-tooling-support-tests/src/test/java/platform/tooling/support/tests/ManagedResource.java
|
{
"start": 1410,
"end": 1489
}
|
interface ____ {
@Target(ElementType.TYPE)
@Retention(RUNTIME)
@
|
ManagedResource
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/VoidMissingNullableTest.java
|
{
"start": 10493,
"end": 10877
}
|
class ____ {
@Nullable Void v;
void f() {
var v = this.v;
}
}
""")
.doTest();
}
@Test
public void negativeOtherLocalVariable() {
aggressiveCompilationHelper
.addSourceLines(
"Test.java",
"""
import javax.annotation.Nullable;
|
Test
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
|
{
"start": 44605,
"end": 46256
}
|
enum ____ type {@code E} in terms of the value of its ordinal. Enums serialized like this must have a corresponding
* test which uses {@code EnumSerializationTestUtils#assertEnumSerialization} to fix the wire protocol.
*/
public <E extends Enum<E>> void writeEnumSet(EnumSet<E> enumSet) throws IOException {
writeVInt(enumSet.size());
for (E e : enumSet) {
writeEnum(e);
}
}
/**
* Write a {@link TimeValue} to the stream
*/
public void writeTimeValue(TimeValue timeValue) throws IOException {
writeZLong(timeValue.duration());
writeByte((byte) timeValue.timeUnit().ordinal());
}
/**
* Write an optional {@link TimeValue} to the stream.
*/
public void writeOptionalTimeValue(@Nullable TimeValue timeValue) throws IOException {
if (timeValue == null) {
writeBoolean(false);
} else {
writeBoolean(true);
writeTimeValue(timeValue);
}
}
/**
* Similar to {@link #writeOptionalWriteable} but for use when the value is always missing.
*/
public <T extends Writeable> void writeMissingWriteable(Class<T> ignored) throws IOException {
writeBoolean(false);
}
/**
* Similar to {@link #writeOptionalString} but for use when the value is always missing.
*/
public void writeMissingString() throws IOException {
writeBoolean(false);
}
/**
* Write a {@link BigInteger} to the stream
*/
public void writeBigInteger(BigInteger bigInteger) throws IOException {
writeString(bigInteger.toString());
}
}
|
with
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/NestedCompositeIdWithOrderedUpdatesTest.java
|
{
"start": 2495,
"end": 3212
}
|
class ____
{
@ManyToOne(cascade={}, // cascade nothing
fetch=FetchType.LAZY,
optional=false)
private B b;
/**
* "key" won't work because h2 database considers it a reserved word, and hibernate doesn't escape it.
* furthermore, the variable name must be after {@link #b}, alphabetically, to account for hibernate's internal sorting.
*/
private String zkey;
public AId()
{
}
public AId(B b, String key)
{
this.b=b;
this.zkey=key;
}
public B getB()
{
return b;
}
public void setB(B b)
{
this.b=b;
}
public String getZkey()
{
return zkey;
}
public void setZkey(String zkey)
{
this.zkey=zkey;
}
}
@Entity(name = "B")
public static
|
AId
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsInfo.java
|
{
"start": 1144,
"end": 1534
}
|
interface ____ {
/**
* Typically name corresponds to annotation {@link Metric#value()} or
* the name of the class.
* @return the name of the metric/tag
*/
String name();
/**
* Typically the description corresponds to annotation {@link Metric#about()}
* or the name of the class.
* @return the description of the metric/tag
*/
String description();
}
|
MetricsInfo
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/SecureFieldDetectionTest.java
|
{
"start": 11461,
"end": 11734
}
|
class ____ {
private String notSecured;
public String getNotSecured() {
return notSecured;
}
public void setNotSecured(String notSecured) {
this.notSecured = notSecured;
}
}
public static
|
NoSecureField
|
java
|
apache__spark
|
sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeMapData.java
|
{
"start": 1899,
"end": 5980
}
|
class ____ extends MapData implements Externalizable, KryoSerializable {
private Object baseObject;
private long baseOffset;
// The size of this map's backing data, in bytes.
// The 4-bytes header of key array `numBytes` is also included, so it's actually equal to
// 4 + key array numBytes + value array numBytes.
private int sizeInBytes;
public Object getBaseObject() { return baseObject; }
public long getBaseOffset() { return baseOffset; }
public int getSizeInBytes() { return sizeInBytes; }
private final UnsafeArrayData keys;
private final UnsafeArrayData values;
/**
* Construct a new UnsafeMapData. The resulting UnsafeMapData won't be usable until
* `pointTo()` has been called, since the value returned by this constructor is equivalent
* to a null pointer.
*/
public UnsafeMapData() {
keys = new UnsafeArrayData();
values = new UnsafeArrayData();
}
/**
* Update this UnsafeMapData to point to different backing data.
*
* @param baseObject the base object
* @param baseOffset the offset within the base object
* @param sizeInBytes the size of this map's backing data, in bytes
*/
public void pointTo(Object baseObject, long baseOffset, int sizeInBytes) {
// Read the numBytes of key array from the first 8 bytes.
final long keyArraySize = Platform.getLong(baseObject, baseOffset);
assert keyArraySize >= 0 : "keyArraySize (" + keyArraySize + ") should >= 0";
assert keyArraySize <= Integer.MAX_VALUE :
"keyArraySize (" + keyArraySize + ") should <= Integer.MAX_VALUE";
final int valueArraySize = sizeInBytes - (int)keyArraySize - 8;
assert valueArraySize >= 0 : "valueArraySize (" + valueArraySize + ") should >= 0";
keys.pointTo(baseObject, baseOffset + 8, (int)keyArraySize);
values.pointTo(baseObject, baseOffset + 8 + keyArraySize, valueArraySize);
assert keys.numElements() == values.numElements();
this.baseObject = baseObject;
this.baseOffset = baseOffset;
this.sizeInBytes = sizeInBytes;
}
@Override
public int numElements() {
return keys.numElements();
}
@Override
public UnsafeArrayData keyArray() {
return keys;
}
@Override
public UnsafeArrayData valueArray() {
return values;
}
public void writeToMemory(Object target, long targetOffset) {
Platform.copyMemory(baseObject, baseOffset, target, targetOffset, sizeInBytes);
}
public void writeTo(ByteBuffer buffer) {
assert(buffer.hasArray());
byte[] target = buffer.array();
int offset = buffer.arrayOffset();
int pos = buffer.position();
writeToMemory(target, Platform.BYTE_ARRAY_OFFSET + offset + pos);
buffer.position(pos + sizeInBytes);
}
@Override
public UnsafeMapData copy() {
UnsafeMapData mapCopy = new UnsafeMapData();
final byte[] mapDataCopy = new byte[sizeInBytes];
Platform.copyMemory(
baseObject, baseOffset, mapDataCopy, Platform.BYTE_ARRAY_OFFSET, sizeInBytes);
mapCopy.pointTo(mapDataCopy, Platform.BYTE_ARRAY_OFFSET, sizeInBytes);
return mapCopy;
}
@Override
public void writeExternal(ObjectOutput out) throws IOException {
byte[] bytes = UnsafeDataUtils.getBytes(baseObject, baseOffset, sizeInBytes);
out.writeInt(bytes.length);
out.write(bytes);
}
@Override
public void readExternal(ObjectInput in) throws IOException {
this.baseOffset = BYTE_ARRAY_OFFSET;
this.sizeInBytes = in.readInt();
this.baseObject = new byte[sizeInBytes];
in.readFully((byte[]) baseObject);
pointTo(baseObject, baseOffset, sizeInBytes);
}
@Override
public void write(Kryo kryo, Output output) {
byte[] bytes = UnsafeDataUtils.getBytes(baseObject, baseOffset, sizeInBytes);
output.writeInt(bytes.length);
output.write(bytes);
}
@Override
public void read(Kryo kryo, Input input) {
this.baseOffset = BYTE_ARRAY_OFFSET;
this.sizeInBytes = input.readInt();
this.baseObject = new byte[sizeInBytes];
input.read((byte[]) baseObject);
pointTo(baseObject, baseOffset, sizeInBytes);
}
}
|
UnsafeMapData
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/MathAbsoluteNegativeTest.java
|
{
"start": 876,
"end": 1199
}
|
class ____ {
private final CompilationTestHelper helper =
CompilationTestHelper.newInstance(MathAbsoluteNegative.class, getClass());
@Test
public void random() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.Random;
|
MathAbsoluteNegativeTest
|
java
|
apache__rocketmq
|
broker/src/main/java/org/apache/rocketmq/broker/client/DefaultConsumerIdsChangeListener.java
|
{
"start": 6545,
"end": 7066
}
|
class ____ {
private final AtomicBoolean interrupted = new AtomicBoolean(false);
private final List<Channel> channels;
public NotifyTaskControl(List<Channel> channels) {
this.channels = channels;
}
public boolean isInterrupted() {
return interrupted.get();
}
public void interrupt() {
interrupted.set(true);
}
public List<Channel> getChannels() {
return channels;
}
}
}
|
NotifyTaskControl
|
java
|
apache__flink
|
flink-connectors/flink-connector-base/src/test/java/org/apache/flink/connector/base/source/reader/SourceReaderBaseTest.java
|
{
"start": 12418,
"end": 26720
}
|
class ____
implements org.apache.flink.api.connector.source.util.ratelimit.RateLimiter<
TestingSourceSplit> {
private final Executor limiter =
Executors.newSingleThreadExecutor(new ExecutorThreadFactory("flink-rate-limiter"));
private RateLimiter rateLimiter;
private final double maxPerSecond;
public SplitAwaredRateLimiter(double maxPerSecond) {
this.maxPerSecond = maxPerSecond;
this.rateLimiter = RateLimiter.create(maxPerSecond);
}
@Override
public CompletionStage<Void> acquire(int numberOfEvents) {
return CompletableFuture.runAsync(() -> rateLimiter.acquire(numberOfEvents), limiter);
}
@Override
public void notifyAddingSplit(TestingSourceSplit split) {
if (!split.splitId().equals("test-split1")) {
this.rateLimiter = RateLimiter.create(maxPerSecond / 2);
}
}
}
@Test
void testRecordsWithSplitsRecycledWhenEmpty() throws Exception {
final TestingRecordsWithSplitIds<String> records =
new TestingRecordsWithSplitIds<>("test-split", "value1", "value2");
final SourceReader<?, ?> reader =
createReaderAndAwaitAvailable(
Collections.singletonList("test-split"),
Collections.singletonList(records),
RateLimiterStrategy.noOp());
// poll thrice: twice to get all records, one more to trigger recycle and moving to the next
// split
reader.pollNext(new TestingReaderOutput<>());
reader.pollNext(new TestingReaderOutput<>());
reader.pollNext(new TestingReaderOutput<>());
assertThat(records.isRecycled()).isTrue();
}
@Test
void testMultipleSplitsWithDifferentFinishingMoments() throws Exception {
MockSplitReader mockSplitReader =
MockSplitReader.newBuilder()
.setNumRecordsPerSplitPerFetch(2)
.setSeparatedFinishedRecord(false)
.setBlockingFetch(false)
.build();
MockSourceReader reader =
new MockSourceReader(
() -> mockSplitReader, getConfig(), new TestingReaderContext());
reader.start();
List<MockSourceSplit> splits =
Arrays.asList(
getSplit(0, 10, Boundedness.BOUNDED), getSplit(1, 12, Boundedness.BOUNDED));
reader.addSplits(splits);
reader.notifyNoMoreSplits();
while (true) {
InputStatus status = reader.pollNext(new TestingReaderOutput<>());
if (status == InputStatus.END_OF_INPUT) {
break;
}
if (status == InputStatus.NOTHING_AVAILABLE) {
reader.isAvailable().get();
}
}
}
@Test
void testMultipleSplitsWithSeparatedFinishedRecord() throws Exception {
MockSplitReader mockSplitReader =
MockSplitReader.newBuilder()
.setNumRecordsPerSplitPerFetch(2)
.setSeparatedFinishedRecord(true)
.setBlockingFetch(false)
.build();
MockSourceReader reader =
new MockSourceReader(
() -> mockSplitReader, getConfig(), new TestingReaderContext());
reader.start();
List<MockSourceSplit> splits =
Arrays.asList(
getSplit(0, 10, Boundedness.BOUNDED), getSplit(1, 10, Boundedness.BOUNDED));
reader.addSplits(splits);
reader.notifyNoMoreSplits();
while (true) {
InputStatus status = reader.pollNext(new TestingReaderOutput<>());
if (status == InputStatus.END_OF_INPUT) {
break;
}
if (status == InputStatus.NOTHING_AVAILABLE) {
reader.isAvailable().get();
}
}
}
@Test
void testPollNextReturnMoreAvailableWhenAllSplitFetcherCloseWithLeftoverElementInQueue()
throws Exception {
MockSplitReader mockSplitReader =
MockSplitReader.newBuilder()
.setNumRecordsPerSplitPerFetch(1)
.setBlockingFetch(true)
.build();
BlockingShutdownSplitFetcherManager<int[], MockSourceSplit> splitFetcherManager =
new BlockingShutdownSplitFetcherManager<>(() -> mockSplitReader, getConfig());
final MockSourceReader sourceReader =
new MockSourceReader(splitFetcherManager, getConfig(), new TestingReaderContext());
// Create and add a split that only contains one record
final MockSourceSplit split = new MockSourceSplit(0, 0, 1);
sourceReader.addSplits(Collections.singletonList(split));
sourceReader.notifyNoMoreSplits();
// Add the last record to the split when the splitFetcherManager shutting down SplitFetchers
splitFetcherManager.getInShutdownSplitFetcherFuture().thenRun(() -> split.addRecord(1));
assertThat(sourceReader.pollNext(new TestingReaderOutput<>()))
.isEqualTo(InputStatus.MORE_AVAILABLE);
}
@ParameterizedTest(name = "Emit record before split addition: {0}")
@ValueSource(booleans = {true, false})
void testPerSplitWatermark(boolean emitRecordBeforeSplitAddition) throws Exception {
MockSplitReader mockSplitReader =
MockSplitReader.newBuilder()
.setNumRecordsPerSplitPerFetch(3)
.setBlockingFetch(true)
.build();
MockSourceReader reader =
new MockSourceReader(
() -> mockSplitReader, new Configuration(), new TestingReaderContext());
SourceOperator<Integer, MockSourceSplit> sourceOperator =
createTestOperator(
reader,
WatermarkStrategy.forGenerator(
(context) -> new OnEventWatermarkGenerator()),
true);
MockSourceSplit splitA = new MockSourceSplit(0, 0, 3);
splitA.addRecord(100);
splitA.addRecord(200);
splitA.addRecord(300);
MockSourceSplit splitB = new MockSourceSplit(1, 0, 3);
splitB.addRecord(150);
splitB.addRecord(250);
splitB.addRecord(350);
WatermarkCollectingDataOutput output = new WatermarkCollectingDataOutput();
if (emitRecordBeforeSplitAddition) {
sourceOperator.emitNext(output);
}
AddSplitEvent<MockSourceSplit> addSplitsEvent =
new AddSplitEvent<>(Arrays.asList(splitA, splitB), new MockSourceSplitSerializer());
sourceOperator.handleOperatorEvent(addSplitsEvent);
// First 3 records from split A should not generate any watermarks
CommonTestUtils.waitUtil(
() -> {
try {
sourceOperator.emitNext(output);
} catch (Exception e) {
LOG.warn("Exception caught at emitting records", e);
return false;
}
return output.numRecords == 3;
},
Duration.ofSeconds(10),
String.format(
"%d out of 3 records are received within timeout", output.numRecords));
assertThat(output.watermarks).isEmpty();
CommonTestUtils.waitUtil(
() -> {
try {
sourceOperator.emitNext(output);
} catch (Exception e) {
LOG.warn("Exception caught at emitting records", e);
return false;
}
return output.numRecords == 6;
},
Duration.ofSeconds(10),
String.format(
"%d out of 6 records are received within timeout", output.numRecords));
assertThat(output.watermarks).hasSize(3);
assertThat(output.watermarks).containsExactly(150L, 250L, 300L);
}
@Test
void testMultipleSplitsAndFinishedByRecordEvaluator() throws Exception {
int split0End = 7;
int split1End = 15;
MockSplitReader mockSplitReader =
MockSplitReader.newBuilder()
.setNumRecordsPerSplitPerFetch(2)
.setSeparatedFinishedRecord(false)
.setBlockingFetch(false)
.build();
MockSourceReader reader =
new MockSourceReader(
new SingleThreadFetcherManager<>(() -> mockSplitReader, getConfig()),
getConfig(),
new TestingReaderContext(),
i -> i == split0End || i == split1End);
reader.start();
List<MockSourceSplit> splits =
Arrays.asList(
getSplit(0, NUM_RECORDS_PER_SPLIT, Boundedness.BOUNDED),
getSplit(1, NUM_RECORDS_PER_SPLIT, Boundedness.BOUNDED));
reader.addSplits(splits);
reader.notifyNoMoreSplits();
TestingReaderOutput<Integer> output = new TestingReaderOutput<>();
while (true) {
InputStatus status = reader.pollNext(output);
if (status == InputStatus.END_OF_INPUT) {
break;
}
if (status == InputStatus.NOTHING_AVAILABLE) {
reader.isAvailable().get();
}
}
List<Integer> excepted =
IntStream.concat(
IntStream.range(0, split0End),
IntStream.range(NUM_RECORDS_PER_SPLIT, split1End))
.boxed()
.collect(Collectors.toList());
assertThat(output.getEmittedRecords())
.containsExactlyInAnyOrder(excepted.toArray(new Integer[excepted.size()]));
}
// ---------------- helper methods -----------------
@Override
protected MockSourceReader createReader() {
MockSplitReader mockSplitReader =
MockSplitReader.newBuilder()
.setNumRecordsPerSplitPerFetch(2)
.setBlockingFetch(true)
.build();
return new MockSourceReader(() -> mockSplitReader, getConfig(), new TestingReaderContext());
}
@Override
protected List<MockSourceSplit> getSplits(
int numSplits, int numRecordsPerSplit, Boundedness boundedness) {
List<MockSourceSplit> mockSplits = new ArrayList<>();
for (int i = 0; i < numSplits; i++) {
mockSplits.add(getSplit(i, numRecordsPerSplit, boundedness));
}
return mockSplits;
}
@Override
protected MockSourceSplit getSplit(int splitId, int numRecords, Boundedness boundedness) {
MockSourceSplit mockSplit;
if (boundedness == Boundedness.BOUNDED) {
mockSplit = new MockSourceSplit(splitId, 0, numRecords);
} else {
mockSplit = new MockSourceSplit(splitId);
}
for (int j = 0; j < numRecords; j++) {
mockSplit.addRecord(splitId * 10 + j);
}
return mockSplit;
}
@Override
protected long getNextRecordIndex(MockSourceSplit split) {
return split.index();
}
private Configuration getConfig() {
Configuration config = new Configuration();
config.set(SourceReaderOptions.ELEMENT_QUEUE_CAPACITY, 1);
config.set(SourceReaderOptions.SOURCE_READER_CLOSE_TIMEOUT, 30000L);
return config;
}
// ------------------------------------------------------------------------
// Testing Setup Helpers
// ------------------------------------------------------------------------
private static <E> SourceReader<E, ?> createReaderAndAwaitAvailable(
final List<String> splitIds,
final List<RecordsWithSplitIds<E>> records,
RateLimiterStrategy<TestingSourceSplit> rateLimiterStrategy)
throws Exception {
final SourceReader<E, TestingSourceSplit> reader =
new SingleThreadMultiplexSourceReaderBase<
E, E, TestingSourceSplit, TestingSourceSplit>(
() -> new TestingSplitReader<>(records.toArray(new RecordsWithSplitIds[0])),
new PassThroughRecordEmitter<>(),
new Configuration(),
new TestingReaderContext(),
rateLimiterStrategy) {
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
super.notifyCheckpointComplete(checkpointId);
}
@Override
protected void onSplitFinished(
Map<String, TestingSourceSplit> finishedSplitIds) {}
@Override
protected TestingSourceSplit initializedState(TestingSourceSplit split) {
return split;
}
@Override
protected TestingSourceSplit toSplitType(
String splitId, TestingSourceSplit splitState) {
return splitState;
}
};
reader.start();
final List<TestingSourceSplit> splits =
splitIds.stream().map(TestingSourceSplit::new).collect(Collectors.toList());
reader.addSplits(splits);
reader.isAvailable().get();
return reader;
}
// ------------------ Test helper classes -------------------
/**
* When maybeShutdownFinishedFetchers is invoke, BlockingShutdownSplitFetcherManager will
* complete the inShutdownSplitFetcherFuture and ensures that all the split fetchers are
* shutdown.
*/
private static
|
SplitAwaredRateLimiter
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java
|
{
"start": 40373,
"end": 40549
}
|
class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return definesFunction( dialect, "vector_norm" );
}
}
public static
|
SupportsVectorNorm
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/Protocol.java
|
{
"start": 2240,
"end": 2710
}
|
class ____ extends JsonProperties {
/** The version of the protocol specification implemented here. */
public static final long VERSION = 1;
// Support properties for both Protocol and Message objects
private static final Set<String> MESSAGE_RESERVED = Set.of("doc", "response", "request", "errors", "one-way");
private static final Set<String> FIELD_RESERVED = Set.of("name", "type", "doc", "default", "aliases");
/** A protocol message. */
public
|
Protocol
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BinderTests.java
|
{
"start": 16527,
"end": 16600
}
|
enum ____ {
FOO_BAR, BAR_BAZ, BAZ_BOO
}
@Validated
static
|
ExampleEnum
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/internal/BasicValueBinder.java
|
{
"start": 46294,
"end": 46753
}
|
class ____ implements BasicMappingAccess {
private static final AnyDiscriminatorMappingAccess INSTANCE = new AnyDiscriminatorMappingAccess();
@Override
public Class<? extends UserType<?>> customType(MemberDetails attribute, ModelsContext context) {
return null;
}
@Override
public Map<String,String> customTypeParameters(MemberDetails attribute, ModelsContext context) {
return emptyMap();
}
}
private static
|
AnyDiscriminatorMappingAccess
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/subapplication/SubApplicationColumnPrefix.java
|
{
"start": 3649,
"end": 5372
}
|
enum ____.
*
* @param columnFamily that this column is stored in.
* @param columnPrefix for this column.
* @param converter used to encode/decode values to be stored in HBase for
* this column prefix.
*/
SubApplicationColumnPrefix(ColumnFamily<SubApplicationTable> columnFamily,
String columnPrefix, boolean compondColQual, ValueConverter converter) {
this.valueConverter = converter;
this.columnFamily = columnFamily;
this.columnPrefix = columnPrefix;
if (columnPrefix == null) {
this.columnPrefixBytes = null;
} else {
// Future-proof by ensuring the right column prefix hygiene.
this.columnPrefixBytes =
Bytes.toBytes(Separator.SPACE.encode(columnPrefix));
}
}
/**
* @return the column name value
*/
public String getColumnPrefix() {
return columnPrefix;
}
@Override
public byte[] getColumnPrefixBytes(byte[] qualifierPrefix) {
return ColumnHelper.getColumnQualifier(
this.columnPrefixBytes, qualifierPrefix);
}
@Override
public byte[] getColumnPrefixBytes(String qualifierPrefix) {
return ColumnHelper.getColumnQualifier(
this.columnPrefixBytes, qualifierPrefix);
}
@Override
public byte[] getColumnPrefixInBytes() {
return columnPrefixBytes != null ? columnPrefixBytes.clone() : null;
}
@Override
public byte[] getColumnFamilyBytes() {
return columnFamily.getBytes();
}
@Override
public ValueConverter getValueConverter() {
return valueConverter;
}
@Override
public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) {
return attributes;
}
@Override
public boolean supplementCellTimeStamp() {
return false;
}
}
|
definition
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/GraphMatching.java
|
{
"start": 1211,
"end": 3091
}
|
class ____ {
/**
* Finds a <a
* href="https://en.wikipedia.org/wiki/Matching_(graph_theory)#In_unweighted_bipartite_graphs">
* maximum cardinality matching of a bipartite graph</a>. The vertices of one part of the
* bipartite graph are identified by objects of type {@code U} using object equality. The vertices
* of the other part are similarly identified by objects of type {@code V}. The input bipartite
* graph is represented as a {@code Multimap<U, V>}: each entry represents an edge, with the key
* representing the vertex in the first part and the value representing the value in the second
* part. (Note that, even if {@code U} and {@code V} are the same type, equality between a key and
* a value has no special significance: effectively, they are in different domains.) Fails if any
* of the vertices (keys or values) are null. The output matching is similarly represented as a
* {@code BiMap<U, V>} (the property that a matching has no common vertices translates into the
* bidirectional uniqueness property of the {@link BiMap}).
*
* <p>If there are multiple matchings which share the maximum cardinality, an arbitrary one is
* returned.
*/
static <U, V> ImmutableBiMap<U, V> maximumCardinalityBipartiteMatching(Multimap<U, V> graph) {
return HopcroftKarp.overBipartiteGraph(graph).perform();
}
private GraphMatching() {}
/**
* Helper which implements the <a
* href="https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm">Hopcroft–Karp</a>
* algorithm.
*
* <p>The worst-case complexity is {@code O(E V^0.5)} where the graph contains {@code E} edges and
* {@code V} vertices. For dense graphs, where {@code E} is {@code O(V^2)}, this is {@code V^2.5}
* (and non-dense graphs perform better than dense graphs with the same number of vertices).
*/
private static
|
GraphMatching
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/junit4/rules/FailingBeforeAndAfterMethodsSpringRuleTests.java
|
{
"start": 3384,
"end": 3648
}
|
class ____ extends BaseSpringRuleTestCase {
}
@Ignore("TestCase classes are run manually by the enclosing test class")
@TestExecutionListeners(AlwaysFailingPrepareTestInstanceTestExecutionListener.class)
public static
|
AlwaysFailingAfterTestClassSpringRuleTestCase
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/GetAllTopicConfigRequestHeader.java
|
{
"start": 1384,
"end": 2174
}
|
class ____ implements CommandCustomHeader {
@Override
public void checkFields() throws RemotingCommandException {
// nothing
}
@CFNotNull
private Integer topicSeq;
private String dataVersion;
private Integer maxTopicNum;
public Integer getTopicSeq() {
return topicSeq;
}
public void setTopicSeq(Integer topicSeq) {
this.topicSeq = topicSeq;
}
public String getDataVersion() {
return dataVersion;
}
public void setDataVersion(String dataVersion) {
this.dataVersion = dataVersion;
}
public Integer getMaxTopicNum() {
return maxTopicNum;
}
public void setMaxTopicNum(Integer maxTopicNum) {
this.maxTopicNum = maxTopicNum;
}
}
|
GetAllTopicConfigRequestHeader
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/inject/anninheritance/AnnotationInheritanceSpec.java
|
{
"start": 410,
"end": 1007
}
|
class ____ {
@Test
void testAnnotationInheritance() {
final Map<String, Object> config = Collections.singletonMap("datasource.url", "jdbc://someurl");
try (ApplicationContext context = ApplicationContext.run(config)) {
final BeanDefinition<BookRepository> beanDefinition = context.getBeanDefinition(BookRepository.class);
final String name = beanDefinition.stringValue(AnnotationUtil.NAMED).orElse(null);
assertEquals("bookRepository", name);
assertTrue(beanDefinition.isSingleton());
}
}
}
|
AnnotationInheritanceSpec
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/spi/FactoryFinder.java
|
{
"start": 986,
"end": 1237
}
|
interface ____ {
String DEFAULT_PATH = "META-INF/services/org/apache/camel/";
/**
* Gets the resource classpath.
*
* @return the resource classpath.
*/
String getResourcePath();
/**
* Creates a new
|
FactoryFinder
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/DisabledForJreRange.java
|
{
"start": 5264,
"end": 5357
}
|
class ____ method should
* be disabled, specified as an integer.
*
* <p>If a {@code JRE}
|
or
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java
|
{
"start": 26690,
"end": 27358
}
|
interface ____ by clients to remove an existing Reservation.
* </p>
*
* @param request to remove an existing Reservation (the
* {@link ReservationDeleteRequest} should refer to an existing valid
* {@link ReservationId})
* @return response empty on successfully deleting the existing reservation
* @throws YarnException if the request is invalid or reservation cannot be
* deleted successfully
* @throws IOException
*
*/
@Public
@Unstable
public abstract ReservationDeleteResponse deleteReservation(
ReservationDeleteRequest request) throws YarnException, IOException;
/**
* <p>
* The
|
used
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/javatime/GlobalJavaTimeJdbcTypeTests.java
|
{
"start": 9870,
"end": 10148
}
|
class ____ {
@Id
private Integer id;
private String name;
private OffsetDateTime theOffsetDateTime;
private Instant theInstant;
private LocalDateTime theLocalDateTime;
private LocalDate theLocalDate;
private LocalTime theLocalTime;
}
}
|
EntityWithJavaTimeValues
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/time/DurationFormatUtils.java
|
{
"start": 3929,
"end": 31539
}
|
class ____ {
/** Empty array. */
private static final Token[] EMPTY_ARRAY = {};
/**
* Helper method to determine if a set of tokens contain a value
*
* @param tokens set to look in
* @param value to look for
* @return boolean {@code true} if contained
*/
static boolean containsTokenWithValue(final Token[] tokens, final Object value) {
return Stream.of(tokens).anyMatch(token -> token.getValue() == value);
}
private final CharSequence value;
private int count;
private int optionalIndex = -1;
/**
* Wraps a token around a value. A value would be something like a 'Y'.
*
* @param value value to wrap, non-null.
* @param optional whether the token is optional
* @param optionalIndex the index of the optional token within the pattern
*/
Token(final CharSequence value, final boolean optional, final int optionalIndex) {
this.value = Objects.requireNonNull(value, "value");
this.count = 1;
if (optional) {
this.optionalIndex = optionalIndex;
}
}
/**
* Supports equality of this Token to another Token.
*
* @param obj2 Object to consider equality of
* @return boolean {@code true} if equal
*/
@Override
public boolean equals(final Object obj2) {
if (obj2 instanceof Token) {
final Token tok2 = (Token) obj2;
if (this.value.getClass() != tok2.value.getClass()) {
return false;
}
if (this.count != tok2.count) {
return false;
}
if (this.value instanceof StringBuilder) {
return this.value.toString().equals(tok2.value.toString());
}
if (this.value instanceof Number) {
return this.value.equals(tok2.value);
}
return this.value == tok2.value;
}
return false;
}
/**
* Gets the current number of values represented
*
* @return int number of values represented
*/
int getCount() {
return count;
}
/**
* Gets the particular value this token represents.
*
* @return Object value, non-null.
*/
Object getValue() {
return value;
}
/**
* Returns a hash code for the token equal to the
* hash code for the token's value. Thus 'TT' and 'TTTT'
* will have the same hash code.
*
* @return The hash code for the token
*/
@Override
public int hashCode() {
return this.value.hashCode();
}
/**
* Adds another one of the value
*/
void increment() {
count++;
}
/**
* Represents this token as a String.
*
* @return String representation of the token
*/
@Override
public String toString() {
return StringUtils.repeat(this.value.toString(), this.count);
}
}
private static final int MINUTES_PER_HOUR = 60;
private static final int SECONDS_PER_MINUTES = 60;
private static final int HOURS_PER_DAY = 24;
/**
* Pattern used with {@link FastDateFormat} and {@link SimpleDateFormat}
* for the ISO 8601 period format used in durations.
*
* @see org.apache.commons.lang3.time.FastDateFormat
* @see java.text.SimpleDateFormat
*/
public static final String ISO_EXTENDED_FORMAT_PATTERN = "'P'yyyy'Y'M'M'd'DT'H'H'm'M's.SSS'S'";
static final String y = "y";
static final String M = "M";
static final String d = "d";
static final String H = "H";
static final String m = "m";
static final String s = "s";
static final String S = "S";
/**
* The internal method to do the formatting.
*
* @param tokens the tokens
* @param years the number of years
* @param months the number of months
* @param days the number of days
* @param hours the number of hours
* @param minutes the number of minutes
* @param seconds the number of seconds
* @param milliseconds the number of millis
* @param padWithZeros whether to pad
* @return the formatted string
*/
static String format(final Token[] tokens, final long years, final long months, final long days, final long hours, final long minutes,
final long seconds,
final long milliseconds, final boolean padWithZeros) {
final StringBuilder buffer = new StringBuilder();
boolean lastOutputSeconds = false;
boolean lastOutputZero = false;
int optionalStart = -1;
boolean firstOptionalNonLiteral = false;
int optionalIndex = -1;
boolean inOptional = false;
for (final Token token : tokens) {
final Object value = token.getValue();
final boolean isLiteral = value instanceof StringBuilder;
final int count = token.getCount();
if (optionalIndex != token.optionalIndex) {
optionalIndex = token.optionalIndex;
if (optionalIndex > -1) {
//entering new optional block
optionalStart = buffer.length();
lastOutputZero = false;
inOptional = true;
firstOptionalNonLiteral = false;
} else {
//leaving optional block
inOptional = false;
}
}
if (isLiteral) {
if (!inOptional || !lastOutputZero) {
buffer.append(value.toString());
}
} else if (value.equals(y)) {
lastOutputSeconds = false;
lastOutputZero = years == 0;
if (!inOptional || !lastOutputZero) {
buffer.append(paddedValue(years, padWithZeros, count));
}
} else if (value.equals(M)) {
lastOutputSeconds = false;
lastOutputZero = months == 0;
if (!inOptional || !lastOutputZero) {
buffer.append(paddedValue(months, padWithZeros, count));
}
} else if (value.equals(d)) {
lastOutputSeconds = false;
lastOutputZero = days == 0;
if (!inOptional || !lastOutputZero) {
buffer.append(paddedValue(days, padWithZeros, count));
}
} else if (value.equals(H)) {
lastOutputSeconds = false;
lastOutputZero = hours == 0;
if (!inOptional || !lastOutputZero) {
buffer.append(paddedValue(hours, padWithZeros, count));
}
} else if (value.equals(m)) {
lastOutputSeconds = false;
lastOutputZero = minutes == 0;
if (!inOptional || !lastOutputZero) {
buffer.append(paddedValue(minutes, padWithZeros, count));
}
} else if (value.equals(s)) {
lastOutputSeconds = true;
lastOutputZero = seconds == 0;
if (!inOptional || !lastOutputZero) {
buffer.append(paddedValue(seconds, padWithZeros, count));
}
} else if (value.equals(S)) {
lastOutputZero = milliseconds == 0;
if (!inOptional || !lastOutputZero) {
if (lastOutputSeconds) {
// ensure at least 3 digits are displayed even if padding is not selected
final int width = padWithZeros ? Math.max(3, count) : 3;
buffer.append(paddedValue(milliseconds, true, width));
} else {
buffer.append(paddedValue(milliseconds, padWithZeros, count));
}
}
lastOutputSeconds = false;
}
//as soon as we hit first nonliteral in optional, check for literal prefix
if (inOptional && !isLiteral && !firstOptionalNonLiteral) {
firstOptionalNonLiteral = true;
if (lastOutputZero) {
buffer.delete(optionalStart, buffer.length());
}
}
}
return buffer.toString();
}
/**
* Formats the time gap as a string, using the specified format, and padding with zeros.
*
* <p>This method formats durations using the days and lower fields of the
* format pattern. Months and larger are not used.</p>
*
* @param durationMillis the duration to format
* @param format the way in which to format the duration, not null
* @return the formatted duration, not null
* @throws IllegalArgumentException if durationMillis is negative
*/
public static String formatDuration(final long durationMillis, final String format) {
return formatDuration(durationMillis, format, true);
}
/**
* Formats the time gap as a string, using the specified format.
* Padding the left-hand side side of numbers with zeroes is optional.
*
* <p>This method formats durations using the days and lower fields of the
* format pattern. Months and larger are not used.</p>
*
* @param durationMillis the duration to format
* @param format the way in which to format the duration, not null
* @param padWithZeros whether to pad the left-hand side side of numbers with 0's
* @return the formatted duration, not null
* @throws IllegalArgumentException if durationMillis is negative
*/
public static String formatDuration(final long durationMillis, final String format, final boolean padWithZeros) {
Validate.inclusiveBetween(0, Long.MAX_VALUE, durationMillis, "durationMillis must not be negative");
final Token[] tokens = lexx(format);
long days = 0;
long hours = 0;
long minutes = 0;
long seconds = 0;
long milliseconds = durationMillis;
if (Token.containsTokenWithValue(tokens, d)) {
days = milliseconds / DateUtils.MILLIS_PER_DAY;
milliseconds -= days * DateUtils.MILLIS_PER_DAY;
}
if (Token.containsTokenWithValue(tokens, H)) {
hours = milliseconds / DateUtils.MILLIS_PER_HOUR;
milliseconds -= hours * DateUtils.MILLIS_PER_HOUR;
}
if (Token.containsTokenWithValue(tokens, m)) {
minutes = milliseconds / DateUtils.MILLIS_PER_MINUTE;
milliseconds -= minutes * DateUtils.MILLIS_PER_MINUTE;
}
if (Token.containsTokenWithValue(tokens, s)) {
seconds = milliseconds / DateUtils.MILLIS_PER_SECOND;
milliseconds -= seconds * DateUtils.MILLIS_PER_SECOND;
}
return format(tokens, 0, 0, days, hours, minutes, seconds, milliseconds, padWithZeros);
}
/**
* Formats the time gap as a string.
*
* <p>The format used is ISO 8601-like: {@code HH:mm:ss.SSS}.</p>
*
* @param durationMillis the duration to format
* @return the formatted duration, not null
* @throws IllegalArgumentException if durationMillis is negative
*/
public static String formatDurationHMS(final long durationMillis) {
return formatDuration(durationMillis, "HH:mm:ss.SSS");
}
/**
* Formats the time gap as a string.
*
* <p>The format used is the ISO 8601 period format.</p>
*
* <p>This method formats durations using the days and lower fields of the
* ISO format pattern, such as P7D6TH5M4.321S.</p>
*
* @param durationMillis the duration to format
* @return the formatted duration, not null
* @throws IllegalArgumentException if durationMillis is negative
*/
public static String formatDurationISO(final long durationMillis) {
return formatDuration(durationMillis, ISO_EXTENDED_FORMAT_PATTERN, false);
}
/**
* Formats an elapsed time into a pluralization correct string.
*
* <p>This method formats durations using the days and lower fields of the
* format pattern. Months and larger are not used.</p>
*
* @param durationMillis the elapsed time to report in milliseconds
* @param suppressLeadingZeroElements suppresses leading 0 elements
* @param suppressTrailingZeroElements suppresses trailing 0 elements
* @return the formatted text in days/hours/minutes/seconds, not null
* @throws IllegalArgumentException if durationMillis is negative
*/
public static String formatDurationWords(
final long durationMillis,
final boolean suppressLeadingZeroElements,
final boolean suppressTrailingZeroElements) {
// This method is generally replaceable by the format method, but
// there are a series of tweaks and special cases that require
// trickery to replicate.
String duration = formatDuration(durationMillis, "d' days 'H' hours 'm' minutes 's' seconds'");
if (suppressLeadingZeroElements) {
// this is a temporary marker on the front. Like ^ in regexp.
duration = " " + duration;
final String text = duration;
String tmp = Strings.CS.replaceOnce(text, " 0 days", StringUtils.EMPTY);
if (tmp.length() != duration.length()) {
duration = tmp;
final String text1 = duration;
tmp = Strings.CS.replaceOnce(text1, " 0 hours", StringUtils.EMPTY);
if (tmp.length() != duration.length()) {
duration = tmp;
final String text2 = duration;
tmp = Strings.CS.replaceOnce(text2, " 0 minutes", StringUtils.EMPTY);
duration = tmp;
}
}
if (!duration.isEmpty()) {
// strip the space off again
duration = duration.substring(1);
}
}
if (suppressTrailingZeroElements) {
final String text = duration;
String tmp = Strings.CS.replaceOnce(text, " 0 seconds", StringUtils.EMPTY);
if (tmp.length() != duration.length()) {
duration = tmp;
final String text1 = duration;
tmp = Strings.CS.replaceOnce(text1, " 0 minutes", StringUtils.EMPTY);
if (tmp.length() != duration.length()) {
duration = tmp;
final String text2 = duration;
tmp = Strings.CS.replaceOnce(text2, " 0 hours", StringUtils.EMPTY);
if (tmp.length() != duration.length()) {
final String text3 = tmp;
duration = Strings.CS.replaceOnce(text3, " 0 days", StringUtils.EMPTY);
}
}
}
}
// handle plurals
duration = " " + duration;
final String text = duration;
duration = Strings.CS.replaceOnce(text, " 1 seconds", " 1 second");
final String text1 = duration;
duration = Strings.CS.replaceOnce(text1, " 1 minutes", " 1 minute");
final String text2 = duration;
duration = Strings.CS.replaceOnce(text2, " 1 hours", " 1 hour");
final String text3 = duration;
duration = Strings.CS.replaceOnce(text3, " 1 days", " 1 day");
return duration.trim();
}
/**
* Formats the time gap as a string, using the specified format.
* Padding the left-hand side side of numbers with zeroes is optional.
*
* @param startMillis the start of the duration
* @param endMillis the end of the duration
* @param format the way in which to format the duration, not null
* @return the formatted duration, not null
* @throws IllegalArgumentException if startMillis is greater than endMillis
*/
public static String formatPeriod(final long startMillis, final long endMillis, final String format) {
return formatPeriod(startMillis, endMillis, format, true, TimeZone.getDefault());
}
/**
* <p>Formats the time gap as a string, using the specified format.
* Padding the left-hand side side of numbers with zeroes is optional and
* the time zone may be specified.
*
* <p>When calculating the difference between months/days, it chooses to
* calculate months first. So when working out the number of months and
* days between January 15th and March 10th, it choose 1 month and
* 23 days gained by choosing January->February = 1 month and then
* calculating days forwards, and not the 1 month and 26 days gained by
* choosing March -> February = 1 month and then calculating days
* backwards.</p>
*
* <p>For more control, the <a href="https://www.joda.org/joda-time/">Joda-Time</a>
* library is recommended.</p>
*
* @param startMillis the start of the duration
* @param endMillis the end of the duration
* @param format the way in which to format the duration, not null
* @param padWithZeros whether to pad the left-hand side side of numbers with 0's
* @param timezone the millis are defined in
* @return the formatted duration, not null
* @throws IllegalArgumentException if startMillis is greater than endMillis
*/
public static String formatPeriod(final long startMillis, final long endMillis, final String format, final boolean padWithZeros,
final TimeZone timezone) {
Validate.isTrue(startMillis <= endMillis, "startMillis must not be greater than endMillis");
// Used to optimize for differences under 28 days and
// called formatDuration(millis, format); however this did not work
// over leap years.
// TODO: Compare performance to see if anything was lost by
// losing this optimization.
final Token[] tokens = lexx(format);
// time zones get funky around 0, so normalizing everything to GMT
// stops the hours being off
final Calendar start = Calendar.getInstance(timezone);
start.setTime(new Date(startMillis));
final Calendar end = Calendar.getInstance(timezone);
end.setTime(new Date(endMillis));
// initial estimates
long milliseconds = end.get(Calendar.MILLISECOND) - start.get(Calendar.MILLISECOND);
int seconds = end.get(Calendar.SECOND) - start.get(Calendar.SECOND);
int minutes = end.get(Calendar.MINUTE) - start.get(Calendar.MINUTE);
int hours = end.get(Calendar.HOUR_OF_DAY) - start.get(Calendar.HOUR_OF_DAY);
int days = end.get(Calendar.DAY_OF_MONTH) - start.get(Calendar.DAY_OF_MONTH);
int months = end.get(Calendar.MONTH) - start.get(Calendar.MONTH);
int years = end.get(Calendar.YEAR) - start.get(Calendar.YEAR);
// each initial estimate is adjusted in case it is under 0
while (milliseconds < 0) {
milliseconds += DateUtils.MILLIS_PER_SECOND;
seconds -= 1;
}
while (seconds < 0) {
seconds += SECONDS_PER_MINUTES;
minutes -= 1;
}
while (minutes < 0) {
minutes += MINUTES_PER_HOUR;
hours -= 1;
}
while (hours < 0) {
hours += HOURS_PER_DAY;
days -= 1;
}
if (Token.containsTokenWithValue(tokens, M)) {
while (days < 0) {
days += start.getActualMaximum(Calendar.DAY_OF_MONTH);
months -= 1;
start.add(Calendar.MONTH, 1);
}
while (months < 0) {
months += 12;
years -= 1;
}
if (!Token.containsTokenWithValue(tokens, y) && years != 0) {
while (years != 0) {
months += 12 * years;
years = 0;
}
}
} else {
// there are no M's in the format string
if (!Token.containsTokenWithValue(tokens, y)) {
int target = end.get(Calendar.YEAR);
if (months < 0) {
// target is end-year -1
target -= 1;
}
while (start.get(Calendar.YEAR) != target) {
days += start.getActualMaximum(Calendar.DAY_OF_YEAR) - start.get(Calendar.DAY_OF_YEAR);
// Not sure I grok why this is needed, but the brutal tests show it is
if (start instanceof GregorianCalendar &&
start.get(Calendar.MONTH) == Calendar.FEBRUARY &&
start.get(Calendar.DAY_OF_MONTH) == 29) {
days += 1;
}
start.add(Calendar.YEAR, 1);
days += start.get(Calendar.DAY_OF_YEAR);
}
years = 0;
}
while (start.get(Calendar.MONTH) != end.get(Calendar.MONTH)) {
days += start.getActualMaximum(Calendar.DAY_OF_MONTH);
start.add(Calendar.MONTH, 1);
}
months = 0;
while (days < 0) {
days += start.getActualMaximum(Calendar.DAY_OF_MONTH);
months -= 1;
start.add(Calendar.MONTH, 1);
}
}
// The rest of this code adds in values that
// aren't requested. This allows the user to ask for the
// number of months and get the real count and not just 0->11.
if (!Token.containsTokenWithValue(tokens, d)) {
hours += HOURS_PER_DAY * days;
days = 0;
}
if (!Token.containsTokenWithValue(tokens, H)) {
minutes += MINUTES_PER_HOUR * hours;
hours = 0;
}
if (!Token.containsTokenWithValue(tokens, m)) {
seconds += SECONDS_PER_MINUTES * minutes;
minutes = 0;
}
if (!Token.containsTokenWithValue(tokens, s)) {
milliseconds += DateUtils.MILLIS_PER_SECOND * seconds;
seconds = 0;
}
return format(tokens, years, months, days, hours, minutes, seconds, milliseconds, padWithZeros);
}
/**
* Formats the time gap as a string.
*
* <p>The format used is the ISO 8601 period format.</p>
*
* @param startMillis the start of the duration to format
* @param endMillis the end of the duration to format
* @return the formatted duration, not null
* @throws IllegalArgumentException if startMillis is greater than endMillis
*/
public static String formatPeriodISO(final long startMillis, final long endMillis) {
return formatPeriod(startMillis, endMillis, ISO_EXTENDED_FORMAT_PATTERN, false, TimeZone.getDefault());
}
/**
* Parses a classic date format string into Tokens
*
* @param format the format to parse, not null
* @return array of Token[]
*/
static Token[] lexx(final String format) {
final ArrayList<Token> list = new ArrayList<>(format.length());
boolean inLiteral = false;
// Although the buffer is stored in a Token, the Tokens are only
// used internally, so cannot be accessed by other threads
StringBuilder buffer = null;
Token previous = null;
boolean inOptional = false;
int optionalIndex = -1;
for (int i = 0; i < format.length(); i++) {
final char ch = format.charAt(i);
if (inLiteral && ch != '\'') {
buffer.append(ch); // buffer can't be null if inLiteral is true
continue;
}
String value = null;
switch (ch) {
// TODO: Need to handle escaping of '
case '[':
if (inOptional) {
throw new IllegalArgumentException("Nested optional block at index: " + i);
}
optionalIndex++;
inOptional = true;
break;
case ']':
if (!inOptional) {
throw new IllegalArgumentException("Attempting to close unopened optional block at index: " + i);
}
inOptional = false;
break;
case '\'':
if (inLiteral) {
buffer = null;
inLiteral = false;
} else {
buffer = new StringBuilder();
list.add(new Token(buffer, inOptional, optionalIndex));
inLiteral = true;
}
break;
case 'y':
value = y;
break;
case 'M':
value = M;
break;
case 'd':
value = d;
break;
case 'H':
value = H;
break;
case 'm':
value = m;
break;
case 's':
value = s;
break;
case 'S':
value = S;
break;
default:
if (buffer == null) {
buffer = new StringBuilder();
list.add(new Token(buffer, inOptional, optionalIndex));
}
buffer.append(ch);
}
if (value != null) {
if (previous != null && previous.getValue().equals(value)) {
previous.increment();
} else {
final Token token = new Token(value, inOptional, optionalIndex);
list.add(token);
previous = token;
}
buffer = null;
}
}
if (inLiteral) { // i.e. we have not found the end of the literal
throw new IllegalArgumentException("Unmatched quote in format: " + format);
}
if (inOptional) { // i.e. we have not found the end of the literal
throw new IllegalArgumentException("Unmatched optional in format: " + format);
}
return list.toArray(Token.EMPTY_ARRAY);
}
/**
* Converts a {@code long} to a {@link String} with optional
* zero padding.
*
* @param value the value to convert
* @param padWithZeros whether to pad with zeroes
* @param count the size to pad to (ignored if {@code padWithZeros} is false)
* @return the string result
*/
private static String paddedValue(final long value, final boolean padWithZeros, final int count) {
final String longString = Long.toString(value);
return padWithZeros ? StringUtils.leftPad(longString, count, '0') : longString;
}
/**
* DurationFormatUtils instances should NOT be constructed in standard programming.
*
* <p>This constructor is public to permit tools that require a JavaBean instance
* to operate.</p>
*
* @deprecated TODO Make private in 4.0.
*/
@Deprecated
public DurationFormatUtils() {
// empty
}
}
|
Token
|
java
|
grpc__grpc-java
|
netty/src/main/java/io/grpc/netty/AbstractNettyHandler.java
|
{
"start": 9596,
"end": 9677
}
|
interface ____ {
boolean isPingAllowed();
}
private static final
|
PingLimiter
|
java
|
apache__camel
|
components/camel-sjms/src/test/java/org/apache/camel/component/sjms/JmsSelectorOptionTest.java
|
{
"start": 1179,
"end": 3881
}
|
class ____ extends JmsTestSupport {
@Test
public void testJmsMessageWithSelector() throws Exception {
MockEndpoint endpointA = getMockEndpoint("mock:a");
MockEndpoint endpointB = getMockEndpoint("mock:b");
MockEndpoint endpointC = getMockEndpoint("mock:c");
endpointA.expectedBodiesReceivedInAnyOrder("A blue car!", "A blue car, again!");
endpointA.expectedHeaderReceived("color", "blue");
endpointB.expectedHeaderReceived("color", "red");
endpointB.expectedBodiesReceived("A red car!");
endpointC.expectedBodiesReceived("Message1", "Message2");
endpointC.expectedMessageCount(2);
template.sendBodyAndHeader("sjms:queue:hello.JmsSelectorOptionTest", "A blue car!", "color", "blue");
template.sendBodyAndHeader("sjms:queue:hello.JmsSelectorOptionTest", "A red car!", "color", "red");
template.sendBodyAndHeader("sjms:queue:hello.JmsSelectorOptionTest", "A blue car, again!", "color", "blue");
template.sendBodyAndHeader("sjms:queue:hello.JmsSelectorOptionTest", "Message1", "SIZE_NUMBER", 1505);
template.sendBodyAndHeader("sjms:queue:hello.JmsSelectorOptionTest", "Message3", "SIZE_NUMBER", 1300);
template.sendBodyAndHeader("sjms:queue:hello.JmsSelectorOptionTest", "Message2", "SIZE_NUMBER", 1600);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testConsumerTemplate() {
template.sendBodyAndHeader("sjms:queue:consumer.JmsSelectorOptionTest", "Message1", "SIZE_NUMBER", 1505);
template.sendBodyAndHeader("sjms:queue:consumer.JmsSelectorOptionTest", "Message3", "SIZE_NUMBER", 1300);
template.sendBodyAndHeader("sjms:queue:consumer.JmsSelectorOptionTest", "Message2", "SIZE_NUMBER", 1600);
Exchange ex = consumer.receive("sjms:queue:consumer.JmsSelectorOptionTest?messageSelector=SIZE_NUMBER<1500", 5000L);
Message message = ex.getIn();
int size = message.getHeader("SIZE_NUMBER", int.class);
assertEquals(1300, size, "The message header SIZE_NUMBER should be less than 1500");
assertEquals("Message3", message.getBody(), "The message body is wrong");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("sjms:queue:hello.JmsSelectorOptionTest?messageSelector=color='blue'").to("mock:a");
from("sjms:queue:hello.JmsSelectorOptionTest?messageSelector=color='red'").to("mock:b");
from("sjms:queue:hello.JmsSelectorOptionTest?messageSelector=SIZE_NUMBER>1500").to("mock:c");
}
};
}
}
|
JmsSelectorOptionTest
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestSupportsStagingTableFactory.java
|
{
"start": 6808,
"end": 7460
}
|
class ____ implements StagedTable {
private final String dataDir;
public TestStagedTable(String dataDir) {
this.dataDir = dataDir;
}
@Override
public void begin() {
JOB_STATUS_CHANGE_PROCESS.add("begin");
}
@Override
public void commit() {
JOB_STATUS_CHANGE_PROCESS.add("commit");
// Change hidden file to official file
new File(dataDir, "_data").renameTo(new File(dataDir, "data"));
}
@Override
public void abort() {
JOB_STATUS_CHANGE_PROCESS.add("abort");
}
}
}
|
TestStagedTable
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/cacheable/annotation/ExplicitlyCacheableEntity.java
|
{
"start": 337,
"end": 614
}
|
class ____ {
private Long id;
private String name;
@Id
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
ExplicitlyCacheableEntity
|
java
|
apache__camel
|
components/camel-saxon/src/test/java/org/apache/camel/language/xpath/XPathHeaderEnableSaxonJavaDslTest.java
|
{
"start": 1080,
"end": 2717
}
|
class ____ extends CamelTestSupport {
@Test
public void testChoiceWithHeaderSelectCamel() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:camel");
mock.expectedBodiesReceived("<name>King</name>");
mock.expectedHeaderReceived("type", "Camel");
template.sendBodyAndHeader("direct:in", "<name>King</name>", "type", "Camel");
mock.assertIsSatisfied();
}
@Test
public void testChoiceWithNoHeaderSelectDonkey() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:donkey");
mock.expectedBodiesReceived("<name>Kong</name>");
template.sendBody("direct:in", "<name>Kong</name>");
mock.assertIsSatisfied();
}
@Test
public void testChoiceWithNoHeaderSelectOther() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:other");
mock.expectedBodiesReceived("<name>Other</name>");
template.sendBody("direct:in", "<name>Other</name>");
mock.assertIsSatisfied();
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:in")
.choice()
.when(XPathBuilder.xpath("$type = 'Camel'").saxon())
.to("mock:camel")
.when(XPathBuilder.xpath("//name = 'Kong'").saxon())
.to("mock:donkey")
.otherwise()
.to("mock:other");
}
};
}
}
|
XPathHeaderEnableSaxonJavaDslTest
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/DataformatComponentBuilderFactory.java
|
{
"start": 1389,
"end": 1893
}
|
interface ____ {
/**
* Data Format (camel-dataformat)
* Use a Camel Data Format as a regular Camel Component.
*
* Category: core,transformation
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-dataformat
*
* @return the dsl builder
*/
static DataformatComponentBuilder dataformat() {
return new DataformatComponentBuilderImpl();
}
/**
* Builder for the Data Format component.
*/
|
DataformatComponentBuilderFactory
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/cluster/ShutdownHeaders.java
|
{
"start": 1381,
"end": 2567
}
|
class ____
implements RuntimeMessageHeaders<
EmptyRequestBody, EmptyResponseBody, EmptyMessageParameters> {
private static final ShutdownHeaders INSTANCE = new ShutdownHeaders();
@Override
public Class<EmptyResponseBody> getResponseClass() {
return EmptyResponseBody.class;
}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public EmptyMessageParameters getUnresolvedMessageParameters() {
return EmptyMessageParameters.getInstance();
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.DELETE;
}
@Override
public String getTargetRestEndpointURL() {
return "/cluster";
}
public static ShutdownHeaders getInstance() {
return INSTANCE;
}
@Override
public String getDescription() {
return "Shuts down the cluster";
}
@Override
public String operationId() {
return "shutdownCluster";
}
}
|
ShutdownHeaders
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/multitenancy/TestingConnectionProvider.java
|
{
"start": 1293,
"end": 1584
}
|
class ____ {
private final String name;
private final ConnectionProvider connectionProvider;
public NamedConnectionProviderPair(String name, ConnectionProvider connectionProvider) {
this.name = name;
this.connectionProvider = connectionProvider;
}
}
}
|
NamedConnectionProviderPair
|
java
|
junit-team__junit5
|
junit-platform-reporting/src/main/java/org/junit/platform/reporting/open/xml/OpenTestReportGeneratingListener.java
|
{
"start": 5559,
"end": 15829
}
|
class ____ implements TestExecutionListener {
static final String ENABLED_PROPERTY_NAME = "junit.platform.reporting.open.xml.enabled";
static final String GIT_ENABLED_PROPERTY_NAME = "junit.platform.reporting.open.xml.git.enabled";
static final String SOCKET_PROPERTY_NAME = "junit.platform.reporting.open.xml.socket";
private final AtomicInteger idCounter = new AtomicInteger();
private final Map<UniqueId, String> inProgressIds = new ConcurrentHashMap<>();
private DocumentWriter<Events> eventsFileWriter = DocumentWriter.noop();
private final Path workingDir;
private @Nullable Path outputDir;
@SuppressWarnings("unused") // Used via ServiceLoader
public OpenTestReportGeneratingListener() {
this(Path.of(".").toAbsolutePath());
}
OpenTestReportGeneratingListener(Path workingDir) {
this.workingDir = workingDir;
}
@Override
public void testPlanExecutionStarted(TestPlan testPlan) {
ConfigurationParameters config = testPlan.getConfigurationParameters();
if (isEnabled(config)) {
NamespaceRegistry namespaceRegistry = NamespaceRegistry.builder(Namespace.REPORTING_CORE) //
.add("e", Namespace.REPORTING_EVENTS) //
.add("git", Namespace.REPORTING_GIT) //
.add("java", Namespace.REPORTING_JAVA) //
.add("junit", JUnitFactory.NAMESPACE, "https://schemas.junit.org/open-test-reporting/junit-1.9.xsd") //
.build();
outputDir = testPlan.getOutputDirectoryCreator().getRootDirectory();
try {
eventsFileWriter = createDocumentWriter(config, namespaceRegistry);
reportInfrastructure(config);
}
catch (Exception e) {
throw new JUnitException("Failed to initialize XML events writer", e);
}
}
}
private DocumentWriter<Events> createDocumentWriter(ConfigurationParameters config,
NamespaceRegistry namespaceRegistry) throws Exception {
return config.get(SOCKET_PROPERTY_NAME, Integer::valueOf) //
.map(port -> {
try {
Socket socket = new Socket(InetAddress.getLoopbackAddress(), port);
Writer writer = new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8);
return Events.createDocumentWriter(namespaceRegistry, writer);
}
catch (Exception e) {
throw new JUnitException("Failed to connect to socket on port " + port, e);
}
}) //
.orElseGet(() -> {
try {
Path eventsXml = requireNonNull(outputDir).resolve("open-test-report.xml");
return Events.createDocumentWriter(namespaceRegistry, eventsXml);
}
catch (Exception e) {
throw new JUnitException("Failed to create XML events file", e);
}
});
}
private boolean isEnabled(ConfigurationParameters config) {
return config.getBoolean(ENABLED_PROPERTY_NAME).orElse(false);
}
private boolean isGitEnabled(ConfigurationParameters config) {
return config.getBoolean(GIT_ENABLED_PROPERTY_NAME).orElse(false);
}
@SuppressWarnings("EmptyCatch")
private void reportInfrastructure(ConfigurationParameters config) {
eventsFileWriter.append(infrastructure(), infrastructure -> {
try {
String hostName = InetAddress.getLocalHost().getHostName();
infrastructure.append(hostName(hostName));
}
catch (UnknownHostException ignored) {
}
infrastructure //
.append(userName(System.getProperty("user.name"))) //
.append(operatingSystem(System.getProperty("os.name"))) //
.append(cpuCores(Runtime.getRuntime().availableProcessors())) //
.append(javaVersion(System.getProperty("java.version"))) //
.append(fileEncoding(System.getProperty("file.encoding"))) //
.append(heapSize(), heapSize -> heapSize.withMax(Runtime.getRuntime().maxMemory()));
if (isGitEnabled(config)) {
GitInfoCollector.get(workingDir).ifPresent(git -> addGitInfo(infrastructure, git));
}
});
}
private void addGitInfo(Infrastructure infrastructure, GitInfoCollector git) {
git.getOriginUrl() //
.ifPresent(
gitUrl -> infrastructure.append(repository(), repository -> repository.withOriginUrl(gitUrl)));
git.getBranch() //
.ifPresent(branch -> infrastructure.append(branch(branch)));
git.getCommitHash() //
.ifPresent(gitCommitHash -> infrastructure.append(commit(gitCommitHash)));
git.getStatus() //
.ifPresent(statusOutput -> infrastructure.append(status(statusOutput),
status -> status.withClean(statusOutput.isEmpty())));
}
@Override
public void testPlanExecutionFinished(TestPlan testPlan) {
try {
eventsFileWriter.close();
}
catch (IOException e) {
throw new UncheckedIOException("Failed to close XML events file", e);
}
finally {
eventsFileWriter = DocumentWriter.noop();
}
}
@Override
public void executionSkipped(TestIdentifier testIdentifier, String reason) {
String id = String.valueOf(idCounter.incrementAndGet());
reportStarted(testIdentifier, id);
eventsFileWriter.append(finished(id, Instant.now()), //
finished -> finished.append(result(Result.Status.SKIPPED), result -> {
if (isNotBlank(reason)) {
result.append(reason(reason));
}
}));
}
@Override
public void executionStarted(TestIdentifier testIdentifier) {
String id = String.valueOf(idCounter.incrementAndGet());
inProgressIds.put(testIdentifier.getUniqueIdObject(), id);
reportStarted(testIdentifier, id);
}
private void reportStarted(TestIdentifier testIdentifier, String id) {
eventsFileWriter.append(started(id, Instant.now(), testIdentifier.getDisplayName()), started -> {
testIdentifier.getParentIdObject().ifPresent(parentId -> started.withParentId(inProgressIds.get(parentId)));
started.append(metadata(), metadata -> {
if (!testIdentifier.getTags().isEmpty()) {
metadata.append(tags(), tags -> //
testIdentifier.getTags().forEach(tag -> tags.append(tag(tag.getName()))));
}
metadata.append(uniqueId(testIdentifier.getUniqueId())) //
.append(legacyReportingName(testIdentifier.getLegacyReportingName())) //
.append(type(testIdentifier.getType()));
});
testIdentifier.getSource().ifPresent(
source -> started.append(sources(), sources -> addTestSource(source, sources)));
});
}
private void addTestSource(TestSource source, Sources sources) {
if (source instanceof CompositeTestSource compositeSource) {
compositeSource.getSources().forEach(it -> addTestSource(it, sources));
}
else if (source instanceof ClassSource classSource) {
sources.append(classSource(classSource.getClassName()), //
element -> classSource.getPosition().ifPresent(
filePosition -> element.addFilePosition(filePosition.getLine(), filePosition.getColumn())));
}
else if (source instanceof MethodSource methodSource) {
sources.append(methodSource(methodSource.getClassName(), methodSource.getMethodName()), element -> {
String methodParameterTypes = methodSource.getMethodParameterTypes();
if (methodParameterTypes != null) {
element.withMethodParameterTypes(methodParameterTypes);
}
});
}
else if (source instanceof ClasspathResourceSource classpathResourceSource) {
sources.append(classpathResourceSource(classpathResourceSource.getClasspathResourceName()), //
element -> classpathResourceSource.getPosition().ifPresent(
filePosition -> element.addFilePosition(filePosition.getLine(), filePosition.getColumn())));
}
else if (source instanceof PackageSource packageSource) {
sources.append(packageSource(packageSource.getPackageName()));
}
else if (source instanceof FileSource fileSource) {
sources.append(fileSource(fileSource.getFile()), //
element -> fileSource.getPosition().ifPresent(
filePosition -> element.addFilePosition(filePosition.getLine(), filePosition.getColumn())));
}
else if (source instanceof DirectorySource directorySource) {
sources.append(directorySource(directorySource.getFile()));
}
else if (source instanceof UriSource uriSource) {
sources.append(uriSource(uriSource.getUri()));
}
}
@Override
public void reportingEntryPublished(TestIdentifier testIdentifier, ReportEntry entry) {
String id = inProgressIds.get(testIdentifier.getUniqueIdObject());
eventsFileWriter.append(reported(id, Instant.now()), //
reported -> reported.append(attachments(), //
attachments -> {
Map<String, String> keyValuePairs = entry.getKeyValuePairs();
if (keyValuePairs.containsKey(STDOUT_REPORT_ENTRY_KEY)
|| keyValuePairs.containsKey(STDERR_REPORT_ENTRY_KEY)) {
attachOutput(attachments, entry.getTimestamp(), keyValuePairs.get(STDOUT_REPORT_ENTRY_KEY),
"stdout");
attachOutput(attachments, entry.getTimestamp(), keyValuePairs.get(STDERR_REPORT_ENTRY_KEY),
"stderr");
}
else {
attachments.append(data(entry.getTimestamp()), //
data -> keyValuePairs.forEach(data::addEntry));
}
}));
}
private static void attachOutput(Attachments attachments, LocalDateTime timestamp, @Nullable String content,
String source) {
if (content != null) {
attachments.append(output(timestamp), output -> output.withSource(source).withContent(content));
}
}
@Override
public void fileEntryPublished(TestIdentifier testIdentifier, FileEntry entry) {
String id = inProgressIds.get(testIdentifier.getUniqueIdObject());
eventsFileWriter.append(reported(id, Instant.now()), //
reported -> reported.append(attachments(), attachments -> attachments.append(file(entry.getTimestamp()), //
file -> {
file.withPath(requireNonNull(outputDir).relativize(entry.getPath()).toString());
entry.getMediaType().ifPresent(file::withMediaType);
})));
}
@Override
public void executionFinished(TestIdentifier testIdentifier, TestExecutionResult testExecutionResult) {
String id = inProgressIds.remove(testIdentifier.getUniqueIdObject());
eventsFileWriter.append(finished(id, Instant.now()), //
finished -> finished.append(result(convertStatus(testExecutionResult.getStatus())), //
result -> testExecutionResult.getThrowable() //
.ifPresent(throwable -> result.append(throwable(throwable)))));
}
private Result.Status convertStatus(TestExecutionResult.Status status) {
return switch (status) {
case FAILED -> Result.Status.FAILED;
case SUCCESSFUL -> Result.Status.SUCCESSFUL;
case ABORTED -> Result.Status.ABORTED;
};
}
}
|
OpenTestReportGeneratingListener
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/core/src/main/java/io/quarkus/bootstrap/classloading/ClassLoaderLimiter.java
|
{
"start": 6231,
"end": 6323
}
|
class ____ the test, and produce a failure.
* <p>
* Limitations: if the
|
during
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawPathHandle.java
|
{
"start": 1354,
"end": 3397
}
|
class ____ implements PathHandle {
private static final long serialVersionUID = 0x12ba4689510L;
public static final int MAX_SIZE = 1 << 20;
private transient ByteBuffer fd;
/**
* Store a reference to the given bytes as the serialized form.
* @param fd serialized bytes
*/
public RawPathHandle(ByteBuffer fd) {
this.fd = null == fd
? ByteBuffer.allocate(0)
: fd.asReadOnlyBuffer();
}
/**
* Initialize using a copy of bytes from the serialized handle.
* @param handle PathHandle to preserve in serialized form.
*/
public RawPathHandle(PathHandle handle) {
ByteBuffer hb = null == handle
? ByteBuffer.allocate(0)
: handle.bytes();
fd = ByteBuffer.allocate(hb.remaining());
fd.put(hb);
fd.flip();
}
@Override
public ByteBuffer bytes() {
return fd.asReadOnlyBuffer();
}
@Override
public boolean equals(Object other) {
if (!(other instanceof PathHandle)) {
return false;
}
PathHandle o = (PathHandle) other;
return bytes().equals(o.bytes());
}
@Override
public int hashCode() {
return bytes().hashCode();
}
@Override
public String toString() {
return bytes().toString();
}
private void writeObject(ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
out.writeInt(fd.remaining());
if (fd.hasArray()) {
out.write(fd.array(), fd.position(), fd.remaining());
} else {
byte[] x = new byte[fd.remaining()];
fd.slice().get(x);
out.write(x);
}
}
private void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException {
in.defaultReadObject();
int len = in.readInt();
if (len < 0 || len > MAX_SIZE) {
throw new IOException("Illegal buffer length " + len);
}
byte[] x = new byte[len];
in.readFully(x);
fd = ByteBuffer.wrap(x);
}
private void readObjectNoData() throws ObjectStreamException {
throw new InvalidObjectException("Stream data required");
}
}
|
RawPathHandle
|
java
|
apache__camel
|
components/camel-jsonapi/src/test/java/org/apache/camel/component/jsonapi/FooBar.java
|
{
"start": 855,
"end": 1021
}
|
class ____ {
private String foo;
public String getFoo() {
return foo;
}
public void setFoo(String foo) {
this.foo = foo;
}
}
|
FooBar
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/config/builder/impl/DefaultAppenderRefComponentBuilder.java
|
{
"start": 1175,
"end": 1713
}
|
class ____ extends DefaultComponentAndConfigurationBuilder<AppenderRefComponentBuilder>
implements AppenderRefComponentBuilder {
public DefaultAppenderRefComponentBuilder(
final DefaultConfigurationBuilder<? extends Configuration> builder, final String ref) {
super(builder, "AppenderRef");
addAttribute("ref", ref);
}
@Override
public AppenderRefComponentBuilder add(final FilterComponentBuilder builder) {
return addComponent(builder);
}
}
|
DefaultAppenderRefComponentBuilder
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/validation/AbstractNormalScopedFinalTest.java
|
{
"start": 1027,
"end": 1196
}
|
class ____ {
@Produces
@ApplicationScoped
public FinalFoo foo = new FinalFoo();
}
@ApplicationScoped
static
|
FieldProducerWithFinalClass
|
java
|
apache__camel
|
components/camel-pqc/src/test/java/org/apache/camel/component/pqc/PQCBIKEGenerateEncapsulationAESTest.java
|
{
"start": 1629,
"end": 4766
}
|
class ____ extends CamelTestSupport {
@EndpointInject("mock:sign")
protected MockEndpoint resultSign;
@Produce("direct:sign")
protected ProducerTemplate templateSign;
@EndpointInject("mock:verify")
protected MockEndpoint resultVerify;
public PQCBIKEGenerateEncapsulationAESTest() throws NoSuchAlgorithmException {
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:sign").to("pqc:keyenc?operation=generateSecretKeyEncapsulation&symmetricKeyAlgorithm=AES")
.to("mock:sign")
.to("pqc:keyenc?operation=extractSecretKeyEncapsulation&symmetricKeyAlgorithm=AES").to("mock:verify");
}
};
}
@BeforeAll
public static void startup() throws Exception {
Security.addProvider(new BouncyCastleProvider());
Security.addProvider(new BouncyCastlePQCProvider());
}
@Test
void testSignAndVerify() throws Exception {
resultSign.expectedMessageCount(1);
resultVerify.expectedMessageCount(1);
templateSign.sendBody("Hello");
resultSign.assertIsSatisfied();
assertNotNull(resultSign.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class));
assertEquals(PQCSymmetricAlgorithms.AES.getAlgorithm(),
resultSign.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class).getAlgorithm());
SecretKeyWithEncapsulation secEncrypted
= resultSign.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class);
assertNotNull(resultVerify.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class));
assertEquals(PQCSymmetricAlgorithms.AES.getAlgorithm(),
resultVerify.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class).getAlgorithm());
SecretKeyWithEncapsulation secEncryptedExtracted
= resultVerify.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class);
assertTrue(Arrays.areEqual(secEncrypted.getEncoded(), secEncryptedExtracted.getEncoded()));
}
@BindToRegistry("Keypair")
public KeyPair setKeyPair() throws NoSuchAlgorithmException, NoSuchProviderException, InvalidAlgorithmParameterException {
KeyPairGenerator kpg = KeyPairGenerator.getInstance(PQCKeyEncapsulationAlgorithms.BIKE.getAlgorithm(),
PQCKeyEncapsulationAlgorithms.BIKE.getBcProvider());
kpg.initialize(BIKEParameterSpec.bike192, new SecureRandom());
KeyPair kp = kpg.generateKeyPair();
return kp;
}
@BindToRegistry("KeyGenerator")
public KeyGenerator setKeyGenerator()
throws NoSuchAlgorithmException, NoSuchProviderException {
KeyGenerator kg = KeyGenerator.getInstance(PQCKeyEncapsulationAlgorithms.BIKE.getAlgorithm(),
PQCKeyEncapsulationAlgorithms.BIKE.getBcProvider());
return kg;
}
}
|
PQCBIKEGenerateEncapsulationAESTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/TaskManagerLogUrlHeaders.java
|
{
"start": 1150,
"end": 2588
}
|
class ____
implements RuntimeMessageHeaders<
EmptyRequestBody, LogUrlResponse, JobTaskManagerMessageParameters> {
private static final TaskManagerLogUrlHeaders INSTANCE = new TaskManagerLogUrlHeaders();
private static final String URL =
"/jobs/:"
+ JobIDPathParameter.KEY
+ "/taskmanagers/:"
+ TaskManagerIdPathParameter.KEY
+ "/log-url";
private TaskManagerLogUrlHeaders() {}
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public Class<LogUrlResponse> getResponseClass() {
return LogUrlResponse.class;
}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public JobTaskManagerMessageParameters getUnresolvedMessageParameters() {
return new JobTaskManagerMessageParameters();
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.GET;
}
@Override
public String getTargetRestEndpointURL() {
return URL;
}
public static TaskManagerLogUrlHeaders getInstance() {
return INSTANCE;
}
@Override
public String getDescription() {
return "Returns the log url of jobmanager of a specific job.";
}
}
|
TaskManagerLogUrlHeaders
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/expression/StandardBeanExpressionResolver.java
|
{
"start": 2732,
"end": 4458
}
|
class ____ implements BeanExpressionResolver {
/**
* System property to configure the maximum length for SpEL expressions: {@value}.
* <p>Can also be configured via the {@link SpringProperties} mechanism.
* @since 6.1.3
* @see SpelParserConfiguration#getMaximumExpressionLength()
*/
public static final String MAX_SPEL_EXPRESSION_LENGTH_PROPERTY_NAME = "spring.context.expression.maxLength";
/** Default expression prefix: "#{". */
public static final String DEFAULT_EXPRESSION_PREFIX = "#{";
/** Default expression suffix: "}". */
public static final String DEFAULT_EXPRESSION_SUFFIX = "}";
private String expressionPrefix = DEFAULT_EXPRESSION_PREFIX;
private String expressionSuffix = DEFAULT_EXPRESSION_SUFFIX;
private ExpressionParser expressionParser;
private final Map<String, Expression> expressionCache = new ConcurrentHashMap<>(256);
private final Map<BeanExpressionContext, StandardEvaluationContext> evaluationCache = new ConcurrentHashMap<>(8);
private final ParserContext beanExpressionParserContext = new ParserContext() {
@Override
public boolean isTemplate() {
return true;
}
@Override
public String getExpressionPrefix() {
return expressionPrefix;
}
@Override
public String getExpressionSuffix() {
return expressionSuffix;
}
};
/**
* Create a new {@code StandardBeanExpressionResolver} with default settings.
* <p>As of Spring Framework 6.1.3, the maximum SpEL expression length can be
* configured via the {@link #MAX_SPEL_EXPRESSION_LENGTH_PROPERTY_NAME} property.
*/
public StandardBeanExpressionResolver() {
this(null);
}
/**
* Create a new {@code StandardBeanExpressionResolver} with the given bean
|
StandardBeanExpressionResolver
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/url/UrlOnUrlParameterTest.java
|
{
"start": 756,
"end": 1744
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot(
jar -> jar.addClasses(Resource.class, Client.class))
.overrideConfigKey(
"quarkus.rest-client.\"io.quarkus.rest.client.reactive.url.UrlOnUrlParameterTest$Client\".uri",
"http://does-not-exist.io");
@RestClient
Client client;
@ConfigProperty(name = "quarkus.http.test-port")
Integer testPort;
@Test
public void testOverride() throws MalformedURLException {
String result = client.test(new URL(String.format("http://localhost:%d/", testPort)), "bar");
assertEquals("bar", result);
}
@Test
public void testNoOverride() {
assertThatThrownBy(() -> client.test(null, "bar")).cause().isInstanceOf(UnknownHostException.class);
}
@Path("test")
@RegisterRestClient(configKey = "client")
public
|
UrlOnUrlParameterTest
|
java
|
apache__flink
|
flink-metrics/flink-metrics-core/src/main/java/org/apache/flink/metrics/groups/CacheMetricGroup.java
|
{
"start": 1255,
"end": 2036
}
|
interface ____ extends MetricGroup {
/** The number of cache hits. */
void hitCounter(Counter hitCounter);
/** The number of cache misses. */
void missCounter(Counter missCounter);
/** The number of times to load data into cache from external system. */
void loadCounter(Counter loadCounter);
/** The number of load failures. */
void numLoadFailuresCounter(Counter numLoadFailuresCounter);
/** The time spent for the latest load operation. */
void latestLoadTimeGauge(Gauge<Long> latestLoadTimeGauge);
/** The number of records in cache. */
void numCachedRecordsGauge(Gauge<Long> numCachedRecordsGauge);
/** The number of bytes used by cache. */
void numCachedBytesGauge(Gauge<Long> numCachedBytesGauge);
}
|
CacheMetricGroup
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.