language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/ScriptExternalTest.java
|
{
"start": 1067,
"end": 1925
}
|
class ____ extends ContextTestSupport {
protected MockEndpoint resultEndpoint;
@Test
public void testScript() throws Exception {
resultEndpoint.expectedBodiesReceived("Hello");
sendBody("direct:start", "Hello");
resultEndpoint.assertIsSatisfied();
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
resultEndpoint = getMockEndpoint("mock:result");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
// should not affect the body
.script(simple("resource:classpath:org/apache/camel/processor/mysimplescript.txt")).to("mock:result");
}
};
}
}
|
ScriptExternalTest
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/liveobject/condition/GTCondition.java
|
{
"start": 749,
"end": 1108
}
|
class ____ implements Condition {
private final String name;
private final Number value;
public GTCondition(String name, Number value) {
super();
this.name = name;
this.value = value;
}
public String getName() {
return name;
}
public Number getValue() {
return value;
}
}
|
GTCondition
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/NamespaceHttpX509Tests.java
|
{
"start": 7904,
"end": 8591
}
|
class ____ {
@Bean
UserDetailsService userDetailsService() {
UserDetails user = User.withDefaultPasswordEncoder()
.username("rod")
.password("password")
.roles("USER", "ADMIN")
.build();
return new InMemoryUserDetailsManager(user);
}
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((requests) -> requests
.anyRequest().hasRole("USER"))
.x509((x509) -> x509
.subjectPrincipalRegex("CN=(.*?)@example.com(?:,|$)"));
// @formatter:on
return http.build();
}
}
@EnableWebMvc
@Configuration
@EnableWebSecurity
public static
|
SubjectPrincipalRegexConfig
|
java
|
apache__kafka
|
trogdor/src/test/java/org/apache/kafka/trogdor/workload/ShareConsumeBenchSpecTest.java
|
{
"start": 1123,
"end": 2561
}
|
class ____ {
@Test
public void testExpandTopicNames() {
ShareConsumeBenchSpec shareConsumeBenchSpec = shareConsumeBenchSpec(List.of("foo[1-3]", "bar"));
Set<String> expectedNames = new HashSet<>();
expectedNames.add("foo1");
expectedNames.add("foo2");
expectedNames.add("foo3");
expectedNames.add("bar");
assertEquals(expectedNames, shareConsumeBenchSpec.expandTopicNames());
}
@Test
public void testInvalidNameRaisesException() {
for (String invalidName : List.of("In:valid", "invalid:", ":invalid[]", "in:valid:", "invalid[1-3]:")) {
assertThrows(IllegalArgumentException.class, () -> shareConsumeBenchSpec(List.of(invalidName)).expandTopicNames());
}
}
@Test
public void testDefaultShareGroupName() {
ShareConsumeBenchSpec shareConsumeBenchSpec = new ShareConsumeBenchSpec(0, 0, "node", "localhost",
123, 1234, null, Map.of(), Map.of(), Map.of(), 1,
Optional.empty(), List.of("abc"));
assertEquals("share", shareConsumeBenchSpec.shareGroup());
}
private ShareConsumeBenchSpec shareConsumeBenchSpec(List<String> activeTopics) {
return new ShareConsumeBenchSpec(0, 0, "node", "localhost",
123, 1234, "sg-1",
Map.of(), Map.of(), Map.of(), 1,
Optional.empty(), activeTopics);
}
}
|
ShareConsumeBenchSpecTest
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentFactoryTest.java
|
{
"start": 2628,
"end": 3418
}
|
interface ____ {",
" TestComponent newTestComponent(TestModule mod);",
" }",
"}");
CompilerTests.daggerCompiler(moduleFile, componentFile)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(goldenFileRule.goldenSource("test/DaggerTestComponent"));
});
}
@Test
public void testSetterMethodFails() {
Source componentFile =
CompilerTests.javaSource(
"test.SimpleComponent",
"package test;",
"",
"import dagger.Component;",
"import javax.inject.Provider;",
"",
"@Component",
"abstract
|
Factory
|
java
|
quarkusio__quarkus
|
core/runtime/src/main/java/io/quarkus/runtime/logging/DecorateStackUtil.java
|
{
"start": 292,
"end": 2162
}
|
class ____ {
public static String getDecoratedString(final Throwable throwable, String srcMainJava, List<String> knowClasses) {
if (srcMainJava != null) {
return DecorateStackUtil.getDecoratedString(throwable, Path.of(srcMainJava), knowClasses);
}
return null;
}
public static String getDecoratedString(final Throwable throwable, Path srcMainJava, List<String> knowClasses) {
if (knowClasses != null && !knowClasses.isEmpty() && throwable != null) {
StackTraceElement[] stackTrace = throwable.getStackTrace();
for (StackTraceElement elem : stackTrace) {
if (knowClasses.contains(elem.getClassName())) {
String decoratedString = DecorateStackUtil.getDecoratedString(srcMainJava, elem);
if (decoratedString != null) {
return decoratedString;
}
}
}
}
return null;
}
public static String getDecoratedString(Path srcMainJava, StackTraceElement stackTraceElement) {
int lineNumber = stackTraceElement.getLineNumber();
if (lineNumber > 0 && srcMainJava != null) {
String fullJavaFileName = getFullPath(stackTraceElement.getClassName(), stackTraceElement.getFileName());
Path f = srcMainJava.resolve(fullJavaFileName);
return getDecoratedString(stackTraceElement, f, lineNumber);
}
return null;
}
public static String getDecoratedString(StackTraceElement stackTraceElement, List<Path> workspacePaths) {
if (stackTraceElement == null || workspacePaths == null || workspacePaths.isEmpty()) {
return null;
}
int lineNumber = stackTraceElement.getLineNumber();
if (lineNumber > 0) {
// Convert the
|
DecorateStackUtil
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/ext/ExternalTypeId2588Test.java
|
{
"start": 773,
"end": 900
}
|
class ____ implements Animal {
public boolean alive;
}
@JsonIgnoreProperties(ignoreUnknown = true)
static
|
Wolf
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/ContextLossDetectionTest.java
|
{
"start": 9583,
"end": 10079
}
|
class ____<T> extends ForeignOperator<T> implements CoreSubscriber<T> {
private final @Nullable Context lossyContext;
CoreLossyOperator(Subscriber<? super T> subscriber, @Nullable Context lossyContext) {
super(subscriber);
this.lossyContext = lossyContext;
}
@Override
public Context currentContext() {
if (this.lossyContext == null) {
return CoreSubscriber.super.currentContext();
}
return this.lossyContext;
}
}
}
static abstract
|
CoreLossyOperator
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/service/repository/ConfigRowMapperInjector.java
|
{
"start": 16771,
"end": 17607
}
|
class ____ implements RowMapper<ConfigAdvanceInfo> {
@Override
public ConfigAdvanceInfo mapRow(ResultSet rs, int rowNum) throws SQLException {
ConfigAdvanceInfo info = new ConfigAdvanceInfo();
info.setCreateTime(rs.getTimestamp("gmt_modified").getTime());
info.setModifyTime(rs.getTimestamp("gmt_modified").getTime());
info.setCreateUser(rs.getString("src_user"));
info.setCreateIp(rs.getString("src_ip"));
info.setDesc(rs.getString("c_desc"));
info.setUse(rs.getString("c_use"));
info.setEffect(rs.getString("effect"));
info.setType(rs.getString("type"));
info.setSchema(rs.getString("c_schema"));
return info;
}
}
public static final
|
ConfigAdvanceInfoRowMapper
|
java
|
apache__camel
|
components/camel-mail/src/test/java/org/apache/camel/component/mail/MailProducerUnsupportedCharsetTest.java
|
{
"start": 1428,
"end": 4340
}
|
class ____ extends CamelTestSupport {
private static final MailboxUser jones = Mailbox.getOrCreateUser("jones", "secret");
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testSencUnsupportedCharset() throws Exception {
Mailbox.clearAll();
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from(jones.uriPrefix(Protocol.pop3) + "&initialDelay=100&delay=100&ignoreUnsupportedCharset=true")
.to("mock:result");
}
});
context.start();
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World\r\n", "Bye World\r\n");
mock.allMessages().header("Content-Type").isEqualTo("text/plain");
Map<String, Object> headers = new HashMap<>();
headers.put("To", "jones@localhost");
headers.put("Content-Type", "text/plain");
template.sendBodyAndHeaders(jones.uriPrefix(Protocol.smtp) + "&ignoreUnsupportedCharset=true", "Hello World", headers);
headers.clear();
headers.put("To", "jones@localhost");
headers.put("Content-Type", "text/plain; charset=ansi_x3.110-1983");
template.sendBodyAndHeaders(jones.uriPrefix(Protocol.smtp) + "&ignoreUnsupportedCharset=true", "Bye World", headers);
mock.assertIsSatisfied();
}
@Test
public void testSencUnsupportedCharsetDisabledOption() throws Exception {
Mailbox.clearAll();
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from(jones.uriPrefix(Protocol.pop3) + "&initialDelay=100&delay=100&ignoreUnsupportedCharset=false")
.to("mock:result");
}
});
context.start();
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World\r\n");
mock.allMessages().header("Content-Type").isEqualTo("text/plain");
Map<String, Object> headers = new HashMap<>();
headers.put("To", "jones@localhost");
headers.put("Content-Type", "text/plain");
template.sendBodyAndHeaders(jones.uriPrefix(Protocol.smtp) + "&ignoreUnsupportedCharset=false", "Hello World", headers);
headers.clear();
headers.put("To", "jones@localhost");
headers.put("Content-Type", "text/plain; charset=XXX");
try {
template.sendBodyAndHeaders(jones.uriPrefix(Protocol.smtp) + "&ignoreUnsupportedCharset=false", "Bye World",
headers);
fail("Should have thrown an exception");
} catch (RuntimeCamelException e) {
assertIsInstanceOf(UnsupportedEncodingException.class, e.getCause());
}
mock.assertIsSatisfied();
}
}
|
MailProducerUnsupportedCharsetTest
|
java
|
alibaba__fastjson
|
src/main/java/com/alibaba/fastjson/JSONValidator.java
|
{
"start": 10462,
"end": 12335
}
|
class ____ extends JSONValidator {
private final static ThreadLocal<byte[]> bufLocal = new ThreadLocal<byte[]>();
private final InputStream is;
private byte[] buf;
private int end = -1;
private int readCount = 0;
public UTF8InputStreamValidator(InputStream is) {
this.is = is;
buf = bufLocal.get();
if (buf != null) {
bufLocal.set(null);
} else {
buf = new byte[1024 * 8];
}
next();
skipWhiteSpace();
}
void next() {
if (pos < end) {
ch = (char) buf[++pos];
} else {
if (!eof) {
int len;
try {
len = is.read(buf, 0, buf.length);
readCount++;
} catch (IOException ex) {
throw new JSONException("read error");
}
if (len > 0) {
ch = (char) buf[0];
pos = 0;
end = len - 1;
}
else if (len == -1) {
pos = 0;
end = 0;
buf = null;
ch = '\0';
eof = true;
} else {
pos = 0;
end = 0;
buf = null;
ch = '\0';
eof = true;
throw new JSONException("read error");
}
}
}
}
public void close() throws IOException {
bufLocal.set(buf);
is.close();
}
}
static
|
UTF8InputStreamValidator
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/compliance/IllegalArgumentExceptionTest.java
|
{
"start": 1022,
"end": 7219
}
|
class ____ {
@Test
public void testCriteriaTupleQuerySameAlias(EntityManagerFactoryScope scope) {
final CriteriaQuery<Tuple> query = scope.getEntityManagerFactory().getCriteriaBuilder().createTupleQuery();
final Root<Person> person = query.from( Person.class );
Assertions.assertThrows(
IllegalArgumentException.class,
() -> {
List list = new ArrayList();
list.add( person.get( "id" ).alias( "a" ) );
list.add( person.get( "name" ).alias( "a" ) );
query.multiselect( list );
}
);
}
@Test
public void testCriteriaTupleQuerySameAlias1(EntityManagerFactoryScope scope) {
final CriteriaQuery<Tuple> query = scope.getEntityManagerFactory().getCriteriaBuilder().createTupleQuery();
final Root<Person> person = query.from( Person.class );
Selection[] selection = {
person.get( "id" ).alias( "a" ),
person.get( "name" ).alias( "a" )
};
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
query.multiselect( selection )
);
}
@Test
public void testCriteriaTupleQueryNonExistingAttributeNames(EntityManagerFactoryScope scope) {
final CriteriaQuery<Tuple> query = scope.getEntityManagerFactory().getCriteriaBuilder().createTupleQuery();
final Root<Person> person = query.from( Person.class );
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
query.multiselect(
person.get( "not_existing_attribute_name" ).alias( "a1" ),
person.get( "another_not_existing_attribute_name" ).alias( "a2" )
)
);
}
@Test
public void testCriteriaStringQuery(EntityManagerFactoryScope scope) {
final CriteriaQuery<String> query = scope.getEntityManagerFactory()
.getCriteriaBuilder()
.createQuery( String.class );
Assertions.assertThrows(
IllegalArgumentException.class,
() -> {
final Root<Person> person = query.from( Person.class );
person.get( "not_existing_attribute_name" );
}
);
}
@Test
public void testGetStringNonExistingAttributeName(EntityManagerFactoryScope scope) {
Assertions.assertThrows(
IllegalArgumentException.class,
() -> {
final CriteriaQuery<Person> query = scope.getEntityManagerFactory()
.getCriteriaBuilder()
.createQuery( Person.class );
query.from( Person.class ).get( "not_existing_attribute_name" );
}
);
}
@Test
public void testJoinANonExistingAttributeNameToAFrom(EntityManagerFactoryScope scope) {
final CriteriaQuery<Person> query = scope.getEntityManagerFactory()
.getCriteriaBuilder()
.createQuery( Person.class );
final From<Person, Person> customer = query.from( Person.class );
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
customer.join( "not_existing_attribute_name" )
);
}
@Test
public void testJoinANonExistingAttributeNameToAFrom2(EntityManagerFactoryScope scope) {
final CriteriaQuery<Person> query = scope.getEntityManagerFactory()
.getCriteriaBuilder()
.createQuery( Person.class );
final From<Person, Person> customer = query.from( Person.class );
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
customer.join( "not_existing_attribute_name", JoinType.INNER )
);
}
@Test
public void testJoinANonExistingAttributeNameToAJoin(EntityManagerFactoryScope scope) {
final CriteriaQuery<Person> query = scope.getEntityManagerFactory()
.getCriteriaBuilder()
.createQuery( Person.class );
final Root<Person> customer = query.from( Person.class );
final Join<Person, Address> address = customer.join( "address" );
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
address.join( "not_existing_attribute_name" )
);
}
@Test
public void testJoinANonExistingAttributeNameToAJoin2(EntityManagerFactoryScope scope) {
final CriteriaQuery<Person> query = scope.getEntityManagerFactory()
.getCriteriaBuilder()
.createQuery( Person.class );
final Root<Person> customer = query.from( Person.class );
final Join<Person, Address> address = customer.join( "address" );
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
address.join( "not_existing_attribute_name", JoinType.INNER )
);
}
@Test
public void fetchFetchStringIllegalArgumentExceptionTest(EntityManagerFactoryScope scope) {
final CriteriaQuery<Person> query = scope.getEntityManagerFactory()
.getCriteriaBuilder()
.createQuery( Person.class );
final From<Person, Person> customer = query.from( Person.class );
final Fetch f = customer.fetch( "address" );
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
f.fetch( "not_existing_attribute_name" )
);
}
@Test
public void testHqlQueryWithWrongSemantic(EntityManagerFactoryScope scope) {
scope.inEntityManager(
entityManager -> {
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
entityManager.createQuery( "Seletc p" ).getResultList()
);
}
);
}
@Test
public void testCriteriaNullReturnType(EntityManagerFactoryScope scope) {
scope.inEntityManager(
entityManager -> {
Assertions.assertThrows(
IllegalArgumentException.class,
() -> {
CriteriaBuilder criteriaBuilder = scope.getEntityManagerFactory().getCriteriaBuilder();
CriteriaQuery criteriaQuery = criteriaBuilder.createQuery( null );
entityManager.createQuery( criteriaQuery ).getResultList();
}
);
}
);
}
@Test
public void testNonExistingNativeQuery(EntityManagerFactoryScope scope) {
scope.inEntityManager(
entityManager ->
Assertions.assertThrows(
IllegalArgumentException.class,
() -> {
entityManager.createNamedQuery( "NonExisting_NativeQuery" );
}
)
);
}
@Test
public void testQueryWrongReturnType(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
Assertions.assertThrows(
IllegalArgumentException.class,
() -> {
entityManager.createQuery( "select p from Peron p", Integer.class ).getResultList();
}
);
}
);
}
@Entity(name = "Person")
public static
|
IllegalArgumentExceptionTest
|
java
|
apache__kafka
|
server/src/main/java/org/apache/kafka/server/AssignmentsManagerDeadlineFunction.java
|
{
"start": 1082,
"end": 1180
}
|
class ____ when the MaybeSendAssignmentsEvent should run for AssignmentsManager.
*/
public
|
calculates
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1552/Issue1552Mapper.java
|
{
"start": 369,
"end": 933
}
|
interface ____ {
Issue1552Mapper INSTANCE = Mappers.getMapper( Issue1552Mapper.class );
@Mappings({
@Mapping(target = "first.value", constant = "constant"),
@Mapping(target = "second.value", source = "sourceTwo")
})
Target twoArgsWithConstant(String sourceOne, String sourceTwo);
@Mappings({
@Mapping(target = "first.value", expression = "java(\"expression\")"),
@Mapping(target = "second.value", source = "sourceTwo")
})
Target twoArgsWithExpression(String sourceOne, String sourceTwo);
}
|
Issue1552Mapper
|
java
|
google__guice
|
core/src/com/google/inject/name/NamedImpl.java
|
{
"start": 812,
"end": 1673
}
|
class ____ implements Named, Serializable {
private final String value;
public NamedImpl(String value) {
this.value = checkNotNull(value, "name");
}
@Override
public String value() {
return this.value;
}
@Override
public int hashCode() {
// This is specified in java.lang.Annotation.
return (127 * "value".hashCode()) ^ value.hashCode();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Named)) {
return false;
}
Named other = (Named) o;
return value.equals(other.value());
}
@Override
public String toString() {
return '@' + Named.class.getName() + '(' + Annotations.memberValueString("value", value) + ')';
}
@Override
public Class<? extends Annotation> annotationType() {
return Named.class;
}
private static final long serialVersionUID = 0;
}
|
NamedImpl
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/metadata/InjectionPointMetadataTest.java
|
{
"start": 1327,
"end": 8148
}
|
class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(Controller.class, Controlled.class);
@SuppressWarnings({ "unchecked", "rawtypes", "serial" })
@Test
public void testInjectionPointMetadata() {
ArcContainer arc = Arc.container();
Controller controller = arc.instance(Controller.class).get();
// Field
InjectionPoint injectionPoint = controller.controlled.injectionPoint;
assertNotNull(injectionPoint);
assertEquals(Controlled.class, injectionPoint.getType());
Set<Annotation> qualifiers = injectionPoint.getQualifiers();
assertEquals(1, qualifiers.size());
assertEquals(Default.class, qualifiers.iterator().next().annotationType());
Bean<?> bean = injectionPoint.getBean();
assertNotNull(bean);
assertTrue(bean.getTypes().stream().anyMatch(t -> t.equals(Controller.class)));
assertNotNull(injectionPoint.getAnnotated());
assertTrue(injectionPoint.getAnnotated() instanceof AnnotatedField);
AnnotatedField<Controller> annotatedField = (AnnotatedField<Controller>) injectionPoint.getAnnotated();
assertEquals("controlled", annotatedField.getJavaMember().getName());
assertEquals(Controlled.class, annotatedField.getBaseType());
assertEquals(2, annotatedField.getAnnotations().size());
assertTrue(annotatedField.isAnnotationPresent(Inject.class));
assertTrue(annotatedField.isAnnotationPresent(FooAnnotation.class));
assertFalse(annotatedField.isAnnotationPresent(Deprecated.class));
assertTrue(annotatedField.getAnnotation(Singleton.class) == null);
assertTrue(annotatedField.getAnnotations(Singleton.class).isEmpty());
assertFalse(injectionPoint.isTransient());
// Transient field
InjectionPoint transientInjectionPoint = controller.transientControlled.injectionPoint;
assertNotNull(transientInjectionPoint);
assertTrue(transientInjectionPoint.isTransient());
// Method
InjectionPoint methodInjectionPoint = controller.controlledMethod.injectionPoint;
assertNotNull(methodInjectionPoint);
assertEquals(Controlled.class, methodInjectionPoint.getType());
assertTrue(methodInjectionPoint.getAnnotated() instanceof AnnotatedParameter);
assertEquals(bean, methodInjectionPoint.getBean());
AnnotatedParameter<Controller> methodParam = (AnnotatedParameter<Controller>) methodInjectionPoint.getAnnotated();
assertEquals(0, methodParam.getPosition());
assertEquals(Controller.class, methodParam.getDeclaringCallable().getJavaMember().getDeclaringClass());
assertEquals("setControlled", methodParam.getDeclaringCallable().getJavaMember().getName());
assertFalse(methodInjectionPoint.isTransient());
// Constructor
InjectionPoint ctorInjectionPoint = controller.controlledCtor.injectionPoint;
assertNotNull(ctorInjectionPoint);
assertEquals(Controlled.class, ctorInjectionPoint.getType());
assertTrue(ctorInjectionPoint.getAnnotated() instanceof AnnotatedParameter);
assertEquals(bean, ctorInjectionPoint.getBean());
AnnotatedParameter<Controller> ctorParam = (AnnotatedParameter<Controller>) ctorInjectionPoint.getAnnotated();
assertEquals(1, ctorParam.getPosition());
assertTrue(ctorParam.isAnnotationPresent(Singleton.class));
assertTrue(ctorParam.getAnnotation(Singleton.class) != null);
assertTrue(!ctorParam.getAnnotations(Singleton.class).isEmpty());
assertEquals(1, ctorParam.getAnnotations().size());
assertTrue(ctorParam.getDeclaringCallable() instanceof AnnotatedConstructor);
assertEquals(Controller.class, ctorParam.getDeclaringCallable().getJavaMember().getDeclaringClass());
assertFalse(ctorInjectionPoint.isTransient());
// Instance
InjectionPoint instanceInjectionPoint = controller.instanceControlled.get().injectionPoint;
assertNotNull(instanceInjectionPoint);
assertEquals(Controlled.class, instanceInjectionPoint.getType());
qualifiers = instanceInjectionPoint.getQualifiers();
assertEquals(1, qualifiers.size());
assertEquals(Default.class, qualifiers.iterator().next().annotationType());
bean = instanceInjectionPoint.getBean();
assertNotNull(bean);
assertTrue(bean.getTypes().stream().anyMatch(t -> t.equals(Controller.class)));
assertNotNull(instanceInjectionPoint.getAnnotated());
assertTrue(instanceInjectionPoint.getAnnotated() instanceof AnnotatedField);
annotatedField = (AnnotatedField) instanceInjectionPoint.getAnnotated();
assertEquals("instanceControlled", annotatedField.getJavaMember().getName());
assertEquals(new TypeLiteral<Instance<Controlled>>() {
}.getType(), annotatedField.getBaseType());
assertTrue(annotatedField.isAnnotationPresent(Inject.class));
assertTrue(annotatedField.getAnnotation(Singleton.class) == null);
assertTrue(annotatedField.getAnnotations(Singleton.class).isEmpty());
assertEquals(1, annotatedField.getAnnotations().size());
assertFalse(instanceInjectionPoint.isTransient());
}
@SuppressWarnings({ "unchecked", "serial" })
@Test
public void testObserverInjectionPointMetadata() {
AtomicReference<InjectionPoint> ip = new AtomicReference<>();
Arc.container().beanManager().getEvent().select(new TypeLiteral<AtomicReference<InjectionPoint>>() {
}).fire(ip);
InjectionPoint injectionPoint = ip.get();
assertNotNull(injectionPoint);
assertEquals(Controlled.class, injectionPoint.getType());
Set<Annotation> qualifiers = injectionPoint.getQualifiers();
assertEquals(1, qualifiers.size());
assertEquals(Default.class, qualifiers.iterator().next().annotationType());
Assertions.assertNull(injectionPoint.getBean());
assertNotNull(injectionPoint.getAnnotated());
assertTrue(injectionPoint.getAnnotated() instanceof AnnotatedParameter);
AnnotatedParameter<Controller> annotatedParam = (AnnotatedParameter<Controller>) injectionPoint.getAnnotated();
assertEquals(Controlled.class, annotatedParam.getBaseType());
assertEquals(1, annotatedParam.getAnnotations().size());
assertFalse(annotatedParam.isAnnotationPresent(Inject.class));
assertTrue(annotatedParam.isAnnotationPresent(FooAnnotation.class));
assertTrue(annotatedParam.getAnnotation(Singleton.class) == null);
assertTrue(annotatedParam.getAnnotations(Singleton.class).isEmpty());
assertFalse(injectionPoint.isTransient());
}
@Singleton
static
|
InjectionPointMetadataTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
|
{
"start": 134826,
"end": 136877
}
|
class ____ extends ParserRuleContext {
public TerminalNode WITH() { return getToken(EsqlBaseParser.WITH, 0); }
public MapExpressionContext mapExpression() {
return getRuleContext(MapExpressionContext.class,0);
}
@SuppressWarnings("this-escape")
public CommandNamedParametersContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_commandNamedParameters; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterCommandNamedParameters(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitCommandNamedParameters(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitCommandNamedParameters(this);
else return visitor.visitChildren(this);
}
}
public final CommandNamedParametersContext commandNamedParameters() throws RecognitionException {
CommandNamedParametersContext _localctx = new CommandNamedParametersContext(_ctx, getState());
enterRule(_localctx, 96, RULE_commandNamedParameters);
try {
enterOuterAlt(_localctx, 1);
{
setState(517);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) {
case 1:
{
setState(515);
match(WITH);
setState(516);
mapExpression();
}
break;
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static
|
CommandNamedParametersContext
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/materializedtable/AlterMaterializedTableResumeOperation.java
|
{
"start": 1210,
"end": 2389
}
|
class ____ extends AlterMaterializedTableOperation {
private final Map<String, String> dynamicOptions;
public AlterMaterializedTableResumeOperation(
ObjectIdentifier tableIdentifier, Map<String, String> options) {
super(tableIdentifier);
this.dynamicOptions = options;
}
public Map<String, String> getDynamicOptions() {
return dynamicOptions;
}
@Override
public TableResultInternal execute(Context ctx) {
throw new UnsupportedOperationException(
"AlterMaterializedTableResumeOperation doesn't support ExecutableOperation yet.");
}
@Override
public String asSummaryString() {
StringBuilder builder =
new StringBuilder(
String.format(
"ALTER MATERIALIZED TABLE %s RESUME",
tableIdentifier.asSummaryString()));
if (!dynamicOptions.isEmpty()) {
builder.append(
String.format(" WITH (%s)", OperationUtils.formatProperties(dynamicOptions)));
}
return builder.toString();
}
}
|
AlterMaterializedTableResumeOperation
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/junit/jupiter/RegisterExtensionSpringExtensionTests.java
|
{
"start": 2098,
"end": 7928
}
|
class ____ {
@RegisterExtension
static final SpringExtension springExtension = new SpringExtension();
@Autowired
Person dilbert;
@Autowired
List<Person> people;
@Autowired
Dog dog;
@Autowired
Cat cat;
@Autowired
List<Cat> cats;
@Value("${enigma}")
Integer enigma;
@Test
void applicationContextInjectedIntoMethod(ApplicationContext applicationContext) {
assertThat(applicationContext).as("ApplicationContext should have been injected by Spring").isNotNull();
assertThat(applicationContext.getBean("dilbert", Person.class)).isEqualTo(this.dilbert);
}
@Test
void genericApplicationContextInjectedIntoMethod(GenericApplicationContext applicationContext) {
assertThat(applicationContext).as("GenericApplicationContext should have been injected by Spring").isNotNull();
assertThat(applicationContext.getBean("dilbert", Person.class)).isEqualTo(this.dilbert);
}
@Test
void autowiredFields() {
assertThat(this.dilbert).as("Dilbert should have been @Autowired by Spring").isNotNull();
assertThat(this.dilbert.getName()).as("Person's name").isEqualTo("Dilbert");
assertThat(this.people).as("Number of people in context").hasSize(2);
assertThat(this.dog).as("Dogbert should have been @Autowired by Spring").isNotNull();
assertThat(this.dog.getName()).as("Dog's name").isEqualTo("Dogbert");
assertThat(this.cat).as("Catbert should have been @Autowired by Spring as the @Primary cat").isNotNull();
assertThat(this.cat.getName()).as("Primary cat's name").isEqualTo("Catbert");
assertThat(this.cats).as("Number of cats in context").hasSize(2);
assertThat(this.enigma).as("Enigma should have been injected via @Value by Spring").isNotNull();
assertThat(this.enigma).as("enigma").isEqualTo(42);
}
@Test
void autowiredParameterByTypeForSingleBean(@Autowired Dog dog) {
assertThat(dog).as("Dogbert should have been @Autowired by Spring").isNotNull();
assertThat(dog.getName()).as("Dog's name").isEqualTo("Dogbert");
}
@Test
void autowiredParameterByTypeForPrimaryBean(@Autowired Cat primaryCat) {
assertThat(primaryCat).as("Primary cat should have been @Autowired by Spring").isNotNull();
assertThat(primaryCat.getName()).as("Primary cat's name").isEqualTo("Catbert");
}
@Test
void autowiredParameterWithExplicitQualifier(@Qualifier("wally") Person person) {
assertThat(person).as("Wally should have been @Autowired by Spring").isNotNull();
assertThat(person.getName()).as("Person's name").isEqualTo("Wally");
}
/**
* NOTE: Test code must be compiled with "-g" (debug symbols) or "-parameters" in
* order for the parameter name to be used as the qualifier; otherwise, use
* {@code @Qualifier("wally")}.
*/
@Test
void autowiredParameterWithImplicitQualifierBasedOnParameterName(@Autowired Person wally) {
assertThat(wally).as("Wally should have been @Autowired by Spring").isNotNull();
assertThat(wally.getName()).as("Person's name").isEqualTo("Wally");
}
@Test
void autowiredParameterAsJavaUtilOptional(@Autowired Optional<Dog> dog) {
assertThat(dog).as("Optional dog should have been @Autowired by Spring").isNotNull();
assertThat(dog).as("Value of Optional should be 'present'").isPresent();
assertThat(dog.get().getName()).as("Dog's name").isEqualTo("Dogbert");
}
@Test
void autowiredParameterThatDoesNotExistAsJavaUtilOptional(@Autowired Optional<Number> number) {
assertThat(number).as("Optional number should have been @Autowired by Spring").isNotNull();
assertThat(number).as("Value of Optional number should not be 'present'").isNotPresent();
}
@Test
void autowiredParameterThatDoesNotExistButIsNotRequired(@Autowired(required = false) Number number) {
assertThat(number).as("Non-required number should have been @Autowired as 'null' by Spring").isNull();
}
@Test
void autowiredParameterOfList(@Autowired List<Person> peopleParam) {
assertThat(peopleParam).as("list of people should have been @Autowired by Spring").isNotNull();
assertThat(peopleParam).as("Number of people in context").hasSize(2);
}
@Test
void valueParameterWithPrimitiveType(@Value("99") int num) {
assertThat(num).isEqualTo(99);
}
@Test
void valueParameterFromPropertyPlaceholder(@Value("${enigma}") Integer enigmaParam) {
assertThat(enigmaParam).as("Enigma should have been injected via @Value by Spring").isNotNull();
assertThat(enigmaParam).as("enigma").isEqualTo(42);
}
@Test
void valueParameterFromDefaultValueForPropertyPlaceholder(@Value("${bogus:false}") Boolean defaultValue) {
assertThat(defaultValue).as("Default value should have been injected via @Value by Spring").isNotNull();
assertThat(defaultValue).as("default value").isFalse();
}
@Test
void valueParameterFromSpelExpression(@Value("#{@dilbert.name}") String name) {
assertThat(name).as(
"Dilbert's name should have been injected via SpEL expression in @Value by Spring").isNotNull();
assertThat(name).as("name from SpEL expression").isEqualTo("Dilbert");
}
@Test
void valueParameterFromSpelExpressionWithNestedPropertyPlaceholder(@Value("#{'Hello ' + ${enigma}}") String hello) {
assertThat(hello).as("hello should have been injected via SpEL expression in @Value by Spring").isNotNull();
assertThat(hello).as("hello from SpEL expression").isEqualTo("Hello 42");
}
@Test
void junitAndSpringMethodInjectionCombined(@Autowired Cat kittyCat, TestInfo testInfo, ApplicationContext context,
TestReporter testReporter) {
assertThat(testInfo).as("TestInfo should have been injected by JUnit").isNotNull();
assertThat(testReporter).as("TestReporter should have been injected by JUnit").isNotNull();
assertThat(context).as("ApplicationContext should have been injected by Spring").isNotNull();
assertThat(kittyCat).as("Cat should have been @Autowired by Spring").isNotNull();
}
}
|
RegisterExtensionSpringExtensionTests
|
java
|
quarkusio__quarkus
|
integration-tests/mongodb-client/src/main/java/io/quarkus/it/mongodb/BookCodecProvider.java
|
{
"start": 299,
"end": 682
}
|
class ____ a bean and is properly instantiated by the CDI container
private final Jsonb jsonb;
public BookCodecProvider(Jsonb jsonb) {
this.jsonb = jsonb;
}
@Override
public <T> Codec<T> get(Class<T> clazz, CodecRegistry registry) {
if (clazz == Book.class) {
return (Codec<T>) new BookCodec();
}
return null;
}
}
|
is
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMReconnect.java
|
{
"start": 4292,
"end": 11314
}
|
class ____ implements
EventHandler<RMNodeEvent> {
@Override
public void handle(RMNodeEvent event) {
rmNodeEvents.add(event);
}
}
ResourceTrackerService resourceTrackerService;
public void setUp() {
Configuration conf = new Configuration();
// Dispatcher that processes events inline
dispatcher = new InlineDispatcher();
dispatcher.register(RMNodeEventType.class,
new TestRMNodeEventDispatcher());
context = new RMContextImpl(dispatcher, null,
null, null, null, null, null, null, null, null);
dispatcher.register(SchedulerEventType.class,
new InlineDispatcher.EmptyEventHandler());
dispatcher.register(RMNodeEventType.class,
new NodeEventDispatcher(context));
NMLivelinessMonitor nmLivelinessMonitor = new NMLivelinessMonitor(
dispatcher);
nmLivelinessMonitor.init(conf);
nmLivelinessMonitor.start();
NodesListManager nodesListManager = new NodesListManager(context);
nodesListManager.init(conf);
RMContainerTokenSecretManager containerTokenSecretManager =
new RMContainerTokenSecretManager(conf);
containerTokenSecretManager.start();
NMTokenSecretManagerInRM nmTokenSecretManager =
new NMTokenSecretManagerInRM(conf);
nmTokenSecretManager.start();
resourceTrackerService = new ResourceTrackerService(context,
nodesListManager, nmLivelinessMonitor, containerTokenSecretManager,
nmTokenSecretManager);
resourceTrackerService.init(conf);
resourceTrackerService.start();
}
@AfterEach
public void tearDown() {
resourceTrackerService.stop();
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
public void testReconnect(SchedulerType type) throws Exception {
initTestNMReconnect(type);
String hostname1 = "localhost1";
Resource capability = Resources.createResource(1024);
RegisterNodeManagerRequest request1 = recordFactory
.newRecordInstance(RegisterNodeManagerRequest.class);
NodeId nodeId1 = NodeId.newInstance(hostname1, 0);
request1.setNodeId(nodeId1);
request1.setHttpPort(0);
request1.setResource(capability);
resourceTrackerService.registerNodeManager(request1);
assertEquals(RMNodeEventType.STARTED, rmNodeEvents.get(0).getType());
rmNodeEvents.clear();
resourceTrackerService.registerNodeManager(request1);
assertEquals(RMNodeEventType.RECONNECTED,
rmNodeEvents.get(0).getType());
rmNodeEvents.clear();
resourceTrackerService.registerNodeManager(request1);
capability = Resources.createResource(1024, 2);
request1.setResource(capability);
assertEquals(RMNodeEventType.RECONNECTED,
rmNodeEvents.get(0).getType());
}
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
public void testCompareRMNodeAfterReconnect(SchedulerType type) throws Exception {
initTestNMReconnect(type);
AbstractYarnScheduler scheduler = getScheduler();
Configuration yarnConf = new YarnConfiguration();
ConfigurationProvider configurationProvider =
ConfigurationProviderFactory.getConfigurationProvider(yarnConf);
configurationProvider.init(yarnConf);
context.setConfigurationProvider(configurationProvider);
RMNodeLabelsManager nlm = new RMNodeLabelsManager();
nlm.init(yarnConf);
nlm.start();
context.setNodeLabelManager(nlm);
scheduler.setRMContext(context);
scheduler.init(yarnConf);
scheduler.start();
dispatcher.register(SchedulerEventType.class, scheduler);
String hostname1 = "localhost1";
Resource capability = Resources.createResource(4096, 4);
RegisterNodeManagerRequest request1 = recordFactory
.newRecordInstance(RegisterNodeManagerRequest.class);
NodeId nodeId1 = NodeId.newInstance(hostname1, 0);
NodeStatus mockNodeStatus = createMockNodeStatus();
request1.setNodeId(nodeId1);
request1.setHttpPort(0);
request1.setResource(capability);
request1.setNodeStatus(mockNodeStatus);
resourceTrackerService.registerNodeManager(request1);
assertNotNull(context.getRMNodes().get(nodeId1));
// verify Scheduler and RMContext use same RMNode reference.
assertTrue(scheduler.getSchedulerNode(nodeId1).getRMNode() ==
context.getRMNodes().get(nodeId1));
assertEquals(context.getRMNodes().get(nodeId1).
getTotalCapability(), capability);
Resource capability1 = Resources.createResource(2048, 2);
request1.setResource(capability1);
resourceTrackerService.registerNodeManager(request1);
assertNotNull(context.getRMNodes().get(nodeId1));
// verify Scheduler and RMContext use same RMNode reference
// after reconnect.
assertTrue(scheduler.getSchedulerNode(nodeId1).getRMNode() ==
context.getRMNodes().get(nodeId1));
// verify RMNode's capability is changed.
assertEquals(context.getRMNodes().get(nodeId1).
getTotalCapability(), capability1);
nlm.stop();
scheduler.stop();
}
@SuppressWarnings("unchecked")
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
@Timeout(10)
public void testDecommissioningNodeReconnect(SchedulerType type) throws Exception {
initTestNMReconnect(type);
MockRM rm = new MockRM();
rm.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
nm1.registerNode();
rm.waitForState(nm1.getNodeId(), NodeState.RUNNING);
rm.getRMContext().getDispatcher().getEventHandler().handle(
new RMNodeEvent(nm1.getNodeId(),
RMNodeEventType.GRACEFUL_DECOMMISSION));
rm.waitForState(nm1.getNodeId(), NodeState.DECOMMISSIONING);
MockNM nm2 =
new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
RegisterNodeManagerResponse response = nm2.registerNode();
// not SHUTDOWN
assertTrue(response.getNodeAction().equals(NodeAction.NORMAL));
rm.stop();
}
@Timeout(10)
@ParameterizedTest(name = "{0}")
@MethodSource("getParameters")
public void testRMNodeStatusAfterReconnect(SchedulerType type) throws Exception {
initTestNMReconnect(type);
// The node(127.0.0.1:1234) reconnected with RM. When it registered with
// RM, RM set its lastNodeHeartbeatResponse's id to 0 asynchronously. But
// the node's heartbeat come before RM succeeded setting the id to 0.
MockRM rm = new MockRM();
rm.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
nm1.registerNode();
int i = 0;
while(i < 3) {
nm1.nodeHeartbeat(true);
rm.drainEvents();
i++;
}
MockNM nm2 =
new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
nm2.registerNode();
RMNode rmNode = rm.getRMContext().getRMNodes().get(nm2.getNodeId());
nm2.nodeHeartbeat(true);
rm.drainEvents();
assertEquals(NodeState.RUNNING, rmNode.getState(), "Node is Not in Running state.");
rm.stop();
}
}
|
TestRMNodeEventDispatcher
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/onetoone/singletable/SingleTableOneToOneTest.java
|
{
"start": 7820,
"end": 7905
}
|
class ____ extends BaseClass {
}
@Entity(name = "Container1")
public static
|
SubClass2
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/net/impl/MessageWrite.java
|
{
"start": 515,
"end": 733
}
|
interface ____ {
/**
* Write the message.
*/
void write();
/**
* Cancel the write operation.
*
* @param cause the cancellation cause
*/
default void cancel(Throwable cause) {
}
}
|
MessageWrite
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/annotation/authentication/builders/AuthenticationManagerBuilder.java
|
{
"start": 2651,
"end": 12612
}
|
class ____
extends AbstractConfiguredSecurityBuilder<AuthenticationManager, AuthenticationManagerBuilder>
implements ProviderManagerBuilder<AuthenticationManagerBuilder> {
private final Log logger = LogFactory.getLog(getClass());
private AuthenticationManager parentAuthenticationManager;
private List<AuthenticationProvider> authenticationProviders = new ArrayList<>();
private UserDetailsService defaultUserDetailsService;
private Boolean eraseCredentials;
private AuthenticationEventPublisher eventPublisher;
/**
* Creates a new instance
* @param objectPostProcessor the {@link ObjectPostProcessor} instance to use.
*/
public AuthenticationManagerBuilder(ObjectPostProcessor<Object> objectPostProcessor) {
super(objectPostProcessor, true);
}
/**
* Allows providing a parent {@link AuthenticationManager} that will be tried if this
* {@link AuthenticationManager} was unable to attempt to authenticate the provided
* {@link Authentication}.
* @param authenticationManager the {@link AuthenticationManager} that should be used
* if the current {@link AuthenticationManager} was unable to attempt to authenticate
* the provided {@link Authentication}.
* @return the {@link AuthenticationManagerBuilder} for further adding types of
* authentication
*/
public AuthenticationManagerBuilder parentAuthenticationManager(AuthenticationManager authenticationManager) {
if (authenticationManager instanceof ProviderManager) {
eraseCredentials(((ProviderManager) authenticationManager).isEraseCredentialsAfterAuthentication());
}
this.parentAuthenticationManager = authenticationManager;
return this;
}
/**
* Sets the {@link AuthenticationEventPublisher}
* @param eventPublisher the {@link AuthenticationEventPublisher} to use
* @return the {@link AuthenticationManagerBuilder} for further customizations
*/
public AuthenticationManagerBuilder authenticationEventPublisher(AuthenticationEventPublisher eventPublisher) {
Assert.notNull(eventPublisher, "AuthenticationEventPublisher cannot be null");
this.eventPublisher = eventPublisher;
return this;
}
/**
* @param eraseCredentials true if {@link AuthenticationManager} should clear the
* credentials from the {@link Authentication} object after authenticating
* @return the {@link AuthenticationManagerBuilder} for further customizations
*/
public AuthenticationManagerBuilder eraseCredentials(boolean eraseCredentials) {
this.eraseCredentials = eraseCredentials;
return this;
}
/**
* Add in memory authentication to the {@link AuthenticationManagerBuilder} and return
* a {@link InMemoryUserDetailsManagerConfigurer} to allow customization of the in
* memory authentication.
*
* <p>
* This method also ensure that a {@link UserDetailsService} is available for the
* {@link #getDefaultUserDetailsService()} method. Note that additional
* {@link UserDetailsService}'s may override this {@link UserDetailsService} as the
* default.
* </p>
* @return a {@link InMemoryUserDetailsManagerConfigurer} to allow customization of
* the in memory authentication
* @throws Exception if an error occurs when adding the in memory authentication
*/
public InMemoryUserDetailsManagerConfigurer<AuthenticationManagerBuilder> inMemoryAuthentication() {
return apply(new InMemoryUserDetailsManagerConfigurer<>());
}
/**
* Add JDBC authentication to the {@link AuthenticationManagerBuilder} and return a
* {@link JdbcUserDetailsManagerConfigurer} to allow customization of the JDBC
* authentication.
*
* <p>
* When using with a persistent data store, it is best to add users external of
* configuration using something like <a href="https://flywaydb.org/">Flyway</a> or
* <a href="https://www.liquibase.org/">Liquibase</a> to create the schema and adding
* users to ensure these steps are only done once and that the optimal SQL is used.
* </p>
*
* <p>
* This method also ensure that a {@link UserDetailsService} is available for the
* {@link #getDefaultUserDetailsService()} method. Note that additional
* {@link UserDetailsService}'s may override this {@link UserDetailsService} as the
* default. See the <a href=
* "https://docs.spring.io/spring-security/reference/servlet/appendix/database-schema.html"
* >User Schema</a> section of the reference for the default schema.
* </p>
* @return a {@link JdbcUserDetailsManagerConfigurer} to allow customization of the
* JDBC authentication
* @throws Exception if an error occurs when adding the JDBC authentication
*/
public JdbcUserDetailsManagerConfigurer<AuthenticationManagerBuilder> jdbcAuthentication() {
return apply(new JdbcUserDetailsManagerConfigurer<>());
}
/**
* Add authentication based upon the custom {@link UserDetailsService} that is passed
* in. It then returns a {@link DaoAuthenticationConfigurer} to allow customization of
* the authentication.
*
* <p>
* This method also ensure that the {@link UserDetailsService} is available for the
* {@link #getDefaultUserDetailsService()} method. Note that additional
* {@link UserDetailsService}'s may override this {@link UserDetailsService} as the
* default.
* </p>
* @return a {@link DaoAuthenticationConfigurer} to allow customization of the DAO
* authentication
* @throws Exception if an error occurs when adding the {@link UserDetailsService}
* based authentication
*/
public <T extends UserDetailsService> DaoAuthenticationConfigurer<AuthenticationManagerBuilder, T> userDetailsService(
T userDetailsService) {
this.defaultUserDetailsService = userDetailsService;
return apply(new DaoAuthenticationConfigurer<>(userDetailsService));
}
/**
* Add LDAP authentication to the {@link AuthenticationManagerBuilder} and return a
* {@link LdapAuthenticationProviderConfigurer} to allow customization of the LDAP
* authentication.
*
* <p>
* This method <b>does NOT</b> ensure that a {@link UserDetailsService} is available
* for the {@link #getDefaultUserDetailsService()} method.
* @return a {@link LdapAuthenticationProviderConfigurer} to allow customization of
* the LDAP authentication
* @throws Exception if an error occurs when adding the LDAP authentication
*/
public LdapAuthenticationProviderConfigurer<AuthenticationManagerBuilder> ldapAuthentication() throws Exception {
LdapAuthenticationProviderConfigurer<AuthenticationManagerBuilder> ldap = new LdapAuthenticationProviderConfigurer<>();
with(ldap);
return ldap;
}
/**
* Add authentication based upon the custom {@link AuthenticationProvider} that is
* passed in. Since the {@link AuthenticationProvider} implementation is unknown, all
* customizations must be done externally and the {@link AuthenticationManagerBuilder}
* is returned immediately.
*
* <p>
* This method <b>does NOT</b> ensure that the {@link UserDetailsService} is available
* for the {@link #getDefaultUserDetailsService()} method.
*
* Note that an {@link Exception} might be thrown if an error occurs when adding the
* {@link AuthenticationProvider}.
* @return a {@link AuthenticationManagerBuilder} to allow further authentication to
* be provided to the {@link AuthenticationManagerBuilder}
*/
@Override
public AuthenticationManagerBuilder authenticationProvider(AuthenticationProvider authenticationProvider) {
this.authenticationProviders.add(authenticationProvider);
return this;
}
@Override
protected ProviderManager performBuild() {
if (!isConfigured()) {
this.logger.debug("No authenticationProviders and no parentAuthenticationManager defined. Returning null.");
return null;
}
ProviderManager providerManager = new ProviderManager(this.authenticationProviders,
this.parentAuthenticationManager);
if (this.eraseCredentials != null) {
providerManager.setEraseCredentialsAfterAuthentication(this.eraseCredentials);
}
if (this.eventPublisher != null) {
providerManager.setAuthenticationEventPublisher(this.eventPublisher);
}
providerManager = postProcess(providerManager);
return providerManager;
}
/**
* Determines if the {@link AuthenticationManagerBuilder} is configured to build a non
* null {@link AuthenticationManager}. This means that either a non-null parent is
* specified or at least one {@link AuthenticationProvider} has been specified.
*
* <p>
* When using {@link SecurityConfigurer} instances, the
* {@link AuthenticationManagerBuilder} will not be configured until the
* {@link SecurityConfigurer#configure(SecurityBuilder)} methods. This means a
* {@link SecurityConfigurer} that is last could check this method and provide a
* default configuration in the {@link SecurityConfigurer#configure(SecurityBuilder)}
* method.
* @return true, if {@link AuthenticationManagerBuilder} is configured, otherwise
* false
*/
public boolean isConfigured() {
return !this.authenticationProviders.isEmpty() || this.parentAuthenticationManager != null;
}
/**
* Gets the default {@link UserDetailsService} for the
* {@link AuthenticationManagerBuilder}. The result may be null in some circumstances.
* @return the default {@link UserDetailsService} for the
* {@link AuthenticationManagerBuilder}
*/
public UserDetailsService getDefaultUserDetailsService() {
return this.defaultUserDetailsService;
}
/**
* Captures the {@link UserDetailsService} from any {@link UserDetailsAwareConfigurer}
* .
* @param configurer the {@link UserDetailsAwareConfigurer} to capture the
* {@link UserDetailsService} from.
* @return the {@link UserDetailsAwareConfigurer} for further customizations
* @throws Exception if an error occurs
*/
private <C extends UserDetailsAwareConfigurer<AuthenticationManagerBuilder, ? extends UserDetailsService>> C apply(
C configurer) {
this.defaultUserDetailsService = configurer.getUserDetailsService();
with(configurer);
return configurer;
}
}
|
AuthenticationManagerBuilder
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/RMapAsync.java
|
{
"start": 904,
"end": 1260
}
|
interface ____ Redis based implementation
* of {@link java.util.concurrent.ConcurrentMap} and {@link java.util.Map}
* <p>
* This map uses serialized state of key instead of hashCode or equals methods.
* This map doesn't allow to store <code>null</code> as key or value.
*
* @author Nikita Koksharov
*
* @param <K> key
* @param <V> value
*/
public
|
for
|
java
|
quarkusio__quarkus
|
extensions/elytron-security-ldap/deployment/src/test/java/io/quarkus/elytron/security/ldap/rest/ParametrizedPathsResource.java
|
{
"start": 220,
"end": 607
}
|
class ____ {
@GET
@Path("/my/{path}/admin")
@RolesAllowed("adminRole")
public String admin(@PathParam("path") String path) {
return "Admin accessed " + path;
}
@GET
@Path("/my/{path}/view")
@RolesAllowed("standardRole")
public String view(@PathParam("path") String path) {
return "View accessed " + path;
}
}
|
ParametrizedPathsResource
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/operators/AbstractOuterJoinDriver.java
|
{
"start": 2043,
"end": 8989
}
|
class ____<IT1, IT2, OT>
implements Driver<FlatJoinFunction<IT1, IT2, OT>, OT> {
protected static final Logger LOG = LoggerFactory.getLogger(AbstractOuterJoinDriver.class);
protected TaskContext<FlatJoinFunction<IT1, IT2, OT>, OT> taskContext;
protected volatile JoinTaskIterator<IT1, IT2, OT>
outerJoinIterator; // the iterator that does the actual outer join
protected volatile boolean running;
// ------------------------------------------------------------------------
@Override
public void setup(TaskContext<FlatJoinFunction<IT1, IT2, OT>, OT> context) {
this.taskContext = context;
this.running = true;
}
@Override
public int getNumberOfInputs() {
return 2;
}
@Override
public Class<FlatJoinFunction<IT1, IT2, OT>> getStubType() {
@SuppressWarnings("unchecked")
final Class<FlatJoinFunction<IT1, IT2, OT>> clazz =
(Class<FlatJoinFunction<IT1, IT2, OT>>) (Class<?>) FlatJoinFunction.class;
return clazz;
}
@Override
public int getNumberOfDriverComparators() {
return 2;
}
@Override
public void prepare() throws Exception {
final TaskConfig config = this.taskContext.getTaskConfig();
// obtain task manager's memory manager and I/O manager
final MemoryManager memoryManager = this.taskContext.getMemoryManager();
final IOManager ioManager = this.taskContext.getIOManager();
// set up memory and I/O parameters
final double driverMemFraction = config.getRelativeMemoryDriver();
final DriverStrategy ls = config.getDriverStrategy();
final Counter numRecordsIn =
this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsInCounter();
final MutableObjectIterator<IT1> in1 =
new CountingMutableObjectIterator<>(
this.taskContext.<IT1>getInput(0), numRecordsIn);
final MutableObjectIterator<IT2> in2 =
new CountingMutableObjectIterator<>(
this.taskContext.<IT2>getInput(1), numRecordsIn);
// get serializers and comparators
final TypeSerializer<IT1> serializer1 =
this.taskContext.<IT1>getInputSerializer(0).getSerializer();
final TypeSerializer<IT2> serializer2 =
this.taskContext.<IT2>getInputSerializer(1).getSerializer();
final TypeComparator<IT1> comparator1 = this.taskContext.getDriverComparator(0);
final TypeComparator<IT2> comparator2 = this.taskContext.getDriverComparator(1);
final TypePairComparatorFactory<IT1, IT2> pairComparatorFactory =
config.getPairComparatorFactory(this.taskContext.getUserCodeClassLoader());
if (pairComparatorFactory == null) {
throw new Exception("Missing pair comparator factory for outer join driver");
}
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
boolean objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (LOG.isDebugEnabled()) {
LOG.debug(
"Outer Join Driver object reuse: "
+ (objectReuseEnabled ? "ENABLED" : "DISABLED")
+ ".");
}
// create and return outer join iterator according to provided local strategy.
if (objectReuseEnabled) {
this.outerJoinIterator =
getReusingOuterJoinIterator(
ls,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory,
memoryManager,
ioManager,
driverMemFraction);
} else {
this.outerJoinIterator =
getNonReusingOuterJoinIterator(
ls,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory,
memoryManager,
ioManager,
driverMemFraction);
}
this.outerJoinIterator.open();
if (LOG.isDebugEnabled()) {
LOG.debug(this.taskContext.formatLogString("outer join task iterator ready."));
}
}
@Override
public void run() throws Exception {
final Counter numRecordsOut =
this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
final FlatJoinFunction<IT1, IT2, OT> joinStub = this.taskContext.getStub();
final Collector<OT> collector =
new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
final JoinTaskIterator<IT1, IT2, OT> outerJoinIterator = this.outerJoinIterator;
while (this.running && outerJoinIterator.callWithNextKey(joinStub, collector)) {}
}
@Override
public void cleanup() throws Exception {
if (this.outerJoinIterator != null) {
this.outerJoinIterator.close();
this.outerJoinIterator = null;
}
}
@Override
public void cancel() {
this.running = false;
if (this.outerJoinIterator != null) {
this.outerJoinIterator.abort();
}
}
protected abstract JoinTaskIterator<IT1, IT2, OT> getReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction)
throws Exception;
protected abstract JoinTaskIterator<IT1, IT2, OT> getNonReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction)
throws Exception;
}
|
AbstractOuterJoinDriver
|
java
|
google__dagger
|
javatests/dagger/hilt/android/PackagePrivateConstructorTest.java
|
{
"start": 2240,
"end": 2385
}
|
class ____ extends Hilt_PackagePrivateConstructorTest_TestActivity {
}
@AndroidEntryPoint(BaseFragment.class)
public static final
|
TestActivity
|
java
|
google__dagger
|
javatests/dagger/hilt/processor/internal/root/MyTestPreviousCompilationTest.java
|
{
"start": 3805,
"end": 4804
}
|
class ____ extends Hilt_AppRoot {}");
// TODO(danysantiago): Add KSP test once b/288966076 is resolved.
compileWithKapt(
ImmutableList.of(appRoot),
processorOptions(),
tempFolderRule,
result -> {
if (disableCrossCompilationRootValidation) {
assertThat(result.getSuccess()).isTrue();
} else {
List<DiagnosticMessage> errors = result.getDiagnostics().get(Kind.ERROR);
assertThat(errors).hasSize(1);
assertThat(errors.get(0).getMsg())
.contains(
"Cannot process new roots when there are test roots from a previous "
+ "compilation unit:\n"
+ " Test roots from previous compilation unit: "
+ "dagger.hilt.processor.internal.root.MyTestPreviousCompilation.MyTest\n"
+ " All roots from this compilation unit: test.AppRoot");
}
});
}
}
|
AppRoot
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/tags/form/AbstractHtmlElementTag.java
|
{
"start": 1224,
"end": 1595
}
|
class ____ for rendering non-standard attributes
* as part of the tag's output. These attributes are accessible to subclasses if
* needed via the {@link AbstractHtmlElementTag#getDynamicAttributes() dynamicAttributes}
* map.
*
* @author Rob Harrop
* @author Jeremy Grelle
* @author Rossen Stoyanchev
* @since 2.0
*/
@SuppressWarnings("serial")
public abstract
|
allows
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessor.java
|
{
"start": 1059,
"end": 2371
}
|
class ____ extends BinaryDateTimeProcessor {
public static final String NAME = "dtformat";
/**
* these characters have a meaning in MS date patterns.
* If a character is not in this set, then it's still allowed in MS FORMAT patters
* but not in Java, so it has to be translated or quoted
*/
private static final Set<Character> MS_DATETIME_PATTERN_CHARS = Set.of(
'd',
'f',
'F',
'g',
'h',
'H',
'K',
'm',
'M',
's',
't',
'y',
'z',
':',
'/',
' ',
'-'
);
/**
* characters that start a quoting block in MS patterns
*/
private static final Set<Character> MS_QUOTING_CHARS = Set.of('\\', '\'', '"');
/**
* list of MS datetime patterns with the corresponding translation in Java DateTimeFormat
* (patterns that are the same in Java and in MS are not listed here)
*/
private static final String[][] MS_TO_JAVA_PATTERNS = {
{ "tt", "a" },
{ "t", "a" },
{ "dddd", "eeee" },
{ "ddd", "eee" },
{ "K", "v" },
{ "g", "G" },
{ "f", "S" },
{ "F", "S" },
{ "z", "X" } };
private final Formatter formatter;
public
|
DateTimeFormatProcessor
|
java
|
apache__logging-log4j2
|
log4j-api-test/src/main/java/org/apache/logging/log4j/test/junit/ThreadContextMapRule.java
|
{
"start": 1151,
"end": 1338
}
|
class ____ extends ThreadContextRule {
/**
* Constructs an initialized instance.
*/
public ThreadContextMapRule() {
super(true, false);
}
}
|
ThreadContextMapRule
|
java
|
apache__camel
|
components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMProducer.java
|
{
"start": 1484,
"end": 5771
}
|
class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(CMProducer.class);
private Validator validator;
/**
* sends a valid message to CM endpoints.
*/
private CMSender sender;
private final HttpClient client;
public CMProducer(final CMEndpoint endpoint, final CMSender sender) {
super(endpoint);
this.sender = sender;
this.client = endpoint.getHttpClient();
}
/**
* Producer is a exchange processor. This process is built in several steps. 1. Validate message receive from client
* 2. Send validated message to CM endpoints. 3. Process response from CM endpoints.
*/
@Override
public void process(final Exchange exchange) throws Exception {
// Immutable message receive from clients. Throws camel ' s
// InvalidPayloadException
final SMSMessage smsMessage = exchange.getIn().getMandatoryBody(SMSMessage.class);
// Validates Payload - SMSMessage
LOG.trace("Validating SMSMessage instance provided: {}", smsMessage);
final Set<ConstraintViolation<SMSMessage>> constraintViolations = getValidator().validate(smsMessage);
if (!constraintViolations.isEmpty()) {
final StringBuilder msg = new StringBuilder();
for (final ConstraintViolation<SMSMessage> cv : constraintViolations) {
msg.append(String.format("- Invalid value for %s: %s", cv.getPropertyPath().toString(), cv.getMessage()));
}
LOG.debug("SMS message: {}", msg);
throw new InvalidPayloadRuntimeException(exchange, SMSMessage.class);
}
LOG.trace("SMSMessage instance is valid: {}", smsMessage);
// We have a valid (immutable) SMSMessage instance, lets extend to
// CMMessage
// This is the instance we will use to build the XML document to be
// sent to CM SMS GW.
final CMMessage cmMessage = new CMMessage(smsMessage.getPhoneNumber(), smsMessage.getMessage());
LOG.debug("CMMessage instance build from valid SMSMessage instance");
if (smsMessage.getFrom() == null || smsMessage.getFrom().isEmpty()) {
String df = getConfiguration().getDefaultFrom();
cmMessage.setSender(df);
LOG.debug("Dynamic sender is set to default dynamic sender: {}", df);
}
// Remember, this can be null.
cmMessage.setIdAsString(smsMessage.getId());
// Unicode and multipart
cmMessage.setUnicodeAndMultipart(getConfiguration().getDefaultMaxNumberOfParts());
// 2. Send a validated sms message to CM endpoints
// for abnormal situations.
sender.send(cmMessage);
LOG.debug("Request accepted by CM Host: {}", cmMessage);
}
@Override
protected void doStart() throws Exception {
// log at debug level for singletons, for prototype scoped log at trace
// level to not spam logs
LOG.debug("Starting CMProducer");
final CMConfiguration configuration = getConfiguration();
if (configuration.isTestConnectionOnStartup()) {
try {
LOG.debug("Checking connection - {}", getEndpoint().getCMUrl());
client.execute(new HttpHead(getEndpoint().getCMUrl()), res -> null);
LOG.debug("Connection to {}: OK", getEndpoint().getCMUrl());
} catch (final Exception e) {
throw new HostUnavailableException(
String.format("Connection to %s: NOT AVAILABLE", getEndpoint().getCMUrl()), e);
}
}
// keep starting
super.doStart();
LOG.debug("CMProducer started");
}
@Override
public CMEndpoint getEndpoint() {
return (CMEndpoint) super.getEndpoint();
}
public CMConfiguration getConfiguration() {
return getEndpoint().getConfiguration();
}
public Validator getValidator() {
if (validator == null) {
validator = getEndpoint().getComponent().getValidator();
}
return validator;
}
public CMSender getSender() {
return sender;
}
public void setSender(CMSender sender) {
this.sender = sender;
}
}
|
CMProducer
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
|
{
"start": 8878,
"end": 18625
}
|
class ____
extends InputFormat<LongWritable, IntWritable> {
/** {@inheritDoc} */
public List<InputSplit> getSplits(JobContext context) {
//get the property values
final int startDigit = context.getConfiguration().getInt(
DIGIT_START_PROPERTY, 1);
final int nDigits = context.getConfiguration().getInt(
DIGIT_SIZE_PROPERTY, 100);
final int nMaps = context.getConfiguration().getInt(
DIGIT_PARTS_PROPERTY, 1);
//create splits
final List<InputSplit> splits = new ArrayList<InputSplit>(nMaps);
final int[] parts = partition(startDigit - 1, nDigits, nMaps);
for (int i = 0; i < parts.length; ++i) {
final int k = i < parts.length - 1 ? parts[i+1]: nDigits+startDigit-1;
splits.add(new BbpSplit(i, parts[i], k - parts[i]));
}
return splits;
}
/** {@inheritDoc} */
public RecordReader<LongWritable, IntWritable> createRecordReader(
InputSplit generic, TaskAttemptContext context) {
final BbpSplit split = (BbpSplit)generic;
//return a record reader
return new RecordReader<LongWritable, IntWritable>() {
boolean done = false;
public void initialize(InputSplit split, TaskAttemptContext context) {
}
public boolean nextKeyValue() {
//Each record only contains one key.
return !done ? done = true : false;
}
public LongWritable getCurrentKey() {
return new LongWritable(split.getOffset());
}
public IntWritable getCurrentValue() {
return new IntWritable((int)split.getLength());
}
public float getProgress() {
return done? 1f: 0f;
}
public void close() {
}
};
}
}
/** Create and setup a job */
private static Job createJob(String name, Configuration conf
) throws IOException {
final Job job = Job.getInstance(conf, NAME + "_" + name);
final Configuration jobconf = job.getConfiguration();
job.setJarByClass(BaileyBorweinPlouffe.class);
// setup mapper
job.setMapperClass(BbpMapper.class);
job.setMapOutputKeyClass(LongWritable.class);
job.setMapOutputValueClass(BytesWritable.class);
// setup reducer
job.setReducerClass(BbpReducer.class);
job.setOutputKeyClass(LongWritable.class);
job.setOutputValueClass(BytesWritable.class);
job.setNumReduceTasks(1);
// setup input
job.setInputFormatClass(BbpInputFormat.class);
// disable task timeout
jobconf.setLong(MRJobConfig.TASK_TIMEOUT, 0);
// do not use speculative execution
jobconf.setBoolean(MRJobConfig.MAP_SPECULATIVE, false);
jobconf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE, false);
return job;
}
/** Run a map/reduce job to compute Pi. */
private static void compute(int startDigit, int nDigits, int nMaps,
String workingDir, Configuration conf, PrintStream out
) throws IOException {
final String name = startDigit + "_" + nDigits;
//setup wroking directory
out.println("Working Directory = " + workingDir);
out.println();
final FileSystem fs = FileSystem.get(conf);
final Path dir = fs.makeQualified(new Path(workingDir));
if (fs.exists(dir)) {
throw new IOException("Working directory " + dir
+ " already exists. Please remove it first.");
} else if (!fs.mkdirs(dir)) {
throw new IOException("Cannot create working directory " + dir);
}
out.println("Start Digit = " + startDigit);
out.println("Number of Digits = " + nDigits);
out.println("Number of Maps = " + nMaps);
// setup a job
final Job job = createJob(name, conf);
final Path hexfile = new Path(dir, "pi_" + name + ".hex");
FileOutputFormat.setOutputPath(job, new Path(dir, "out"));
// setup custom properties
job.getConfiguration().set(WORKING_DIR_PROPERTY, dir.toString());
job.getConfiguration().set(HEX_FILE_PROPERTY, hexfile.toString());
job.getConfiguration().setInt(DIGIT_START_PROPERTY, startDigit);
job.getConfiguration().setInt(DIGIT_SIZE_PROPERTY, nDigits);
job.getConfiguration().setInt(DIGIT_PARTS_PROPERTY, nMaps);
// start a map/reduce job
out.println("\nStarting Job ...");
final long startTime = Time.monotonicNow();
try {
if (!job.waitForCompletion(true)) {
out.println("Job failed.");
System.exit(1);
}
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
final double duration = (Time.monotonicNow() - startTime)/1000.0;
out.println("Duration is " + duration + " seconds.");
}
out.println("Output file: " + hexfile);
}
/**
* Parse arguments and then runs a map/reduce job.
* @return a non-zero value if there is an error. Otherwise, return 0.
*/
public int run(String[] args) throws IOException {
if (args.length != 4) {
System.err.println("Usage: bbp "
+ " <startDigit> <nDigits> <nMaps> <workingDir>");
ToolRunner.printGenericCommandUsage(System.err);
return -1;
}
final int startDigit = Integer.parseInt(args[0]);
final int nDigits = Integer.parseInt(args[1]);
final int nMaps = Integer.parseInt(args[2]);
final String workingDir = args[3];
if (startDigit <= 0) {
throw new IllegalArgumentException("startDigit = " + startDigit+" <= 0");
} else if (nDigits <= 0) {
throw new IllegalArgumentException("nDigits = " + nDigits + " <= 0");
} else if (nDigits % BBP_HEX_DIGITS != 0) {
throw new IllegalArgumentException("nDigits = " + nDigits
+ " is not a multiple of " + BBP_HEX_DIGITS);
} else if (nDigits - 1L + startDigit > IMPLEMENTATION_LIMIT + BBP_HEX_DIGITS) {
throw new UnsupportedOperationException("nDigits - 1 + startDigit = "
+ (nDigits - 1L + startDigit)
+ " > IMPLEMENTATION_LIMIT + BBP_HEX_DIGITS,"
+ ", where IMPLEMENTATION_LIMIT=" + IMPLEMENTATION_LIMIT
+ "and BBP_HEX_DIGITS=" + BBP_HEX_DIGITS);
} else if (nMaps <= 0) {
throw new IllegalArgumentException("nMaps = " + nMaps + " <= 0");
}
compute(startDigit, nDigits, nMaps, workingDir, getConf(), System.out);
return 0;
}
/** The main method for running it as a stand alone command. */
public static void main(String[] argv) throws Exception {
System.exit(ToolRunner.run(null, new BaileyBorweinPlouffe(), argv));
}
/////////////////////////////////////////////////////////////////////
// static fields and methods for Bailey-Borwein-Plouffe algorithm. //
/////////////////////////////////////////////////////////////////////
/** Limitation of the program.
* The program may return incorrect results if the limit is exceeded.
* The default value is 10^8.
* The program probably can handle some higher values such as 2^28.
*/
private static final long IMPLEMENTATION_LIMIT = 100000000;
private static final long ACCURACY_BIT = 32;
private static final long BBP_HEX_DIGITS = 4;
private static final long BBP_MULTIPLIER = 1 << (4 * BBP_HEX_DIGITS);
/**
* Compute the exact (d+1)th to (d+{@link #BBP_HEX_DIGITS})th
* hex digits of pi.
*/
static long hexDigits(final long d) {
if (d < 0) {
throw new IllegalArgumentException("d = " + d + " < 0");
} else if (d > IMPLEMENTATION_LIMIT) {
throw new IllegalArgumentException("d = " + d
+ " > IMPLEMENTATION_LIMIT = " + IMPLEMENTATION_LIMIT);
}
final double s1 = sum(1, d);
final double s4 = sum(4, d);
final double s5 = sum(5, d);
final double s6 = sum(6, d);
double pi = s1 + s1;
if (pi >= 1)
pi--;
pi *= 2;
if (pi >= 1)
pi--;
pi -= s4;
if (pi < 0)
pi++;
pi -= s4;
if (pi < 0)
pi++;
pi -= s5;
if (pi < 0)
pi++;
pi -= s6;
if (pi < 0)
pi++;
return (long) (pi * BBP_MULTIPLIER);
}
/**
* Approximate the fraction part of
* $16^d \sum_{k=0}^\infty \frac{16^{d-k}}{8k+j}$
* for d > 0 and j = 1, 4, 5, 6.
*/
private static double sum(final long j, final long d) {
long k = j == 1 ? 1 : 0;
double s = 0;
if (k <= d) {
s = 1.0 / ((d << 3) | j);
for (; k < d; k++) {
final long n = (k << 3) | j;
s += mod((d - k) << 2, n) * 1.0 / n;
if (s >= 1)
s--;
}
k++;
}
if (k >= 1L << (ACCURACY_BIT - 7))
return s;
for (;; k++) {
final long n = (k << 3) | j;
final long shift = (k - d) << 2;
if (ACCURACY_BIT <= shift || 1L << (ACCURACY_BIT - shift) < n) {
return s;
}
s += 1.0 / (n << shift);
if (s >= 1)
s--;
}
}
/** Compute $2^e \mod n$ for e > 0, n > 2 */
static long mod(final long e, final long n) {
long mask = (e & 0xFFFFFFFF00000000L) == 0 ? 0x00000000FFFFFFFFL
: 0xFFFFFFFF00000000L;
mask &= (e & 0xFFFF0000FFFF0000L & mask) == 0 ? 0x0000FFFF0000FFFFL
: 0xFFFF0000FFFF0000L;
mask &= (e & 0xFF00FF00FF00FF00L & mask) == 0 ? 0x00FF00FF00FF00FFL
: 0xFF00FF00FF00FF00L;
mask &= (e & 0xF0F0F0F0F0F0F0F0L & mask) == 0 ? 0x0F0F0F0F0F0F0F0FL
: 0xF0F0F0F0F0F0F0F0L;
mask &= (e & 0xCCCCCCCCCCCCCCCCL & mask) == 0 ? 0x3333333333333333L
: 0xCCCCCCCCCCCCCCCCL;
mask &= (e & 0xAAAAAAAAAAAAAAAAL & mask) == 0 ? 0x5555555555555555L
: 0xAAAAAAAAAAAAAAAAL;
long r = 2;
for (mask >>= 1; mask > 0; mask >>= 1) {
r *= r;
r %= n;
if ((e & mask) != 0) {
r += r;
if (r >= n)
r -= n;
}
}
return r;
}
/** Represent a number x in hex for 1 > x >= 0 */
private static
|
BbpInputFormat
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/SinksTest.java
|
{
"start": 8616,
"end": 8931
}
|
class ____ {
final Supplier<Sinks.Many<Integer>> supplier = () -> Sinks.many().multicast().directAllOrNothing();
@TestFactory
Stream<DynamicContainer> checkSemantics() {
return Stream.of(
expectMulticast(supplier, 0),
expectReplay(supplier, NONE)
);
}
}
@Nested
|
MulticastDirectAllOrNothing
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/logging/AbstractLoggingSystem.java
|
{
"start": 7860,
"end": 8617
}
|
class ____<T> {
private final Map<LogLevel, T> systemToNative;
private final Map<T, LogLevel> nativeToSystem;
public LogLevels() {
this.systemToNative = new EnumMap<>(LogLevel.class);
this.nativeToSystem = new HashMap<>();
}
public void map(LogLevel system, T nativeLevel) {
this.systemToNative.putIfAbsent(system, nativeLevel);
this.nativeToSystem.putIfAbsent(nativeLevel, system);
}
public @Nullable LogLevel convertNativeToSystem(T level) {
return this.nativeToSystem.get(level);
}
public @Nullable T convertSystemToNative(@Nullable LogLevel level) {
return this.systemToNative.get(level);
}
public Set<LogLevel> getSupported() {
return new LinkedHashSet<>(this.nativeToSystem.values());
}
}
}
|
LogLevels
|
java
|
elastic__elasticsearch
|
x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java
|
{
"start": 953,
"end": 2476
}
|
class ____ extends Plugin implements SearchPlugin {
public static final LicensedFeature.Momentary RANK_RRF_FEATURE = LicensedFeature.momentary(
null,
"rank-rrf",
License.OperationMode.ENTERPRISE
);
public static final LicensedFeature.Momentary LINEAR_RETRIEVER_FEATURE = LicensedFeature.momentary(
null,
"linear-retriever",
License.OperationMode.ENTERPRISE
);
public static final String NAME = "rrf";
@Override
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
return List.of(
new NamedWriteableRegistry.Entry(RankBuilder.class, NAME, RRFRankBuilder::new),
new NamedWriteableRegistry.Entry(RankShardResult.class, NAME, RRFRankShardResult::new),
new NamedWriteableRegistry.Entry(RankDoc.class, RRFRankDoc.NAME, RRFRankDoc::new),
new NamedWriteableRegistry.Entry(RankDoc.class, LinearRankDoc.NAME, LinearRankDoc::new)
);
}
@Override
public List<NamedXContentRegistry.Entry> getNamedXContent() {
return List.of(new NamedXContentRegistry.Entry(RankBuilder.class, new ParseField(NAME), RRFRankBuilder::fromXContent));
}
@Override
public List<RetrieverSpec<?>> getRetrievers() {
return List.of(
new RetrieverSpec<>(new ParseField(NAME), RRFRetrieverBuilder::fromXContent),
new RetrieverSpec<>(new ParseField(LinearRetrieverBuilder.NAME), LinearRetrieverBuilder::fromXContent)
);
}
}
|
RRFRankPlugin
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/plugins/DoNotMockEnforcer.java
|
{
"start": 3322,
"end": 3370
}
|
class ____.
*/
@NotExtensible
|
hierarchy
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/openapi/model/Encoding.java
|
{
"start": 1049,
"end": 3571
}
|
enum ____ {
FORM("form"),
SPACE_DELIMITED("spaceDelimited"),
PIPE_DELIMITED("pipeDelimited"),
DEEP_OBJECT("deepObject");
private final String value;
Style(String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
}
private String contentType;
private Map<String, Parameter> headers;
private Style style;
private Boolean explode;
private Boolean allowReserved;
public String getContentType() {
return contentType;
}
public Encoding setContentType(String contentType) {
this.contentType = contentType;
return this;
}
public Map<String, Parameter> getHeaders() {
return headers;
}
public Parameter getHeader(String name) {
return headers == null ? null : headers.get(name);
}
public Encoding setHeaders(Map<String, Parameter> headers) {
this.headers = headers;
return this;
}
public Encoding addHeader(String name, Parameter header) {
if (headers == null) {
headers = new LinkedHashMap<>();
}
headers.put(name, header);
return this;
}
public Encoding removeHeader(String name) {
if (headers != null) {
headers.remove(name);
}
return this;
}
public Style getStyle() {
return style;
}
public Encoding setStyle(Style style) {
this.style = style;
return this;
}
public Boolean getExplode() {
return explode;
}
public Encoding setExplode(Boolean explode) {
this.explode = explode;
return this;
}
public Boolean getAllowReserved() {
return allowReserved;
}
public Encoding setAllowReserved(Boolean allowReserved) {
this.allowReserved = allowReserved;
return this;
}
@Override
public Encoding clone() {
Encoding clone = super.clone();
clone.headers = clone(headers);
return clone;
}
@Override
public Map<String, Object> writeTo(Map<String, Object> encoding, Context context) {
write(encoding, "contentType", contentType);
write(encoding, "headers", headers, context);
write(encoding, "style", style);
write(encoding, "explode", explode);
write(encoding, "allowReserved", allowReserved);
writeExtensions(encoding);
return encoding;
}
}
|
Style
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/OpenTelemetryProcessor.java
|
{
"start": 4701,
"end": 19965
}
|
class ____ {
private static final DotName WITH_SPAN = DotName.createSimple(WithSpan.class.getName());
private static final DotName ADD_SPAN_ATTRIBUTES = DotName.createSimple(AddingSpanAttributes.class.getName());
private static final Predicate<AnnotationInstance> isAddSpanAttribute = new Predicate<>() {
@Override
public boolean test(AnnotationInstance annotationInstance) {
return annotationInstance.name().equals(ADD_SPAN_ATTRIBUTES);
}
};
private static final DotName WITH_SPAN_INTERCEPTOR = DotName.createSimple(WithSpanInterceptor.class.getName());
private static final DotName ADD_SPAN_ATTRIBUTES_INTERCEPTOR = DotName
.createSimple(AddingSpanAttributesInterceptor.class.getName());
@BuildStep(onlyIfNot = MetricsEnabled.class)
void registerForReflection(BuildProducer<ReflectiveMethodBuildItem> reflectiveItem) {
if (isClassPresentAtRuntime(
"io.opentelemetry.exporter.logging.LoggingMetricExporter")) {
reflectiveItem.produce(new ReflectiveMethodBuildItem(
"Used by OpenTelemetry Export Logging",
false,
"io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil",
"addMeterConfiguratorCondition"));
}
}
@BuildStep
AdditionalBeanBuildItem ensureProducerIsRetained() {
return AdditionalBeanBuildItem.builder()
.setUnremovable()
.addBeanClasses(
AutoConfiguredOpenTelemetrySdkBuilderCustomizer.SimpleLogRecordProcessorCustomizer.class,
AutoConfiguredOpenTelemetrySdkBuilderCustomizer.ResourceCustomizer.class,
AutoConfiguredOpenTelemetrySdkBuilderCustomizer.SamplerCustomizer.class,
AutoConfiguredOpenTelemetrySdkBuilderCustomizer.TracerProviderCustomizer.class,
AutoConfiguredOpenTelemetrySdkBuilderCustomizer.MetricProviderCustomizer.class,
AutoConfiguredOpenTelemetrySdkBuilderCustomizer.TextMapPropagatorCustomizers.class)
.build();
}
// Signal independent resource attributes
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
SyntheticBeanBuildItem setupDelayedAttribute(OpenTelemetryRecorder recorder, ApplicationInfoBuildItem appInfo) {
return SyntheticBeanBuildItem.configure(DelayedAttributes.class).types(Attributes.class)
.supplier(recorder.delayedAttributes(Version.getVersion(),
appInfo.getName(), appInfo.getVersion()))
.scope(Singleton.class)
.setRuntimeInit()
.done();
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
void openTelemetryBean(OpenTelemetryRecorder recorder,
OTelBuildConfig oTelBuildConfig,
BuildProducer<SyntheticBeanBuildItem> syntheticProducer,
BuildProducer<OpenTelemetrySdkBuildItem> openTelemetrySdkBuildItemBuildProducer) {
syntheticProducer.produce(SyntheticBeanBuildItem.configure(OpenTelemetry.class)
.defaultBean()
.setRuntimeInit()
.unremovable()
.scope(Singleton.class)
.addInjectionPoint(
ParameterizedType.create(
DotName.createSimple(Instance.class),
new Type[] { ClassType.create(
DotName.createSimple(
AutoConfiguredOpenTelemetrySdkBuilderCustomizer.class.getName())) },
null))
.createWith(recorder.opentelemetryBean())
.destroyer(OpenTelemetryDestroyer.class)
.done());
// same as `TracerEnabled`
boolean tracingEnabled = oTelBuildConfig.traces().enabled()
.map(it -> it && oTelBuildConfig.enabled())
.orElseGet(oTelBuildConfig::enabled);
// same as `MetricProcessor.MetricEnabled`
boolean metricsEnabled = oTelBuildConfig.metrics().enabled()
.map(it -> it && oTelBuildConfig.enabled())
.orElseGet(oTelBuildConfig::enabled);
// same as `LogHandlerProcessor.LogsEnabled`
boolean loggingEnabled = oTelBuildConfig.logs().enabled()
.map(it -> it && oTelBuildConfig.enabled())
.orElseGet(oTelBuildConfig::enabled);
openTelemetrySdkBuildItemBuildProducer.produce(new OpenTelemetrySdkBuildItem(
tracingEnabled, metricsEnabled, loggingEnabled, recorder.isOtelSdkEnabled()));
}
@BuildStep
void handleServices(OTelBuildConfig config,
BuildProducer<ServiceProviderBuildItem> services,
BuildProducer<RemovedResourceBuildItem> removedResources,
BuildProducer<RuntimeInitializedClassBuildItem> runtimeReinitialized) throws IOException {
final List<String> spanExporterProviders = ServiceUtil.classNamesNamedIn(
Thread.currentThread().getContextClassLoader(),
SPI_ROOT + ConfigurableSpanExporterProvider.class.getName())
.stream()
.filter(p -> !OtlpSpanExporterProvider.class.getName().equals(p))
.collect(toList()); // filter out OtlpSpanExporterProvider since it depends on OkHttp
if (!spanExporterProviders.isEmpty()) {
services.produce(
new ServiceProviderBuildItem(ConfigurableSpanExporterProvider.class.getName(), spanExporterProviders));
}
// remove the service file that contains OtlpSpanExporterProvider
if (config.traces().exporter().stream().noneMatch(ExporterType.Constants.OTLP_VALUE::equals)) {
removedResources.produce(new RemovedResourceBuildItem(
ArtifactKey.fromString("io.opentelemetry:opentelemetry-exporter-otlp"),
Set.of("META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider")));
}
final List<String> metricExporterProviders = ServiceUtil.classNamesNamedIn(
Thread.currentThread().getContextClassLoader(),
SPI_ROOT + ConfigurableMetricExporterProvider.class.getName())
.stream()
.filter(p -> !OtlpMetricExporterProvider.class.getName().equals(p))
.collect(toList()); // filter out OtlpMetricExporterProvider since it depends on OkHttp
if (!metricExporterProviders.isEmpty()) {
services.produce(
new ServiceProviderBuildItem(ConfigurableMetricExporterProvider.class.getName(), metricExporterProviders));
}
if (config.metrics().exporter().stream().noneMatch(ExporterType.Constants.OTLP_VALUE::equals)) {
removedResources.produce(new RemovedResourceBuildItem(
ArtifactKey.fromString("io.opentelemetry:opentelemetry-exporter-otlp"),
Set.of("META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider")));
}
final List<String> logRecordExporterProviders = ServiceUtil.classNamesNamedIn(
Thread.currentThread().getContextClassLoader(),
SPI_ROOT + ConfigurableLogRecordExporterProvider.class.getName())
.stream()
.filter(p -> !OtlpLogRecordExporterProvider.class.getName().equals(p))
.collect(toList()); // filter out OtlpLogRecordExporterProvider since it depends on OkHttp
if (!logRecordExporterProviders.isEmpty()) {
services.produce(
new ServiceProviderBuildItem(ConfigurableLogRecordExporterProvider.class.getName(),
logRecordExporterProviders));
}
if (config.logs().exporter().stream().noneMatch(ExporterType.Constants.OTLP_VALUE::equals)) {
removedResources.produce(new RemovedResourceBuildItem(
ArtifactKey.fromString("io.opentelemetry:opentelemetry-exporter-otlp"),
Set.of("META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider")));
}
runtimeReinitialized.produce(
new RuntimeInitializedClassBuildItem("io.opentelemetry.sdk.autoconfigure.TracerProviderConfiguration"));
runtimeReinitialized.produce(
new RuntimeInitializedClassBuildItem("io.opentelemetry.sdk.autoconfigure.MeterProviderConfiguration"));
runtimeReinitialized.produce(
new RuntimeInitializedClassBuildItem("io.opentelemetry.sdk.autoconfigure.LoggerProviderConfiguration"));
runtimeReinitialized.produce(
new RuntimeInitializedClassBuildItem("io.quarkus.opentelemetry.runtime.logs.OpenTelemetryLogHandler"));
services.produce(ServiceProviderBuildItem.allProvidersFromClassPath(
ConfigurableSamplerProvider.class.getName()));
// The following are added but not officially supported, yet.
services.produce(ServiceProviderBuildItem.allProvidersFromClassPath(
AutoConfigurationCustomizerProvider.class.getName()));
services.produce(ServiceProviderBuildItem.allProvidersFromClassPath(
ResourceProvider.class.getName()));
services.produce(ServiceProviderBuildItem.allProvidersFromClassPath(
ConfigurablePropagatorProvider.class.getName()));
}
@BuildStep
void registerOpenTelemetryContextStorage(
BuildProducer<NativeImageResourceBuildItem> resource,
BuildProducer<ReflectiveClassBuildItem> reflectiveClass) {
resource.produce(new NativeImageResourceBuildItem(
"META-INF/services/io.opentelemetry.context.ContextStorageProvider"));
reflectiveClass.produce(ReflectiveClassBuildItem.builder(QuarkusContextStorage.class)
.reason(getClass().getName())
.methods().fields().build());
}
@BuildStep
void registerWithSpan(
BuildProducer<InterceptorBindingRegistrarBuildItem> interceptorBindingRegistrar,
BuildProducer<AdditionalBeanBuildItem> additionalBeans) {
interceptorBindingRegistrar.produce(new InterceptorBindingRegistrarBuildItem(
new InterceptorBindingRegistrar() {
@Override
public List<InterceptorBinding> getAdditionalBindings() {
return List.of(
InterceptorBinding.of(WithSpan.class, Set.of("value", "kind")),
InterceptorBinding.of(AddingSpanAttributes.class, Set.of("value")));
}
}));
additionalBeans.produce(new AdditionalBeanBuildItem(
WithSpanInterceptor.class,
AddingSpanAttributesInterceptor.class));
}
@BuildStep
void transformWithSpan(BuildProducer<AnnotationsTransformerBuildItem> annotationsTransformer) {
annotationsTransformer.produce(new AnnotationsTransformerBuildItem(transformationContext -> {
AnnotationTarget target = transformationContext.getTarget();
Transformation transform = transformationContext.transform();
if (target.kind().equals(AnnotationTarget.Kind.CLASS)) {
if (target.asClass().name().equals(WITH_SPAN_INTERCEPTOR)) {
transform.add(WITH_SPAN);
} else if (target.asClass().name().equals(ADD_SPAN_ATTRIBUTES_INTERCEPTOR)) {
transform.add(ADD_SPAN_ATTRIBUTES);
}
} else if (target.kind() == AnnotationTarget.Kind.METHOD) {
MethodInfo methodInfo = target.asMethod();
// WITH_SPAN_INTERCEPTOR and ADD_SPAN_ATTRIBUTES must not be applied at the same time and the first has priority.
if (methodInfo.hasAnnotation(WITH_SPAN) && methodInfo.hasAnnotation(ADD_SPAN_ATTRIBUTES)) {
transform.remove(isAddSpanAttribute);
}
}
transform.done();
}));
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
void createOpenTelemetry(
OpenTelemetryRecorder recorder,
CoreVertxBuildItem vertx,
LaunchModeBuildItem launchMode) {
if (launchMode.getLaunchMode() == LaunchMode.DEVELOPMENT || launchMode.getLaunchMode() == LaunchMode.TEST) {
recorder.resetGlobalOpenTelemetryForDevMode();
}
recorder.eagerlyCreateContextStorage();
recorder.storeVertxOnContextStorage(vertx.getVertx());
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
void setupVertx(InstrumentationRecorder recorder, BeanContainerBuildItem beanContainerBuildItem,
Capabilities capabilities) {
boolean sqlClientAvailable = capabilities.isPresent(Capability.REACTIVE_DB2_CLIENT)
|| capabilities.isPresent(Capability.REACTIVE_MSSQL_CLIENT)
|| capabilities.isPresent(Capability.REACTIVE_MYSQL_CLIENT)
|| capabilities.isPresent(Capability.REACTIVE_ORACLE_CLIENT)
|| capabilities.isPresent(Capability.REACTIVE_PG_CLIENT);
boolean redisClientAvailable = capabilities.isPresent(Capability.REDIS_CLIENT);
recorder.setupVertxTracer(beanContainerBuildItem.getValue(), sqlClientAvailable, redisClientAvailable);
}
@BuildStep
void validateDataSourcesWithEnabledTelemetry(List<JdbcDataSourceBuildItem> jdbcDataSources,
BuildProducer<ValidationErrorBuildItem> validationErrors) {
for (JdbcDataSourceBuildItem dataSource : jdbcDataSources) {
final String dataSourceName = dataSource.getName();
// verify that no datasource is using OpenTelemetryDriver as that is not supported anymore
if (dataSourceUsesOTelJdbcDriver(dataSourceName)) {
validationErrors.produce(
new ValidationErrorBuildItem(
new ConfigurationException(
String.format(
"Data source '%s' is using unsupported JDBC driver '%s', please activate JDBC instrumentation by setting the 'quarkus.datasource.jdbc.telemetry' configuration property to 'true' instead",
dataSourceName, OPEN_TELEMETRY_DRIVER))));
}
}
}
private static boolean dataSourceUsesOTelJdbcDriver(String dataSourceName) {
List<String> driverPropertyKeys = DataSourceUtil.dataSourcePropertyKeys(dataSourceName, "jdbc.driver");
for (String driverPropertyKey : driverPropertyKeys) {
ConfigValue explicitlyConfiguredDriverValue = ConfigProvider.getConfig().getConfigValue(driverPropertyKey);
if (explicitlyConfiguredDriverValue.getValue() != null) {
return explicitlyConfiguredDriverValue.getValue().equals(OPEN_TELEMETRY_DRIVER);
}
}
return false;
}
}
|
OpenTelemetryProcessor
|
java
|
quarkusio__quarkus
|
test-framework/junit5-internal/src/main/java/io/quarkus/test/ProdModeTestBuildStep.java
|
{
"start": 197,
"end": 502
}
|
class ____ implements BuildStep {
private final Map<String, Object> testContext;
public ProdModeTestBuildStep(Map<String, Object> testContext) {
this.testContext = testContext;
}
public Map<String, Object> getTestContext() {
return testContext;
}
}
|
ProdModeTestBuildStep
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/engine/config/spi/ConfigurationService.java
|
{
"start": 3075,
"end": 3307
}
|
interface ____<T> {
/**
* Convert an untyped Object reference to the Converter's type.
*
* @param value The untyped value
*
* @return The converted (typed) value.
*/
@NonNull T convert(Object value);
}
}
|
Converter
|
java
|
grpc__grpc-java
|
stub/src/test/java/io/grpc/stub/ClientCallsTest.java
|
{
"start": 34392,
"end": 35353
}
|
class ____ implements ClientInterceptor {
boolean onCloseCalled;
Executor savedExecutor;
@Override
public <ReqT,RespT> ClientCall<ReqT, RespT> interceptCall(
MethodDescriptor<ReqT, RespT> method, CallOptions callOptions, Channel next) {
return new SimpleForwardingClientCall<ReqT, RespT>(next.newCall(method, callOptions)) {
@Override public void start(ClientCall.Listener<RespT> listener, Metadata headers) {
super.start(new SimpleForwardingClientCallListener<RespT>(listener) {
@Override public void onClose(Status status, Metadata trailers) {
onCloseCalled = true;
savedExecutor = callOptions.getExecutor();
super.onClose(status, trailers);
}
}, headers);
}
@Override public void halfClose() {
super.halfClose();
Thread.currentThread().interrupt();
}
};
}
}
}
|
InterruptInterceptor
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/test/java/org/springframework/boot/test/context/SpringBootTestContextHierarchyTests.java
|
{
"start": 2092,
"end": 2176
}
|
class ____ {
ChildConfiguration(MyBean myBean) {
}
}
static
|
ChildConfiguration
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/StatePartitionStreamProvider.java
|
{
"start": 1307,
"end": 2249
}
|
class ____ {
/** A ready-made stream that contains data for one state partition */
private final InputStream stream;
/** Holds potential exception that happened when actually trying to create the stream */
private final IOException creationException;
public StatePartitionStreamProvider(IOException creationException) {
this.creationException = Preconditions.checkNotNull(creationException);
this.stream = null;
}
public StatePartitionStreamProvider(InputStream stream) {
this.stream = new NonClosingInputStreamDecorator(Preconditions.checkNotNull(stream));
this.creationException = null;
}
/** Returns a stream with the data of one state partition. */
public InputStream getStream() throws IOException {
if (creationException != null) {
throw new IOException(creationException);
}
return stream;
}
}
|
StatePartitionStreamProvider
|
java
|
junit-team__junit5
|
junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/descriptor/ClassBasedTestDescriptor.java
|
{
"start": 24430,
"end": 25283
}
|
class ____ {
private final List<DiscoveryIssue> discoveryIssues = new ArrayList<>();
final Class<?> testClass;
final Set<TestTag> tags;
final Lifecycle lifecycle;
@Nullable
ExecutionMode defaultChildExecutionMode;
final ExclusiveResourceCollector exclusiveResourceCollector;
ClassInfo(Class<?> testClass, JupiterConfiguration configuration) {
this.testClass = testClass;
this.tags = getTags(testClass, //
() -> "class '%s'".formatted(testClass.getName()), //
() -> ClassSource.from(testClass), //
discoveryIssues::add);
this.lifecycle = getTestInstanceLifecycle(testClass, configuration);
this.defaultChildExecutionMode = (this.lifecycle == Lifecycle.PER_CLASS ? ExecutionMode.SAME_THREAD : null);
this.exclusiveResourceCollector = ExclusiveResourceCollector.from(testClass);
}
}
private static
|
ClassInfo
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/web/BasicAnnotationConfigWacTests.java
|
{
"start": 1082,
"end": 1520
}
|
class ____ extends AbstractBasicWacTests {
@Autowired
ServletContextAwareBean servletContextAwareBean;
@Test
void fooEnigmaAutowired() {
assertThat(foo).isEqualTo("enigma");
}
@Test
void servletContextAwareBeanProcessed() {
assertThat(servletContextAwareBean).isNotNull();
assertThat(servletContextAwareBean.getServletContext()).isNotNull();
}
@Configuration(proxyBeanMethods = false)
static
|
BasicAnnotationConfigWacTests
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/util/JpaMetamodelUnitTests.java
|
{
"start": 1583,
"end": 3618
}
|
class ____ {
@Mock Metamodel metamodel;
@Mock EntityType<?> type;
@Test
void skipsEntityTypesWithoutJavaTypeForIdentifierLookup() {
doReturn(Collections.singleton(type)).when(metamodel).getEntities();
assertThat(JpaMetamodel.of(metamodel).isSingleIdAttribute(Object.class, "id", Object.class)).isFalse();
}
@Test // DATAJPA-1446
void cacheIsEffectiveUnlessCleared() {
JpaMetamodel model = JpaMetamodel.of(metamodel);
assertThat(model).isEqualTo(JpaMetamodel.of(metamodel));
JpaMetamodel.clear();
assertThat(model).isNotEqualTo(JpaMetamodel.of(metamodel));
}
@Test // #2421
void doesNotConsiderNonNativeEmbeddablesJpaManaged() {
JpaMetamodel model = JpaMetamodel.of(metamodel);
ManagedType<?> entity = getEntity(Wrapper.class);
ManagedType<?> embeddable = getEmbeddable(ExplicitEmbeddable.class);
ManagedType<?> inner = getEmbeddable(Inner.class);
doReturn(new HashSet<>(Arrays.asList(entity, embeddable, inner))).when(metamodel).getManagedTypes();
doReturn(new HashSet<>(Arrays.asList(embeddable, inner))).when(metamodel).getEmbeddables();
assertThat(model.isMappedType(Wrapper.class)).isTrue();
assertThat(model.isMappedType(ExplicitEmbeddable.class)).isTrue();
assertThat(model.isMappedType(Inner.class)).isFalse();
}
private EmbeddableType<?> getEmbeddable(Class<?> type) {
EmbeddableType<?> managedType = getManagedType(type, EmbeddableType.class);
doReturn(PersistenceType.EMBEDDABLE).when(managedType).getPersistenceType();
return managedType;
}
private EntityType<?> getEntity(Class<?> type) {
EntityType<?> managedType = getManagedType(type, EntityType.class);
doReturn(PersistenceType.ENTITY).when(managedType).getPersistenceType();
return managedType;
}
private <T extends ManagedType<?>> T getManagedType(Class<?> type, Class<T> baseType) {
T managedType = mock(baseType);
doReturn(type).when(managedType).getJavaType();
doReturn(managedType).when(metamodel).managedType(type);
return managedType;
}
@Entity
static
|
JpaMetamodelUnitTests
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/offline/StartOfflineTest.java
|
{
"start": 1046,
"end": 3086
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(MyEntity.class)
.addAsResource("application-start-offline.properties", "application.properties"))
.setLogRecordPredicate(record -> GlobalTemporaryTableStrategy.class.getName().equals(record.getLoggerName())
|| record.getLoggerName().contains("JdbcEnvironmentInitiator"))
.assertLogRecords(records -> {
assertThat(records) // JdbcSettings.ALLOW_METADATA_ON_BOOT
.extracting(LogRecord::getMessage)
.doesNotContain("HHH000342: Could not obtain connection to query JDBC database metadata");
assertThat(records) // Local TemporaryTable Strategy
.extracting(LogRecord::getMessage).doesNotContain("Unable obtain JDBC Connection");
});
@Inject
EntityManagerFactory entityManagerFactory;
@Test
public void applicationStarts() {
assertThat(entityManagerFactory).isNotNull();
}
@Test
public void testVersionCheckShouldBeDisabledWhenOffline() {
SessionFactoryImplementor sfi = (SessionFactoryImplementor) entityManagerFactory.unwrap(SessionFactory.class);
ServiceRegistryImplementor registry = sfi.getServiceRegistry();
QuarkusRuntimeInitDialectFactory service = (QuarkusRuntimeInitDialectFactory) registry.getService(DialectFactory.class);
assertThat(service.isVersionCheckEnabled()).isFalse();
}
@Test
public void testUnitSchemaManagementStrategyIsNone() {
Object strategy = entityManagerFactory.unwrap(SessionFactoryImplementor.class)
.getServiceRegistry()
.getService(ConfigurationServiceImpl.class)
.getSettings()
.get(AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION);
assertThat(strategy).isEqualTo("none");
}
}
|
StartOfflineTest
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/java8stream/defaultimplementation/Source.java
|
{
"start": 254,
"end": 496
}
|
class ____ {
private Stream<SourceFoo> fooStream;
public Stream<SourceFoo> getFooStream() {
return fooStream;
}
public void setFooStream(Stream<SourceFoo> fooStream) {
this.fooStream = fooStream;
}
}
|
Source
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java
|
{
"start": 975,
"end": 4681
}
|
class ____ extends AbstractXContentTestCase<InferenceFieldMetadata> {
public void testSerialization() throws IOException {
final InferenceFieldMetadata before = createTestItem();
final BytesStreamOutput out = new BytesStreamOutput();
before.writeTo(out);
final StreamInput in = out.bytes().streamInput();
final InferenceFieldMetadata after = new InferenceFieldMetadata(in);
assertThat(after, equalTo(before));
}
@Override
protected InferenceFieldMetadata createTestInstance() {
return createTestItem();
}
@Override
protected InferenceFieldMetadata doParseInstance(XContentParser parser) throws IOException {
if (parser.nextToken() == XContentParser.Token.START_OBJECT) {
parser.nextToken();
}
assertEquals(XContentParser.Token.FIELD_NAME, parser.currentToken());
InferenceFieldMetadata inferenceMetadata = InferenceFieldMetadata.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
return inferenceMetadata;
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// do not add elements at the top-level as any element at this level is parsed as a new inference field,
// and do not add additional elements to chunking maps as they will fail parsing with extra data
return field -> field.equals("") || field.contains(CHUNKING_SETTINGS_FIELD);
}
private static InferenceFieldMetadata createTestItem() {
String name = randomAlphaOfLengthBetween(3, 10);
String inferenceId = randomIdentifier();
String searchInferenceId = randomIdentifier();
String[] inputFields = generateRandomStringArray(5, 10, false, false);
Map<String, Object> chunkingSettings = generateRandomChunkingSettings();
return new InferenceFieldMetadata(name, inferenceId, searchInferenceId, inputFields, chunkingSettings);
}
public static Map<String, Object> generateRandomChunkingSettings() {
if (randomBoolean()) {
return null; // Defaults to model chunking settings
}
return randomBoolean() ? generateRandomWordBoundaryChunkingSettings() : generateRandomSentenceBoundaryChunkingSettings();
}
private static Map<String, Object> generateRandomWordBoundaryChunkingSettings() {
return Map.of("strategy", "word_boundary", "max_chunk_size", randomIntBetween(20, 100), "overlap", randomIntBetween(1, 50));
}
private static Map<String, Object> generateRandomSentenceBoundaryChunkingSettings() {
return Map.of(
"strategy",
"sentence_boundary",
"max_chunk_size",
randomIntBetween(20, 100),
"sentence_overlap",
randomIntBetween(0, 1)
);
}
public void testNullCtorArgsThrowException() {
assertThrows(
NullPointerException.class,
() -> new InferenceFieldMetadata(null, "inferenceId", "searchInferenceId", new String[0], Map.of())
);
assertThrows(
NullPointerException.class,
() -> new InferenceFieldMetadata("name", null, "searchInferenceId", new String[0], Map.of())
);
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", null, new String[0], Map.of()));
assertThrows(
NullPointerException.class,
() -> new InferenceFieldMetadata("name", "inferenceId", "searchInferenceId", null, Map.of())
);
}
}
|
InferenceFieldMetadataTests
|
java
|
elastic__elasticsearch
|
modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java
|
{
"start": 2305,
"end": 22517
}
|
class ____ extends AbstractParentChildTestCase {
public void testSimpleChildrenAgg() {
long count = categoryToControl.values().stream().mapToLong(control -> control.commentIds.size()).sum();
assertNoFailuresAndResponse(
prepareSearch("test").setQuery(matchQuery("randomized", true)).addAggregation(children("to_comment", "comment")),
response -> {
SingleBucketAggregation childrenAgg = response.getAggregations().get("to_comment");
assertThat("Response: " + response + "\n", childrenAgg.getDocCount(), equalTo(count));
}
);
}
public void testChildrenAggs() {
assertNoFailuresAndResponse(
prepareSearch("test").setQuery(matchQuery("randomized", true))
.addAggregation(
terms("category").field("category")
.size(10000)
.subAggregation(
children("to_comment", "comment").subAggregation(
terms("commenters").field("commenter").size(10000).subAggregation(topHits("top_comments"))
)
)
),
response -> {
Terms categoryTerms = response.getAggregations().get("category");
assertThat(categoryTerms.getBuckets().size(), equalTo(categoryToControl.size()));
for (Map.Entry<String, Control> entry1 : categoryToControl.entrySet()) {
Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(entry1.getKey());
assertThat(categoryBucket.getKeyAsString(), equalTo(entry1.getKey()));
assertThat(categoryBucket.getDocCount(), equalTo((long) entry1.getValue().articleIds.size()));
SingleBucketAggregation childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
assertThat(childrenBucket.getDocCount(), equalTo((long) entry1.getValue().commentIds.size()));
assertThat(
((InternalAggregation) childrenBucket).getProperty("_count"),
equalTo((long) entry1.getValue().commentIds.size())
);
Terms commentersTerms = childrenBucket.getAggregations().get("commenters");
assertThat(((InternalAggregation) childrenBucket).getProperty("commenters"), sameInstance(commentersTerms));
assertThat(commentersTerms.getBuckets().size(), equalTo(entry1.getValue().commenterToCommentId.size()));
for (Map.Entry<String, Set<String>> entry2 : entry1.getValue().commenterToCommentId.entrySet()) {
Terms.Bucket commentBucket = commentersTerms.getBucketByKey(entry2.getKey());
assertThat(commentBucket.getKeyAsString(), equalTo(entry2.getKey()));
assertThat(commentBucket.getDocCount(), equalTo((long) entry2.getValue().size()));
TopHits topHits = commentBucket.getAggregations().get("top_comments");
for (SearchHit searchHit : topHits.getHits().getHits()) {
assertThat(entry2.getValue().contains(searchHit.getId()), is(true));
}
}
}
}
);
}
public void testParentWithMultipleBuckets() {
assertNoFailuresAndResponse(
prepareSearch("test").setQuery(matchQuery("randomized", false))
.addAggregation(
terms("category").field("category")
.size(10000)
.subAggregation(children("to_comment", "comment").subAggregation(topHits("top_comments").sort("id", SortOrder.ASC)))
),
response -> {
Terms categoryTerms = response.getAggregations().get("category");
assertThat(categoryTerms.getBuckets().size(), equalTo(3));
for (Terms.Bucket bucket : categoryTerms.getBuckets()) {
logger.info("bucket={}", bucket.getKey());
SingleBucketAggregation childrenBucket = bucket.getAggregations().get("to_comment");
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
logger.info("total_hits={}", topHits.getHits().getTotalHits().value());
for (SearchHit searchHit : topHits.getHits()) {
logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId());
}
}
Terms.Bucket categoryBucket = categoryTerms.getBucketByKey("a");
assertThat(categoryBucket.getKeyAsString(), equalTo("a"));
assertThat(categoryBucket.getDocCount(), equalTo(3L));
SingleBucketAggregation childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
assertThat(childrenBucket.getDocCount(), equalTo(2L));
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
assertThat(topHits.getHits().getTotalHits().value(), equalTo(2L));
assertThat(topHits.getHits().getAt(0).getId(), equalTo("e"));
assertThat(topHits.getHits().getAt(1).getId(), equalTo("f"));
categoryBucket = categoryTerms.getBucketByKey("b");
assertThat(categoryBucket.getKeyAsString(), equalTo("b"));
assertThat(categoryBucket.getDocCount(), equalTo(2L));
childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
assertThat(childrenBucket.getDocCount(), equalTo(1L));
topHits = childrenBucket.getAggregations().get("top_comments");
assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L));
assertThat(topHits.getHits().getAt(0).getId(), equalTo("f"));
categoryBucket = categoryTerms.getBucketByKey("c");
assertThat(categoryBucket.getKeyAsString(), equalTo("c"));
assertThat(categoryBucket.getDocCount(), equalTo(2L));
childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
assertThat(childrenBucket.getDocCount(), equalTo(1L));
topHits = childrenBucket.getAggregations().get("top_comments");
assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L));
assertThat(topHits.getHits().getAt(0).getId(), equalTo("f"));
}
);
}
public void testWithDeletes() throws Exception {
String indexName = "xyz";
assertAcked(
prepareCreate(indexName).setMapping(
addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), "name", "keyword")
)
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest(indexName, "parent", "1", null));
requests.add(createIndexRequest(indexName, "child", "2", "1", "count", 1));
requests.add(createIndexRequest(indexName, "child", "3", "1", "count", 1));
requests.add(createIndexRequest(indexName, "child", "4", "1", "count", 1));
requests.add(createIndexRequest(indexName, "child", "5", "1", "count", 1));
indexRandom(true, requests);
for (int i = 0; i < 10; i++) {
assertNoFailuresAndResponse(
prepareSearch(indexName).addAggregation(children("children", "child").subAggregation(sum("counts").field("count"))),
response -> {
SingleBucketAggregation children = response.getAggregations().get("children");
assertThat(children.getDocCount(), equalTo(4L));
Sum count = children.getAggregations().get("counts");
assertThat(count.value(), equalTo(4.));
}
);
String idToUpdate = Integer.toString(2 + randomInt(3));
/*
* The whole point of this test is to test these things with deleted
* docs in the index so we turn off detect_noop to make sure that
* the updates cause that.
*/
UpdateResponse updateResponse;
updateResponse = client().prepareUpdate(indexName, idToUpdate)
.setRouting("1")
.setDoc(Requests.INDEX_CONTENT_TYPE, "count", 1)
.setDetectNoop(false)
.get();
assertThat(updateResponse.getVersion(), greaterThan(1L));
refresh();
}
}
public void testNonExistingChildType() throws Exception {
assertNoFailuresAndResponse(prepareSearch("test").addAggregation(children("non-existing", "xyz")), response -> {
SingleBucketAggregation children = response.getAggregations().get("non-existing");
assertThat(children.getName(), equalTo("non-existing"));
assertThat(children.getDocCount(), equalTo(0L));
});
}
public void testPostCollection() throws Exception {
String indexName = "prodcatalog";
String masterType = "masterprod";
String childType = "variantsku";
assertAcked(
prepareCreate(indexName).setSettings(indexSettings(1, 0))
.setMapping(
addFieldMappings(
buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, masterType, childType),
"brand",
"text",
"name",
"keyword",
"material",
"text",
"color",
"keyword",
"size",
"keyword"
)
)
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest(indexName, masterType, "1", null, "brand", "Levis", "name", "Style 501", "material", "Denim"));
requests.add(createIndexRequest(indexName, childType, "3", "1", "color", "blue", "size", "32"));
requests.add(createIndexRequest(indexName, childType, "4", "1", "color", "blue", "size", "34"));
requests.add(createIndexRequest(indexName, childType, "5", "1", "color", "blue", "size", "36"));
requests.add(createIndexRequest(indexName, childType, "6", "1", "color", "black", "size", "38"));
requests.add(createIndexRequest(indexName, childType, "7", "1", "color", "black", "size", "40"));
requests.add(createIndexRequest(indexName, childType, "8", "1", "color", "gray", "size", "36"));
requests.add(
createIndexRequest(indexName, masterType, "2", null, "brand", "Wrangler", "name", "Regular Cut", "material", "Leather")
);
requests.add(createIndexRequest(indexName, childType, "9", "2", "color", "blue", "size", "32"));
requests.add(createIndexRequest(indexName, childType, "10", "2", "color", "blue", "size", "34"));
requests.add(createIndexRequest(indexName, childType, "12", "2", "color", "black", "size", "36"));
requests.add(createIndexRequest(indexName, childType, "13", "2", "color", "black", "size", "38"));
requests.add(createIndexRequest(indexName, childType, "14", "2", "color", "black", "size", "40"));
requests.add(createIndexRequest(indexName, childType, "15", "2", "color", "orange", "size", "36"));
requests.add(createIndexRequest(indexName, childType, "16", "2", "color", "green", "size", "44"));
indexRandom(true, requests);
assertNoFailuresAndResponse(
prepareSearch(indexName).setQuery(hasChildQuery(childType, termQuery("color", "orange"), ScoreMode.None))
.addAggregation(
children("my-refinements", childType).subAggregation(terms("my-colors").field("color"))
.subAggregation(terms("my-sizes").field("size"))
),
response -> {
assertHitCount(response, 1L);
SingleBucketAggregation childrenAgg = response.getAggregations().get("my-refinements");
assertThat(childrenAgg.getDocCount(), equalTo(7L));
Terms termsAgg = childrenAgg.getAggregations().get("my-colors");
assertThat(termsAgg.getBuckets().size(), equalTo(4));
assertThat(termsAgg.getBucketByKey("black").getDocCount(), equalTo(3L));
assertThat(termsAgg.getBucketByKey("blue").getDocCount(), equalTo(2L));
assertThat(termsAgg.getBucketByKey("green").getDocCount(), equalTo(1L));
assertThat(termsAgg.getBucketByKey("orange").getDocCount(), equalTo(1L));
termsAgg = childrenAgg.getAggregations().get("my-sizes");
assertThat(termsAgg.getBuckets().size(), equalTo(6));
assertThat(termsAgg.getBucketByKey("36").getDocCount(), equalTo(2L));
assertThat(termsAgg.getBucketByKey("32").getDocCount(), equalTo(1L));
assertThat(termsAgg.getBucketByKey("34").getDocCount(), equalTo(1L));
assertThat(termsAgg.getBucketByKey("38").getDocCount(), equalTo(1L));
assertThat(termsAgg.getBucketByKey("40").getDocCount(), equalTo(1L));
assertThat(termsAgg.getBucketByKey("44").getDocCount(), equalTo(1L));
}
);
}
public void testHierarchicalChildrenAggs() {
String indexName = "geo";
String grandParentType = "continent";
String parentType = "country";
String childType = "city";
assertAcked(
prepareCreate(indexName).setMapping(
addFieldMappings(
buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, grandParentType, parentType, parentType, childType),
"name",
"keyword"
)
)
);
createIndexRequest(indexName, grandParentType, "1", null, "name", "europe").get();
createIndexRequest(indexName, parentType, "2", "1", "name", "belgium").get();
createIndexRequest(indexName, childType, "3", "2", "name", "brussels").setRouting("1").get();
refresh();
assertNoFailuresAndResponse(
prepareSearch(indexName).setQuery(matchQuery("name", "europe"))
.addAggregation(
children(parentType, parentType).subAggregation(
children(childType, childType).subAggregation(terms("name").field("name"))
)
),
response -> {
assertHitCount(response, 1L);
SingleBucketAggregation children = response.getAggregations().get(parentType);
assertThat(children.getName(), equalTo(parentType));
assertThat(children.getDocCount(), equalTo(1L));
children = children.getAggregations().get(childType);
assertThat(children.getName(), equalTo(childType));
assertThat(children.getDocCount(), equalTo(1L));
Terms terms = children.getAggregations().get("name");
assertThat(terms.getBuckets().size(), equalTo(1));
assertThat(terms.getBuckets().get(0).getKey().toString(), equalTo("brussels"));
assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1L));
}
);
}
public void testPostCollectAllLeafReaders() throws Exception {
// The 'towns' and 'parent_names' aggs operate on parent docs and if child docs are in different segments we need
// to ensure those segments which child docs are also evaluated to in the post collect phase.
// Before we only evaluated segments that yielded matches in 'towns' and 'parent_names' aggs, which caused
// us to miss to evaluate child docs in segments we didn't have parent matches for.
assertAcked(
prepareCreate("index").setMapping(
addFieldMappings(
buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parentType", "childType"),
"name",
"keyword",
"town",
"keyword",
"age",
"integer"
)
)
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest("index", "parentType", "1", null, "name", "Bob", "town", "Memphis"));
requests.add(createIndexRequest("index", "parentType", "2", null, "name", "Alice", "town", "Chicago"));
requests.add(createIndexRequest("index", "parentType", "3", null, "name", "Bill", "town", "Chicago"));
requests.add(createIndexRequest("index", "childType", "4", "1", "name", "Jill", "age", 5));
requests.add(createIndexRequest("index", "childType", "5", "1", "name", "Joey", "age", 3));
requests.add(createIndexRequest("index", "childType", "6", "2", "name", "John", "age", 2));
requests.add(createIndexRequest("index", "childType", "7", "3", "name", "Betty", "age", 6));
requests.add(createIndexRequest("index", "childType", "8", "3", "name", "Dan", "age", 1));
indexRandom(true, requests);
assertNoFailuresAndResponse(
prepareSearch("index").setSize(0)
.addAggregation(
AggregationBuilders.terms("towns")
.field("town")
.subAggregation(
AggregationBuilders.terms("parent_names").field("name").subAggregation(children("child_docs", "childType"))
)
),
response -> {
Terms towns = response.getAggregations().get("towns");
assertThat(towns.getBuckets().size(), equalTo(2));
assertThat(towns.getBuckets().get(0).getKeyAsString(), equalTo("Chicago"));
assertThat(towns.getBuckets().get(0).getDocCount(), equalTo(2L));
Terms parents = towns.getBuckets().get(0).getAggregations().get("parent_names");
assertThat(parents.getBuckets().size(), equalTo(2));
assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Alice"));
assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L));
SingleBucketAggregation children = parents.getBuckets().get(0).getAggregations().get("child_docs");
assertThat(children.getDocCount(), equalTo(1L));
assertThat(parents.getBuckets().get(1).getKeyAsString(), equalTo("Bill"));
assertThat(parents.getBuckets().get(1).getDocCount(), equalTo(1L));
children = parents.getBuckets().get(1).getAggregations().get("child_docs");
assertThat(children.getDocCount(), equalTo(2L));
assertThat(towns.getBuckets().get(1).getKeyAsString(), equalTo("Memphis"));
assertThat(towns.getBuckets().get(1).getDocCount(), equalTo(1L));
parents = towns.getBuckets().get(1).getAggregations().get("parent_names");
assertThat(parents.getBuckets().size(), equalTo(1));
assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Bob"));
assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L));
children = parents.getBuckets().get(0).getAggregations().get("child_docs");
assertThat(children.getDocCount(), equalTo(2L));
}
);
}
}
|
ChildrenIT
|
java
|
elastic__elasticsearch
|
x-pack/plugin/fleet/src/internalClusterTest/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsActionIT.java
|
{
"start": 1707,
"end": 16458
}
|
class ____ extends ESIntegTestCase {
public static final TimeValue TEN_SECONDS = TimeValue.timeValueSeconds(10);
public static final long[] EMPTY_ARRAY = new long[0];
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Stream.of(Fleet.class, LocalStateCompositeXPackPlugin.class, IndexLifecycle.class).collect(Collectors.toList());
}
public void testGetGlobalCheckpoints() throws Exception {
int shards = between(1, 5);
String indexName = "test_index";
indicesAdmin().prepareCreate(indexName)
.setSettings(
indexSettings(shards, 1)
// ESIntegTestCase randomizes durability settings. The global checkpoint only advances after a fsync, hence we
// must run with REQUEST durability
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST)
)
.get();
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
indexName,
false,
false,
EMPTY_ARRAY,
randomTimeValue()
);
final GetGlobalCheckpointsAction.Response response = client().execute(GetGlobalCheckpointsAction.INSTANCE, request).get();
long[] expected = new long[shards];
Arrays.fill(expected, -1);
assertArrayEquals(expected, response.globalCheckpoints());
final int totalDocuments = shards * 3;
for (int i = 0; i < totalDocuments; ++i) {
prepareIndex(indexName).setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get();
}
final GetGlobalCheckpointsAction.Request request2 = new GetGlobalCheckpointsAction.Request(
indexName,
false,
false,
EMPTY_ARRAY,
randomTimeValue()
);
final GetGlobalCheckpointsAction.Response response2 = client().execute(GetGlobalCheckpointsAction.INSTANCE, request2).get();
assertEquals(totalDocuments, Arrays.stream(response2.globalCheckpoints()).map(s -> s + 1).sum());
indicesAdmin().prepareRefresh(indexName).get();
final IndicesStatsResponse statsResponse = indicesAdmin().prepareStats(indexName).get();
long[] fromStats = Arrays.stream(statsResponse.getShards())
.filter(i -> i.getShardRouting().primary())
.sorted(Comparator.comparingInt(value -> value.getShardRouting().id()))
.mapToLong(s -> s.getSeqNoStats().getGlobalCheckpoint())
.toArray();
assertArrayEquals(fromStats, response2.globalCheckpoints());
}
public void testPollGlobalCheckpointAdvancement() throws Exception {
String indexName = "test_index";
indicesAdmin().prepareCreate(indexName)
.setSettings(indexSettings(1, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST))
.get();
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
indexName,
false,
false,
EMPTY_ARRAY,
TEN_SECONDS
);
final GetGlobalCheckpointsAction.Response response = client().execute(GetGlobalCheckpointsAction.INSTANCE, request).get();
assertEquals(-1, response.globalCheckpoints()[0]);
final int totalDocuments = between(25, 50);
new Thread(() -> {
for (int i = 0; i < totalDocuments; ++i) {
prepareIndex(indexName).setId(Integer.toString(i)).setSource("{}", XContentType.JSON).execute();
}
}).start();
final GetGlobalCheckpointsAction.Request request2 = new GetGlobalCheckpointsAction.Request(
indexName,
true,
false,
new long[] { totalDocuments - 2 },
TimeValue.timeValueSeconds(30)
);
long start = System.nanoTime();
final GetGlobalCheckpointsAction.Response response2 = client().execute(GetGlobalCheckpointsAction.INSTANCE, request2).get();
long elapsed = TimeValue.timeValueNanos(System.nanoTime() - start).seconds();
assertThat(elapsed, lessThan(30L));
assertFalse(response.timedOut());
assertEquals(totalDocuments - 1, response2.globalCheckpoints()[0]);
}
public void testPollGlobalCheckpointAdvancementTimeout() {
String indexName = "test_index";
indicesAdmin().prepareCreate(indexName)
.setSettings(indexSettings(1, 0).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST))
.get();
final int totalDocuments = 30;
for (int i = 0; i < totalDocuments; ++i) {
prepareIndex(indexName).setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get();
}
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
indexName,
true,
false,
new long[] { 29 },
TimeValue.timeValueMillis(between(1, 100))
);
long start = System.nanoTime();
GetGlobalCheckpointsAction.Response response = client().execute(GetGlobalCheckpointsAction.INSTANCE, request).actionGet();
long elapsed = TimeValue.timeValueNanos(System.nanoTime() - start).seconds();
assertThat(elapsed, lessThan(30L));
assertTrue(response.timedOut());
assertEquals(29L, response.globalCheckpoints()[0]);
}
public void testMustProvideCorrectNumberOfShards() {
String indexName = "test_index";
indicesAdmin().prepareCreate(indexName)
.setSettings(indexSettings(1, 0).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST))
.get();
final long[] incorrectArrayLength = new long[2];
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
indexName,
true,
false,
incorrectArrayLength,
TEN_SECONDS
);
ElasticsearchStatusException exception = expectThrows(
ElasticsearchStatusException.class,
client().execute(GetGlobalCheckpointsAction.INSTANCE, request)
);
assertThat(exception.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(
exception.getMessage(),
equalTo("number of checkpoints must equal number of shards. [shard count: 1, checkpoint count: 2]")
);
}
public void testWaitForAdvanceOnlySupportsOneShard() {
String indexName = "test_index";
indicesAdmin().prepareCreate(indexName)
.setSettings(indexSettings(3, 0).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST))
.get();
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
indexName,
true,
false,
new long[3],
TEN_SECONDS
);
ElasticsearchStatusException exception = expectThrows(
ElasticsearchStatusException.class,
client().execute(GetGlobalCheckpointsAction.INSTANCE, request)
);
assertThat(exception.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(exception.getMessage(), equalTo("wait_for_advance only supports indices with one shard. [shard count: 3]"));
}
public void testIndexDoesNotExistNoWait() {
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
"non-existent",
false,
false,
EMPTY_ARRAY,
TEN_SECONDS
);
long start = System.nanoTime();
expectThrows(IndexNotFoundException.class, client().execute(GetGlobalCheckpointsAction.INSTANCE, request));
long elapsed = TimeValue.timeValueNanos(System.nanoTime() - start).seconds();
assertThat(elapsed, lessThanOrEqualTo(TEN_SECONDS.seconds()));
}
public void testWaitOnIndexTimeout() {
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
"non-existent",
true,
true,
EMPTY_ARRAY,
TimeValue.timeValueMillis(between(1, 100))
);
expectThrows(IndexNotFoundException.class, client().execute(GetGlobalCheckpointsAction.INSTANCE, request));
}
public void testWaitOnIndexCreated() throws Exception {
String indexName = "not-yet-existing";
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
indexName,
true,
true,
EMPTY_ARRAY,
TEN_SECONDS
);
long start = System.nanoTime();
ActionFuture<GetGlobalCheckpointsAction.Response> future = client().execute(GetGlobalCheckpointsAction.INSTANCE, request);
Thread.sleep(randomIntBetween(10, 100));
indicesAdmin().prepareCreate(indexName)
.setSettings(indexSettings(1, 0).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST))
.get();
prepareIndex(indexName).setId(Integer.toString(0)).setSource("{}", XContentType.JSON).get();
GetGlobalCheckpointsAction.Response response = future.actionGet();
long elapsed = TimeValue.timeValueNanos(System.nanoTime() - start).seconds();
assertThat(elapsed, lessThanOrEqualTo(TEN_SECONDS.seconds()));
assertThat(response.globalCheckpoints()[0], equalTo(0L));
assertFalse(response.timedOut());
}
public void testPrimaryShardsNotReadyNoWait() {
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
"not-assigned",
false,
false,
EMPTY_ARRAY,
TEN_SECONDS
);
indicesAdmin().prepareCreate("not-assigned")
.setWaitForActiveShards(ActiveShardCount.NONE)
.setSettings(
indexSettings(1, 0).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST)
.put(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "node", "none")
)
.get();
UnavailableShardsException exception = expectThrows(
UnavailableShardsException.class,
client().execute(GetGlobalCheckpointsAction.INSTANCE, request)
);
assertEquals("Primary shards were not active [shards=1, active=0]", exception.getMessage());
}
public void testWaitOnPrimaryShardsReadyTimeout() {
TimeValue timeout = TimeValue.timeValueMillis(between(1, 100));
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
"not-assigned",
true,
true,
EMPTY_ARRAY,
timeout
);
indicesAdmin().prepareCreate("not-assigned")
.setWaitForActiveShards(ActiveShardCount.NONE)
.setSettings(
indexSettings(1, 0).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST)
.put(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "node", "none")
)
.get();
UnavailableShardsException exception = expectThrows(
UnavailableShardsException.class,
client().execute(GetGlobalCheckpointsAction.INSTANCE, request)
);
assertEquals("Primary shards were not active within timeout [timeout=" + timeout + ", shards=1, active=0]", exception.getMessage());
}
public void testWaitOnPrimaryShardsReady() throws Exception {
String indexName = "not-assigned";
final GetGlobalCheckpointsAction.Request request = new GetGlobalCheckpointsAction.Request(
indexName,
true,
true,
EMPTY_ARRAY,
TEN_SECONDS
);
indicesAdmin().prepareCreate(indexName)
.setWaitForActiveShards(ActiveShardCount.NONE)
.setSettings(
indexSettings(1, 0).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST)
.put(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "node", "none")
)
.get();
long start = System.nanoTime();
ActionFuture<GetGlobalCheckpointsAction.Response> future = client().execute(GetGlobalCheckpointsAction.INSTANCE, request);
Thread.sleep(randomIntBetween(10, 100));
updateIndexSettings(Settings.builder().put(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "node", ""), indexName);
prepareIndex(indexName).setId(Integer.toString(0)).setSource("{}", XContentType.JSON).get();
GetGlobalCheckpointsAction.Response response = future.actionGet();
long elapsed = TimeValue.timeValueNanos(System.nanoTime() - start).seconds();
assertThat(elapsed, lessThanOrEqualTo(TEN_SECONDS.seconds()));
assertThat(response.globalCheckpoints()[0], equalTo(0L));
assertFalse(response.timedOut());
}
public void testWaitOnPrimaryShardThrottled() throws Exception {
updateClusterSettings(Settings.builder().put(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey(), 0));
String indexName = "throttled";
indicesAdmin().prepareCreate(indexName)
.setWaitForActiveShards(ActiveShardCount.NONE)
.setSettings(indexSettings(1, 0).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST))
.get();
long start = System.nanoTime();
var future = client().execute(
GetGlobalCheckpointsAction.INSTANCE,
new GetGlobalCheckpointsAction.Request(indexName, true, true, EMPTY_ARRAY, TEN_SECONDS)
);
Thread.sleep(randomIntBetween(10, 100));
updateClusterSettings(Settings.builder().putNull(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey()));
prepareIndex(indexName).setId(Integer.toString(0)).setSource("{}", XContentType.JSON).get();
var response = future.actionGet();
long elapsed = TimeValue.timeValueNanos(System.nanoTime() - start).seconds();
assertThat(elapsed, lessThanOrEqualTo(TEN_SECONDS.seconds()));
assertThat(response.globalCheckpoints()[0], equalTo(0L));
assertFalse(response.timedOut());
}
}
|
GetGlobalCheckpointsActionIT
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/testcontainers/otlp/OpenTelemetryMetricsContainerConnectionDetailsFactory.java
|
{
"start": 2084,
"end": 2521
}
|
class ____
extends ContainerConnectionDetails<Container<?>> implements OtlpMetricsConnectionDetails {
private OpenTelemetryMetricsContainerConnectionDetails(ContainerConnectionSource<Container<?>> source) {
super(source);
}
@Override
public String getUrl() {
return "http://%s:%d/v1/metrics".formatted(getContainer().getHost(), getContainer().getMappedPort(4318));
}
}
}
|
OpenTelemetryMetricsContainerConnectionDetails
|
java
|
dropwizard__dropwizard
|
dropwizard-benchmarks/src/main/java/io/dropwizard/benchmarks/util/DurationBenchmark.java
|
{
"start": 582,
"end": 1174
}
|
class ____ {
/**
* Don't trust the IDE, it's advisedly non-final to avoid constant folding
*/
private String duration = "12h";
@Benchmark
public Duration parseDuration() {
return Duration.parse(duration);
}
public static void main(String[] args) throws Exception {
new Runner(new OptionsBuilder()
.include(DurationBenchmark.class.getSimpleName())
.forks(1)
.warmupIterations(5)
.measurementIterations(5)
.build())
.run();
}
}
|
DurationBenchmark
|
java
|
junit-team__junit5
|
junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/extension/TimeoutInvocationFactory.java
|
{
"start": 840,
"end": 2248
}
|
class ____ {
private final Store store;
TimeoutInvocationFactory(Store store) {
this.store = Preconditions.notNull(store, "store must not be null");
}
<T> Invocation<T> create(ThreadMode threadMode, TimeoutInvocationParameters<T> timeoutInvocationParameters) {
Preconditions.notNull(threadMode, "thread mode must not be null");
Preconditions.condition(threadMode != ThreadMode.INFERRED, "thread mode must not be INFERRED");
Preconditions.notNull(timeoutInvocationParameters, "timeout invocation parameters must not be null");
if (threadMode == ThreadMode.SEPARATE_THREAD) {
return new SeparateThreadTimeoutInvocation<>(timeoutInvocationParameters.getInvocation(),
timeoutInvocationParameters.getTimeoutDuration(), timeoutInvocationParameters.getDescriptionSupplier(),
timeoutInvocationParameters.getPreInterruptCallback());
}
return new SameThreadTimeoutInvocation<>(timeoutInvocationParameters.getInvocation(),
timeoutInvocationParameters.getTimeoutDuration(), getThreadExecutorForSameThreadInvocation(),
timeoutInvocationParameters.getDescriptionSupplier(),
timeoutInvocationParameters.getPreInterruptCallback());
}
private ScheduledExecutorService getThreadExecutorForSameThreadInvocation() {
return store.computeIfAbsent(SingleThreadExecutorResource.class).get();
}
@SuppressWarnings({ "deprecation", "try" })
private abstract static
|
TimeoutInvocationFactory
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/v2/ttl/AbstractTtlState.java
|
{
"start": 1113,
"end": 1653
}
|
class ____ TTL logic wrappers of state objects. state V2 does not support
* FULL_STATE_SCAN_SNAPSHOT and INCREMENTAL_CLEANUP, only supports ROCKSDB_COMPACTION_FILTER.
* UpdateType#OnReadAndWrite is also not supported in state V2.
*
* @param <K> The type of key the state is associated to
* @param <N> The type of the namespace
* @param <SV> The type of values kept internally in state without TTL
* @param <TTLSV> The type of values kept internally in state with TTL
* @param <S> Type of originally wrapped state object
*/
abstract
|
for
|
java
|
apache__avro
|
lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDatumConverterFactory.java
|
{
"start": 8762,
"end": 9280
}
|
class ____ extends AvroDatumConverter<IntWritable, Integer> {
private final Schema mSchema;
/** Constructor. */
public IntWritableConverter() {
mSchema = Schema.create(Schema.Type.INT);
}
/** {@inheritDoc} */
@Override
public Integer convert(IntWritable input) {
return input.get();
}
/** {@inheritDoc} */
@Override
public Schema getWriterSchema() {
return mSchema;
}
}
/** Converts LongWritables into Longs. */
public static
|
IntWritableConverter
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-s3/src/test/java/org/apache/camel/component/aws2/s3/integration/S3StreamUploadOperationAsyncIT.java
|
{
"start": 1568,
"end": 3323
}
|
class ____ extends Aws2S3Base {
@EndpointInject
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint result;
@Test
public void sendIn() throws Exception {
result.expectedMessageCount(1000);
for (int i = 0; i < 1000; i++) {
final CompletableFuture<Object> future = template.asyncSendBody("direct:stream1", "Andrea\n");
assertDoesNotThrow(() -> future.get(5, TimeUnit.SECONDS));
}
MockEndpoint.assertIsSatisfied(context, 10, TimeUnit.SECONDS);
Exchange ex = template.request("direct:listObjects", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.S3_OPERATION, AWS2S3Operations.listObjects);
}
});
List<S3Object> resp = ex.getMessage().getBody(List.class);
assertEquals(40, resp.size());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String awsEndpoint1
= String.format(
"aws2-s3://%s?autoCreateBucket=true&streamingUploadMode=true&keyName=fileTest.txt&batchMessageNumber=25&namingStrategy=random&streamingUploadTimeout=10000",
name.get());
from("direct:stream1").to(awsEndpoint1).to("mock:result");
String awsEndpoint = String.format("aws2-s3://%s?autoCreateBucket=true",
name.get());
from("direct:listObjects").to(awsEndpoint);
}
};
}
}
|
S3StreamUploadOperationAsyncIT
|
java
|
alibaba__nacos
|
console/src/test/java/com/alibaba/nacos/console/handler/impl/noop/naming/InstanceNoopHandlerTest.java
|
{
"start": 919,
"end": 1621
}
|
class ____ {
InstanceNoopHandler instanceNoopHandler;
@BeforeEach
void setUp() {
instanceNoopHandler = new InstanceNoopHandler();
}
@AfterEach
void tearDown() {
}
@Test
void listInstances() {
assertThrows(NacosApiException.class, () -> instanceNoopHandler.listInstances("", "", "", "", 0, 0),
"Current functionMode is `config`, naming module is disabled.");
}
@Test
void updateInstance() {
assertThrows(NacosApiException.class, () -> instanceNoopHandler.updateInstance(null, null),
"Current functionMode is `config`, naming module is disabled.");
}
}
|
InstanceNoopHandlerTest
|
java
|
apache__flink
|
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/BucketsTest.java
|
{
"start": 21661,
"end": 23830
}
|
enum ____ {
CREATED,
INACTIVE
}
private List<Tuple2<EventType, String>> events = new ArrayList<>();
@Override
public void bucketCreated(Bucket<String, String> bucket) {
events.add(new Tuple2<>(EventType.CREATED, bucket.getBucketId()));
}
@Override
public void bucketInactive(Bucket<String, String> bucket) {
events.add(new Tuple2<>(EventType.INACTIVE, bucket.getBucketId()));
}
public List<Tuple2<EventType, String>> getEvents() {
return events;
}
}
@Test
void testFileLifeCycleListener() throws Exception {
File outDir = TempDirUtils.newFolder(tempFolder);
Path path = new Path(outDir.toURI());
OnProcessingTimePolicy<String, String> rollOnProcessingTimeCountingPolicy =
new OnProcessingTimePolicy<>(2L);
TestFileLifeCycleListener fileLifeCycleListener = new TestFileLifeCycleListener();
Buckets<String, String> buckets =
createBuckets(
path,
rollOnProcessingTimeCountingPolicy,
null,
fileLifeCycleListener,
0,
OutputFileConfig.builder().build());
buckets.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 2L));
buckets.onElement("test2", new TestUtils.MockSinkContext(null, 1L, 3L));
// Will close the part file writer of the bucket "test1". Now bucket "test1" have only
// one pending file while bucket "test2" has an on-writing in-progress file.
buckets.onProcessingTime(4);
buckets.onElement("test1", new TestUtils.MockSinkContext(null, 1L, 5L));
buckets.onElement("test2", new TestUtils.MockSinkContext(null, 1L, 6L));
assertThat(fileLifeCycleListener.files).hasSize(2);
assertThat(fileLifeCycleListener.files.get("test1"))
.containsExactly("part-0-0", "part-0-1");
assertThat(fileLifeCycleListener.files.get("test2")).containsExactly("part-0-1");
}
private static
|
EventType
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/sps/FileCollector.java
|
{
"start": 1184,
"end": 1497
}
|
interface ____ {
/**
* This method can be used to scan and collects the files under that
* directory and adds to the given BlockStorageMovementNeeded.
*
* @param path
* - file path id
*/
void scanAndCollectFiles(long path)
throws IOException, InterruptedException;
}
|
FileCollector
|
java
|
playframework__playframework
|
web/play-java-forms/src/main/java/play/data/validation/ValidationError.java
|
{
"start": 322,
"end": 2477
}
|
class ____ {
private String key;
private List<String> messages;
private List<Object> arguments;
/**
* Constructs a new {@code ValidationError}.
*
* @param key the error key
* @param message the error message
*/
public ValidationError(String key, String message) {
this(key, message, ImmutableList.of());
}
/**
* Constructs a new {@code ValidationError}.
*
* @param key the error key
* @param message the error message
* @param arguments the error message arguments
*/
public ValidationError(String key, String message, List<Object> arguments) {
this.key = key;
this.arguments = arguments;
this.messages = ImmutableList.of(message);
}
/**
* Constructs a new {@code ValidationError}.
*
* @param key the error key
* @param messages the list of error messages
* @param arguments the error message arguments
*/
public ValidationError(String key, List<String> messages, List<Object> arguments) {
this.key = key;
this.messages = messages;
this.arguments = arguments;
}
/**
* Returns the error key.
*
* @return the error key of the message.
*/
public String key() {
return key;
}
/**
* Returns the error message.
*
* @return the last message in the list of messages.
*/
public String message() {
return messages.get(messages.size() - 1);
}
/**
* Returns the error messages.
*
* @return a list of messages.
*/
public List<String> messages() {
return messages;
}
/**
* Returns the error arguments.
*
* @return a list of error arguments.
*/
public List<Object> arguments() {
return arguments;
}
/**
* Returns the formatted error message (message + arguments) in the given Messages.
*
* @param messagesObj the play.i18n.Messages object containing the language.
* @return the results of messagesObj.at(messages, arguments).
*/
public String format(Messages messagesObj) {
return messagesObj.at(messages, arguments);
}
public String toString() {
return "ValidationError(" + key + "," + messages + "," + arguments + ")";
}
}
|
ValidationError
|
java
|
quarkusio__quarkus
|
extensions/panache/mongodb-panache/deployment/src/test/java/io/quarkus/mongodb/panache/bug10812/Bug10812BookNotAnnotatedEntity.java
|
{
"start": 163,
"end": 471
}
|
class ____ extends PanacheMongoEntity {
@BsonProperty("bookTitle")
private String title;
public String getTitle() {
return title;
}
public Bug10812BookNotAnnotatedEntity setTitle(String title) {
this.title = title;
return this;
}
}
|
Bug10812BookNotAnnotatedEntity
|
java
|
quarkusio__quarkus
|
extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/RestClientMethodEnhancer.java
|
{
"start": 1005,
"end": 3146
}
|
class ____ implements BiFunction<String, ClassVisitor, ClassVisitor> {
private static final String CACHE_KEY_PARAMETER_POSITIONS_DESCRIPTOR = "L"
+ CacheKeyParameterPositions.class.getName().replace('.', '/') + ";";
private final String methodName;
private final short[] cacheKeyParameterPositions;
public RestClientMethodEnhancer(String methodName, short[] cacheKeyParameterPositions) {
this.methodName = methodName;
this.cacheKeyParameterPositions = cacheKeyParameterPositions;
}
@Override
public ClassVisitor apply(String className, ClassVisitor classVisitor) {
return new ClassVisitor(Gizmo.ASM_API_VERSION, classVisitor) {
@Override
public MethodVisitor visitMethod(int access, String name, String descriptor, String signature,
String[] exceptions) {
MethodVisitor superVisitor = super.visitMethod(access, name, descriptor, signature, exceptions);
if (!name.equals(methodName)) {
// This is not the method we want to enhance, let's skip the bytecode transformation.
return superVisitor;
} else {
return new MethodVisitor(Gizmo.ASM_API_VERSION, superVisitor) {
@Override
public void visitEnd() {
/*
* If the method parameters at positions 0 and 2 are annotated with @CacheKey, the following code
* will add the `@CacheKeyParameterPositions(value={0, 2})` annotation to the method.
*/
AnnotationVisitor annotation = super.visitAnnotation(CACHE_KEY_PARAMETER_POSITIONS_DESCRIPTOR,
true);
annotation.visit("value", cacheKeyParameterPositions);
annotation.visitEnd();
super.visitEnd();
}
};
}
}
};
}
}
|
RestClientMethodEnhancer
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/internal/Classes.java
|
{
"start": 2330,
"end": 3013
}
|
class ____ {
private static final Classes INSTANCE = new Classes();
/**
* Returns the singleton instance of this class.
*
* @return the singleton instance of this class.
*/
public static Classes instance() {
return INSTANCE;
}
private final Failures failures = Failures.instance();
private final ComparisonStrategy comparisonStrategy = StandardComparisonStrategy.instance();
/**
* Verifies that the actual {@code Class} is assignable from all the {@code others} classes.
*
* @param info contains information about the assertion.
* @param actual the "actual" {@code Class}.
* @param others the others {@code Class} who this actual
|
Classes
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_913/Issue913SetterMapperForCollectionsTest.java
|
{
"start": 1037,
"end": 14264
}
|
class ____ {
@RegisterExtension
GeneratedSource generatedSource = new GeneratedSource().addComparisonToFixtureFor(
DomainDtoWithNvmsNullMapper.class,
DomainDtoWithNvmsDefaultMapper.class,
DomainDtoWithPresenceCheckMapper.class,
DomainDtoWithNcvsAlwaysMapper.class
);
/**
* The null value mapping strategy on type level (Mapper) should generate forged methods for the
* conversion from string to long that return null in the entire mapper, so also for the forged
* mapper. Note the default NVMS is RETURN_NULL.
*
* The generated code should not use a local variable for setting {@code longs}, but it should use a local
* variable for setting {@code strings} as a direct assignment and the copy constructor is used, in case the
* assignment is {@code null} then {@code null} should be set for {@code string}
*/
@ProcessorTest
public void shouldReturnNullForNvmsReturnNullForCreate() {
Dto dto = new Dto();
Domain domain = DomainDtoWithNvmsNullMapper.INSTANCE.create( dto );
doControlAsserts( domain );
assertThat( domain.getStrings() ).isNull();
assertThat( domain.getLongs() ).isNull();
}
/**
* The null value mapping strategy on type level (Mapper) should generate forged methods for the
* conversion from string to long that return null in the entire mapper, so also for the forged
* mapper. Note the default NVMS is RETURN_NULL.
*/
@ProcessorTest
public void shouldReturnNullForNvmsReturnNullForUpdate() {
Dto dto = new Dto();
Domain domain = new Domain();
domain.setLongs( new HashSet<>() );
domain.setStrings( new HashSet<>() );
DomainDtoWithNvmsNullMapper.INSTANCE.update( dto, domain );
doControlAsserts( domain );
assertThat( domain.getStrings() ).isNull();
assertThat( domain.getLongs() ).isNull();
}
/**
* The null value mapping strategy on type level (Mapper) should generate forged methods for the
* conversion from string to long that return null in the entire mapper, so also for the forged
* mapper. Note the default NVMS is RETURN_NULL.
*
* target (stringsInitialized is Not Null) and source (stringInitialized is Null) target should
* be explicitely set to null
*/
@ProcessorTest
public void shouldReturnNullForNvmsReturnNullForUpdateWithNonNullTargetAndNullSource() {
Dto dto = new Dto();
dto.setStringsInitialized( null );
Domain domain = new Domain();
domain.setLongs( new HashSet<>() );
domain.setStrings( new HashSet<>() );
DomainDtoWithNvmsNullMapper.INSTANCE.update( dto, domain );
assertThat( domain.getStringsInitialized() ).isNull();
assertThat( domain.getLongsInitialized() ).isNull();
assertThat( domain.getStringsWithDefault() ).containsOnly( "3" );
assertThat( domain.getStrings() ).isNull();
assertThat( domain.getLongs() ).isNull();
}
/**
* The null value mapping strategy on type level (Mapper) should generate forged methods for the
* conversion from string to long that return null in the entire mapper, so also for the forged
* mapper. Note the default NVMS is RETURN_NULL.
*/
@ProcessorTest
public void shouldReturnNullForNvmsReturnNullForUpdateWithReturn() {
Dto dto = new Dto();
Domain domain1 = new Domain();
domain1.setLongs( new HashSet<>() );
domain1.setStrings( new HashSet<>() );
Domain domain2 = DomainDtoWithNvmsNullMapper.INSTANCE.updateWithReturn( dto, domain1 );
doControlAsserts( domain1, domain2 );
assertThat( domain1.getStrings() ).isNull();
assertThat( domain1.getLongs() ).isNull();
assertThat( domain2.getStrings() ).isNull();
assertThat( domain2.getLongs() ).isNull();
}
/**
* The null value mapping strategy on type level (Mapper) should generate forged methods for the
* conversion from string to long that return default in the entire mapper, so also for the forged
* mapper. Note the default NVMS is RETURN_NULL.
*
* However, for plain mappings (strings to strings) the result will also be an empty collection.
*/
@ProcessorTest
public void shouldReturnDefaultForNvmsReturnDefaultForCreate() {
Dto dto = new Dto();
Domain domain = DomainDtoWithNvmsDefaultMapper.INSTANCE.create( dto );
doControlAsserts( domain );
assertThat( domain.getStrings() ).isEmpty();
assertThat( domain.getLongs() ).isEmpty();
}
/**
* The null value mapping strategy on type level (Mapper) should generate forged methods for the conversion from
* string to long that return default in the entire mapper, so also for the forged mapper. Note the default NVMS is
* RETURN_NULL.
*
* However, for plain mappings (strings to strings) the result will also be an empty collection.
*/
@ProcessorTest
public void shouldReturnDefaultForNvmsReturnDefaultForUpdate() {
Dto dto = new Dto();
Domain domain = new Domain();
Set<Long> longIn = new HashSet<>();
longIn.add( 10L );
domain.setLongs( longIn );
domain.setStrings( new HashSet<>() );
DomainDtoWithNvmsDefaultMapper.INSTANCE.update( dto, domain );
doControlAsserts( domain );
assertThat( domain.getStrings() ).isEmpty();
assertThat( domain.getLongs() ).isEmpty();
assertThat( domain.getLongs() ).isSameAs( longIn ); // make sure add all is used.
}
/**
* The null value mapping strategy on type level (Mapper) should generate forged methods for the
* conversion from string to long that return default in the entire mapper, so also for the forged
* mapper. Note the default NVMS is
* RETURN_NULL.
*
* However, for plain mappings (strings to strings) the result will also be an empty collection.
*
*/
@ProcessorTest
public void shouldReturnDefaultForNvmsReturnDefaultForUpdateWithReturn() {
Dto dto = new Dto();
Domain domain1 = new Domain();
Set<Long> longIn = new HashSet<>();
longIn.add( 10L );
domain1.setLongs( longIn );
domain1.setStrings( new HashSet<>() );
domain1.getStrings().add( "30" );
Domain domain2 = DomainDtoWithNvmsDefaultMapper.INSTANCE.updateWithReturn( dto, domain1 );
assertThat( domain2 ).isSameAs( domain1 );
doControlAsserts( domain1, domain2 );
assertThat( domain1.getLongs() ).isEqualTo( domain2.getLongs() );
assertThat( domain1.getStrings() ).isEmpty();
assertThat( domain1.getLongs() ).isEmpty();
assertThat( domain2.getStrings() ).isEmpty();
assertThat( domain2.getLongs() ).isEmpty();
assertThat( domain1.getLongs() ).isSameAs( longIn ); // make sure that add all is used
assertThat( domain2.getLongs() ).isSameAs( longIn ); // make sure that add all is used
}
/**
* Test create method ICW presence checker. The presence checker is responsible for the null check.
*
*/
@ProcessorTest
public void shouldReturnNullForCreateWithPresenceChecker() {
DtoWithPresenceCheck dto = new DtoWithPresenceCheck();
Domain domain = DomainDtoWithPresenceCheckMapper.INSTANCE.create( dto );
doControlAsserts( domain );
assertThat( domain.getStrings() ).isNull();
assertThat( domain.getLongs() ).isNull();
}
/**
* Test update method ICW presence checker
*
* Similar as in regular mappings, the target property should be left as-is.
*
*/
@IssueKey( "#954")
@ProcessorTest
public void shouldReturnNullForUpdateWithPresenceChecker() {
DtoWithPresenceCheck dto = new DtoWithPresenceCheck();
Domain domain = new Domain();
domain.setLongs( new HashSet<>() );
domain.getLongs().add( 10L );
domain.setStrings( new HashSet<>() );
domain.getStrings().add( "30" );
DomainDtoWithPresenceCheckMapper.INSTANCE.update( dto, domain );
doControlAsserts( domain );
assertThat( domain.getStrings() ).containsExactly( "30" );
assertThat( domain.getLongs() ).containsExactly( 10L );
}
/**
* Test update with return method ICW presence checker
*
* Similar as in regular mappings, the target property should be left as-is.
*
*/
@IssueKey( "#954")
@ProcessorTest
public void shouldReturnNullForUpdateWithReturnWithPresenceChecker() {
DtoWithPresenceCheck dto = new DtoWithPresenceCheck();
Domain domain1 = new Domain();
domain1.setLongs( new HashSet<>() );
domain1.getLongs().add( 10L );
domain1.setStrings( new HashSet<>() );
domain1.getStrings().add( "30" );
Domain domain2 = DomainDtoWithPresenceCheckMapper.INSTANCE.updateWithReturn( dto, domain1 );
assertThat( domain2 ).isSameAs( domain1 );
doControlAsserts( domain1, domain2 );
assertThat( domain1.getLongs() ).isEqualTo( domain2.getLongs() );
assertThat( domain1.getStrings() ).containsExactly( "30" );
assertThat( domain1.getLongs() ).containsExactly( 10L );
assertThat( domain2.getStrings() ).containsExactly( "30" );
assertThat( domain2.getLongs() ).containsExactly( 10L );
}
/**
* Test create method ICW NullValueCheckStrategy.ALWAYS.
*
*/
@IssueKey( "#954")
@ProcessorTest
public void shouldReturnNullForCreateWithNcvsAlways() {
DtoWithPresenceCheck dto = new DtoWithPresenceCheck();
Domain domain = DomainDtoWithNcvsAlwaysMapper.INSTANCE.create( dto );
doControlAsserts( domain );
assertThat( domain.getStrings() ).isNull();
assertThat( domain.getLongs() ).isNull();
}
/**
* Test update method ICW presence checker
*
* Similar as in regular mappings, the target property should be left as-is.
*
*/
@IssueKey( "#954")
@ProcessorTest
public void shouldReturnNullForUpdateWithNcvsAlways() {
DtoWithPresenceCheck dto = new DtoWithPresenceCheck();
Domain domain = new Domain();
domain.setLongs( new HashSet<>() );
domain.getLongs().add( 10L );
domain.setStrings( new HashSet<>() );
domain.getStrings().add( "30" );
DomainDtoWithNcvsAlwaysMapper.INSTANCE.update( dto, domain );
doControlAsserts( domain );
assertThat( domain.getStrings() ).containsExactly( "30" );
assertThat( domain.getLongs() ).containsExactly( 10L );
}
/**
* Test update with return method ICW presence checker
*
* Similar as in regular mappings, the target property should be left as-is.
*
*/
@IssueKey( "#954")
@ProcessorTest
public void shouldReturnNullForUpdateWithReturnWithNcvsAlways() {
DtoWithPresenceCheck dto = new DtoWithPresenceCheck();
Domain domain1 = new Domain();
domain1.setLongs( new HashSet<>() );
domain1.getLongs().add( 10L );
domain1.setStrings( new HashSet<>() );
domain1.getStrings().add( "30" );
Domain domain2 = DomainDtoWithNcvsAlwaysMapper.INSTANCE.updateWithReturn( dto, domain1 );
assertThat( domain2 ).isSameAs( domain1 );
doControlAsserts( domain1, domain2 );
assertThat( domain1.getLongs() ).isEqualTo( domain2.getLongs() );
assertThat( domain1.getStrings() ).containsExactly( "30" );
assertThat( domain1.getLongs() ).containsExactly( 10L );
assertThat( domain2.getStrings() ).containsExactly( "30" );
assertThat( domain2.getLongs() ).containsExactly( 10L );
}
/**
* These assert check if non-null and default mapping is working as expected.
*/
private void doControlAsserts( Domain domain ) {
assertThat( domain.getStringsInitialized() ).containsOnly( "5" );
assertThat( domain.getLongsInitialized() ).containsOnly( 5L );
assertThat( domain.getStringsWithDefault() ).containsOnly( "3" );
}
/**
* These assert check if non-null and default mapping is working as expected.
*/
private void doControlAsserts( Domain domain1, Domain domain2) {
assertThat( domain1 ).isEqualTo( domain2 );
assertThat( domain1.getStringsInitialized() ).containsOnly( "5" );
assertThat( domain1.getLongsInitialized() ).containsOnly( 5L );
assertThat( domain1.getStringsWithDefault() ).containsOnly( "3" );
assertThat( domain2.getStringsInitialized() ).containsOnly( "5" );
assertThat( domain2.getLongsInitialized() ).containsOnly( 5L );
assertThat( domain2.getStringsWithDefault() ).containsOnly( "3" );
assertThat( domain1.getStringsInitialized() ).isEqualTo( domain2.getStringsInitialized() );
assertThat( domain1.getLongsInitialized() ).isEqualTo( domain2.getLongsInitialized() );
assertThat( domain1.getStringsWithDefault() ).isEqualTo( domain2.getStringsWithDefault() );
}
}
|
Issue913SetterMapperForCollectionsTest
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/dev/IsolatedRemoteDevModeMain.java
|
{
"start": 1921,
"end": 8682
}
|
class ____ implements BiConsumer<CuratedApplication, Map<String, Object>>, Closeable {
private static final Logger log = Logger.getLogger(IsolatedRemoteDevModeMain.class);
private volatile DevModeContext context;
private final List<HotReplacementSetup> hotReplacementSetups = new ArrayList<>();
private AtomicReference<Throwable> deploymentProblem = new AtomicReference<>();
static volatile RemoteDevClient remoteDevClient;
static volatile Closeable remoteDevClientSession;
private static volatile CuratedApplication curatedApplication;
private static volatile AugmentAction augmentAction;
private static volatile Map<String, String> currentHashes;
private static volatile Path appRoot;
private static volatile Map<DevModeContext.ModuleInfo, Set<String>> copiedStaticResources = new HashMap<>();
static RemoteDevClient createClient(CuratedApplication curatedApplication) {
ServiceLoader<RemoteDevClientProvider> providers = ServiceLoader.load(RemoteDevClientProvider.class,
curatedApplication.getOrCreateAugmentClassLoader());
RemoteDevClient client = null;
for (RemoteDevClientProvider provider : providers) {
Optional<RemoteDevClient> opt = provider.getClient();
if (opt.isPresent()) {
client = opt.get();
break;
}
}
if (client == null) {
client = new DefaultRemoteDevClient();
}
return client;
}
private synchronized JarResult generateApplication() {
ClassLoader old = Thread.currentThread().getContextClassLoader();
try {
//ok, we have resolved all the deps
try {
AugmentResult start = augmentAction.createProductionApplication();
if (!start.getJar().mutable()) {
throw new RuntimeException(
"remote-dev can only be used with mutable applications i.e. " +
"using the mutable-jar package type");
}
//now extract the artifacts, to mirror the remote side
DevModeTask.extractDevModeClasses(start.getJar().getPath().getParent(),
curatedApplication.getApplicationModel(), null);
return start.getJar();
} catch (Throwable t) {
deploymentProblem.set(t);
log.error("Failed to generate Quarkus application", t);
return null;
}
} finally {
Thread.currentThread().setContextClassLoader(old);
}
}
private RuntimeUpdatesProcessor setupRuntimeCompilation(DevModeContext context, Path applicationRoot)
throws Exception {
if (!context.getAllModules().isEmpty()) {
ServiceLoader<CompilationProvider> serviceLoader = ServiceLoader.load(CompilationProvider.class);
List<CompilationProvider> compilationProviders = new ArrayList<>();
for (CompilationProvider provider : serviceLoader) {
compilationProviders.add(provider);
context.getAllModules().forEach(moduleInfo -> moduleInfo.addSourcePaths(provider.handledSourcePaths()));
}
QuarkusCompiler compiler;
try {
compiler = new QuarkusCompiler(curatedApplication, compilationProviders, context);
} catch (Exception e) {
log.error("Failed to create compiler, runtime compilation will be unavailable", e);
return null;
}
//this is never the remote side
RuntimeUpdatesProcessor processor = new RuntimeUpdatesProcessor(applicationRoot, context, compiler,
DevModeType.REMOTE_LOCAL_SIDE, this::regenerateApplication,
new BiConsumer<DevModeContext.ModuleInfo, String>() {
@Override
public void accept(DevModeContext.ModuleInfo moduleInfo, String s) {
copiedStaticResources.computeIfAbsent(moduleInfo, ss -> new HashSet<>()).add(s);
}
}, new BiFunction<String, byte[], byte[]>() {
@Override
public byte[] apply(String s, byte[] bytes) {
return ClassTransformingBuildStep.transform(s, bytes);
}
}, null, deploymentProblem);
for (HotReplacementSetup service : ServiceLoader.load(HotReplacementSetup.class,
curatedApplication.getOrCreateBaseRuntimeClassLoader())) {
hotReplacementSetups.add(service);
service.setupHotDeployment(processor);
processor.addHotReplacementSetup(service);
}
for (DeploymentFailedStartHandler service : ServiceLoader.load(DeploymentFailedStartHandler.class,
curatedApplication.getOrCreateAugmentClassLoader())) {
processor.addDeploymentFailedStartHandler(new Runnable() {
@Override
public void run() {
ClassLoader old = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(curatedApplication.getOrCreateAugmentClassLoader());
service.handleFailedInitialStart();
} finally {
Thread.currentThread().setContextClassLoader(old);
}
}
});
}
return processor;
}
return null;
}
void regenerateApplication(Set<String> ignore, ClassScanResult ignore2) {
generateApplication();
}
public void close() {
try {
try {
RuntimeUpdatesProcessor.INSTANCE.close();
} catch (IOException e) {
log.error("Failed to close compiler", e);
} finally {
RuntimeUpdatesProcessor.INSTANCE = null;
}
for (HotReplacementSetup i : hotReplacementSetups) {
i.close();
}
if (remoteDevClientSession != null) {
try {
remoteDevClientSession.close();
} catch (IOException e) {
log.error("Failed to close client", e);
}
}
} finally {
deploymentProblem.set(null);
curatedApplication.close();
}
}
//the main entry point, but loaded inside the augmentation
|
IsolatedRemoteDevModeMain
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/logging/logback/ExtractorTests.java
|
{
"start": 1049,
"end": 2493
}
|
class ____ {
@Test
void messageAndStackTraceWhenNoPrinterPrintsUsingLoggingSystem() {
Extractor extractor = new Extractor(null, createConverter());
assertThat(extractor.messageAndStackTrace(createEvent())).startsWith("TestMessage\n\n")
.contains("java.lang.RuntimeException: Boom!");
}
@Test
void messageAndStackTraceWhenNoPrinterPrintsUsingPrinter() {
Extractor extractor = new Extractor(new SimpleStackTracePrinter(), createConverter());
assertThat(extractor.messageAndStackTrace(createEvent()))
.isEqualTo("TestMessage\n\nstacktrace:RuntimeException");
}
@Test
void stackTraceWhenNoPrinterPrintsUsingFallback() {
Extractor extractor = new Extractor(null, createConverter());
assertThat(extractor.stackTrace(createEvent())).contains("java.lang.RuntimeException: Boom!");
}
@Test
void stackTraceWhenNoPrinterPrintsUsingPrinter() {
Extractor extractor = new Extractor(new SimpleStackTracePrinter(), createConverter());
assertThat(extractor.stackTrace(createEvent())).isEqualTo("stacktrace:RuntimeException");
}
private ThrowableProxyConverter createConverter() {
ThrowableProxyConverter converter = new ThrowableProxyConverter();
converter.start();
return converter;
}
private ILoggingEvent createEvent() {
LoggingEvent event = new LoggingEvent();
event.setMessage("TestMessage");
event.setThrowableProxy(new ThrowableProxy(new RuntimeException("Boom!")));
return event;
}
}
|
ExtractorTests
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/stackdriver/StackdriverProperties.java
|
{
"start": 1222,
"end": 3406
}
|
class ____ extends StepRegistryProperties {
/**
* Identifier of the Google Cloud project to monitor.
*/
private @Nullable String projectId;
/**
* Monitored resource type.
*/
private String resourceType = "global";
/**
* Monitored resource's labels.
*/
private @Nullable Map<String, String> resourceLabels;
/**
* Whether to use semantically correct metric types. When false, counter metrics are
* published as the GAUGE MetricKind. When true, counter metrics are published as the
* CUMULATIVE MetricKind.
*/
private boolean useSemanticMetricTypes;
/**
* Prefix for metric type. Valid prefixes are described in the Google Cloud
* documentation (https://cloud.google.com/monitoring/custom-metrics#identifier).
*/
private String metricTypePrefix = "custom.googleapis.com/";
/**
* Whether it should be attempted to create a metric descriptor before writing a time
* series.
*/
private boolean autoCreateMetricDescriptors = true;
public @Nullable String getProjectId() {
return this.projectId;
}
public void setProjectId(@Nullable String projectId) {
this.projectId = projectId;
}
public String getResourceType() {
return this.resourceType;
}
public void setResourceType(String resourceType) {
this.resourceType = resourceType;
}
public @Nullable Map<String, String> getResourceLabels() {
return this.resourceLabels;
}
public void setResourceLabels(@Nullable Map<String, String> resourceLabels) {
this.resourceLabels = resourceLabels;
}
public boolean isUseSemanticMetricTypes() {
return this.useSemanticMetricTypes;
}
public void setUseSemanticMetricTypes(boolean useSemanticMetricTypes) {
this.useSemanticMetricTypes = useSemanticMetricTypes;
}
public String getMetricTypePrefix() {
return this.metricTypePrefix;
}
public void setMetricTypePrefix(String metricTypePrefix) {
this.metricTypePrefix = metricTypePrefix;
}
public boolean isAutoCreateMetricDescriptors() {
return this.autoCreateMetricDescriptors;
}
public void setAutoCreateMetricDescriptors(boolean autoCreateMetricDescriptors) {
this.autoCreateMetricDescriptors = autoCreateMetricDescriptors;
}
}
|
StackdriverProperties
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/EntityTrackingRevisionListener.java
|
{
"start": 401,
"end": 651
}
|
interface ____ extends RevisionListener {
/**
* Called after audited entity data has been persisted.
* @param entityClass Audited entity class.
* @param entityName Name of the audited entity. May be useful when Java
|
EntityTrackingRevisionListener
|
java
|
apache__camel
|
core/camel-main/src/test/java/org/apache/camel/main/PropertyBindingSupportRootArrayTest.java
|
{
"start": 1225,
"end": 2343
}
|
class ____ {
@Test
public void testRootArray() {
CamelContext context = new DefaultCamelContext();
BeanIntrospection bi = PluginHelper.getBeanIntrospection(context);
bi.setExtendedStatistics(true);
context.start();
MySecondFoo target = new MySecondFoo();
PropertyBindingSupport.build()
.withCamelContext(context)
.withTarget(target)
.withProperty("bars[0]", "#class:" + MySecondBar.class.getName())
.withProperty("bars[0].names[0]", "a")
.withProperty("bars[0].names[1]", "b")
.withRemoveParameters(false).bind();
assertEquals(1, target.getBars().size());
assertEquals(2, target.getBars().get(0).getNames().size());
assertEquals("a", target.getBars().get(0).getNames().get(0));
assertEquals("b", target.getBars().get(0).getNames().get(1));
// will auto detect generated configurer so no reflection in use
assertEquals(0, bi.getInvokedCounter());
context.stop();
}
}
|
PropertyBindingSupportRootArrayTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/scheduling/config/LazyScheduledTasksBeanDefinitionParserTests.java
|
{
"start": 1615,
"end": 1722
}
|
class ____ {
volatile boolean executed = false;
public void doWork() {
executed = true;
}
}
}
|
Task
|
java
|
apache__camel
|
core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedThroughputLogger.java
|
{
"start": 1194,
"end": 2196
}
|
class ____ extends ManagedProcessor implements ManagedThroughputLoggerMBean {
public ManagedThroughputLogger(CamelContext context, ThroughputLogger logger, ProcessorDefinition<?> definition) {
super(context, logger, definition);
}
@Override
public ThroughputLogger getProcessor() {
return (ThroughputLogger) super.getProcessor();
}
@Override
public void reset() {
super.reset();
getProcessor().reset();
}
@Override
public long getReceivedCounter() {
return getProcessor().getReceivedCounter();
}
@Override
public double getAverage() {
return getProcessor().getAverage();
}
@Override
public double getRate() {
return getProcessor().getRate();
}
@Override
public String getLastLogMessage() {
return getProcessor().getLastLogMessage();
}
@Override
public void resetThroughputLogger() {
getProcessor().reset();
}
}
|
ManagedThroughputLogger
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/common/processor/src/main/java/org/jboss/resteasy/reactive/common/processor/IndexedParameter.java
|
{
"start": 313,
"end": 5785
}
|
class ____<T extends IndexedParameter<T>> {
private static final Object[] NO_ERROR_LOCATION_PARAMETERS = new Object[0];
protected ClassInfo currentClassInfo;
protected ClassInfo actualEndpointInfo;
protected Map<String, String> existingConverters;
protected AdditionalReaders additionalReaders;
protected Map<DotName, AnnotationInstance> anns;
protected Type paramType;
protected String rawErrorLocation;
protected Object[] errorLocationParameters;
protected boolean hasRuntimeConverters;
protected Set<String> pathParameters;
protected String sourceName;
protected boolean field;
protected boolean suspended;
protected boolean sse;
protected String name;
protected String defaultValue;
protected ParameterType type;
protected String elementType;
protected boolean single;
protected boolean optional;
protected String separator;
public boolean isObtainedAsCollection() {
return !single
&& (type == ParameterType.HEADER
|| type == ParameterType.MATRIX
|| type == ParameterType.FORM
|| type == ParameterType.QUERY);
}
public ClassInfo getCurrentClassInfo() {
return currentClassInfo;
}
public T setCurrentClassInfo(ClassInfo currentClassInfo) {
this.currentClassInfo = currentClassInfo;
return (T) this;
}
public ClassInfo getActualEndpointInfo() {
return actualEndpointInfo;
}
public T setActualEndpointInfo(ClassInfo actualEndpointInfo) {
this.actualEndpointInfo = actualEndpointInfo;
return (T) this;
}
public Map<String, String> getExistingConverters() {
return existingConverters;
}
public T setExistingConverters(Map<String, String> existingConverters) {
this.existingConverters = existingConverters;
return (T) this;
}
public AdditionalReaders getAdditionalReaders() {
return additionalReaders;
}
public T setAdditionalReaders(AdditionalReaders additionalReaders) {
this.additionalReaders = additionalReaders;
return (T) this;
}
public Map<DotName, AnnotationInstance> getAnns() {
return anns;
}
public T setAnns(Map<DotName, AnnotationInstance> anns) {
this.anns = anns;
return (T) this;
}
public Type getParamType() {
return paramType;
}
public T setParamType(Type paramType) {
this.paramType = paramType;
return (T) this;
}
public String getErrorLocation() {
return String.format(rawErrorLocation, errorLocationParameters);
}
String getRawErrorLocation() {
return rawErrorLocation;
}
Object[] getErrorLocationParameters() {
return errorLocationParameters;
}
public T setErrorLocation(String rawErrorLocation, Object[] parameters) {
this.rawErrorLocation = rawErrorLocation;
this.errorLocationParameters = parameters;
return (T) this;
}
public boolean isHasRuntimeConverters() {
return hasRuntimeConverters;
}
public T setHasRuntimeConverters(boolean hasRuntimeConverters) {
this.hasRuntimeConverters = hasRuntimeConverters;
return (T) this;
}
public Set<String> getPathParameters() {
return pathParameters;
}
public T setPathParameters(Set<String> pathParameters) {
this.pathParameters = pathParameters;
return (T) this;
}
public String getSourceName() {
return sourceName;
}
public T setSourceName(String sourceName) {
this.sourceName = sourceName;
return (T) this;
}
public boolean isField() {
return field;
}
public T setField(boolean field) {
this.field = field;
return (T) this;
}
public boolean isSuspended() {
return suspended;
}
public T setSuspended(boolean suspended) {
this.suspended = suspended;
return (T) this;
}
public boolean isSse() {
return sse;
}
public T setSse(boolean sse) {
this.sse = sse;
return (T) this;
}
public String getName() {
return name;
}
public T setName(String name) {
this.name = name;
return (T) this;
}
public String getDefaultValue() {
return defaultValue;
}
public T setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
return (T) this;
}
public ParameterType getType() {
return type;
}
public T setType(ParameterType type) {
this.type = type;
return (T) this;
}
public String getElementType() {
return elementType;
}
public T setElementType(String elementType) {
this.elementType = elementType;
return (T) this;
}
public boolean isSingle() {
return single;
}
public T setSingle(boolean single) {
this.single = single;
return (T) this;
}
public boolean isOptional() {
return optional;
}
public T setOptional(boolean optional) {
this.optional = optional;
return (T) this;
}
public String getSeparator() {
return separator;
}
public T setSeparator(String separator) {
this.separator = separator;
return (T) this;
}
}
|
IndexedParameter
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/tests/ha/HATest.java
|
{
"start": 1147,
"end": 15480
}
|
class ____ extends VertxTestBase {
protected ClusterManager getClusterManager() {
return new FakeClusterManager();
}
protected Vertx vertx1, vertx2, vertx3, vertx4 = null;
@Override
protected void tearDown() throws Exception {
closeVertices(vertx1, vertx2, vertx3, vertx4);
super.tearDown();
}
@Test
public void testSimpleFailover() throws Exception {
startNodes(2, new VertxOptions().setHAEnabled(true));
DeploymentOptions options = new DeploymentOptions().setHa(true);
JsonObject config = new JsonObject().put("foo", "bar");
options.setConfig(config);
CountDownLatch latch = new CountDownLatch(1);
vertices[0].deployVerticle("java:" + HAVerticle1.class.getName(), options).onComplete(onSuccess(v -> {
assertEquals(1, vertices[0].deploymentIDs().size());
assertEquals(0, vertices[1].deploymentIDs().size());
latch.countDown();
}));
awaitLatch(latch);
kill(0);
assertWaitUntil(() -> vertices[1].deploymentIDs().size() == 1);
checkDeploymentExists(1, "java:" + HAVerticle1.class.getName(), options);
}
@Test
public void testQuorum() throws Exception {
vertx1 = startVertx(2);
DeploymentOptions options = new DeploymentOptions().setHa(true);
JsonObject config = new JsonObject().put("foo", "bar");
options.setConfig(config);
vertx1.deployVerticle("java:" + HAVerticle1.class.getName(), options).onComplete(onSuccess(id -> {
assertTrue(vertx1.deploymentIDs().contains(id));
testComplete();
}));
// Shouldn't deploy until a quorum is obtained
assertWaitUntil(() -> vertx1.deploymentIDs().isEmpty());
vertx2 = startVertx(2);
// Now should be deployed
await();
}
@Test
public void testQuorumLost() throws Exception {
vertx1 = startVertx(3);
vertx2 = startVertx(3);
vertx3 = startVertx(3);
DeploymentOptions options = new DeploymentOptions().setHa(true);
JsonObject config = new JsonObject().put("foo", "bar");
options.setConfig(config);
vertx1.deployVerticle("java:" + HAVerticle1.class.getName(), options).onComplete(onSuccess(id -> {
assertTrue(vertx1.deploymentIDs().contains(id));
}));
vertx2.deployVerticle("java:" + HAVerticle2.class.getName(), options).onComplete(onSuccess(id -> {
assertTrue(vertx2.deploymentIDs().contains(id));
}));
assertWaitUntil(() -> vertx1.deploymentIDs().size() == 1 && vertx2.deploymentIDs().size() == 1);
// Now close vertx3 - quorum should then be lost and verticles undeployed
CountDownLatch latch = new CountDownLatch(1);
vertx3.close().onComplete(ar -> {
latch.countDown();
});
awaitLatch(latch);
assertWaitUntil(() -> vertx1.deploymentIDs().isEmpty() && vertx2.deploymentIDs().isEmpty());
// Now re-instate the quorum
vertx4 = startVertx(3);
assertWaitUntil(() -> vertx1.deploymentIDs().size() == 1 && vertx2.deploymentIDs().size() == 1);
}
@Test
public void testCleanCloseNoFailover() throws Exception {
vertx1 = startVertx();
vertx2 = startVertx();
DeploymentOptions options = new DeploymentOptions().setHa(true);
JsonObject config = new JsonObject().put("foo", "bar");
options.setConfig(config);
CountDownLatch deployLatch = new CountDownLatch(1);
vertx2.deployVerticle("java:" + HAVerticle1.class.getName(), options).onComplete(onSuccess(id -> {
deployLatch.countDown();
}));
awaitLatch(deployLatch);
((VertxImpl)vertx1).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
fail("Should not be called");
});
vertx2.close().onComplete(ar -> {
vertx.setTimer(500, tid -> {
// Wait a bit in case failover happens
testComplete();
});
});
await();
}
@Test
public void testFailureInFailover() throws Exception {
vertx1 = startVertx();
vertx2 = startVertx();
vertx3 = startVertx();
CountDownLatch latch1 = new CountDownLatch(1);
vertx1.deployVerticle("java:" + HAVerticle1.class.getName(), new DeploymentOptions().setHa(true)).onComplete(onSuccess(id -> {
assertTrue(vertx1.deploymentIDs().contains(id));
latch1.countDown();
}));
awaitLatch(latch1);
((VertxImpl)vertx2).haManager().failDuringFailover(true);
((VertxImpl)vertx3).haManager().failDuringFailover(true);
CountDownLatch latch2 = new CountDownLatch(1);
((VertxImpl)vertx2).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
assertFalse(succeeded);
latch2.countDown();
});
((VertxImpl)vertx3).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
assertFalse(succeeded);
latch2.countDown();
});
((VertxImpl)vertx1).haManager().simulateKill();
awaitLatch(latch2);
// Now try again - this time failover should work
assertTrue(vertx2.deploymentIDs().isEmpty());
assertTrue(vertx3.deploymentIDs().isEmpty());
((VertxImpl)vertx2).haManager().failDuringFailover(false);
CountDownLatch latch3 = new CountDownLatch(1);
((VertxImpl)vertx2).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
assertTrue(succeeded);
latch3.countDown();
});
((VertxImpl)vertx3).haManager().simulateKill();
awaitLatch(latch3);
assertWaitUntil(() -> vertx2.deploymentIDs().size() == 1);
}
@Test
public void testHaGroups() throws Exception {
vertx1 = startVertx("group1", 1);
vertx2 = startVertx("group1", 1);
vertx3 = startVertx("group2", 1);
vertx4 = startVertx("group2", 1);
CountDownLatch latch1 = new CountDownLatch(2);
vertx1.deployVerticle("java:" + HAVerticle1.class.getName(), new DeploymentOptions().setHa(true)).onComplete(onSuccess(id -> {
assertTrue(vertx1.deploymentIDs().contains(id));
latch1.countDown();
}));
vertx3.deployVerticle("java:" + HAVerticle2.class.getName(), new DeploymentOptions().setHa(true)).onComplete(onSuccess(id -> {
assertTrue(vertx3.deploymentIDs().contains(id));
latch1.countDown();
}));
awaitLatch(latch1);
CountDownLatch latch2 = new CountDownLatch(1);
((VertxImpl)vertx1).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
fail("Should not failover here 1");
});
((VertxImpl)vertx2).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
fail("Should not failover here 2");
});
((VertxImpl)vertx4).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
assertTrue(succeeded);
latch2.countDown();
});
((VertxImpl)vertx3).haManager().simulateKill();
awaitLatch(latch2);
assertTrue(vertx4.deploymentIDs().size() == 1);
CountDownLatch latch3 = new CountDownLatch(1);
((VertxImpl)vertx2).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
assertTrue(succeeded);
latch3.countDown();
});
((VertxImpl)vertx4).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
fail("Should not failover here 4");
});
((VertxImpl)vertx1).haManager().simulateKill();
awaitLatch(latch3);
assertTrue(vertx2.deploymentIDs().size() == 1);
}
@Test
public void testNoFailoverToNonHANode() throws Exception {
vertx1 = startVertx();
// Create a non HA node
vertx2 = startVertx(null, 0, false);
CountDownLatch latch1 = new CountDownLatch(1);
vertx1.deployVerticle("java:" + HAVerticle1.class.getName(), new DeploymentOptions().setHa(true)).onComplete(onSuccess(id -> {
assertTrue(vertx1.deploymentIDs().contains(id));
latch1.countDown();
}));
awaitLatch(latch1);
((VertxImpl)vertx2).failoverCompleteHandler((nodeID, haInfo, succeeded) -> fail("Should not failover here 2"));
((VertxImpl)vertx1).failoverCompleteHandler((nodeID, haInfo, succeeded) -> fail("Should not failover here 1"));
((VertxImpl)vertx1).haManager().simulateKill();
vertx2.close().onComplete(ar -> {
vertx.setTimer(500, tid -> {
// Wait a bit in case failover happens
assertEquals("Verticle should still be deployed here 1", 1, vertx1.deploymentIDs().size());
assertTrue("Verticle should not failover here 2", vertx2.deploymentIDs().isEmpty());
testComplete();
});
});
await();
}
@Test
public void testNonHADeployments() throws Exception {
vertx1 = startVertx();
vertx2 = startVertx();
// Deploy an HA and a non HA deployment
CountDownLatch latch1 = new CountDownLatch(2);
vertx2.deployVerticle("java:" + HAVerticle1.class.getName(), new DeploymentOptions().setHa(true)).onComplete(onSuccess(id -> {
assertTrue(vertx2.deploymentIDs().contains(id));
latch1.countDown();
}));
vertx2.deployVerticle("java:" + HAVerticle2.class.getName(), new DeploymentOptions().setHa(false)).onComplete(onSuccess(id -> {
assertTrue(vertx2.deploymentIDs().contains(id));
latch1.countDown();
}));
awaitLatch(latch1);
CountDownLatch latch2 = new CountDownLatch(1);
((VertxImpl)vertx1).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
assertTrue(succeeded);
latch2.countDown();
});
((VertxImpl)vertx2).haManager().simulateKill();
awaitLatch(latch2);
assertTrue(vertx1.deploymentIDs().size() == 1);
String depID = vertx1.deploymentIDs().iterator().next();
assertTrue(((VertxInternal) vertx1).deploymentManager().deployment(depID).deployment().identifier().equals("java:" + HAVerticle1.class.getName()));
}
@Test
public void testCloseRemovesFromCluster() throws Exception {
vertx1 = startVertx();
vertx2 = startVertx();
vertx3 = startVertx();
CountDownLatch latch1 = new CountDownLatch(1);
vertx3.deployVerticle("java:" + HAVerticle1.class.getName(), new DeploymentOptions().setHa(true)).onComplete(onSuccess(id -> {
assertTrue(vertx3.deploymentIDs().contains(id));
latch1.countDown();
}));
awaitLatch(latch1);
CountDownLatch latch2 = new CountDownLatch(1);
// Close vertx2 - this should not then participate in failover
vertx2.close().onComplete(ar -> {
((VertxImpl)vertx1).failoverCompleteHandler((nodeID, haInfo, succeeded) -> {
assertTrue(succeeded);
latch2.countDown();
});
((VertxImpl) vertx3).haManager().simulateKill();
});
awaitLatch(latch2);
assertTrue(vertx1.deploymentIDs().size() == 1);
String depID = vertx1.deploymentIDs().iterator().next();
assertTrue(((VertxInternal) vertx1).deploymentManager().deployment(depID).deployment().identifier().equals("java:" + HAVerticle1.class.getName()));
}
@Test
public void testQuorumWithHaGroups() throws Exception {
vertx1 = startVertx("group1", 2);
vertx2 = startVertx("group2", 2);
vertx1.deployVerticle("java:" + HAVerticle1.class.getName(), new DeploymentOptions().setHa(true)).onComplete(onSuccess(id -> {
assertTrue(vertx1.deploymentIDs().contains(id));
}));
// Wait a little while
Thread.sleep(500);
//Should not be deployed yet
assertTrue(vertx1.deploymentIDs().isEmpty());
vertx3 = startVertx("group1", 2);
// Now should deploy
assertWaitUntil(() -> vertx1.deploymentIDs().size() == 1);
vertx2.deployVerticle("java:" + HAVerticle1.class.getName(), new DeploymentOptions().setHa(true)).onComplete(onSuccess(id -> {
assertTrue(vertx2.deploymentIDs().contains(id));
}));
// Wait a little while
Thread.sleep(500);
//Should not be deployed yet
assertTrue(vertx2.deploymentIDs().isEmpty());
vertx4 = startVertx("group2", 2);
// Now should deploy
assertWaitUntil(() -> vertx2.deploymentIDs().size() == 1);
// Noow stop vertx4
CountDownLatch latch = new CountDownLatch(1);
vertx4.close().onComplete(ar -> {
latch.countDown();
});
awaitLatch(latch);
assertWaitUntil(() -> vertx2.deploymentIDs().isEmpty());
assertTrue(vertx1.deploymentIDs().size() == 1);
CountDownLatch latch2 = new CountDownLatch(1);
vertx3.close().onComplete(ar -> {
latch2.countDown();
});
awaitLatch(latch2);
assertWaitUntil(() -> vertx1.deploymentIDs().isEmpty());
}
protected Vertx startVertx() throws Exception {
return startVertx(null, 1);
}
protected Vertx startVertx(int quorumSize) throws Exception {
return startVertx(null, quorumSize);
}
protected Vertx startVertx(String haGroup, int quorumSize) throws Exception {
return startVertx(haGroup, quorumSize, true);
}
protected Vertx startVertx(String haGroup, int quorumSize, boolean ha) throws Exception {
VertxOptions options = new VertxOptions().setHAEnabled(ha);
options.getEventBusOptions().setHost("localhost");
if (ha) {
options.setQuorumSize(quorumSize);
if (haGroup != null) {
options.setHAGroup(haGroup);
}
}
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Vertx> vertxRef = new AtomicReference<>();
Vertx.builder()
.with(options)
.withClusterManager(getClusterManager())
.buildClustered()
.onComplete(onSuccess(vertx -> {
vertxRef.set(vertx);
latch.countDown();
}));
latch.await(2, TimeUnit.MINUTES);
return vertxRef.get();
}
protected void checkDeploymentExists(int pos, String verticleName, DeploymentOptions options) {
VertxImpl vi = (VertxImpl) vertices[pos];
for (String deploymentID: vi.deploymentIDs()) {
DeploymentContext dep = vi.deploymentManager().deployment(deploymentID);
if (verticleName.equals(dep.deployment().identifier()) && options.toJson().equals(dep.deployment().options().toJson())) {
return;
}
}
fail("Can't find deployment for verticleName: " + verticleName + " on node " + pos);
}
protected void kill(int pos) {
VertxImpl v = (VertxImpl) vertices[pos];
v.executeBlocking(() -> {
v.haManager().simulateKill();
return null;
}, false).onComplete(onSuccess(ar -> {
}));
}
protected void closeVertices(Vertx... vertices) throws Exception {
CountDownLatch latch = new CountDownLatch(vertices.length);
for (int i = 0; i < vertices.length; i++) {
if (vertices[i] != null) {
vertices[i].close().onComplete(onSuccess(res -> {
latch.countDown();
}));
} else {
latch.countDown();
}
}
awaitLatch(latch, 2, TimeUnit.MINUTES);
}
}
|
HATest
|
java
|
hibernate__hibernate-orm
|
hibernate-spatial/src/test/java/org/hibernate/spatial/testing/domain/GeomEntity.java
|
{
"start": 907,
"end": 2150
}
|
class ____ implements GeomEntityLike<Geometry> {
@Id
private Integer id;
private String type;
private Geometry geom;
public static GeomEntity createFrom(TestDataElement element, Dialect dialect) throws WktDecodeException {
WktDecoder decoder = getWktDecoder( dialect );
Geometry geom = decoder.decode( element.wkt );
GeomEntity result = new GeomEntity();
result.setId( element.id );
result.setGeom( geom );
result.setType( element.type );
return result;
}
@Override
public Integer getId() {
return id;
}
@Override
public void setId(Integer id) {
this.id = id;
}
@Override
public String getType() {
return type;
}
@Override
public void setType(String type) {
this.type = type;
}
@Override
public Geometry getGeom() {
return geom;
}
@Override
public void setGeom(Geometry geom) {
this.geom = geom;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
GeomEntity geomEntity = (GeomEntity) o;
return id.equals( geomEntity.id );
}
public void setGeomFromWkt(String wkt) {
this.geom = Wkt.fromWkt( wkt );
}
@Override
public int hashCode() {
return id;
}
}
|
GeomEntity
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/spi/RestClientRequestValidator.java
|
{
"start": 1052,
"end": 2716
}
|
interface ____ {
String FACTORY = "rest-client-request-validator-factory";
/**
* Validation error
*
* @param statusCode to use a specific HTTP status code for this validation error
* @param body to use a specific message body for this validation error
*/
record ValidationError(int statusCode, String body) {
}
/**
* Validation context to use during validation
*
* @param consumes content-type this service can consume
* @param produces content-type this service can produce
* @param requiredBody whether the message body is required
* @param queryDefaultValues default values for HTTP query parameters
* @param queryAllowedValues allowed values for HTTP query parameters
* @param requiredQueryParameters names of HTTP query parameters that are required
* @param requiredHeaders names of HTTP headers parameters that are required
*/
record ValidationContext(String consumes, String produces,
boolean requiredBody,
Map<String, String> queryDefaultValues,
Map<String, String> queryAllowedValues,
Set<String> requiredQueryParameters,
Set<String> requiredHeaders) {
}
/**
* Validates the incoming client request
*
* @param exchange the current exchange
* @param validationContent validation context
* @return the validation error, or <tt>null</tt> if success
*/
ValidationError validate(Exchange exchange, ValidationContext validationContent);
}
|
RestClientRequestValidator
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/typesafe/ParamDeclarationWrongClassTest.java
|
{
"start": 374,
"end": 860
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(Foo.class)
.addAsResource(new StringAsset("{@org.acme.Foo foo}"
+ "{foo.name}"), "templates/foo.html"))
.setExpectedException(TemplateException.class);
@Test
public void testValidation() {
fail();
}
}
|
ParamDeclarationWrongClassTest
|
java
|
apache__camel
|
core/camel-cloud/src/main/java/org/apache/camel/impl/cloud/DefaultServiceCallExpression.java
|
{
"start": 914,
"end": 1565
}
|
class ____ custom implementations of ServiceCall EIP components.
* <p/>
* Below are some examples how to call a service and what Camel endpoint URI is constructed based on the input:
*
* <pre>
serviceCall("myService") -> http://hostname:port
serviceCall("myService/foo") -> http://hostname:port/foo
serviceCall("http:myService/foo") -> http:hostname:port/foo
serviceCall("myService", "http:myService.host:myService.port/foo") -> http:hostname:port/foo
serviceCall("myService", "netty:tcp:myService?connectTimeout=1000") -> netty:tcp:hostname:port?connectTimeout=1000
* </pre>
*
* @deprecated since 4.7
*/
@Deprecated(since = "4.7")
public
|
for
|
java
|
spring-projects__spring-security
|
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/registration/AssertingPartyMetadata.java
|
{
"start": 4986,
"end": 9878
}
|
interface ____<B extends Builder<B>> {
/**
* Set the asserting party's <a href=
* "https://www.oasis-open.org/committees/download.php/51890/SAML%20MD%20simplified%20overview.pdf#2.9%20EntityDescriptor">EntityID</a>.
* Equivalent to the value found in the asserting party's <EntityDescriptor
* EntityID="..."/>
* @param entityId the asserting party's EntityID
* @return the {@link B} for further configuration
*/
B entityId(String entityId);
/**
* Set the WantAuthnRequestsSigned setting, indicating the asserting party's
* preference that relying parties should sign the AuthnRequest before sending.
* @param wantAuthnRequestsSigned the WantAuthnRequestsSigned setting
* @return the {@link B} for further configuration
*/
B wantAuthnRequestsSigned(boolean wantAuthnRequestsSigned);
/**
* Apply this {@link Consumer} to the list of SigningMethod Algorithms
* @param signingMethodAlgorithmsConsumer a {@link Consumer} of the list of
* SigningMethod Algorithms
* @return this {@link B} for further configuration
* @since 5.5
*/
B signingAlgorithms(Consumer<List<String>> signingMethodAlgorithmsConsumer);
/**
* Apply this {@link Consumer} to the list of {@link Saml2X509Credential}s
* @param credentialsConsumer a {@link Consumer} of the {@link List} of
* {@link Saml2X509Credential}s
* @return the {@link RelyingPartyRegistration.Builder} for further configuration
* @since 5.4
*/
B verificationX509Credentials(Consumer<Collection<Saml2X509Credential>> credentialsConsumer);
/**
* Apply this {@link Consumer} to the list of {@link Saml2X509Credential}s
* @param credentialsConsumer a {@link Consumer} of the {@link List} of
* {@link Saml2X509Credential}s
* @return the {@link RelyingPartyRegistration.Builder} for further configuration
* @since 5.4
*/
B encryptionX509Credentials(Consumer<Collection<Saml2X509Credential>> credentialsConsumer);
/**
* Set the <a href=
* "https://www.oasis-open.org/committees/download.php/51890/SAML%20MD%20simplified%20overview.pdf#2.5%20Endpoint">SingleSignOnService</a>
* Location.
*
* <p>
* Equivalent to the value found in <SingleSignOnService Location="..."/> in
* the asserting party's <IDPSSODescriptor>.
* @param singleSignOnServiceLocation the SingleSignOnService Location
* @return the {@link B} for further configuration
*/
B singleSignOnServiceLocation(String singleSignOnServiceLocation);
/**
* Set the <a href=
* "https://www.oasis-open.org/committees/download.php/51890/SAML%20MD%20simplified%20overview.pdf#2.5%20Endpoint">SingleSignOnService</a>
* Binding.
*
* <p>
* Equivalent to the value found in <SingleSignOnService Binding="..."/> in
* the asserting party's <IDPSSODescriptor>.
* @param singleSignOnServiceBinding the SingleSignOnService Binding
* @return the {@link B} for further configuration
*/
B singleSignOnServiceBinding(Saml2MessageBinding singleSignOnServiceBinding);
/**
* Set the <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService
* Location</a>
*
* <p>
* Equivalent to the value found in <SingleLogoutService Location="..."/> in
* the asserting party's <IDPSSODescriptor>.
* @param singleLogoutServiceLocation the SingleLogoutService Location
* @return the {@link B} for further configuration
* @since 5.6
*/
B singleLogoutServiceLocation(String singleLogoutServiceLocation);
/**
* Set the <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService
* Response Location</a>
*
* <p>
* Equivalent to the value found in <SingleLogoutService
* ResponseLocation="..."/> in the asserting party's <IDPSSODescriptor>.
* @param singleLogoutServiceResponseLocation the SingleLogoutService Response
* Location
* @return the {@link B} for further configuration
* @since 5.6
*/
B singleLogoutServiceResponseLocation(String singleLogoutServiceResponseLocation);
/**
* Set the <a href=
* "https://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf#page=7">SingleLogoutService
* Binding</a>
*
* <p>
* Equivalent to the value found in <SingleLogoutService Binding="..."/> in
* the asserting party's <IDPSSODescriptor>.
* @param singleLogoutServiceBinding the SingleLogoutService Binding
* @return the {@link B} for further configuration
* @since 5.6
*/
B singleLogoutServiceBinding(Saml2MessageBinding singleLogoutServiceBinding);
/**
* Creates an immutable ProviderDetails object representing the configuration for
* an Identity Provider, IDP
* @return immutable ProviderDetails object
*/
AssertingPartyMetadata build();
}
}
|
Builder
|
java
|
apache__camel
|
components/camel-tracing/src/test/java/org/apache/camel/tracing/decorators/SqlSpanDecoratorTest.java
|
{
"start": 1225,
"end": 2108
}
|
class ____ {
private static final String SQL_STATEMENT = "select * from customer";
@Test
public void testPre() {
Endpoint endpoint = Mockito.mock(Endpoint.class);
Exchange exchange = Mockito.mock(Exchange.class);
Message message = Mockito.mock(Message.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn("test");
Mockito.when(exchange.getIn()).thenReturn(message);
Mockito.when(message.getHeader(SqlSpanDecorator.CAMEL_SQL_QUERY, String.class)).thenReturn(SQL_STATEMENT);
SpanDecorator decorator = new SqlSpanDecorator();
MockSpanAdapter span = new MockSpanAdapter();
decorator.pre(span, exchange, endpoint);
assertEquals("sql", span.tags().get(TagConstants.DB_SYSTEM));
assertEquals(SQL_STATEMENT, span.tags().get(TagConstants.DB_STATEMENT));
}
}
|
SqlSpanDecoratorTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/web/annotation/DiscoveredWebOperation.java
|
{
"start": 1726,
"end": 3496
}
|
class ____ extends AbstractDiscoveredOperation implements WebOperation {
private static final boolean REACTIVE_STREAMS_PRESENT = ClassUtils.isPresent("org.reactivestreams.Publisher",
DiscoveredWebOperation.class.getClassLoader());
private final String id;
private final boolean blocking;
private final WebOperationRequestPredicate requestPredicate;
DiscoveredWebOperation(EndpointId endpointId, DiscoveredOperationMethod operationMethod, OperationInvoker invoker,
WebOperationRequestPredicate requestPredicate) {
super(operationMethod, invoker);
this.id = getId(endpointId, operationMethod);
this.blocking = getBlocking(operationMethod);
this.requestPredicate = requestPredicate;
}
private String getId(EndpointId endpointId, OperationMethod method) {
return endpointId + method.getParameters()
.stream()
.filter(this::hasSelector)
.map(this::dashName)
.collect(Collectors.joining());
}
private boolean hasSelector(OperationParameter parameter) {
return parameter.getAnnotation(Selector.class) != null;
}
private String dashName(OperationParameter parameter) {
return "-" + parameter.getName();
}
private boolean getBlocking(OperationMethod method) {
return !REACTIVE_STREAMS_PRESENT || !Publisher.class.isAssignableFrom(method.getMethod().getReturnType());
}
@Override
public String getId() {
return this.id;
}
@Override
public boolean isBlocking() {
return this.blocking;
}
@Override
public WebOperationRequestPredicate getRequestPredicate() {
return this.requestPredicate;
}
@Override
protected void appendFields(ToStringCreator creator) {
creator.append("id", this.id)
.append("blocking", this.blocking)
.append("requestPredicate", this.requestPredicate);
}
}
|
DiscoveredWebOperation
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/UberJarMergedResourceBuildItem.java
|
{
"start": 234,
"end": 544
}
|
class ____ extends MultiBuildItem {
private final String path;
public UberJarMergedResourceBuildItem(String path) {
this.path = Assert.checkNotEmptyParam("UberJarMergedResourceBuildItem.path", path);
}
public String getPath() {
return path;
}
}
|
UberJarMergedResourceBuildItem
|
java
|
netty__netty
|
codec-base/src/test/java/io/netty/handler/codec/LineBasedFrameDecoderTest.java
|
{
"start": 1269,
"end": 7633
}
|
class ____ {
@Test
public void testDecodeWithStrip() throws Exception {
EmbeddedChannel ch = new EmbeddedChannel(new LineBasedFrameDecoder(8192, true, false));
ch.writeInbound(copiedBuffer("first\r\nsecond\nthird", CharsetUtil.US_ASCII));
ByteBuf buf = ch.readInbound();
assertEquals("first", buf.toString(CharsetUtil.US_ASCII));
ByteBuf buf2 = ch.readInbound();
assertEquals("second", buf2.toString(CharsetUtil.US_ASCII));
assertNull(ch.readInbound());
ch.finish();
ReferenceCountUtil.release(ch.readInbound());
buf.release();
buf2.release();
}
@Test
public void testDecodeWithoutStrip() throws Exception {
EmbeddedChannel ch = new EmbeddedChannel(new LineBasedFrameDecoder(8192, false, false));
ch.writeInbound(copiedBuffer("first\r\nsecond\nthird", CharsetUtil.US_ASCII));
ByteBuf buf = ch.readInbound();
assertEquals("first\r\n", buf.toString(CharsetUtil.US_ASCII));
ByteBuf buf2 = ch.readInbound();
assertEquals("second\n", buf2.toString(CharsetUtil.US_ASCII));
assertNull(ch.readInbound());
ch.finish();
ReferenceCountUtil.release(ch.readInbound());
buf.release();
buf2.release();
}
@Test
public void testTooLongLine1() throws Exception {
EmbeddedChannel ch = new EmbeddedChannel(new LineBasedFrameDecoder(16, false, false));
try {
ch.writeInbound(copiedBuffer("12345678901234567890\r\nfirst\nsecond", CharsetUtil.US_ASCII));
fail();
} catch (Exception e) {
assertInstanceOf(TooLongFrameException.class, e);
}
ByteBuf buf = ch.readInbound();
ByteBuf buf2 = copiedBuffer("first\n", CharsetUtil.US_ASCII);
assertEquals(buf2, buf);
assertFalse(ch.finish());
buf.release();
buf2.release();
}
@Test
public void testTooLongLine2() throws Exception {
EmbeddedChannel ch = new EmbeddedChannel(new LineBasedFrameDecoder(16, false, false));
assertFalse(ch.writeInbound(copiedBuffer("12345678901234567", CharsetUtil.US_ASCII)));
try {
ch.writeInbound(copiedBuffer("890\r\nfirst\r\n", CharsetUtil.US_ASCII));
fail();
} catch (Exception e) {
assertInstanceOf(TooLongFrameException.class, e);
}
ByteBuf buf = ch.readInbound();
ByteBuf buf2 = copiedBuffer("first\r\n", CharsetUtil.US_ASCII);
assertEquals(buf2, buf);
assertFalse(ch.finish());
buf.release();
buf2.release();
}
@Test
public void testTooLongLineWithFailFast() throws Exception {
EmbeddedChannel ch = new EmbeddedChannel(new LineBasedFrameDecoder(16, false, true));
try {
ch.writeInbound(copiedBuffer("12345678901234567", CharsetUtil.US_ASCII));
fail();
} catch (Exception e) {
assertInstanceOf(TooLongFrameException.class, e);
}
assertFalse(ch.writeInbound(copiedBuffer("890", CharsetUtil.US_ASCII)));
assertTrue(ch.writeInbound(copiedBuffer("123\r\nfirst\r\n", CharsetUtil.US_ASCII)));
ByteBuf buf = ch.readInbound();
ByteBuf buf2 = copiedBuffer("first\r\n", CharsetUtil.US_ASCII);
assertEquals(buf2, buf);
assertFalse(ch.finish());
buf.release();
buf2.release();
}
@Test
public void testDecodeSplitsCorrectly() throws Exception {
EmbeddedChannel ch = new EmbeddedChannel(new LineBasedFrameDecoder(8192, false, false));
assertTrue(ch.writeInbound(copiedBuffer("line\r\n.\r\n", CharsetUtil.US_ASCII)));
ByteBuf buf = ch.readInbound();
assertEquals("line\r\n", buf.toString(CharsetUtil.US_ASCII));
ByteBuf buf2 = ch.readInbound();
assertEquals(".\r\n", buf2.toString(CharsetUtil.US_ASCII));
assertFalse(ch.finishAndReleaseAll());
buf.release();
buf2.release();
}
@Test
public void testFragmentedDecode() throws Exception {
EmbeddedChannel ch = new EmbeddedChannel(new LineBasedFrameDecoder(8192, false, false));
assertFalse(ch.writeInbound(copiedBuffer("huu", CharsetUtil.US_ASCII)));
assertNull(ch.readInbound());
assertFalse(ch.writeInbound(copiedBuffer("haa\r", CharsetUtil.US_ASCII)));
assertNull(ch.readInbound());
assertTrue(ch.writeInbound(copiedBuffer("\nhuuhaa\r\n", CharsetUtil.US_ASCII)));
ByteBuf buf = ch.readInbound();
assertEquals("huuhaa\r\n", buf.toString(CharsetUtil.US_ASCII));
ByteBuf buf2 = ch.readInbound();
assertEquals("huuhaa\r\n", buf2.toString(CharsetUtil.US_ASCII));
assertFalse(ch.finishAndReleaseAll());
buf.release();
buf2.release();
}
@Test
public void testEmptyLine() throws Exception {
EmbeddedChannel ch = new EmbeddedChannel(new LineBasedFrameDecoder(8192, true, false));
assertTrue(ch.writeInbound(copiedBuffer("\nabcna\r\n", CharsetUtil.US_ASCII)));
ByteBuf buf = ch.readInbound();
assertEquals("", buf.toString(CharsetUtil.US_ASCII));
ByteBuf buf2 = ch.readInbound();
assertEquals("abcna", buf2.toString(CharsetUtil.US_ASCII));
assertFalse(ch.finishAndReleaseAll());
buf.release();
buf2.release();
}
@Test
public void testNotFailFast() throws Exception {
EmbeddedChannel ch = new EmbeddedChannel(new LineBasedFrameDecoder(2, false, false));
assertFalse(ch.writeInbound(wrappedBuffer(new byte[] { 0, 1, 2 })));
assertFalse(ch.writeInbound(wrappedBuffer(new byte[]{ 3, 4 })));
try {
ch.writeInbound(wrappedBuffer(new byte[] { '\n' }));
fail();
} catch (TooLongFrameException expected) {
// Expected once we received a full frame.
}
assertFalse(ch.writeInbound(wrappedBuffer(new byte[] { '5' })));
assertTrue(ch.writeInbound(wrappedBuffer(new byte[] { '\n' })));
ByteBuf expected = wrappedBuffer(new byte[] { '5', '\n' });
ByteBuf buffer = ch.readInbound();
assertEquals(expected, buffer);
expected.release();
buffer.release();
assertFalse(ch.finish());
}
}
|
LineBasedFrameDecoderTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/files/Files_assertHasDigest_AlgorithmBytes_Test.java
|
{
"start": 2281,
"end": 6700
}
|
class ____ extends FilesBaseTest {
private final String algorithm = "MD5";
private final byte[] expected = new byte[0];
@Test
void should_fail_if_actual_is_null() {
// GIVEN
File actual = null;
// WHEN
var error = expectAssertionError(() -> underTest.assertHasDigest(INFO, actual, algorithm, expected));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_with_should_exist_error_if_actual_does_not_exist() {
// GIVEN
File actual = new File("xyz");
// WHEN
expectAssertionError(() -> underTest.assertHasDigest(INFO, actual, algorithm, expected));
// THEN
verify(failures).failure(INFO, shouldExist(actual));
}
@Test
void should_fail_if_actual_exists_but_is_not_file() {
// GIVEN
File actual = newFolder(tempDir.getAbsolutePath() + "/tmp");
// WHEN
expectAssertionError(() -> underTest.assertHasDigest(INFO, actual, algorithm, expected));
// THEN
verify(failures).failure(INFO, shouldBeFile(actual));
}
@DisabledOnOs(OS.WINDOWS)
@Test
void should_fail_if_actual_exists_but_is_not_readable() {
// GIVEN
File actual = newFile(tempDir.getAbsolutePath() + "/Test.java");
actual.setReadable(false);
// WHEN
expectAssertionError(() -> underTest.assertHasDigest(INFO, actual, algorithm, expected));
// THEN
verify(failures).failure(INFO, shouldBeReadable(actual));
}
@Test
void should_throw_error_if_digest_is_null() {
// GIVEN
MessageDigest digest = null;
// WHEN
NullPointerException npe = catchNullPointerException(() -> underTest.assertHasDigest(INFO, actual, digest, expected));
// THEN
then(npe).hasMessage("The message digest algorithm should not be null");
}
@Test
void should_throw_error_if_expected_is_null() {
// GIVEN
byte[] expected = null;
// WHEN
NullPointerException npe = catchNullPointerException(() -> underTest.assertHasDigest(INFO, actual, algorithm, expected));
// THEN
then(npe).hasMessage("The binary representation of digest to compare to should not be null");
}
@Test
void should_throw_error_wrapping_caught_IOException() throws IOException {
// GIVEN
File actual = newFile(tempDir.getAbsolutePath() + "/tmp.txt");
IOException cause = new IOException();
given(nioFilesWrapper.newInputStream(any())).willThrow(cause);
// WHEN
UncheckedIOException uioe = catchThrowableOfType(UncheckedIOException.class,
() -> underTest.assertHasDigest(INFO, actual, algorithm, expected));
// THEN
then(uioe).hasCause(cause);
}
@Test
void should_throw_error_wrapping_caught_NoSuchAlgorithmException() {
// GIVEN
String unknownDigestAlgorithm = "UnknownDigestAlgorithm";
// WHEN
IllegalStateException ise = catchThrowableOfType(IllegalStateException.class,
() -> underTest.assertHasDigest(INFO, actual, unknownDigestAlgorithm,
expected));
// THEN
then(ise).hasMessage("Unable to find digest implementation for: <UnknownDigestAlgorithm>");
}
@Test
void should_fail_if_actual_does_not_have_expected_digest() throws IOException, NoSuchAlgorithmException {
// GIVEN
String algorithm = "MD5";
File actual = newFile(tempDir.getAbsolutePath() + "/tmp.txt");
writeByteArrayToFile(actual, "Bad Content".getBytes());
MessageDigest digest = MessageDigest.getInstance("MD5");
byte[] expected = digest.digest("Content".getBytes());
DigestDiff digestDiff = new DigestDiff(toHex(digest.digest(readAllBytes(actual.toPath()))), toHex(expected), digest);
// WHEN
expectAssertionError(() -> unMockedFiles.assertHasDigest(INFO, actual, algorithm, expected));
// THEN
verify(failures).failure(INFO, shouldHaveDigest(actual, digestDiff));
}
@Test
void should_pass_if_actual_has_expected_digest() throws Exception {
// GIVEN
String algorithm = "MD5";
byte[] data = "Content".getBytes();
File actual = newFile(tempDir.getAbsolutePath() + "/tmp.txt");
writeByteArrayToFile(actual, data);
byte[] expected = MessageDigest.getInstance(algorithm).digest(data);
// WHEN/THEN
unMockedFiles.assertHasDigest(INFO, actual, algorithm, expected);
}
}
|
Files_assertHasDigest_AlgorithmBytes_Test
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/console/ConsoleDetailsTests.java
|
{
"start": 4244,
"end": 4466
}
|
class ____ {
@Test
void empty() {
}
@Test
@DisplayName(".oO fancy display name Oo.")
void changeDisplayName() {
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@DisplayName("Skip")
static
|
BasicTestCase
|
java
|
quarkusio__quarkus
|
integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/Customer.java
|
{
"start": 439,
"end": 2334
}
|
class ____ extends AbstractEntity implements Persistable<Long> {
@Column(name = "first_name")
private String firstName;
@Column(name = "last_name")
private String lastName;
@Email
@Column(name = "email")
private String email;
@Column(name = "telephone")
private String telephone;
@OneToMany(mappedBy = "customer", fetch = FetchType.LAZY)
@JsonProperty(access = Access.WRITE_ONLY)
private Set<Cart> carts;
@Column(name = "enabled", nullable = false)
private Boolean enabled;
public Customer() {
}
public Customer(String firstName, String lastName, @Email String email,
String telephone, Set<Cart> carts, Boolean enabled) {
this.firstName = firstName;
this.lastName = lastName;
this.email = email;
this.telephone = telephone;
this.carts = carts;
this.enabled = enabled;
}
@Override
public boolean isNew() {
return id == null;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getTelephone() {
return telephone;
}
public void setTelephone(String telephone) {
this.telephone = telephone;
}
public Set<Cart> getCarts() {
return carts;
}
public void setCarts(Set<Cart> carts) {
this.carts = carts;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
}
|
Customer
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/PulsarEndpointBuilderFactory.java
|
{
"start": 59142,
"end": 65661
}
|
class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final PulsarHeaderNameBuilder INSTANCE = new PulsarHeaderNameBuilder();
/**
* The properties attached to the message.
*
* The option is a: {@code Map<String, String>} type.
*
* Group: consumer
*
* @return the name of the header {@code properties}.
*/
public String properties() {
return "properties";
}
/**
* The producer name who produced the message.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code producer_name}.
*/
public String producerName() {
return "producer_name";
}
/**
* The sequence id associated with the message.
*
* The option is a: {@code long} type.
*
* Group: consumer
*
* @return the name of the header {@code sequence_id}.
*/
public String sequenceId() {
return "sequence_id";
}
/**
* The publish time of the message.
*
* The option is a: {@code long} type.
*
* Group: consumer
*
* @return the name of the header {@code publish_time}.
*/
public String publishTime() {
return "publish_time";
}
/**
* The unique message ID associated with the message.
*
* The option is a: {@code org.apache.pulsar.client.api.MessageId} type.
*
* Group: consumer
*
* @return the name of the header {@code message_id}.
*/
public String messageId() {
return "message_id";
}
/**
* The event time associated with the message.
*
* The option is a: {@code long} type.
*
* Group: consumer
*
* @return the name of the header {@code event_time}.
*/
public String eventTime() {
return "event_time";
}
/**
* The key of the message.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code key}.
*/
public String key() {
return "key";
}
/**
* The bytes in key.
*
* The option is a: {@code byte[]} type.
*
* Group: consumer
*
* @return the name of the header {@code key_bytes}.
*/
public String keyBytes() {
return "key_bytes";
}
/**
* The topic the message was published to.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code topic_name}.
*/
public String topicName() {
return "topic_name";
}
/**
* The message receipt.
*
* The option is a: {@code
* org.apache.camel.component.pulsar.PulsarMessageReceipt} type.
*
* Group: consumer
*
* @return the name of the header {@code message_receipt}.
*/
public String messageReceipt() {
return "message_receipt";
}
/**
* The key of the message for routing policy.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code PulsarProducerMessageKey}.
*/
public String pulsarProducerMessageKey() {
return "CamelPulsarProducerMessageKey";
}
/**
* The properties of the message to add.
*
* The option is a: {@code Map<String, String>} type.
*
* Group: producer
*
* @return the name of the header {@code
* PulsarProducerMessageProperties}.
*/
public String pulsarProducerMessageProperties() {
return "CamelPulsarProducerMessageProperties";
}
/**
* The event time of the message message.
*
* The option is a: {@code Long} type.
*
* Group: producer
*
* @return the name of the header {@code
* PulsarProducerMessageEventTime}.
*/
public String pulsarProducerMessageEventTime() {
return "CamelPulsarProducerMessageEventTime";
}
/**
* Deliver the message only at or after the specified absolute
* timestamp. The timestamp is milliseconds and based on UTC (eg:
* System.currentTimeMillis) Note: messages are only delivered with
* delay when a consumer is consuming through a Shared subscription.
* With other subscription types, the messages will still be delivered
* immediately.
*
* The option is a: {@code Long} type.
*
* Group: producer
*
* @return the name of the header {@code
* PulsarProducerMessageDeliverAt}.
*/
public String pulsarProducerMessageDeliverAt() {
return "CamelPulsarProducerMessageDeliverAt";
}
/**
* The message redelivery count, redelivery count maintain in pulsar
* broker.
*
* The option is a: {@code int} type.
*
* Group: consumer
*
* @return the name of the header {@code PulsarRedeliveryCount}.
*/
public String pulsarRedeliveryCount() {
return "CamelPulsarRedeliveryCount";
}
/**
* Deliver the message after a given delayed time (millis).
*
* The option is a: {@code Long} type.
*
* Group: producer
*
* @return the name of the header {@code
* PulsarProducerMessageDeliverAfter}.
*/
public String pulsarProducerMessageDeliverAfter() {
return "CamelPulsarProducerMessageDeliverAfter";
}
}
static PulsarEndpointBuilder endpointBuilder(String componentName, String path) {
|
PulsarHeaderNameBuilder
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/deser/array/FieldIntArrayTest.java
|
{
"start": 170,
"end": 569
}
|
class ____ extends TestCase {
public void test_intArray() throws Exception {
Model model = JSON.parseObject("{\"value\":[1,2,3]}", Model.class);
assertNotNull(model.value);
assertEquals(3, model.value.length);
assertEquals(1, model.value[0]);
assertEquals(2, model.value[1]);
assertEquals(3, model.value[2]);
}
public static
|
FieldIntArrayTest
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/calcite/FlinkRexBuilder.java
|
{
"start": 1449,
"end": 7462
}
|
class ____ extends RexBuilder {
public FlinkRexBuilder(RelDataTypeFactory typeFactory) {
super(typeFactory);
}
/**
* Compared to the original method we adjust the nullability of the nested column based on the
* nullability of the enclosing type.
*
* <p>If the fields type is NOT NULL, but the enclosing ROW is nullable we still can produce
* nulls.
*/
@Override
public RexNode makeFieldAccess(RexNode expr, String fieldName, boolean caseSensitive) {
final RexNode field = super.makeFieldAccess(expr, fieldName, caseSensitive);
return makeFieldAccess(expr, field);
}
/**
* Compared to the original method we adjust the nullability of the nested column based on the
* nullability of the enclosing type.
*
* <p>If the fields type is NOT NULL, but the enclosing ROW is nullable we still can produce
* nulls.
*/
@Override
public RexNode makeFieldAccess(RexNode expr, int i) {
final RexNode field = super.makeFieldAccess(expr, i);
return makeFieldAccess(expr, field);
}
/**
* Creates a literal of the default value for the given type.
*
* <p>This value is:
*
* <ul>
* <li>0 for numeric types;
* <li>FALSE for BOOLEAN;
* <li>The epoch for TIMESTAMP and DATE;
* <li>Midnight for TIME;
* <li>The empty string for string types (CHAR, BINARY, VARCHAR, VARBINARY).
* </ul>
*
* <p>Uses '1970-01-01 00:00:00'(epoch 0 second) as zero value for TIMESTAMP_LTZ, the zero value
* '0000-00-00 00:00:00' in Calcite is an invalid time whose month and day is invalid, we
* workaround here. Stop overriding once CALCITE-4555 fixed.
*
* @param type Type
* @return Simple literal, or cast simple literal
*/
@Override
public RexLiteral makeZeroLiteral(RelDataType type) {
switch (type.getSqlTypeName()) {
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
return makeLiteral(new TimestampString(1970, 1, 1, 0, 0, 0), type);
default:
return super.makeZeroLiteral(type);
}
}
/**
* Adjust the nullability of the nested column based on the nullability of the enclosing type.
* However, if there is former nullability {@code CAST} present then it will be dropped and
* replaced with a new one (if needed). For instance if there is a table
*
* <pre>{@code
* CREATE TABLE MyTable (
* `field1` ROW<`data` ROW<`nested` ROW<`trId` STRING>>NOT NULL>
* WITH ('connector' = 'datagen')
* }</pre>
*
* <p>and then there is a SQL query
*
* <pre>{@code
* SELECT `field1`.`data`.`nested`.`trId` AS transactionId FROM MyTable
* }</pre>
*
* <p>The {@code SELECT} picks a nested field only. In this case it should go step by step
* checking each level.
*
* <ol>
* <li>Looking at {@code `field1`} type it is nullable, then no changes.
* <li>{@code `field1`.`data`} is {@code NOT NULL}, however keeping in mind that enclosing
* type @{code `field1`} is nullable then need to change nullability with {@code CAST}
* <li>{@code `field1`.`data`.`nested`} is nullable that means that in this case no need for
* extra {@code CAST} inserted in previous step, so it will be dropped.
* <li>{@code `field1`.`data`.`nested`.`trId`} is also nullable, so no changes.
* </ol>
*/
private RexNode makeFieldAccess(RexNode expr, RexNode field) {
final RexNode fieldWithRemovedCast = removeCastNullableFromFieldAccess(field);
final boolean nullabilityShouldChange =
field.getType().isNullable() != fieldWithRemovedCast.getType().isNullable()
|| expr.getType().isNullable() && !field.getType().isNullable();
if (nullabilityShouldChange) {
return makeCast(
typeFactory.createTypeWithNullability(field.getType(), true),
fieldWithRemovedCast,
true,
false);
}
return expr.getType().isNullable() && fieldWithRemovedCast.getType().isNullable()
? fieldWithRemovedCast
: field;
}
/**
* {@link FlinkRexBuilder#makeFieldAccess} will adjust nullability based on nullability of the
* enclosing type. However, it might be a deeply nested column and for every step {@link
* FlinkRexBuilder#makeFieldAccess} will try to insert a cast. This method will remove previous
* cast in order to keep only one.
*/
private RexNode removeCastNullableFromFieldAccess(RexNode rexFieldAccess) {
if (!(rexFieldAccess instanceof RexFieldAccess)) {
return rexFieldAccess;
}
RexNode rexNode = rexFieldAccess;
while (rexNode instanceof RexFieldAccess) {
rexNode = ((RexFieldAccess) rexNode).getReferenceExpr();
}
if (rexNode.getKind() != SqlKind.CAST) {
return rexFieldAccess;
}
RexShuttle visitor =
new RexShuttle() {
@Override
public RexNode visitCall(final RexCall call) {
if (call.getKind() == SqlKind.CAST
&& !call.operands.get(0).getType().isNullable()
&& call.getType().isNullable()
&& call.getOperands()
.get(0)
.getType()
.getFieldList()
.equals(call.getType().getFieldList())) {
return RexUtil.removeCast(call);
}
return call;
}
};
return RexUtil.apply(visitor, new RexNode[] {rexFieldAccess})[0];
}
}
|
FlinkRexBuilder
|
java
|
micronaut-projects__micronaut-core
|
context/src/main/java/io/micronaut/logging/impl/Log4jLoggingSystem.java
|
{
"start": 1135,
"end": 1797
}
|
class ____ implements LoggingSystem {
@Override
public void setLogLevel(String name, LogLevel level) {
if (name.equalsIgnoreCase("root")) {
Configurator.setRootLevel(toLevel(level));
} else {
Configurator.setLevel(name, toLevel(level));
}
}
/**
* @param logLevel The micronaut {@link LogLevel} to convert
* @return The converted log4j {@link Level}
*/
private static Level toLevel(LogLevel logLevel) {
if (logLevel == LogLevel.NOT_SPECIFIED) {
return null;
} else {
return Level.valueOf(logLevel.name());
}
}
}
|
Log4jLoggingSystem
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/ast/expr/SQLBinaryExpr.java
|
{
"start": 927,
"end": 3370
}
|
class ____ extends SQLExprImpl implements SQLLiteralExpr, SQLValuableExpr {
private String text;
private transient Number val;
public SQLBinaryExpr() {
}
public SQLBinaryExpr(String value) {
super();
this.text = value;
}
public String getText() {
return text;
}
public Number getValue() {
if (text == null) {
return null;
}
if (val == null) {
long[] words = new long[text.length() / 64 + 1];
for (int i = text.length() - 1; i >= 0; --i) {
char ch = text.charAt(i);
if (ch == '1') {
int wordIndex = i >> 6;
words[wordIndex] |= (1L << (text.length() - 1 - i));
}
}
if (words.length == 1) {
val = words[0];
} else {
byte[] bytes = new byte[words.length * 8];
for (int i = 0; i < words.length; ++i) {
Utils.putLong(bytes, (words.length - 1 - i) * 8, words[i]);
}
val = new BigInteger(bytes);
}
}
return val;
}
public void setValue(String value) {
this.text = value;
}
public void accept0(SQLASTVisitor visitor) {
visitor.visit(this);
visitor.endVisit(this);
}
public void output(StringBuilder buf) {
buf.append("b'");
buf.append(text);
buf.append('\'');
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((text == null) ? 0 : text.hashCode());
return result;
}
public SQLBinaryExpr clone() {
return new SQLBinaryExpr(text);
}
@Override
public List<SQLObject> getChildren() {
return Collections.emptyList();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
SQLBinaryExpr other = (SQLBinaryExpr) obj;
if (text == null) {
if (other.text != null) {
return false;
}
} else if (!text.equals(other.text)) {
return false;
}
return true;
}
}
|
SQLBinaryExpr
|
java
|
google__guice
|
core/test/com/google/inject/internal/Collector.java
|
{
"start": 937,
"end": 1669
}
|
class ____ extends DefaultBindingTargetVisitor<Object, Object>
implements MultibindingsTargetVisitor<Object, Object> {
MapBinderBinding<? extends Object> mapbinding;
MultibinderBinding<? extends Object> setbinding;
OptionalBinderBinding<? extends Object> optionalbinding;
@Override
public Object visit(MapBinderBinding<? extends Object> mapbinding) {
this.mapbinding = mapbinding;
return null;
}
@Override
public Object visit(MultibinderBinding<? extends Object> multibinding) {
this.setbinding = multibinding;
return null;
}
@Override
public Object visit(OptionalBinderBinding<? extends Object> optionalbinding) {
this.optionalbinding = optionalbinding;
return null;
}
}
|
Collector
|
java
|
apache__camel
|
components/camel-mina/src/test/java/org/apache/camel/component/mina/MinaTransferExchangeOptionTest.java
|
{
"start": 1511,
"end": 5881
}
|
class ____ extends BaseMinaTest {
private static final Logger LOG = LoggerFactory.getLogger(MinaTransferExchangeOptionTest.class);
@Test
public void testMinaTransferExchangeOptionWithoutException() throws Exception {
Exchange exchange = sendExchange(false);
assertExchange(exchange, false);
}
@Test
public void testMinaTransferExchangeOptionWithException() throws Exception {
Exchange exchange = sendExchange(true);
assertExchange(exchange, true);
}
private Exchange sendExchange(boolean setException) throws Exception {
Endpoint endpoint = context.getEndpoint(
String.format("mina:tcp://localhost:%1$s?sync=true&encoding=UTF-8&transferExchange=true&objectCodecPattern=*",
getPort()));
Producer producer = endpoint.createProducer();
Exchange exchange = endpoint.createExchange();
Message message = exchange.getIn();
message.setBody("Hello!");
message.setHeader("cheese", "feta");
exchange.setProperty("ham", "old");
exchange.setProperty("setException", setException);
producer.start();
producer.process(exchange);
return exchange;
}
private void assertExchange(Exchange exchange, boolean hasException) {
if (!hasException) {
Message out = exchange.getMessage();
assertNotNull(out);
assertEquals("Goodbye!", out.getBody());
assertEquals("cheddar", out.getHeader("cheese"));
} else {
Message fault = exchange.getMessage();
assertNotNull(fault);
assertNotNull(fault.getBody());
assertTrue(fault.getBody() instanceof InterruptedException, "Should get the InterruptedException exception");
assertEquals("nihao", fault.getHeader("hello"));
}
// in should stay the same
Message in = exchange.getIn();
assertNotNull(in);
assertEquals("Hello!", in.getBody());
assertEquals("feta", in.getHeader("cheese"));
// however the shared properties have changed
assertEquals("fresh", exchange.getProperty("salami"));
assertNull(exchange.getProperty("Charset"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
fromF("mina:tcp://localhost:%1$s?sync=true&encoding=UTF-8&transferExchange=true&objectCodecPattern=*",
getPort())
.process(e -> {
LOG.debug("Enter Processor...");
assertNotNull(e.getIn().getBody());
LOG.debug("Enter Processor...1");
assertNotNull(e.getIn().getHeaders());
LOG.debug("Enter Processor...2");
assertNotNull(e.getProperties());
LOG.debug("Enter Processor...3");
assertEquals("Hello!", e.getIn().getBody());
LOG.debug("Enter Processor...4");
assertEquals("feta", e.getIn().getHeader("cheese"));
LOG.debug("Enter Processor...5");
assertEquals("old", e.getProperty("ham"));
LOG.debug("Enter Processor...6");
assertEquals(ExchangePattern.InOut, e.getPattern());
LOG.debug("Enter Processor...7");
Boolean setException = (Boolean) e.getProperty("setException");
if (setException) {
e.getOut().setBody(new InterruptedException());
e.getOut().setHeader("hello", "nihao");
} else {
e.getOut().setBody("Goodbye!");
e.getOut().setHeader("cheese", "cheddar");
}
e.setProperty("salami", "fresh");
e.setProperty("Charset", Charset.defaultCharset());
LOG.debug("Exit Processor...");
});
}
};
}
}
|
MinaTransferExchangeOptionTest
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java
|
{
"start": 53915,
"end": 54352
}
|
interface ____ {",
" String value();",
"}");
Source scopedBinding =
CompilerTests.javaSource(
"test.ScopedBinding",
"package test;",
"",
"import javax.inject.Inject;",
"import javax.inject.Singleton;",
"",
"@CustomAnnotation(\"someValue\")",
"@CustomScope(\"someOtherValue\")",
"
|
CustomAnnotation
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingStatusMessageParameters.java
|
{
"start": 1285,
"end": 1615
}
|
class ____ extends JobMessageParameters {
public final TriggerIdPathParameter triggerIdPathParameter = new TriggerIdPathParameter();
@Override
public Collection<MessagePathParameter<?>> getPathParameters() {
return Arrays.asList(jobPathParameter, triggerIdPathParameter);
}
}
|
RescalingStatusMessageParameters
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/AssertDelegateTarget.java
|
{
"start": 664,
"end": 868
}
|
interface ____ can be used to wrap your assertion within assertThat method for better readability.
* <p>
* Consider the following MyButton and MyButtonAssert classes:
* <pre><code class='java'> public
|
that
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/graphs/LoadEntityGraphWithCompositeKeyCollectionsTest.java
|
{
"start": 5423,
"end": 6713
}
|
class ____ {
@EmbeddedId
private ActivityExerciseId activityExerciseId;
@MapsId("exerciseId")
@ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.PERSIST)
@JoinColumn(name = "exercise_id")
private Exercise exercise;
@OneToMany(mappedBy = "activityAnswerId.activity", cascade = CascadeType.ALL)
private Set<ActivityAnswer> answers = new HashSet<>();
@OneToMany(mappedBy = "activityDocumentId.activity", orphanRemoval = true, cascade = CascadeType.ALL)
private Set<ActivityDocument> documents = new HashSet<>();
public Activity() {
}
public Activity(Exercise exercise, ActivityExerciseId activityExerciseId) {
this.exercise = exercise;
this.activityExerciseId = activityExerciseId;
}
public Activity(Exercise exercise, String activityId) {
this( exercise, new ActivityExerciseId( exercise.getId(), activityId ) );
}
public Exercise getExercise() {
return exercise;
}
public Set<ActivityAnswer> getAnswers() {
return answers;
}
public Set<ActivityDocument> getDocuments() {
return documents;
}
public void setAnswers(Set<ActivityAnswer> answers) {
this.answers = answers;
}
public void setDocuments(Set<ActivityDocument> documents) {
this.documents = documents;
}
}
@Embeddable
public static
|
Activity
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/routingslip/RoutingSlipDataModificationTest.java
|
{
"start": 1207,
"end": 2792
}
|
class ____ extends ContextTestSupport {
protected static final String ANSWER = "answer";
protected static final String ROUTING_SLIP_HEADER = "routingSlipHeader";
protected final MyBean myBean = new MyBean();
@Test
public void testModificationOfDataAlongRoute() throws Exception {
MockEndpoint x = getMockEndpoint("mock:x");
MockEndpoint y = getMockEndpoint("mock:y");
x.expectedBodiesReceived(ANSWER);
y.expectedBodiesReceived(ANSWER + ANSWER);
sendBody();
assertMockEndpointsSatisfied();
}
protected void sendBody() {
template.sendBodyAndHeader("direct:a", ANSWER, ROUTING_SLIP_HEADER, "mock:x , bean:myBean?method=modifyData");
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
Object lookedUpBean = context.getRegistry().lookupByName("myBean");
assertSame(myBean, lookedUpBean, "Lookup of 'myBean' should return same object!");
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("myBean", myBean);
return answer;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: example
from("direct:a").routingSlip(header(ROUTING_SLIP_HEADER)).to("mock:y");
// END SNIPPET: example
}
};
}
public static
|
RoutingSlipDataModificationTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.