language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__spark | common/utils-java/src/test/java/org/apache/spark/util/PatternSparkLoggerSuite.java | {
"start": 981,
"end": 2997
} | class ____ extends SparkLoggerSuiteBase {
private static final SparkLogger LOGGER =
SparkLoggerFactory.getLogger(PatternSparkLoggerSuite.class);
private String toRegexPattern(Level level, String msg) {
return msg
.replace("<level>", level.toString())
.replace("<className>", className());
}
@Override
SparkLogger logger() {
return LOGGER;
}
@Override
String className() {
return PatternSparkLoggerSuite.class.getSimpleName();
}
@Override
String logFilePath() {
return "target/pattern.log";
}
@Override
String expectedPatternForBasicMsg(Level level) {
return toRegexPattern(level, ".*<level> <className>: This is a log message\n");
}
@Override
String expectedPatternForBasicMsgWithEscapeChar(Level level) {
return toRegexPattern(level,
".*<level> <className>: This is a log message\\nThis is a new line \\t other msg\\n");
}
@Override
String expectedPatternForBasicMsgWithException(Level level) {
return toRegexPattern(level, """
.*<level> <className>: This is a log message
[\\s\\S]*""");
}
@Override
String expectedPatternForMsgWithMDC(Level level) {
return toRegexPattern(level, ".*<level> <className>: Lost executor 1.\n");
}
@Override
String expectedPatternForMsgWithMDCs(Level level) {
return toRegexPattern(level,
".*<level> <className>: Lost executor 1, reason: the shuffle data is too large\n");
}
@Override
String expectedPatternForMsgWithMDCsAndException(Level level) {
return toRegexPattern(level,"""
.*<level> <className>: Lost executor 1, reason: the shuffle data is too large
[\\s\\S]*""");
}
@Override
String expectedPatternForMsgWithMDCValueIsNull(Level level) {
return toRegexPattern(level, ".*<level> <className>: Lost executor null.\n");
}
@Override
String expectedPatternForCustomLogKey(Level level) {
return toRegexPattern(level, ".*<level> <className>: Custom log message.\n");
}
}
| PatternSparkLoggerSuite |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/StateUtilTest.java | {
"start": 3558,
"end": 4553
} | class ____.apache.flink.runtime.state.KeyGroupsStateHandle. This can mostly happen when a different StateBackend from the one that was used for taking a checkpoint/savepoint is used when restoring.");
}
private static <T> Future<T> emptyFuture(boolean done, boolean canBeCancelled) {
return new Future<T>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return canBeCancelled;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return done;
}
@Override
public T get() {
throw new UnsupportedOperationException();
}
@Override
public T get(long timeout, TimeUnit unit) {
throw new UnsupportedOperationException();
}
};
}
private static | org |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/PrePostMethodSecurityConfigurationTests.java | {
"start": 73348,
"end": 74111
} | class ____ {
@Bean
@Role(BeanDefinition.ROLE_INFRASTRUCTURE)
static TargetVisitor skipValueTypes() {
return TargetVisitor.defaultsSkipValueTypes();
}
@Bean
FlightRepository flights() {
FlightRepository flights = new FlightRepository();
Flight one = new Flight("1", 35000d, 35);
one.board(new ArrayList<>(List.of("Marie Curie", "Kevin Mitnick", "Ada Lovelace")));
flights.save(one);
Flight two = new Flight("2", 32000d, 72);
two.board(new ArrayList<>(List.of("Albert Einstein")));
flights.save(two);
return flights;
}
@Bean
RoleHierarchy roleHierarchy() {
return RoleHierarchyImpl.withRolePrefix("").role("airplane:read").implies("seating:read").build();
}
}
@AuthorizeReturnObject
static | AuthorizeResultConfig |
java | apache__camel | components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/ConnectionId.java | {
"start": 883,
"end": 2764
} | class ____ {
private final String host;
private final int port;
private final String connectionId;
public ConnectionId(final String host, final int port, final String connectionId) {
Objects.requireNonNull(host);
if (port <= 0) {
throw new IllegalArgumentException("Port must be greater than 0");
}
this.host = host;
this.port = port;
this.connectionId = connectionId;
}
public String getHost() {
return this.host;
}
public int getPort() {
return this.port;
}
public String getConnectionId() {
return this.connectionId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (this.connectionId == null ? 0 : this.connectionId.hashCode());
result = prime * result + (this.host == null ? 0 : this.host.hashCode());
result = prime * result + this.port;
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final ConnectionId other = (ConnectionId) obj;
if (this.connectionId == null) {
if (other.connectionId != null) {
return false;
}
} else if (!this.connectionId.equals(other.connectionId)) {
return false;
}
if (this.host == null) {
if (other.host != null) {
return false;
}
} else if (!this.host.equals(other.host)) {
return false;
}
if (this.port != other.port) {
return false;
}
return true;
}
}
| ConnectionId |
java | apache__camel | test-infra/camel-test-infra-hivemq/src/test/java/org/apache/camel/test/infra/hivemq/services/HiveMQServiceFactory.java | {
"start": 3531,
"end": 3640
} | class ____ extends LocalHiveMQInfraService implements HiveMQService {
}
public static | LocalHiveMQService |
java | apache__avro | lang/java/integration-test/test-custom-conversions/src/main/java/org/apache/avro/codegentest/CustomEnumConversion.java | {
"start": 1049,
"end": 1794
} | class ____ extends Conversion<CustomEnumType> {
@Override
public Class<CustomEnumType> getConvertedType() {
return CustomEnumType.class;
}
@Override
public Schema getRecommendedSchema() {
return new LogicalType("custom-enum").addToSchema(Schema.create(Schema.Type.ENUM));
}
@Override
public String getLogicalTypeName() {
return "custom-enum";
}
@Override
public CustomEnumType fromEnumSymbol(GenericEnumSymbol value, Schema schema, LogicalType type) {
return new CustomEnumType(value.toString());
}
@Override
public GenericEnumSymbol toEnumSymbol(CustomEnumType value, Schema schema, LogicalType type) {
return new GenericData.EnumSymbol(schema, value.getUnderlying());
}
}
| CustomEnumConversion |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/maps/Maps_assertEmpty_Test.java | {
"start": 1496,
"end": 2216
} | class ____ extends MapsBaseTest {
@Test
void should_pass_if_actual_is_empty() {
maps.assertEmpty(INFO, emptyMap());
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> maps.assertEmpty(INFO, null))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_has_elements() {
AssertionInfo info = INFO;
Map<?, ?> actual = mapOf(entry("name", "Yoda"));
Throwable error = catchThrowable(() -> maps.assertEmpty(info, actual));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeEmpty(actual));
}
}
| Maps_assertEmpty_Test |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/EmbeddedTableAnnotation.java | {
"start": 412,
"end": 1294
} | class ____ implements EmbeddedTable {
private String value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public EmbeddedTableAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public EmbeddedTableAnnotation(
EmbeddedTable annotation,
ModelsContext modelContext) {
this.value = annotation.value();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public EmbeddedTableAnnotation(
Map<String, Object> attributeValues,
ModelsContext modelContext) {
this.value = (String) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return EmbeddedTable.class;
}
@Override
public String value() {
return value;
}
public void value(String value) {
this.value = value;
}
}
| EmbeddedTableAnnotation |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/jsontype/PolymorphicTypeValidator.java | {
"start": 2969,
"end": 5074
} | enum ____ {
/**
* Value that indicates that Class name or Class is allowed for use without further checking
*/
ALLOWED,
/**
* Value that indicates that Class name or Class is NOT allowed and no further checks are
* needed or allowed
*/
DENIED,
/**
* Value that indicates that Class name or Class validity cannot be confirmed by validator
* and further checks are needed.
*<p>
* Typically if validator cannot establish validity from Type Id or Class (name), eventual
* determination will be {@code DENIED}, for safety reasons.
*/
INDETERMINATE
}
/**
* Method called when a property with polymorphic value is encountered, and a
* {@code TypeResolverBuilder} is needed. Intent is to allow early determination
* of cases where subtyping is completely denied (for example for security reasons),
* or, conversely, allowed for allow subtypes (when base type guarantees that all subtypes
* are known to be safe). Check can be thought of as both optimization (for latter case)
* and eager-fail (for former case) to give better feedback.
*
* @param ctxt Context for resolution: typically will be {@code DeserializationContext}
* @param baseType Nominal base type used for polymorphic handling: subtypes MUST be instances
* of this type and assignment compatibility is verified by Jackson core
*
* @return Determination of general validity of all subtypes of given base type; if
* {@link Validity#ALLOWED} returned, all subtypes will automatically be accepted without
* further checks; is {@link Validity#DENIED} returned no subtyping allowed at all
* (caller will usually throw an exception); otherwise (return {@link Validity#INDETERMINATE})
* per sub-type validation calls are made for each new subclass encountered.
*/
public abstract Validity validateBaseType(DatabindContext ctxt, JavaType baseType);
/**
* Method called after intended | Validity |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/char2darray/Char2DArrayAssert_hasDimensions_Test.java | {
"start": 920,
"end": 1265
} | class ____ extends Char2DArrayAssertBaseTest {
@Override
protected Char2DArrayAssert invoke_api_method() {
return assertions.hasDimensions(1, 2);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHasDimensions(getInfo(assertions), getActual(assertions), 1, 2);
}
}
| Char2DArrayAssert_hasDimensions_Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cache/Country.java | {
"start": 535,
"end": 1224
} | class ____ {
private Long id;
private String code;
private String name;
private Continent continent;
@Id
@GeneratedValue
public Long getId() {
return id;
}
public void setId(final Long id) {
this.id = id;
}
public String getCode() {
return code;
}
public void setCode(final String code) {
this.code = code;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
@ManyToOne(fetch = FetchType.LAZY)
@Cache(usage = CacheConcurrencyStrategy.READ_ONLY)
public Continent getContinent() {
return continent;
}
public void setContinent(final Continent continent) {
this.continent = continent;
}
}
| Country |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/support/StaticListableBeanFactory.java | {
"start": 2831,
"end": 16922
} | class ____ implements ListableBeanFactory {
/** Map from bean name to bean instance. */
private final Map<String, Object> beans;
/**
* Create a regular {@code StaticListableBeanFactory}, to be populated
* with singleton bean instances through {@link #addBean} calls.
*/
public StaticListableBeanFactory() {
this.beans = new LinkedHashMap<>();
}
/**
* Create a {@code StaticListableBeanFactory} wrapping the given {@code Map}.
* <p>Note that the given {@code Map} may be pre-populated with beans;
* or new, still allowing for beans to be registered via {@link #addBean};
* or {@link java.util.Collections#emptyMap()} for a dummy factory which
* enforces operating against an empty set of beans.
* @param beans a {@code Map} for holding this factory's beans, with the
* bean name as key and the corresponding singleton object as value
* @since 4.3
*/
public StaticListableBeanFactory(Map<String, Object> beans) {
Assert.notNull(beans, "Beans Map must not be null");
this.beans = beans;
}
/**
* Add a new singleton bean.
* <p>Will overwrite any existing instance for the given name.
* @param name the name of the bean
* @param bean the bean instance
*/
public void addBean(String name, Object bean) {
this.beans.put(name, bean);
}
//---------------------------------------------------------------------
// Implementation of BeanFactory interface
//---------------------------------------------------------------------
@Override
public Object getBean(String name) throws BeansException {
return getBean(name, (Class<?>) null);
}
@SuppressWarnings("unchecked")
@Override
public <T> T getBean(String name, @Nullable Class<T> requiredType) throws BeansException {
String beanName = BeanFactoryUtils.transformedBeanName(name);
Object bean = obtainBean(beanName);
if (BeanFactoryUtils.isFactoryDereference(name)) {
if (!(bean instanceof FactoryBean)) {
throw new BeanIsNotAFactoryException(beanName, bean.getClass());
}
}
else if (bean instanceof FactoryBean<?> factoryBean) {
try {
Object exposedObject =
(factoryBean instanceof SmartFactoryBean<?> smartFactoryBean && requiredType != null ?
smartFactoryBean.getObject(requiredType) : factoryBean.getObject());
if (exposedObject == null) {
throw new BeanCreationException(beanName, "FactoryBean exposed null object");
}
bean = exposedObject;
}
catch (Exception ex) {
throw new BeanCreationException(beanName, "FactoryBean threw exception on object creation", ex);
}
}
if (requiredType != null && !requiredType.isInstance(bean)) {
throw new BeanNotOfRequiredTypeException(name, requiredType, bean.getClass());
}
return (T) bean;
}
@Override
public Object getBean(String name, @Nullable Object @Nullable ... args) throws BeansException {
if (!ObjectUtils.isEmpty(args)) {
throw new UnsupportedOperationException(
"StaticListableBeanFactory does not support explicit bean creation arguments");
}
return getBean(name);
}
private Object obtainBean(String beanName) {
Object bean = this.beans.get(beanName);
if (bean == null) {
throw new NoSuchBeanDefinitionException(beanName,
"Defined beans are [" + StringUtils.collectionToCommaDelimitedString(this.beans.keySet()) + "]");
}
return bean;
}
@Override
public <T> T getBean(Class<T> requiredType) throws BeansException {
String[] beanNames = getBeanNamesForType(requiredType);
if (beanNames.length == 1) {
return getBean(beanNames[0], requiredType);
}
else if (beanNames.length > 1) {
throw new NoUniqueBeanDefinitionException(requiredType, beanNames);
}
else {
throw new NoSuchBeanDefinitionException(requiredType);
}
}
@Override
public <T> T getBean(Class<T> requiredType, @Nullable Object @Nullable ... args) throws BeansException {
if (!ObjectUtils.isEmpty(args)) {
throw new UnsupportedOperationException(
"StaticListableBeanFactory does not support explicit bean creation arguments");
}
return getBean(requiredType);
}
@Override
public <T> ObjectProvider<T> getBeanProvider(Class<T> requiredType) throws BeansException {
return getBeanProvider(ResolvableType.forRawClass(requiredType), true);
}
@Override
public <T> ObjectProvider<T> getBeanProvider(ResolvableType requiredType) {
return getBeanProvider(requiredType, true);
}
@Override
public <T> ObjectProvider<T> getBeanProvider(ParameterizedTypeReference<T> requiredType) {
return getBeanProvider(ResolvableType.forType(requiredType), true);
}
@Override
public boolean containsBean(String name) {
return this.beans.containsKey(name);
}
@Override
public boolean isSingleton(String name) throws NoSuchBeanDefinitionException {
String beanName = BeanFactoryUtils.transformedBeanName(name);
Object bean = obtainBean(beanName);
if (bean instanceof FactoryBean<?> factoryBean && !BeanFactoryUtils.isFactoryDereference(name)) {
return factoryBean.isSingleton();
}
return true;
}
@Override
public boolean isPrototype(String name) throws NoSuchBeanDefinitionException {
String beanName = BeanFactoryUtils.transformedBeanName(name);
Object bean = obtainBean(beanName);
return (!BeanFactoryUtils.isFactoryDereference(name) &&
((bean instanceof SmartFactoryBean<?> smartFactoryBean && smartFactoryBean.isPrototype()) ||
(bean instanceof FactoryBean<?> factoryBean && !factoryBean.isSingleton())));
}
@Override
public boolean isTypeMatch(String name, ResolvableType typeToMatch) throws NoSuchBeanDefinitionException {
String beanName = BeanFactoryUtils.transformedBeanName(name);
Object bean = obtainBean(beanName);
if (bean instanceof FactoryBean<?> factoryBean && !BeanFactoryUtils.isFactoryDereference(name)) {
return isTypeMatch(factoryBean, typeToMatch.toClass());
}
return typeToMatch.isInstance(bean);
}
@Override
public boolean isTypeMatch(String name, Class<?> typeToMatch) throws NoSuchBeanDefinitionException {
String beanName = BeanFactoryUtils.transformedBeanName(name);
Object bean = obtainBean(beanName);
if (bean instanceof FactoryBean<?> factoryBean && !BeanFactoryUtils.isFactoryDereference(name)) {
return isTypeMatch(factoryBean, typeToMatch);
}
return typeToMatch.isInstance(bean);
}
private boolean isTypeMatch(FactoryBean<?> factoryBean, Class<?> typeToMatch) throws NoSuchBeanDefinitionException {
if (factoryBean instanceof SmartFactoryBean<?> smartFactoryBean) {
return smartFactoryBean.supportsType(typeToMatch);
}
Class<?> objectType = factoryBean.getObjectType();
return (objectType != null && typeToMatch.isAssignableFrom(objectType));
}
@Override
public @Nullable Class<?> getType(String name) throws NoSuchBeanDefinitionException {
return getType(name, true);
}
@Override
public @Nullable Class<?> getType(String name, boolean allowFactoryBeanInit) throws NoSuchBeanDefinitionException {
String beanName = BeanFactoryUtils.transformedBeanName(name);
Object bean = obtainBean(beanName);
if (bean instanceof FactoryBean<?> factoryBean && !BeanFactoryUtils.isFactoryDereference(name)) {
return factoryBean.getObjectType();
}
return bean.getClass();
}
@Override
public String[] getAliases(String name) {
return new String[0];
}
//---------------------------------------------------------------------
// Implementation of ListableBeanFactory interface
//---------------------------------------------------------------------
@Override
public boolean containsBeanDefinition(String name) {
return this.beans.containsKey(name);
}
@Override
public int getBeanDefinitionCount() {
return this.beans.size();
}
@Override
public String[] getBeanDefinitionNames() {
return StringUtils.toStringArray(this.beans.keySet());
}
@Override
public <T> ObjectProvider<T> getBeanProvider(Class<T> requiredType, boolean allowEagerInit) {
return getBeanProvider(ResolvableType.forRawClass(requiredType), allowEagerInit);
}
@SuppressWarnings("unchecked")
@Override
public <T> ObjectProvider<T> getBeanProvider(ResolvableType requiredType, boolean allowEagerInit) {
return new ObjectProvider<>() {
@Override
public T getObject() throws BeansException {
String[] beanNames = getBeanNamesForType(requiredType);
if (beanNames.length == 1) {
return (T) getBean(beanNames[0], requiredType.toClass());
}
else if (beanNames.length > 1) {
throw new NoUniqueBeanDefinitionException(requiredType, beanNames);
}
else {
throw new NoSuchBeanDefinitionException(requiredType);
}
}
@Override
public T getObject(@Nullable Object... args) throws BeansException {
String[] beanNames = getBeanNamesForType(requiredType);
if (beanNames.length == 1) {
return (T) getBean(beanNames[0], args);
}
else if (beanNames.length > 1) {
throw new NoUniqueBeanDefinitionException(requiredType, beanNames);
}
else {
throw new NoSuchBeanDefinitionException(requiredType);
}
}
@Override
public @Nullable T getIfAvailable() throws BeansException {
String[] beanNames = getBeanNamesForType(requiredType);
if (beanNames.length == 1) {
return (T) getBean(beanNames[0], requiredType.toClass());
}
else if (beanNames.length > 1) {
throw new NoUniqueBeanDefinitionException(requiredType, beanNames);
}
else {
return null;
}
}
@Override
public @Nullable T getIfUnique() throws BeansException {
String[] beanNames = getBeanNamesForType(requiredType);
if (beanNames.length == 1) {
return (T) getBean(beanNames[0], requiredType.toClass());
}
else {
return null;
}
}
@Override
public Stream<T> stream() {
return Arrays.stream(getBeanNamesForType(requiredType))
.map(name -> (T) getBean(name, requiredType.toClass()));
}
};
}
@Override
public String[] getBeanNamesForType(@Nullable ResolvableType type) {
return getBeanNamesForType(type, true, true);
}
@Override
public String[] getBeanNamesForType(@Nullable ResolvableType type,
boolean includeNonSingletons, boolean allowEagerInit) {
Class<?> resolved = (type != null ? type.resolve() : null);
boolean isFactoryType = (resolved != null && FactoryBean.class.isAssignableFrom(resolved));
List<String> matches = new ArrayList<>();
for (Map.Entry<String, Object> entry : this.beans.entrySet()) {
String beanName = entry.getKey();
Object beanInstance = entry.getValue();
if (beanInstance instanceof FactoryBean<?> factoryBean && !isFactoryType) {
if ((includeNonSingletons || factoryBean.isSingleton()) &&
(type == null || isTypeMatch(factoryBean, type.toClass()))) {
matches.add(beanName);
}
}
else {
if (type == null || type.isInstance(beanInstance)) {
matches.add(beanName);
}
}
}
return StringUtils.toStringArray(matches);
}
@Override
public String[] getBeanNamesForType(@Nullable Class<?> type) {
return getBeanNamesForType(ResolvableType.forClass(type));
}
@Override
public String[] getBeanNamesForType(@Nullable Class<?> type, boolean includeNonSingletons, boolean allowEagerInit) {
return getBeanNamesForType(ResolvableType.forClass(type), includeNonSingletons, allowEagerInit);
}
@Override
public <T> Map<String, T> getBeansOfType(@Nullable Class<T> type) throws BeansException {
return getBeansOfType(type, true, true);
}
@Override
@SuppressWarnings("unchecked")
public <T> Map<String, T> getBeansOfType(@Nullable Class<T> type, boolean includeNonSingletons, boolean allowEagerInit)
throws BeansException {
boolean isFactoryType = (type != null && FactoryBean.class.isAssignableFrom(type));
Map<String, T> matches = new LinkedHashMap<>();
for (Map.Entry<String, Object> entry : this.beans.entrySet()) {
String beanName = entry.getKey();
Object beanInstance = entry.getValue();
if (beanInstance instanceof FactoryBean<?> factoryBean && !isFactoryType) {
if ((includeNonSingletons || factoryBean.isSingleton()) &&
(type == null || isTypeMatch(factoryBean, type))) {
matches.put(beanName, getBean(beanName, type));
}
}
else {
if (type == null || type.isInstance(beanInstance)) {
if (isFactoryType) {
beanName = FACTORY_BEAN_PREFIX + beanName;
}
matches.put(beanName, (T) beanInstance);
}
}
}
return matches;
}
@Override
public String[] getBeanNamesForAnnotation(Class<? extends Annotation> annotationType) {
List<String> results = new ArrayList<>();
for (String beanName : this.beans.keySet()) {
if (findAnnotationOnBean(beanName, annotationType) != null) {
results.add(beanName);
}
}
return StringUtils.toStringArray(results);
}
@Override
public Map<String, Object> getBeansWithAnnotation(Class<? extends Annotation> annotationType)
throws BeansException {
Map<String, Object> results = new LinkedHashMap<>();
for (String beanName : this.beans.keySet()) {
if (findAnnotationOnBean(beanName, annotationType) != null) {
results.put(beanName, getBean(beanName));
}
}
return results;
}
@Override
public <A extends Annotation> @Nullable A findAnnotationOnBean(String beanName, Class<A> annotationType)
throws NoSuchBeanDefinitionException {
return findAnnotationOnBean(beanName, annotationType, true);
}
@Override
public <A extends Annotation> @Nullable A findAnnotationOnBean(
String beanName, Class<A> annotationType, boolean allowFactoryBeanInit)
throws NoSuchBeanDefinitionException {
Class<?> beanType = getType(beanName, allowFactoryBeanInit);
return (beanType != null ? AnnotatedElementUtils.findMergedAnnotation(beanType, annotationType) : null);
}
@Override
public <A extends Annotation> Set<A> findAllAnnotationsOnBean(
String beanName, Class<A> annotationType, boolean allowFactoryBeanInit) throws NoSuchBeanDefinitionException {
Class<?> beanType = getType(beanName, allowFactoryBeanInit);
return (beanType != null ?
AnnotatedElementUtils.findAllMergedAnnotations(beanType, annotationType) : Collections.emptySet());
}
}
| StaticListableBeanFactory |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit4/EnabledAndIgnoredSpringRunnerTests.java | {
"start": 2062,
"end": 3289
} | class ____ {
protected static final String NAME = "EnabledAndIgnoredSpringRunnerTests.profile_value.name";
protected static final String VALUE = "enigma";
protected static int numTestsExecuted = 0;
@BeforeClass
public static void setProfileValue() {
numTestsExecuted = 0;
System.setProperty(NAME, VALUE);
}
@AfterClass
public static void verifyNumTestsExecuted() {
assertThat(numTestsExecuted).as("Verifying the number of tests executed.").isEqualTo(3);
}
@Test
@IfProfileValue(name = NAME, value = VALUE + "X")
public void testIfProfileValueDisabled() {
numTestsExecuted++;
fail("The body of a disabled test should never be executed!");
}
@Test
@IfProfileValue(name = NAME, value = VALUE)
public void testIfProfileValueEnabledViaSingleValue() {
numTestsExecuted++;
}
@Test
@IfProfileValue(name = NAME, values = { "foo", VALUE, "bar" })
public void testIfProfileValueEnabledViaMultipleValues() {
numTestsExecuted++;
}
@Test
public void testIfProfileValueNotConfigured() {
numTestsExecuted++;
}
@Test
@Ignore
public void testJUnitIgnoreAnnotation() {
numTestsExecuted++;
fail("The body of an ignored test should never be executed!");
}
}
| EnabledAndIgnoredSpringRunnerTests |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/ArgumentCaptorTest.java | {
"start": 1068,
"end": 2285
} | class ____ {}
ArgumentCaptor<Foo> fooCaptor = ArgumentCaptor.forClass(Foo.class);
ArgumentCaptor<Bar> barCaptor = ArgumentCaptor.forClass(Bar.class);
assertThat(fooCaptor.getCaptorType()).isEqualTo(Foo.class);
assertThat(barCaptor.getCaptorType()).isEqualTo(Bar.class);
}
@Test
public void captor_calls_forClass_with_the_inferred_argument() throws Exception {
ArgumentCaptor<Map<String, Object>> captor = ArgumentCaptor.captor();
assertThat(captor.getCaptorType()).isEqualTo(Map.class);
}
@Test
public void captor_called_with_explicit_varargs_is_invalid() throws Exception {
// Test passing a single argument.
assertThatThrownBy(() -> ArgumentCaptor.captor(1234L))
.isInstanceOf(IllegalArgumentException.class);
// Test passing multiple arguments.
assertThatThrownBy(() -> ArgumentCaptor.captor("this shouldn't", "be here"))
.isInstanceOf(IllegalArgumentException.class);
// Test passing a totally null varargs array.
assertThatThrownBy(() -> ArgumentCaptor.<String>captor((String[]) null))
.isInstanceOf(IllegalArgumentException.class);
}
}
| Bar |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableWindowWithStartEndFlowableTest.java | {
"start": 1365,
"end": 23881
} | class ____ extends RxJavaTest {
private TestScheduler scheduler;
private Scheduler.Worker innerScheduler;
@Before
public void before() {
scheduler = new TestScheduler();
innerScheduler = scheduler.createWorker();
}
@Test
public void flowableBasedOpenerAndCloser() {
final List<String> list = new ArrayList<>();
final List<List<String>> lists = new ArrayList<>();
Flowable<String> source = Flowable.unsafeCreate(new Publisher<String>() {
@Override
public void subscribe(Subscriber<? super String> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
push(subscriber, "one", 10);
push(subscriber, "two", 60);
push(subscriber, "three", 110);
push(subscriber, "four", 160);
push(subscriber, "five", 210);
complete(subscriber, 500);
}
});
Flowable<Object> openings = Flowable.unsafeCreate(new Publisher<Object>() {
@Override
public void subscribe(Subscriber<? super Object> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
push(subscriber, new Object(), 50);
push(subscriber, new Object(), 200);
complete(subscriber, 250);
}
});
Function<Object, Flowable<Object>> closer = new Function<Object, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Object opening) {
return Flowable.unsafeCreate(new Publisher<Object>() {
@Override
public void subscribe(Subscriber<? super Object> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
push(subscriber, new Object(), 100);
complete(subscriber, 101);
}
});
}
};
Flowable<Flowable<String>> windowed = source.window(openings, closer);
windowed.subscribe(observeWindow(list, lists));
scheduler.advanceTimeTo(500, TimeUnit.MILLISECONDS);
assertEquals(2, lists.size());
assertEquals(lists.get(0), list("two", "three"));
assertEquals(lists.get(1), list("five"));
}
private List<String> list(String... args) {
List<String> list = new ArrayList<>();
for (String arg : args) {
list.add(arg);
}
return list;
}
private <T> void push(final Subscriber<T> subscriber, final T value, int delay) {
innerScheduler.schedule(new Runnable() {
@Override
public void run() {
subscriber.onNext(value);
}
}, delay, TimeUnit.MILLISECONDS);
}
private void complete(final Subscriber<?> subscriber, int delay) {
innerScheduler.schedule(new Runnable() {
@Override
public void run() {
subscriber.onComplete();
}
}, delay, TimeUnit.MILLISECONDS);
}
private Consumer<Flowable<String>> observeWindow(final List<String> list, final List<List<String>> lists) {
return new Consumer<Flowable<String>>() {
@Override
public void accept(Flowable<String> stringFlowable) {
stringFlowable.subscribe(new DefaultSubscriber<String>() {
@Override
public void onComplete() {
lists.add(new ArrayList<>(list));
list.clear();
}
@Override
public void onError(Throwable e) {
fail(e.getMessage());
}
@Override
public void onNext(String args) {
list.add(args);
}
});
}
};
}
@Test
public void noUnsubscribeAndNoLeak() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> open = PublishProcessor.create();
final PublishProcessor<Integer> close = PublishProcessor.create();
TestSubscriber<Flowable<Integer>> ts = new TestSubscriber<>();
source.window(open, new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer t) {
return close;
}
})
.doOnNext(new Consumer<Flowable<Integer>>() {
@Override
public void accept(Flowable<Integer> w) throws Throwable {
w.subscribe(Functions.emptyConsumer(), Functions.emptyConsumer()); // avoid abandonment
}
})
.subscribe(ts);
open.onNext(1);
source.onNext(1);
assertTrue(open.hasSubscribers());
assertTrue(close.hasSubscribers());
close.onNext(1);
assertFalse(close.hasSubscribers());
source.onComplete();
ts.assertComplete();
ts.assertNoErrors();
ts.assertValueCount(1);
assertFalse(ts.isCancelled());
assertFalse(open.hasSubscribers());
assertFalse(close.hasSubscribers());
}
@Test
public void unsubscribeAll() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> open = PublishProcessor.create();
final PublishProcessor<Integer> close = PublishProcessor.create();
TestSubscriber<Flowable<Integer>> ts = new TestSubscriber<>();
source.window(open, new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer t) {
return close;
}
})
.doOnNext(new Consumer<Flowable<Integer>>() {
@Override
public void accept(Flowable<Integer> w) throws Throwable {
w.subscribe(Functions.emptyConsumer(), Functions.emptyConsumer()); // avoid abandonment
}
})
.subscribe(ts);
open.onNext(1);
assertTrue(open.hasSubscribers());
assertTrue(close.hasSubscribers());
ts.cancel();
// Disposing the outer sequence stops the opening of new windows
assertFalse(open.hasSubscribers());
// FIXME subject has subscribers because of the open window
assertTrue(close.hasSubscribers());
}
@Test
public void dispose() {
TestHelper.checkDisposed(Flowable.just(1).window(Flowable.just(2), Functions.justFunction(Flowable.never())));
}
@Test
public void reentrant() {
final FlowableProcessor<Integer> pp = PublishProcessor.<Integer>create();
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
if (t == 1) {
pp.onNext(2);
pp.onComplete();
}
}
};
pp.window(BehaviorProcessor.createDefault(1), Functions.justFunction(Flowable.never()))
.flatMap(new Function<Flowable<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> v) throws Exception {
return v;
}
})
.subscribe(ts);
pp.onNext(1);
ts
.awaitDone(1, TimeUnit.SECONDS)
.assertResult(1, 2);
}
@Test
public void boundarySelectorNormal() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> start = PublishProcessor.create();
final PublishProcessor<Integer> end = PublishProcessor.create();
TestSubscriber<Integer> ts = source.window(start, new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) throws Exception {
return end;
}
})
.flatMap(Functions.<Flowable<Integer>>identity())
.test();
start.onNext(0);
source.onNext(1);
source.onNext(2);
source.onNext(3);
source.onNext(4);
start.onNext(1);
source.onNext(5);
source.onNext(6);
end.onNext(1);
start.onNext(2);
TestHelper.emit(source, 7, 8);
ts.assertResult(1, 2, 3, 4, 5, 5, 6, 6, 7, 8);
}
@Test
public void startError() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> start = PublishProcessor.create();
final PublishProcessor<Integer> end = PublishProcessor.create();
TestSubscriber<Integer> ts = source.window(start, new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) throws Exception {
return end;
}
})
.flatMap(Functions.<Flowable<Integer>>identity())
.test();
start.onError(new TestException());
ts.assertFailure(TestException.class);
assertFalse("Source has observers!", source.hasSubscribers());
assertFalse("Start has observers!", start.hasSubscribers());
assertFalse("End has observers!", end.hasSubscribers());
}
@Test
@SuppressUndeliverable
public void endError() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> start = PublishProcessor.create();
final PublishProcessor<Integer> end = PublishProcessor.create();
TestSubscriber<Integer> ts = source.window(start, new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) throws Exception {
return end;
}
})
.flatMap(Functions.<Flowable<Integer>>identity())
.test();
start.onNext(1);
end.onError(new TestException());
ts.assertFailure(TestException.class);
assertFalse("Source has observers!", source.hasSubscribers());
assertFalse("Start has observers!", start.hasSubscribers());
assertFalse("End has observers!", end.hasSubscribers());
}
@Test
public void mainError() {
Flowable.<Integer>error(new TestException())
.window(Flowable.never(), Functions.justFunction(Flowable.just(1)))
.flatMap(Functions.<Flowable<Integer>>identity())
.test()
.assertFailure(TestException.class);
}
@Test
public void windowCloseIngoresCancel() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
BehaviorProcessor.createDefault(1)
.window(BehaviorProcessor.createDefault(1), new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer f) throws Exception {
return new Flowable<Integer>() {
@Override
protected void subscribeActual(
Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onNext(1);
s.onNext(2);
s.onError(new TestException());
}
};
}
})
.doOnNext(new Consumer<Flowable<Integer>>() {
@Override
public void accept(Flowable<Integer> w) throws Throwable {
w.subscribe(Functions.emptyConsumer(), Functions.emptyConsumer()); // avoid abandonment
}
})
.test()
.assertValueCount(1)
.assertNoErrors()
.assertNotComplete();
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
static Flowable<Integer> flowableDisposed(final AtomicBoolean ref) {
return Flowable.just(1).concatWith(Flowable.<Integer>never())
.doOnCancel(new Action() {
@Override
public void run() throws Exception {
ref.set(true);
}
});
}
@Test
public void mainAndBoundaryDisposeOnNoWindows() {
AtomicBoolean mainDisposed = new AtomicBoolean();
AtomicBoolean openDisposed = new AtomicBoolean();
final AtomicBoolean closeDisposed = new AtomicBoolean();
flowableDisposed(mainDisposed)
.window(flowableDisposed(openDisposed), new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) throws Exception {
return flowableDisposed(closeDisposed);
}
})
.doOnNext(new Consumer<Flowable<Integer>>() {
@Override
public void accept(Flowable<Integer> w) throws Throwable {
w.subscribe(Functions.emptyConsumer(), Functions.emptyConsumer()); // avoid abandonment
}
})
.to(TestHelper.<Flowable<Integer>>testConsumer())
.assertSubscribed()
.assertNoErrors()
.assertNotComplete()
.cancel();
assertTrue(mainDisposed.get());
assertTrue(openDisposed.get());
assertTrue(closeDisposed.get());
}
@Test
public void mainWindowMissingBackpressure() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> boundary = PublishProcessor.create();
TestSubscriber<Flowable<Integer>> ts = source.window(boundary, Functions.justFunction(Flowable.never()))
.test(0L)
;
ts.assertEmpty();
boundary.onNext(1);
ts.assertFailure(MissingBackpressureException.class);
assertFalse(source.hasSubscribers());
assertFalse(boundary.hasSubscribers());
}
@Test
public void cancellingWindowCancelsUpstream() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = pp.window(Flowable.just(1).concatWith(Flowable.<Integer>never()), Functions.justFunction(Flowable.never()))
.take(1)
.flatMap(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> w) throws Throwable {
return w.take(1);
}
})
.test();
assertTrue(pp.hasSubscribers());
pp.onNext(1);
ts
.assertResult(1);
assertFalse("Processor still has subscribers!", pp.hasSubscribers());
}
@Test
public void windowAbandonmentCancelsUpstream() {
PublishProcessor<Integer> pp = PublishProcessor.create();
final AtomicReference<Flowable<Integer>> inner = new AtomicReference<>();
TestSubscriber<Flowable<Integer>> ts = pp.window(Flowable.<Integer>just(1).concatWith(Flowable.<Integer>never()),
Functions.justFunction(Flowable.never()))
.doOnNext(new Consumer<Flowable<Integer>>() {
@Override
public void accept(Flowable<Integer> v) throws Throwable {
inner.set(v);
}
})
.test();
assertTrue(pp.hasSubscribers());
ts
.assertValueCount(1)
;
pp.onNext(1);
assertTrue(pp.hasSubscribers());
ts.cancel();
ts
.assertValueCount(1)
.assertNoErrors()
.assertNotComplete();
assertFalse("Processor still has subscribers!", pp.hasSubscribers());
inner.get().test().assertResult();
}
@Test
public void closingIndicatorFunctionCrash() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> boundary = PublishProcessor.create();
TestSubscriber<Flowable<Integer>> ts = source.window(boundary, new Function<Integer, Publisher<Object>>() {
@Override
public Publisher<Object> apply(Integer end) throws Throwable {
throw new TestException();
}
})
.test()
;
ts.assertEmpty();
boundary.onNext(1);
ts.assertFailure(TestException.class);
assertFalse(source.hasSubscribers());
assertFalse(boundary.hasSubscribers());
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeFlowable(o -> o.window(Flowable.never(), v -> Flowable.never()));
}
@Test
public void openError() throws Throwable {
TestHelper.withErrorTracking(errors -> {
TestException ex1 = new TestException();
TestException ex2 = new TestException();
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
AtomicReference<Subscriber<? super Integer>> ref1 = new AtomicReference<>();
AtomicReference<Subscriber<? super Integer>> ref2 = new AtomicReference<>();
Flowable<Integer> f1 = Flowable.<Integer>fromPublisher(ref1::set);
Flowable<Integer> f2 = Flowable.<Integer>fromPublisher(ref2::set);
TestSubscriber<Flowable<Integer>> ts = BehaviorProcessor.createDefault(1)
.window(f1, v -> f2)
.doOnNext(w -> w.test())
.test();
ref1.get().onSubscribe(new BooleanSubscription());
ref1.get().onNext(1);
ref2.get().onSubscribe(new BooleanSubscription());
TestHelper.race(
() -> ref1.get().onError(ex1),
() -> ref2.get().onError(ex2)
);
ts.assertError(RuntimeException.class);
if (!errors.isEmpty()) {
TestHelper.assertUndeliverable(errors, 0, TestException.class);
}
errors.clear();
}
});
}
@Test
public void closeError() throws Throwable {
TestHelper.withErrorTracking(errors -> {
AtomicReference<Subscriber<? super Integer>> ref1 = new AtomicReference<>();
AtomicReference<Subscriber<? super Integer>> ref2 = new AtomicReference<>();
Flowable<Integer> f1 = Flowable.<Integer>unsafeCreate(ref1::set);
Flowable<Integer> f2 = Flowable.<Integer>unsafeCreate(ref2::set);
TestSubscriber<Integer> ts = BehaviorProcessor.createDefault(1)
.window(f1, v -> f2)
.flatMap(v -> v)
.test();
ref1.get().onSubscribe(new BooleanSubscription());
ref1.get().onNext(1);
ref2.get().onSubscribe(new BooleanSubscription());
ref2.get().onError(new TestException());
ref2.get().onError(new TestException());
ts.assertFailure(TestException.class);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
});
}
@Test
public void upstreamFailsBeforeFirstWindow() {
Flowable.error(new TestException())
.window(Flowable.never(), v -> Flowable.never())
.test()
.assertFailure(TestException.class);
}
@Test
public void windowOpenMainCompletes() {
AtomicReference<Subscriber<? super Integer>> ref1 = new AtomicReference<>();
PublishProcessor<Object> pp = PublishProcessor.create();
Flowable<Integer> f1 = Flowable.<Integer>unsafeCreate(ref1::set);
AtomicInteger counter = new AtomicInteger();
TestSubscriber<Flowable<Object>> ts = pp
.window(f1, v -> Flowable.never())
.doOnNext(w -> {
if (counter.getAndIncrement() == 0) {
ref1.get().onNext(2);
pp.onNext(1);
pp.onComplete();
}
w.test();
})
.test();
ref1.get().onSubscribe(new BooleanSubscription());
ref1.get().onNext(1);
ts.assertComplete();
}
@Test
public void windowOpenMainError() {
AtomicReference<Subscriber<? super Integer>> ref1 = new AtomicReference<>();
PublishProcessor<Object> pp = PublishProcessor.create();
Flowable<Integer> f1 = Flowable.<Integer>unsafeCreate(ref1::set);
AtomicInteger counter = new AtomicInteger();
TestSubscriber<Flowable<Object>> ts = pp
.window(f1, v -> Flowable.never())
.doOnNext(w -> {
if (counter.getAndIncrement() == 0) {
ref1.get().onNext(2);
pp.onNext(1);
pp.onError(new TestException());
}
w.test();
})
.test();
ref1.get().onSubscribe(new BooleanSubscription());
ref1.get().onNext(1);
ts.assertError(TestException.class);
}
@Test
public void windowOpenIgnoresDispose() {
AtomicReference<Subscriber<? super Integer>> ref1 = new AtomicReference<>();
PublishProcessor<Object> pp = PublishProcessor.create();
Flowable<Integer> f1 = Flowable.<Integer>unsafeCreate(ref1::set);
TestSubscriber<Flowable<Object>> ts = pp
.window(f1, v -> Flowable.never())
.take(1)
.doOnNext(w -> {
w.test();
})
.test();
ref1.get().onSubscribe(new BooleanSubscription());
ref1.get().onNext(1);
ref1.get().onNext(2);
ts.assertValueCount(1);
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(Flowable.never().window(Flowable.never(), v -> Flowable.never()));
}
@Test
public void mainIgnoresCancelBeforeOnError() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Flowable.fromPublisher(s -> {
s.onSubscribe(new BooleanSubscription());
s.onNext(1);
s.onError(new IOException());
})
.window(BehaviorProcessor.createDefault(1), v -> Flowable.error(new TestException()))
.doOnNext(w -> w.test())
.test()
.assertError(TestException.class);
TestHelper.assertUndeliverable(errors, 0, IOException.class);
});
}
}
| FlowableWindowWithStartEndFlowableTest |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/ChangedClassesBuildItem.java | {
"start": 655,
"end": 1962
} | class ____ extends SimpleBuildItem {
private final Map<DotName, ClassInfo> changedClassesNewVersion;
private final Map<DotName, ClassInfo> changedClassesOldVersion;
private final Map<DotName, ClassInfo> deletedClasses;
private final Map<DotName, ClassInfo> addedClasses;
public ChangedClassesBuildItem(Map<DotName, ClassInfo> changedClassesNewVersion,
Map<DotName, ClassInfo> changedClassesOldVersion, Map<DotName, ClassInfo> deletedClasses,
Map<DotName, ClassInfo> addedClasses) {
this.changedClassesNewVersion = changedClassesNewVersion;
this.changedClassesOldVersion = changedClassesOldVersion;
this.deletedClasses = deletedClasses;
this.addedClasses = addedClasses;
}
public Map<DotName, ClassInfo> getChangedClassesNewVersion() {
return Collections.unmodifiableMap(changedClassesNewVersion);
}
public Map<DotName, ClassInfo> getChangedClassesOldVersion() {
return Collections.unmodifiableMap(changedClassesOldVersion);
}
public Map<DotName, ClassInfo> getDeletedClasses() {
return Collections.unmodifiableMap(deletedClasses);
}
public Map<DotName, ClassInfo> getAddedClasses() {
return Collections.unmodifiableMap(addedClasses);
}
}
| ChangedClassesBuildItem |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectAllowedWarnings.java | {
"start": 1171,
"end": 3445
} | class ____ extends FeatureInjector implements RestTestTransformByParentObject {
private static JsonNodeFactory jsonNodeFactory = JsonNodeFactory.withExactBigDecimals(false);
private final List<String> allowedWarnings;
private String testName;
private final boolean isRegex;
/**
* @param allowedWarnings The allowed warnings to inject
*/
public InjectAllowedWarnings(List<String> allowedWarnings) {
this(false, allowedWarnings);
}
/**
* @param isRegex true if should inject the regex variant of allowed warnings
* @param allowedWarnings The allowed warnings to inject
*/
public InjectAllowedWarnings(boolean isRegex, List<String> allowedWarnings) {
this(isRegex, allowedWarnings, null);
}
/**
* @param isRegex true if should inject the regex variant of allowed warnings
* @param allowedWarnings The allowed warnings to inject
* @param testName The testName to inject
*/
public InjectAllowedWarnings(boolean isRegex, List<String> allowedWarnings, String testName) {
this.isRegex = isRegex;
this.allowedWarnings = allowedWarnings;
this.testName = testName;
}
@Override
public void transformTest(ObjectNode doNodeParent) {
ObjectNode doNodeValue = (ObjectNode) doNodeParent.get(getKeyToFind());
ArrayNode arrayWarnings = (ArrayNode) doNodeValue.get(getSkipFeatureName());
if (arrayWarnings == null) {
arrayWarnings = new ArrayNode(jsonNodeFactory);
doNodeValue.set(getSkipFeatureName(), arrayWarnings);
}
this.allowedWarnings.forEach(arrayWarnings::add);
}
@Override
@Internal
public String getKeyToFind() {
return "do";
}
@Override
@Input
public String getSkipFeatureName() {
return isRegex ? "allowed_warnings_regex" : "allowed_warnings";
}
@Input
public List<String> getAllowedWarnings() {
return allowedWarnings;
}
@Override
public boolean shouldApply(RestTestContext testContext) {
return testName == null || testContext.testName().equals(testName);
}
@Input
@Optional
public String getTestName() {
return testName;
}
}
| InjectAllowedWarnings |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/api/records/TestURL.java | {
"start": 1940,
"end": 2377
} | class ____ implements RecordFactory {
private static final RecordFactoryForTest SELF =
new RecordFactoryForTest();
@SuppressWarnings("unchecked")
@Override
public <T> T newRecordInstance(Class<T> clazz) {
return (T) new URLForTest();
}
public static RecordFactory get() {
return SELF;
}
}
/** URL fake for this test; sidesteps proto-URL dependency. */
public static | RecordFactoryForTest |
java | quarkusio__quarkus | integration-tests/devmode/src/test/java/io/quarkus/test/devui/DevUIReactiveMessagingJsonRPCTest.java | {
"start": 394,
"end": 2951
} | class ____ extends DevUIJsonRPCTest {
@RegisterExtension
static final QuarkusDevModeTest config = new QuarkusDevModeTest()
.withApplicationRoot((jar) -> jar.addClasses(MyProcessor.class, DummyConnector.class)
.addAsResource(
new StringAsset(
"mp.messaging.incoming.input.connector=dummy\n"
+ "mp.messaging.incoming.input.values=hallo"),
"application.properties"));
public DevUIReactiveMessagingJsonRPCTest() {
super("quarkus-messaging");
}
@Test
public void testProcessor() throws Exception {
JsonNode info = super.executeJsonRPCMethod("getInfo");
Assertions.assertNotNull(info);
Assertions.assertTrue(info.isArray());
Iterator<JsonNode> en = info.elements();
boolean consumerExists = false;
boolean publisherExists = false;
while (en.hasNext()) {
JsonNode channel = en.next();
JsonNode consumers = channel.get("consumers");
if (consumers != null) {
consumerExists = typeAndDescriptionExist(consumers, "CHANNEL",
"<code>io.quarkus.test.devui.MyProcessor#channel</code>");
}
JsonNode publishers = channel.get("publishers");
if (publishers != null) {
publisherExists = typeAndDescriptionExist(publishers, "PROCESSOR",
"<code>io.quarkus.test.devui.MyProcessor#process()</code>");
}
}
Assertions.assertTrue(consumerExists);
Assertions.assertTrue(publisherExists);
}
private boolean typeAndDescriptionExist(JsonNode a, String type, String description) {
if (a.isArray()) {
Iterator<JsonNode> en = a.elements();
while (en.hasNext()) {
JsonNode b = en.next();
if (isTypeAndDescription(b, type, description)) {
return true;
}
}
} else {
return isTypeAndDescription(a, type, description);
}
return false;
}
private boolean isTypeAndDescription(JsonNode b, String type, String description) {
String t = b.get("type").asText();
String d = b.get("description").asText();
if (t.equals(type) &&
d.equals(description)) {
return true;
}
return false;
}
}
| DevUIReactiveMessagingJsonRPCTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/UsingFairLockBenchmarkTest.java | {
"start": 1838,
"end": 3197
} | class ____ extends MockDriver {
public MockConnection createMockConnection(MockDriver driver, String url, Properties connectProperties) {
try {
Thread.sleep(1000 * 1);
} catch (InterruptedException e) {
e.printStackTrace();
}
return super.createMockConnection(driver, url, connectProperties);
}
}
@TearDown(Level.Trial)
public void tearDown() throws Exception {
dataSource.close();
assertEquals(0, DruidDataSourceStatManager.getInstance().getDataSourceList().size());
}
@Benchmark
public void test_activeTrace() throws Exception {
int count = 1000_00;
int i = 0;
try {
for (; i < count; ++i) {
Connection conn = dataSource.getConnection();
assertNotNull(conn);
conn.close();
assertTrue(conn.isClosed());
}
} catch (Exception e) {
e.printStackTrace();
} finally {
assertEquals(count, i);
}
}
public static void main(String[] args) throws RunnerException {
Options options = new OptionsBuilder()
.include(UsingFairLockBenchmarkTest.class.getSimpleName())
.build();
new Runner(options).run();
}
}
| SlowDriver |
java | apache__camel | components/camel-hashicorp-vault/src/test/java/org/apache/camel/component/hashicorp/vault/integration/operations/HashicorpProducerReadMultiVersionedSecretIT.java | {
"start": 1379,
"end": 3910
} | class ____ extends HashicorpVaultBase {
@EndpointInject("mock:result-write")
private MockEndpoint mockWrite;
@EndpointInject("mock:result-read")
private MockEndpoint mockRead;
@Test
public void createSecretTest() throws InterruptedException {
mockWrite.expectedMessageCount(2);
mockRead.expectedMessageCount(1);
Exchange exchange = template.request("direct:createSecret", new Processor() {
@Override
public void process(Exchange exchange) {
HashMap map = new HashMap();
map.put("integer", "30");
exchange.getIn().setBody(map);
}
});
exchange = template.request("direct:createSecret", new Processor() {
@Override
public void process(Exchange exchange) {
HashMap map = new HashMap();
map.put("integer", "31");
exchange.getIn().setBody(map);
}
});
exchange = template.request("direct:readSecret", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getMessage().setHeader(HashicorpVaultConstants.SECRET_PATH, "test");
exchange.getMessage().setHeader(HashicorpVaultConstants.SECRET_VERSION, "1");
}
});
MockEndpoint.assertIsSatisfied(context);
Exchange ret = mockRead.getExchanges().get(0);
assertNotNull(ret);
assertEquals("30", ((Map) ret.getMessage().getBody(Map.class).get("data")).get("integer"));
assertEquals(1, ((Map) ret.getMessage().getBody(Map.class).get("metadata")).get("version"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:createSecret")
.toF("hashicorp-vault://secret?operation=createSecret&token=RAW(%s)&host=%s&port=%s&scheme=http&secretPath=test",
service.token(), service.host(), service.port())
.to("mock:result-write");
from("direct:readSecret")
.toF("hashicorp-vault://secret?operation=getSecret&token=RAW(%s)&host=%s&port=%s&scheme=http",
service.token(), service.host(), service.port())
.to("mock:result-read");
}
};
}
}
| HashicorpProducerReadMultiVersionedSecretIT |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/StreamEndpointBuilderFactory.java | {
"start": 1592,
"end": 15136
} | interface ____
extends
EndpointConsumerBuilder {
default AdvancedStreamEndpointConsumerBuilder advanced() {
return (AdvancedStreamEndpointConsumerBuilder) this;
}
/**
* You can configure the encoding (is a charset name) to use text-based
* streams (for example, message body is a String object). If not
* provided, Camel uses the JVM default Charset.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param encoding the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder encoding(String encoding) {
doSetProperty("encoding", encoding);
return this;
}
/**
* When using the stream:file URI format, this option specifies the
* filename to stream to/from.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param fileName the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder fileName(String fileName) {
doSetProperty("fileName", fileName);
return this;
}
/**
* To use JVM file watcher to listen for file change events to support
* re-loading files that may be overwritten, somewhat like tail --retry.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param fileWatcher the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder fileWatcher(boolean fileWatcher) {
doSetProperty("fileWatcher", fileWatcher);
return this;
}
/**
* To use JVM file watcher to listen for file change events to support
* re-loading files that may be overwritten, somewhat like tail --retry.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param fileWatcher the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder fileWatcher(String fileWatcher) {
doSetProperty("fileWatcher", fileWatcher);
return this;
}
/**
* To group X number of lines in the consumer. For example to group 10
* lines and therefore only spit out an Exchange with 10 lines, instead
* of 1 Exchange per line.
*
* The option is a: <code>int</code> type.
*
* Group: consumer
*
* @param groupLines the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder groupLines(int groupLines) {
doSetProperty("groupLines", groupLines);
return this;
}
/**
* To group X number of lines in the consumer. For example to group 10
* lines and therefore only spit out an Exchange with 10 lines, instead
* of 1 Exchange per line.
*
* The option will be converted to a <code>int</code> type.
*
* Group: consumer
*
* @param groupLines the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder groupLines(String groupLines) {
doSetProperty("groupLines", groupLines);
return this;
}
/**
* Allows to use a custom GroupStrategy to control how to group lines.
*
* The option is a:
* <code>org.apache.camel.component.stream.GroupStrategy</code> type.
*
* Group: consumer
*
* @param groupStrategy the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder groupStrategy(org.apache.camel.component.stream.GroupStrategy groupStrategy) {
doSetProperty("groupStrategy", groupStrategy);
return this;
}
/**
* Allows to use a custom GroupStrategy to control how to group lines.
*
* The option will be converted to a
* <code>org.apache.camel.component.stream.GroupStrategy</code> type.
*
* Group: consumer
*
* @param groupStrategy the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder groupStrategy(String groupStrategy) {
doSetProperty("groupStrategy", groupStrategy);
return this;
}
/**
* When using stream:http format, this option specifies optional http
* headers, such as Accept: application/json. Multiple headers can be
* separated by comma. The format of headers can be either HEADER=VALUE
* or HEADER:VALUE. In accordance with the HTTP/1.1 specification,
* leading and/or trailing whitespace is ignored.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param httpHeaders the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder httpHeaders(String httpHeaders) {
doSetProperty("httpHeaders", httpHeaders);
return this;
}
/**
* When using stream:http format, this option specifies the http url to
* stream from.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param httpUrl the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder httpUrl(String httpUrl) {
doSetProperty("httpUrl", httpUrl);
return this;
}
/**
* Initial delay in milliseconds before showing the message prompt. This
* delay occurs only once. Can be used during system startup to avoid
* message prompts being written while other logging is done to the
* system out.
*
* The option is a: <code>long</code> type.
*
* Default: 2000
* Group: consumer
*
* @param initialPromptDelay the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder initialPromptDelay(long initialPromptDelay) {
doSetProperty("initialPromptDelay", initialPromptDelay);
return this;
}
/**
* Initial delay in milliseconds before showing the message prompt. This
* delay occurs only once. Can be used during system startup to avoid
* message prompts being written while other logging is done to the
* system out.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 2000
* Group: consumer
*
* @param initialPromptDelay the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder initialPromptDelay(String initialPromptDelay) {
doSetProperty("initialPromptDelay", initialPromptDelay);
return this;
}
/**
* Optional delay in milliseconds before showing the message prompt.
*
* The option is a: <code>long</code> type.
*
* Group: consumer
*
* @param promptDelay the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder promptDelay(long promptDelay) {
doSetProperty("promptDelay", promptDelay);
return this;
}
/**
* Optional delay in milliseconds before showing the message prompt.
*
* The option will be converted to a <code>long</code> type.
*
* Group: consumer
*
* @param promptDelay the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder promptDelay(String promptDelay) {
doSetProperty("promptDelay", promptDelay);
return this;
}
/**
* Message prompt to use when reading from stream:in; for example, you
* could set this to Enter a command:.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param promptMessage the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder promptMessage(String promptMessage) {
doSetProperty("promptMessage", promptMessage);
return this;
}
/**
* Whether to read the input stream in line mode (terminate by line
* breaks). Setting this to false, will instead read the entire stream
* until EOL.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param readLine the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder readLine(boolean readLine) {
doSetProperty("readLine", readLine);
return this;
}
/**
* Whether to read the input stream in line mode (terminate by line
* breaks). Setting this to false, will instead read the entire stream
* until EOL.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param readLine the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder readLine(String readLine) {
doSetProperty("readLine", readLine);
return this;
}
/**
* Will retry opening the stream if it's overwritten, somewhat like tail
* --retry If reading from files then you should also enable the
* fileWatcher option, to make it work reliable.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param retry the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder retry(boolean retry) {
doSetProperty("retry", retry);
return this;
}
/**
* Will retry opening the stream if it's overwritten, somewhat like tail
* --retry If reading from files then you should also enable the
* fileWatcher option, to make it work reliable.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param retry the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder retry(String retry) {
doSetProperty("retry", retry);
return this;
}
/**
* To be used for continuously reading a stream such as the unix tail
* command.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param scanStream the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder scanStream(boolean scanStream) {
doSetProperty("scanStream", scanStream);
return this;
}
/**
* To be used for continuously reading a stream such as the unix tail
* command.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param scanStream the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder scanStream(String scanStream) {
doSetProperty("scanStream", scanStream);
return this;
}
/**
* Delay in milliseconds between read attempts when using scanStream.
*
* The option is a: <code>long</code> type.
*
* Group: consumer
*
* @param scanStreamDelay the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder scanStreamDelay(long scanStreamDelay) {
doSetProperty("scanStreamDelay", scanStreamDelay);
return this;
}
/**
* Delay in milliseconds between read attempts when using scanStream.
*
* The option will be converted to a <code>long</code> type.
*
* Group: consumer
*
* @param scanStreamDelay the value to set
* @return the dsl builder
*/
default StreamEndpointConsumerBuilder scanStreamDelay(String scanStreamDelay) {
doSetProperty("scanStreamDelay", scanStreamDelay);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the Stream component.
*/
public | StreamEndpointConsumerBuilder |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/util/DateUtil.java | {
"start": 624,
"end": 6412
} | class ____ extends RuntimeException {
public DateParseException() {
}
public DateParseException(final String s) {
super(s);
}
public DateParseException(final String s, final Throwable throwable) {
super(s, throwable);
}
public DateParseException(final Throwable throwable) {
super(throwable);
}
}
/**
* Date format pattern used to parse HTTP date headers in RFC 1123 format.
*/
public static final String PATTERN_RFC1123 = "EEE, dd MMM yyyy HH:mm:ss zzz";
/**
* Date format pattern used to parse HTTP date headers in RFC 1036 format.
*/
public static final String PATTERN_RFC1036 = "EEEE, dd-MMM-yy HH:mm:ss zzz";
/**
* Date format pattern used to parse HTTP date headers in ANSI C
* <code>asctime()</code> format.
*/
public static final String PATTERN_ASCTIME = "EEE MMM d HH:mm:ss yyyy";
private static final Collection DEFAULT_PATTERNS = Arrays.asList(
new String[] { PATTERN_ASCTIME, PATTERN_RFC1036, PATTERN_RFC1123 });
private static final Date DEFAULT_TWO_DIGIT_YEAR_START;
static {
Calendar calendar = Calendar.getInstance();
calendar.set(2000, Calendar.JANUARY, 1, 0, 0);
DEFAULT_TWO_DIGIT_YEAR_START = calendar.getTime();
}
private static final TimeZone GMT = TimeZone.getTimeZone("GMT");
/**
* Parses a date value. The formats used for parsing the date value are retrieved from
* the default http params.
*
* @param dateValue the date value to parse
* @return the parsed date
* @throws DateParseException if the value could not be parsed using any of the
* supported date formats
*/
public static Date parseDate(String dateValue) throws DateParseException {
return parseDate(dateValue, null, null);
}
/**
* Parses the date value using the given date formats.
*
* @param dateValue the date value to parse
* @param dateFormats the date formats to use
* @return the parsed date
* @throws DateParseException if none of the dataFormats could parse the dateValue
*/
public static Date parseDate(String dateValue, Collection dateFormats)
throws DateParseException {
return parseDate(dateValue, dateFormats, null);
}
/**
* Parses the date value using the given date formats.
*
* @param dateValue the date value to parse
* @param dateFormats the date formats to use
* @param startDate During parsing, two digit years will be placed in the range
* <code>startDate</code> to <code>startDate + 100 years</code>. This value may
* be <code>null</code>. When <code>null</code> is given as a parameter, year
* <code>2000</code> will be used.
* @return the parsed date
* @throws DateParseException if none of the dataFormats could parse the dateValue
*/
public static Date parseDate(
String dateValue,
Collection dateFormats,
Date startDate) throws DateParseException {
if (dateValue == null) {
throw new IllegalArgumentException("Date was null");
}
if (dateFormats == null) {
dateFormats = DEFAULT_PATTERNS;
}
if (startDate == null) {
startDate = DEFAULT_TWO_DIGIT_YEAR_START;
}
// trim single quotes around date if present
// see issue #5279
if (dateValue.length() > 1
&& dateValue.startsWith("'")
&& dateValue.endsWith("'")) {
dateValue = dateValue.substring(1, dateValue.length() - 1);
}
SimpleDateFormat dateParser = null;
Iterator formatIter = dateFormats.iterator();
while (formatIter.hasNext()) {
String format = (String) formatIter.next();
if (dateParser == null) {
dateParser = new SimpleDateFormat(format, Locale.US);
dateParser.setTimeZone(TimeZone.getTimeZone("GMT"));
dateParser.set2DigitYearStart(startDate);
} else {
dateParser.applyPattern(format);
}
try {
return dateParser.parse(dateValue);
} catch (ParseException pe) {
// ignore this exception, we will try the next format
}
}
// we were unable to parse the date
throw new DateParseException("Unable to parse " + dateValue);
}
/**
* Formats the given date according to the RFC 1123 pattern.
*
* @param date The date to format.
* @return An RFC 1123 formatted date string.
* @see #PATTERN_RFC1123
*/
public static String formatDate(Date date) {
return formatDate(date, PATTERN_RFC1123);
}
/**
* Formats the given date according to the specified pattern. The pattern
* must conform to that used by the {@link SimpleDateFormat simple date
* format} class.
*
* @param date The date to format.
* @param pattern The pattern to use for formatting the date.
* @return A formatted date string.
* @throws IllegalArgumentException If the given date pattern is invalid.
* @see SimpleDateFormat
*/
public static String formatDate(Date date, String pattern) {
if (date == null)
throw new IllegalArgumentException("Date was null");
if (pattern == null)
throw new IllegalArgumentException("Pattern was null");
SimpleDateFormat formatter = new SimpleDateFormat(pattern, Locale.US);
formatter.setTimeZone(GMT);
return formatter.format(date);
}
/**
* This | DateParseException |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/producer/internals/BuiltInPartitioner.java | {
"start": 1443,
"end": 1630
} | class ____ track of various bookkeeping information required for adaptive sticky partitioning
* (described in detail in KIP-794). There is one partitioner object per topic.
*/
public | keeps |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/ext/MultipleExternalIds291Test.java | {
"start": 1374,
"end": 2796
} | class ____ extends Container {
public String type;
}
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
final ObjectMapper MAPPER = newJsonMapper();
// [databind#291]
@Test
public void testMultipleValuesSingleExtId() throws Exception
{
// first with ext-id before values
_testMultipleValuesSingleExtId(
"{'type' : '1',\n"
+"'field1' : { 'a' : 'AAA' },\n"
+"'field2' : { 'c' : 'CCC' }\n"
+"}"
);
// then after
_testMultipleValuesSingleExtId(
"{\n"
+"'field1' : { 'a' : 'AAA' },\n"
+"'field2' : { 'c' : 'CCC' },\n"
+"'type' : '1'\n"
+"}"
);
// and then in-between
_testMultipleValuesSingleExtId(
"{\n"
+"'field1' : { 'a' : 'AAA' },\n"
+"'type' : '1',\n"
+"'field2' : { 'c' : 'CCC' }\n"
+"}"
);
}
private void _testMultipleValuesSingleExtId(String json) throws Exception
{
json = a2q(json);
// First, with base class, no type id field separately
{
Container c = MAPPER.readValue(json, Container.class);
assertNotNull(c);
assertTrue(c.field1 instanceof A);
assertEquals("AAA", ((A) c.field1).a);
assertTrue(c.field2 instanceof C);
assertEquals("CCC", ((C) c.field2).c);
}
// then with sub- | ContainerWithExtra |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/converter/json/GsonFactoryBeanTests.java | {
"start": 5251,
"end": 5457
} | class ____ {
private byte[] bytes;
@SuppressWarnings("unused")
public byte[] getBytes() {
return this.bytes;
}
public void setBytes(byte[] bytes) {
this.bytes = bytes;
}
}
}
| ByteArrayBean |
java | google__dagger | javatests/artifacts/dagger/build-tests/src/test/java/buildtests/TransitiveProvidesParameterizedTypeTest.java | {
"start": 1265,
"end": 4748
} | class ____ {
@Parameters(name = "{0}")
public static Collection<Object[]> parameters() {
return Arrays.asList(new Object[][] {{ "implementation" }, { "api" }});
}
@Rule public TemporaryFolder folder = new TemporaryFolder();
private final String transitiveDependencyType;
public TransitiveProvidesParameterizedTypeTest(String transitiveDependencyType) {
this.transitiveDependencyType = transitiveDependencyType;
}
@Test
public void testQualifierOnProvidesMethodParameter() throws IOException {
BuildResult result;
switch (transitiveDependencyType) {
case "implementation":
result = setupRunner().buildAndFail();
assertThat(result.getOutput()).contains("Task :app:compileJava FAILED");
assertThat(result.getOutput())
.contains(
"error: ComponentProcessingStep was unable to process 'app.MyComponent' because"
+ " 'library2.TransitiveType<java.lang.String>' could not be resolved."
+ "\n "
+ "\n Dependency trace:"
+ "\n => element (INTERFACE): library1.MyModule"
+ "\n => element (METHOD):"
+ " provideInt(library2.TransitiveType<java.lang.String>)"
+ "\n => type (EXECUTABLE method):"
+ " (library2.TransitiveType<java.lang.String>)java.lang.String"
+ "\n => type (ERROR parameter type):"
+ " library2.TransitiveType<java.lang.String>");
break;
case "api":
result = setupRunner().build();
assertThat(result.task(":app:assemble").getOutcome()).isEqualTo(SUCCESS);
assertThat(result.getOutput())
.contains("Binding: library2.TransitiveType<java.lang.String>");
break;
}
}
private GradleRunner setupRunner() throws IOException {
File projectDir = folder.getRoot();
GradleModule.create(projectDir)
.addSettingsFile(
"include 'app'",
"include 'library1'",
"include 'library2'",
"include 'spi-plugin'")
.addBuildFile(
"buildscript {",
" ext {",
String.format("dagger_version = \"%s\"", System.getProperty("dagger_version")),
" }",
"}",
"",
"allprojects {",
" repositories {",
" mavenCentral()",
" mavenLocal()",
" }",
"}");
GradleModule.create(projectDir, "app")
.addBuildFile(
"plugins {",
" id 'java'",
" id 'application'",
"}",
"tasks.withType(JavaCompile) {",
" options.compilerArgs += '-Adagger.experimentalDaggerErrorMessages=ENABLED'",
"}",
"dependencies {",
" implementation project(':library1')",
" annotationProcessor project(':spi-plugin')",
" implementation \"com.google.dagger:dagger:$dagger_version\"",
" annotationProcessor \"com.google.dagger:dagger-compiler:$dagger_version\"",
"}")
.addSrcFile(
"MyComponent.java",
"package app;",
"",
"import dagger.Component;",
"import library1.MyModule;",
"",
"@Component(modules = MyModule.class)",
"public | TransitiveProvidesParameterizedTypeTest |
java | apache__camel | components/camel-debezium/camel-debezium-oracle/src/test/java/org/apache/camel/component/debezium/DebeziumOracleComponentTest.java | {
"start": 1331,
"end": 4845
} | class ____ {
@Test
void testIfConnectorEndpointCreatedWithConfig() throws Exception {
final Map<String, Object> params = new HashMap<>();
params.put("offsetStorageFileName", "/offset_test_file");
params.put("databaseHostname", "localhost");
params.put("databaseUser", "dbz");
params.put("databasePassword", "pwd");
params.put("topicPrefix", "test");
params.put("databaseServerId", 1234);
params.put("schemaHistoryInternalFileFilename", "/db_history_file_test");
final String remaining = "test_name";
final String uri = "debezium?name=test_name&offsetStorageFileName=/test&"
+ "topicPrefix=localhost&databaseServerId=1234&databaseUser=dbz&databasePassword=pwd&"
+ "databaseServerName=test&schemaHistoryInternalFileFilename=/test";
try (final DebeziumComponent debeziumComponent = new DebeziumOracleComponent(new DefaultCamelContext())) {
debeziumComponent.start();
final DebeziumEndpoint debeziumEndpoint = debeziumComponent.createEndpoint(uri, remaining, params);
assertNotNull(debeziumEndpoint);
// test for config
final OracleConnectorEmbeddedDebeziumConfiguration configuration
= (OracleConnectorEmbeddedDebeziumConfiguration) debeziumEndpoint.getConfiguration();
assertEquals("test_name", configuration.getName());
assertEquals("/offset_test_file", configuration.getOffsetStorageFileName());
assertEquals("localhost", configuration.getDatabaseHostname());
assertEquals("dbz", configuration.getDatabaseUser());
assertEquals("pwd", configuration.getDatabasePassword());
assertEquals("test", configuration.getTopicPrefix());
assertEquals("/db_history_file_test", configuration.getSchemaHistoryInternalFileFilename());
}
}
@Test
void testIfCreatesComponentWithExternalConfiguration() throws Exception {
final OracleConnectorEmbeddedDebeziumConfiguration configuration
= new OracleConnectorEmbeddedDebeziumConfiguration();
configuration.setName("test_config");
configuration.setDatabaseUser("test_db");
configuration.setDatabasePassword("pwd");
configuration.setOffsetStorageFileName("/offset/file");
configuration.setTopicPrefix("test");
final String uri = "debezium:dummy";
try (final DebeziumComponent debeziumComponent = new DebeziumOracleComponent(new DefaultCamelContext())) {
debeziumComponent.start();
// set configurations
debeziumComponent.setConfiguration(configuration);
final DebeziumEndpoint debeziumEndpoint = debeziumComponent.createEndpoint(uri, null, Collections.emptyMap());
assertNotNull(debeziumEndpoint);
// assert configurations
final OracleConnectorEmbeddedDebeziumConfiguration actualConfigurations
= (OracleConnectorEmbeddedDebeziumConfiguration) debeziumEndpoint.getConfiguration();
assertNotNull(actualConfigurations);
assertEquals(configuration.getName(), actualConfigurations.getName());
assertEquals(configuration.getDatabaseUser(), actualConfigurations.getDatabaseUser());
assertEquals(configuration.getConnectorClass(), actualConfigurations.getConnectorClass());
}
}
}
| DebeziumOracleComponentTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java | {
"start": 63562,
"end": 64074
} | class ____ {
private final @Nullable Object method(boolean b) {
if (b) {
return null;
} else {
return null;
}
}
}
""")
.doTest();
}
@Test
public void memberSelectReturnType() {
createRefactoringTestHelper()
.addInputLines(
"in/Test.java",
"""
import org.checkerframework.checker.nullness.qual.Nullable;
| T |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java | {
"start": 881,
"end": 1182
} | class ____ {
public static final String DEPTH = "mapreduce.pentomino.depth";
public static final String WIDTH = "mapreduce.pentomino.width";
public static final String HEIGHT = "mapreduce.pentomino.height";
public static final String CLASS = "mapreduce.pentomino.class";
/**
* This | Pentomino |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/accumulators/AccumulatorLiveITCase.java | {
"start": 2878,
"end": 8820
} | class ____ extends TestLogger {
private static final Logger LOG = LoggerFactory.getLogger(AccumulatorLiveITCase.class);
@Parameterized.Parameter public boolean testBatchJob;
@ClassRule
public static final TestExecutorResource<ScheduledExecutorService> EXECUTOR_RESOURCE =
TestingUtils.defaultExecutorResource();
// name of user accumulator
private static final String ACCUMULATOR_NAME = "test";
private static final Duration HEARTBEAT_INTERVAL = Duration.ofMillis(50L);
// number of heartbeat intervals to check
private static final int NUM_ITERATIONS = 5;
private static final List<Integer> inputData = new ArrayList<>(NUM_ITERATIONS);
static {
// generate test data
for (int i = 0; i < NUM_ITERATIONS; i++) {
inputData.add(i);
}
}
@Parameterized.Parameters(name = "testBatchJob: {0}")
public static Object[] parameters() {
return new Object[] {true, false};
}
@ClassRule
public static final MiniClusterWithClientResource MINI_CLUSTER_RESOURCE =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(getConfiguration())
.setNumberTaskManagers(1)
.setNumberSlotsPerTaskManager(1)
.build());
private static Configuration getConfiguration() {
Configuration config = new Configuration();
config.set(RpcOptions.ASK_TIMEOUT_DURATION, TestingUtils.DEFAULT_ASK_TIMEOUT);
config.set(HeartbeatManagerOptions.HEARTBEAT_INTERVAL, HEARTBEAT_INTERVAL);
return config;
}
@Before
public void resetLatches() throws InterruptedException {
NotifyingMapper.reset();
}
@Test
public void testJob() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
RuntimeExecutionMode runtimeExecutionMode =
testBatchJob ? RuntimeExecutionMode.BATCH : RuntimeExecutionMode.STREAMING;
env.setRuntimeMode(runtimeExecutionMode);
env.setParallelism(1);
DataStream<Integer> input = env.fromData(inputData);
input.flatMap(new NotifyingMapper())
.writeUsingOutputFormat(new DummyOutputFormat())
.disableChaining();
JobGraph jobGraph = env.getStreamGraph().getJobGraph();
submitJobAndVerifyResults(jobGraph);
}
private static void submitJobAndVerifyResults(JobGraph jobGraph) throws Exception {
Deadline deadline = Deadline.now().plus(Duration.ofSeconds(30));
final ClusterClient<?> client = MINI_CLUSTER_RESOURCE.getClusterClient();
final CheckedThread submissionThread =
new CheckedThread() {
@Override
public void go() throws Exception {
submitJobAndWaitForResult(client, jobGraph, getClass().getClassLoader());
}
};
submissionThread.start();
try {
NotifyingMapper.notifyLatch.await();
// verify using the ClusterClient
verifyResults(jobGraph, deadline, client);
// verify using the MiniClusterJobClient
verifyResults(jobGraph, deadline, null);
NotifyingMapper.shutdownLatch.trigger();
} finally {
NotifyingMapper.shutdownLatch.trigger();
// wait for the job to have terminated
submissionThread.sync();
}
}
private static void verifyResults(JobGraph jobGraph, Deadline deadline, ClusterClient<?> client)
throws InterruptedException,
java.util.concurrent.ExecutionException,
java.util.concurrent.TimeoutException {
FutureUtils.retrySuccessfulWithDelay(
() -> {
try {
if (client != null) {
return CompletableFuture.completedFuture(
client.getAccumulators(jobGraph.getJobID()).get());
} else {
final MiniClusterJobClient miniClusterJobClient =
new MiniClusterJobClient(
jobGraph.getJobID(),
MINI_CLUSTER_RESOURCE.getMiniCluster(),
ClassLoader.getSystemClassLoader(),
MiniClusterJobClient.JobFinalizationBehavior
.NOTHING);
return CompletableFuture.completedFuture(
miniClusterJobClient.getAccumulators().get());
}
} catch (Exception e) {
return FutureUtils.completedExceptionally(e);
}
},
Duration.ofMillis(20),
deadline,
accumulators ->
accumulators.size() == 1
&& accumulators.containsKey(ACCUMULATOR_NAME)
&& (int) accumulators.get(ACCUMULATOR_NAME)
== NUM_ITERATIONS,
new ScheduledExecutorServiceAdapter(EXECUTOR_RESOURCE.getExecutor()))
.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
}
/** UDF that notifies when it changes the accumulator values. */
private static | AccumulatorLiveITCase |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/litemode/LifecycleBean.java | {
"start": 825,
"end": 1160
} | class ____ {
private final String name;
private boolean initialized = false;
public LifecycleBean(String name) {
this.name = name;
}
public String getName() {
return this.name;
}
@PostConstruct
public void init() {
initialized = true;
}
public boolean isInitialized() {
return this.initialized;
}
}
| LifecycleBean |
java | google__gson | gson/src/test/java/com/google/gson/functional/UncategorizedTest.java | {
"start": 1388,
"end": 2730
} | class ____ {
private Gson gson = null;
@Before
public void setUp() throws Exception {
gson = new Gson();
}
@Test
public void testInvalidJsonDeserializationFails() throws Exception {
assertThrows(
JsonParseException.class, () -> gson.fromJson("adfasdf1112,,,\":", BagOfPrimitives.class));
assertThrows(
JsonParseException.class,
() -> gson.fromJson("{adfasdf1112,,,\":}", BagOfPrimitives.class));
}
@Test
public void testObjectEqualButNotSameSerialization() {
ClassOverridingEquals objA = new ClassOverridingEquals();
ClassOverridingEquals objB = new ClassOverridingEquals();
objB.ref = objA;
String json = gson.toJson(objB);
assertThat(json).isEqualTo(objB.getExpectedJson());
}
@Test
public void testStaticFieldsAreNotSerialized() {
BagOfPrimitives target = new BagOfPrimitives();
assertThat(gson.toJson(target)).doesNotContain("DEFAULT_VALUE");
}
@Test
public void testGsonInstanceReusableForSerializationAndDeserialization() {
BagOfPrimitives bag = new BagOfPrimitives();
String json = gson.toJson(bag);
BagOfPrimitives deserialized = gson.fromJson(json, BagOfPrimitives.class);
assertThat(deserialized).isEqualTo(bag);
}
/**
* This test ensures that a custom deserializer is able to return a derived | UncategorizedTest |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/authorization/method/AuthorizeReturnObjectMethodInterceptor.java | {
"start": 3549,
"end": 3945
} | class ____ extends StaticMethodMatcherPointcut {
private final Predicate<Class<?>> returnTypeMatches;
MethodReturnTypePointcut(Predicate<Class<?>> returnTypeMatches) {
this.returnTypeMatches = returnTypeMatches;
}
@Override
public boolean matches(Method method, Class<?> targetClass) {
return this.returnTypeMatches.test(method.getReturnType());
}
}
}
| MethodReturnTypePointcut |
java | mockito__mockito | mockito-integration-tests/inline-mocks-tests/src/test/java/org/mockitoinline/StaticMockRuleTest.java | {
"start": 386,
"end": 821
} | class ____ {
@Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
@Mock private MockedStatic<Dummy> dummy;
@Test
public void testStaticMockSimple() {
assertNull(Dummy.foo());
}
@Test
public void testStaticMockWithVerification() {
dummy.when(Dummy::foo).thenReturn("bar");
assertEquals("bar", Dummy.foo());
dummy.verify(Dummy::foo);
}
static | StaticMockRuleTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java | {
"start": 1587,
"end": 6752
} | class ____ extends AcknowledgedRequest<Request> {
public static final ParseField VOCABULARY = new ParseField("vocabulary");
public static final ParseField MERGES = new ParseField("merges");
public static final ParseField SCORES = new ParseField("scores");
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("put_trained_model_vocabulary", Builder::new);
static {
PARSER.declareStringArray(Builder::setVocabulary, VOCABULARY);
PARSER.declareStringArray(Builder::setMerges, MERGES);
PARSER.declareDoubleArray(Builder::setScores, SCORES);
}
public static Request parseRequest(String modelId, XContentParser parser) {
return PARSER.apply(parser, null).build(modelId, false);
}
private final String modelId;
private final List<String> vocabulary;
private final List<String> merges;
private final List<Double> scores;
/**
* An internal flag for indicating whether the vocabulary can be overwritten
*/
private final boolean allowOverwriting;
public Request(
String modelId,
List<String> vocabulary,
@Nullable List<String> merges,
@Nullable List<Double> scores,
boolean allowOverwriting
) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT);
this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID);
this.vocabulary = ExceptionsHelper.requireNonNull(vocabulary, VOCABULARY);
this.merges = Optional.ofNullable(merges).orElse(List.of());
this.scores = Optional.ofNullable(scores).orElse(List.of());
this.allowOverwriting = allowOverwriting;
}
public Request(StreamInput in) throws IOException {
super(in);
this.modelId = in.readString();
this.vocabulary = in.readStringCollectionAsList();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) {
this.merges = in.readStringCollectionAsList();
} else {
this.merges = List.of();
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
this.scores = in.readCollectionAsList(StreamInput::readDouble);
} else {
this.scores = List.of();
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) {
this.allowOverwriting = in.readBoolean();
} else {
this.allowOverwriting = false;
}
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (vocabulary.isEmpty()) {
validationException = addValidationError("[vocabulary] must not be empty", validationException);
} else {
if (scores.isEmpty() == false && scores.size() != vocabulary.size()) {
validationException = addValidationError("[scores] must have same length as [vocabulary]", validationException);
}
}
return validationException;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Objects.equals(modelId, request.modelId)
&& Objects.equals(vocabulary, request.vocabulary)
&& Objects.equals(scores, request.scores)
&& Objects.equals(merges, request.merges)
&& allowOverwriting == request.allowOverwriting;
}
@Override
public int hashCode() {
return Objects.hash(modelId, vocabulary, merges, scores, allowOverwriting);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(modelId);
out.writeStringCollection(vocabulary);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) {
out.writeStringCollection(merges);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
out.writeCollection(scores, StreamOutput::writeDouble);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) {
out.writeBoolean(allowOverwriting);
}
}
public String getModelId() {
return modelId;
}
public List<String> getVocabulary() {
return vocabulary;
}
public List<String> getMerges() {
return merges;
}
public List<Double> getScores() {
return scores;
}
public boolean isOverwritingAllowed() {
return allowOverwriting;
}
public static | Request |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/batchfetch/BatchFetchReferencedColumnNameTest.java | {
"start": 3180,
"end": 3700
} | class ____ {
@Id
@Column(name = "PARENT_ID")
private Long id;
@OneToMany(fetch = FetchType.EAGER, cascade = { CascadeType.ALL })
@JoinColumn(name = "PARENT_ID", referencedColumnName = "PARENT_ID")
@OrderBy("createdOn desc")
private List<Child> children;
public List<Child> getChildren() {
return children;
}
public void setChildren(List<Child> children) {
this.children = children;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
}
| Parent |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java | {
"start": 49369,
"end": 59464
} | class ____ implements Runnable {
@Override
@SuppressWarnings("unchecked")
public void run() {
int lastHeartbeatID = 0;
boolean missedHearbeat = false;
while (!isStopped) {
// Send heartbeat
try {
NodeHeartbeatResponse response = null;
Set<NodeLabel> nodeLabelsForHeartbeat =
nodeLabelsHandler.getNodeLabelsForHeartbeat();
Set<NodeAttribute> nodeAttributesForHeartbeat =
nodeAttributesHandler.getNodeAttributesForHeartbeat();
NodeStatus nodeStatus = getNodeStatus(lastHeartbeatID);
NodeHeartbeatRequest request =
NodeHeartbeatRequest.newInstance(nodeStatus,
NodeStatusUpdaterImpl.this.context
.getContainerTokenSecretManager().getCurrentKey(),
NodeStatusUpdaterImpl.this.context
.getNMTokenSecretManager().getCurrentKey(),
nodeLabelsForHeartbeat,
nodeAttributesForHeartbeat,
NodeStatusUpdaterImpl.this.context
.getRegisteringCollectors());
if (logAggregationEnabled) {
// pull log aggregation status for application running in this NM
List<LogAggregationReport> logAggregationReports =
getLogAggregationReportsForApps(context
.getLogAggregationStatusForApps());
if (logAggregationReports != null
&& !logAggregationReports.isEmpty()) {
request.setLogAggregationReportsForApps(logAggregationReports);
}
}
request.setTokenSequenceNo(
NodeStatusUpdaterImpl.this.tokenSequenceNo);
response = resourceTracker.nodeHeartbeat(request);
//get next heartbeat interval from response
nextHeartBeatInterval = response.getNextHeartBeatInterval();
updateMasterKeys(response);
if (!handleShutdownOrResyncCommand(response)) {
nodeLabelsHandler.verifyRMHeartbeatResponseForNodeLabels(
response);
nodeAttributesHandler
.verifyRMHeartbeatResponseForNodeAttributes(response);
// Explicitly put this method after checking the resync
// response. We
// don't want to remove the completed containers before resync
// because these completed containers will be reported back to RM
// when NM re-registers with RM.
// Only remove the cleanedup containers that are acked
removeOrTrackCompletedContainersFromContext(response
.getContainersToBeRemovedFromNM());
// If the last heartbeat was missed, it is possible that the
// RM saw this one as a duplicate and did not process it.
// If so, we can fail to notify the RM of these completed containers
// on the next heartbeat if we clear pendingCompletedContainers.
// If it wasn't a duplicate, the only impact is we might notify
// the RM twice, which it can handle.
if (!missedHearbeat) {
pendingCompletedContainers.clear();
} else {
LOG.info("skipped clearing pending completed containers due to " +
"missed heartbeat");
missedHearbeat = false;
}
logAggregationReportForAppsTempList.clear();
lastHeartbeatID = response.getResponseId();
List<ContainerId> containersToCleanup = response
.getContainersToCleanup();
if (!containersToCleanup.isEmpty()) {
dispatcher.getEventHandler().handle(
new CMgrCompletedContainersEvent(containersToCleanup,
CMgrCompletedContainersEvent.Reason
.BY_RESOURCEMANAGER));
}
List<ApplicationId> appsToCleanup =
response.getApplicationsToCleanup();
//Only start tracking for keepAlive on FINISH_APP
trackAppsForKeepAlive(appsToCleanup);
if (!appsToCleanup.isEmpty()) {
dispatcher.getEventHandler().handle(
new CMgrCompletedAppsEvent(appsToCleanup,
CMgrCompletedAppsEvent.Reason.BY_RESOURCEMANAGER));
}
Map<ApplicationId, ByteBuffer> systemCredentials =
YarnServerBuilderUtils.convertFromProtoFormat(
response.getSystemCredentialsForApps());
if (systemCredentials != null && !systemCredentials.isEmpty()) {
((NMContext) context).setSystemCrendentialsForApps(
parseCredentials(systemCredentials));
context.getContainerManager().handleCredentialUpdate();
}
List<org.apache.hadoop.yarn.api.records.Container>
containersToUpdate = response.getContainersToUpdate();
if (!containersToUpdate.isEmpty()) {
dispatcher.getEventHandler().handle(
new CMgrUpdateContainersEvent(containersToUpdate));
}
// SignalContainer request originally comes from end users via
// ClientRMProtocol's SignalContainer. Forward the request to
// ContainerManager which will dispatch the event to
// ContainerLauncher.
List<SignalContainerRequest> containersToSignal = response
.getContainersToSignalList();
if (!containersToSignal.isEmpty()) {
dispatcher.getEventHandler().handle(
new CMgrSignalContainersEvent(containersToSignal));
}
// Update QueuingLimits if ContainerManager supports queuing
ContainerQueuingLimit queuingLimit =
response.getContainerQueuingLimit();
if (queuingLimit != null) {
context.getContainerManager().updateQueuingLimit(queuingLimit);
}
}
// Handling node resource update case.
Resource newResource = response.getResource();
if (newResource != null) {
updateNMResource(newResource);
LOG.debug("Node's resource is updated to {}", newResource);
if (!totalResource.equals(newResource)) {
LOG.info("Node's resource is updated to {}", newResource);
}
}
if (timelineServiceV2Enabled) {
updateTimelineCollectorData(response);
}
NodeStatusUpdaterImpl.this.tokenSequenceNo =
response.getTokenSequenceNo();
} catch (ConnectException e) {
//catch and throw the exception if tried MAX wait time to connect RM
dispatcher.getEventHandler().handle(
new NodeManagerEvent(NodeManagerEventType.SHUTDOWN));
// failed to connect to RM.
failedToConnect = true;
throw new YarnRuntimeException(e);
} catch (Exception e) {
// TODO Better error handling. Thread can die with the rest of the
// NM still running.
LOG.error("Caught exception in status-updater", e);
missedHearbeat = true;
} finally {
synchronized (heartbeatMonitor) {
nextHeartBeatInterval = nextHeartBeatInterval <= 0 ?
YarnConfiguration.DEFAULT_RM_NM_HEARTBEAT_INTERVAL_MS :
nextHeartBeatInterval;
try {
heartbeatMonitor.wait(nextHeartBeatInterval);
} catch (InterruptedException e) {
// Do Nothing
}
}
}
}
}
private void updateTimelineCollectorData(
NodeHeartbeatResponse response) {
Map<ApplicationId, AppCollectorData> incomingCollectorsMap =
response.getAppCollectors();
if (incomingCollectorsMap == null) {
LOG.debug("No collectors to update RM");
return;
}
Map<ApplicationId, AppCollectorData> knownCollectors =
context.getKnownCollectors();
for (Map.Entry<ApplicationId, AppCollectorData> entry
: incomingCollectorsMap.entrySet()) {
ApplicationId appId = entry.getKey();
AppCollectorData collectorData = entry.getValue();
// Only handle applications running on local node.
Application application = context.getApplications().get(appId);
if (application != null) {
// Update collector data if the newly received data happens after
// the known data (updates the known data).
AppCollectorData existingData = knownCollectors.get(appId);
if (AppCollectorData.happensBefore(existingData, collectorData)) {
LOG.debug("Sync a new collector address: {} for application: {}"
+ " from RM.", collectorData.getCollectorAddr(), appId);
// Update information for clients.
NMTimelinePublisher nmTimelinePublisher =
context.getNMTimelinePublisher();
if (nmTimelinePublisher != null) {
nmTimelinePublisher.setTimelineServiceAddress(
application.getAppId(), collectorData.getCollectorAddr());
}
// Update information for the node manager itself.
knownCollectors.put(appId, collectorData);
}
}
// Remove the registering collector data
context.getRegisteringCollectors().remove(entry.getKey());
}
}
private void updateMasterKeys(NodeHeartbeatResponse response) {
// See if the master-key has rolled over
MasterKey updatedMasterKey = response.getContainerTokenMasterKey();
if (updatedMasterKey != null) {
// Will be non-null only on roll-over on RM side
context.getContainerTokenSecretManager().setMasterKey(updatedMasterKey);
}
updatedMasterKey = response.getNMTokenMasterKey();
if (updatedMasterKey != null) {
context.getNMTokenSecretManager().setMasterKey(updatedMasterKey);
}
}
}
}
| StatusUpdaterRunnable |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/EqualsHashCodeTest.java | {
"start": 2738,
"end": 2942
} | class ____ {
public int hashCode() {
return 42;
}
}
""")
.addSourceLines(
"Test.java",
"""
| Super |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/scheduling/annotation/ScheduledAnnotationBeanPostProcessorTests.java | {
"start": 39560,
"end": 39642
} | class ____ {
@Scheduled
void invalid() {
}
}
static | EmptyAnnotationTestBean |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/PrePostMethodSecurityConfigurationTests.java | {
"start": 78174,
"end": 78434
} | class ____ implements FactoryBean<Object> {
@Override
public Object getObject() throws Exception {
return new Object();
}
@Override
public Class<?> getObjectType() {
return Object.class;
}
}
}
@Configuration
static | MyFactoryBean |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/queue/QueueUtil.java | {
"start": 1341,
"end": 2493
} | class ____ {
static ResultFuture<Integer> putSuccessfully(
StreamElementQueue<Integer> queue, StreamElement streamElement) {
Optional<ResultFuture<Integer>> resultFuture = queue.tryPut(streamElement);
assertThat(resultFuture).isPresent();
return resultFuture.get();
}
static void putUnsuccessfully(StreamElementQueue<Integer> queue, StreamElement streamElement) {
Optional<ResultFuture<Integer>> resultFuture = queue.tryPut(streamElement);
assertThat(resultFuture).isNotPresent();
}
/**
* Pops all completed elements from the head of this queue.
*
* @return Completed elements or empty list if none exists.
*/
static List<StreamElement> popCompleted(StreamElementQueue<Integer> queue) {
final List<StreamElement> completed = new ArrayList<>();
TimestampedCollector<Integer> collector =
new TimestampedCollector<>(new CollectorOutput<>(completed));
while (queue.hasCompletedElements()) {
queue.emitCompletedElement(collector);
}
collector.close();
return completed;
}
}
| QueueUtil |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/aot/hint/predicate/ProxyHintsPredicates.java | {
"start": 1130,
"end": 2484
} | class ____ {
ProxyHintsPredicates() {
}
/**
* Return a predicate that checks whether a {@link org.springframework.aot.hint.JdkProxyHint}
* is registered for the given interfaces.
* <p>Note that the order in which interfaces are given matters.
* @param interfaces the proxied interfaces
* @return the {@link RuntimeHints} predicate
* @see java.lang.reflect.Proxy
*/
public Predicate<RuntimeHints> forInterfaces(Class<?>... interfaces) {
Assert.notEmpty(interfaces, "'interfaces' should not be empty");
return forInterfaces(Arrays.stream(interfaces).map(TypeReference::of).toArray(TypeReference[]::new));
}
/**
* Return a predicate that checks whether a {@link org.springframework.aot.hint.JdkProxyHint}
* is registered for the given interfaces.
* <p>Note that the order in which interfaces are given matters.
* @param interfaces the proxied interfaces as type references
* @return the {@link RuntimeHints} predicate
* @see java.lang.reflect.Proxy
*/
public Predicate<RuntimeHints> forInterfaces(TypeReference... interfaces) {
Assert.notEmpty(interfaces, "'interfaces' should not be empty");
List<TypeReference> interfaceList = Arrays.asList(interfaces);
return hints -> hints.proxies().jdkProxyHints().anyMatch(proxyHint ->
proxyHint.getProxiedInterfaces().equals(interfaceList));
}
}
| ProxyHintsPredicates |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/booleanarray/BooleanArrayAssert_usingDefaultComparator_Test.java | {
"start": 1176,
"end": 1815
} | class ____ extends BooleanArrayAssertBaseTest {
private BooleanArrays arraysBefore;
@BeforeEach
void before() {
arraysBefore = getArrays(assertions);
}
@Override
protected BooleanArrayAssert invoke_api_method() {
return assertions.usingComparator(alwaysEqual())
.usingDefaultComparator();
}
@Override
protected void verify_internal_effects() {
assertThat(getObjects(assertions).getComparator()).isNull();
assertThat(getObjects(assertions)).isSameAs(Objects.instance());
assertThat(getArrays(assertions)).isSameAs(arraysBefore);
}
}
| BooleanArrayAssert_usingDefaultComparator_Test |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/builder/RouteConfigurationBuilder.java | {
"start": 1297,
"end": 4637
} | class ____ extends RouteBuilder implements RouteConfigurationsBuilder {
private final AtomicBoolean initializedConfiguration = new AtomicBoolean();
private RouteConfigurationsDefinition routeConfigurationCollection = new RouteConfigurationsDefinition();
@Override
public void configure() throws Exception {
// noop
}
@Override
public abstract void configuration() throws Exception;
public RouteConfigurationsDefinition getRouteConfigurationCollection() {
return routeConfigurationCollection;
}
public void setRouteConfigurationCollection(RouteConfigurationsDefinition routeConfigurationCollection) {
this.routeConfigurationCollection = routeConfigurationCollection;
}
/**
* Creates a new route configuration
*
* @return the builder
*/
public RouteConfigurationDefinition routeConfiguration() {
return routeConfiguration(null);
}
/**
* Creates a new route configuration
*
* @return the builder
*/
public RouteConfigurationDefinition routeConfiguration(String id) {
getRouteConfigurationCollection().setCamelContext(getCamelContext());
RouteConfigurationDefinition answer = getRouteConfigurationCollection().routeConfiguration(id);
configureRouteConfiguration(answer);
return answer;
}
@Override
public void addRouteConfigurationsToCamelContext(CamelContext context) throws Exception {
setCamelContext(context);
routeConfigurationCollection.setCamelContext(context);
if (initializedConfiguration.compareAndSet(false, true)) {
configuration();
}
populateRoutesConfiguration();
}
@Override
public void updateRouteConfigurationsToCamelContext(CamelContext context) throws Exception {
setCamelContext(context);
routeConfigurationCollection.setCamelContext(context);
if (initializedConfiguration.compareAndSet(false, true)) {
configuration();
}
List<RouteConfigurationDefinition> list = getRouteConfigurationCollection().getRouteConfigurations();
if (!list.isEmpty()) {
// remove existing before updating
for (RouteConfigurationDefinition def : list) {
context.getCamelContextExtension().getContextPlugin(Model.class).removeRouteConfiguration(def);
}
populateRoutesConfiguration();
}
}
@Override
protected void initializeCamelContext(CamelContext camelContext) {
super.initializeCamelContext(camelContext);
getRouteConfigurationCollection().setCamelContext(camelContext);
}
protected void populateRoutesConfiguration() throws Exception {
CamelContext camelContext = getContext();
if (camelContext == null) {
throw new IllegalArgumentException("CamelContext has not been injected!");
}
getRouteConfigurationCollection().setCamelContext(camelContext);
if (getResource() != null) {
getRouteConfigurationCollection().setResource(getResource());
}
camelContext.getCamelContextExtension().getContextPlugin(Model.class)
.addRouteConfigurations(getRouteConfigurationCollection().getRouteConfigurations());
}
}
| RouteConfigurationBuilder |
java | junit-team__junit5 | junit-platform-console/src/main/java/org/junit/platform/console/command/BaseCommand.java | {
"start": 791,
"end": 3271
} | class ____<T> implements Callable<T> {
@Spec
CommandSpec commandSpec;
@Mixin
AnsiColorOptionMixin ansiColorOption;
@Option(names = "--disable-banner", description = "Disable print out of the welcome message.")
private boolean disableBanner;
@SuppressWarnings("unused")
@Option(names = { "-h", "--help" }, usageHelp = true, description = "Display help information.")
private boolean helpRequested;
@SuppressWarnings("unused")
@Option(names = "--version", versionHelp = true, description = "Display version information.")
private boolean versionRequested;
void execute(String... args) {
toCommandLine().execute(args);
}
void parseArgs(String... args) {
toCommandLine().parseArgs(args);
}
private CommandLine toCommandLine() {
return BaseCommand.initialize(new CommandLine(this));
}
static CommandLine initialize(CommandLine commandLine) {
CommandLine.IParameterExceptionHandler defaultParameterExceptionHandler = commandLine.getParameterExceptionHandler();
return commandLine //
.setParameterExceptionHandler((ex, args) -> {
defaultParameterExceptionHandler.handleParseException(ex, args);
return ExitCode.ANY_ERROR;
}) //
.setExecutionExceptionHandler((ex, cmd, __) -> {
commandLine.getErr().println(cmd.getColorScheme().richStackTraceString(ex));
commandLine.getErr().println();
commandLine.getErr().flush();
cmd.usage(commandLine.getOut());
return ExitCode.ANY_ERROR;
}) //
.setCaseInsensitiveEnumValuesAllowed(true) //
.setAtFileCommentChar(null);
}
@Override
public final T call() {
PrintWriter out = getOut();
if (!disableBanner) {
displayBanner(out);
}
try {
return execute(out);
}
catch (PreconditionViolationException e) {
throw new ParameterException(commandSpec.commandLine(), e.getMessage(), e.getCause());
}
}
private PrintWriter getOut() {
return commandSpec.commandLine().getOut();
}
private void displayBanner(PrintWriter out) {
out.println();
CommandLine.Help.ColorScheme colorScheme = getColorScheme();
if (colorScheme.ansi().enabled()) {
out.print("💚 ");
}
out.println(colorScheme.string(
"@|italic Thanks for using JUnit!|@ Support its development at @|underline https://junit.org/sponsoring|@"));
out.println();
out.flush();
}
protected final CommandLine.Help.ColorScheme getColorScheme() {
return commandSpec.commandLine().getColorScheme();
}
protected abstract T execute(PrintWriter out);
}
| BaseCommand |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotationTypeMappingsTests.java | {
"start": 23233,
"end": 23298
} | interface ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@A
@B
@ | B |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/TestContextManagerSuppressedExceptionsTests.java | {
"start": 3562,
"end": 3768
} | class ____ implements TestExecutionListener {
@Override
public void afterTestExecution(TestContext testContext) {
fail("afterTestExecution-1");
}
}
private static | FailingAfterTestExecutionListener1 |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/ParallelFlatMapTest.java | {
"start": 849,
"end": 1827
} | class ____ {
@Test
public void parallelism() {
ParallelFlux<Integer> source = Flux.range(1, 4).parallel(3);
ParallelFlatMap<Integer, Integer> test = new ParallelFlatMap<>(source,
i -> Flux.range(1, i), false, 12,
Queues.small(), 123, Queues.small());
assertThat(test.parallelism())
.isEqualTo(3)
.isEqualTo(source.parallelism());
}
@Test
public void scanOperator() throws Exception {
ParallelFlux<Integer> source = Flux.range(1, 4).parallel(3);
ParallelFlatMap<Integer, Integer> test = new ParallelFlatMap<>(source,
i -> Flux.range(1, i), true, 12,
Queues.small(), 123, Queues.small());
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(source);
assertThat(test.scan(Scannable.Attr.PREFETCH))
.isEqualTo(123)
.isNotEqualTo(source.getPrefetch());
assertThat(test.scan(Scannable.Attr.DELAY_ERROR)).isTrue();
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
}
| ParallelFlatMapTest |
java | apache__kafka | transaction-coordinator/src/test/java/org/apache/kafka/coordinator/transaction/ProducerIdManagerTest.java | {
"start": 2572,
"end": 9684
} | class ____ extends RPCProducerIdManager {
private final Queue<Errors> errorQueue;
private final boolean isErroneousBlock;
private final AtomicBoolean capturedFailure = new AtomicBoolean(false);
private final ExecutorService brokerToControllerRequestExecutor = Executors.newSingleThreadExecutor();
private final int idLen;
private Long idStart;
MockProducerIdManager(int brokerId,
long idStart,
int idLen,
Queue<Errors> errorQueue,
boolean isErroneousBlock,
Time time) {
super(brokerId, time, () -> 1L, brokerToController);
this.idStart = idStart;
this.idLen = idLen;
this.errorQueue = errorQueue;
this.isErroneousBlock = isErroneousBlock;
}
@Override
protected void sendRequest() {
brokerToControllerRequestExecutor.submit(() -> {
Errors error = errorQueue.poll();
if (error == null || error == Errors.NONE) {
handleAllocateProducerIdsResponse(new AllocateProducerIdsResponse(
new AllocateProducerIdsResponseData()
.setProducerIdStart(idStart)
.setProducerIdLen(idLen)
));
if (!isErroneousBlock) {
idStart += idLen;
}
} else {
handleAllocateProducerIdsResponse(new AllocateProducerIdsResponse(
new AllocateProducerIdsResponseData().setErrorCode(error.code())
));
}
}, 0);
}
@Override
protected void handleAllocateProducerIdsResponse(AllocateProducerIdsResponse response) {
super.handleAllocateProducerIdsResponse(response);
capturedFailure.set(nextProducerIdBlock.get() == null);
}
}
@ParameterizedTest
@ValueSource(ints = {1, 2, 10, 100})
public void testConcurrentGeneratePidRequests(int idBlockLen) throws InterruptedException {
// Send concurrent generateProducerId requests. Ensure that the generated producer id is unique.
// For each block (total 3 blocks), only "idBlockLen" number of requests should go through.
// All other requests should fail immediately.
var numThreads = 5;
var latch = new CountDownLatch(idBlockLen * 3);
var manager = new MockProducerIdManager(0, 0, idBlockLen,
new ConcurrentLinkedQueue<>(), false, Time.SYSTEM);
var requestHandlerThreadPool = Executors.newFixedThreadPool(numThreads);
Map<Long, Integer> pidMap = new ConcurrentHashMap<>();
for (int i = 0; i < numThreads; i++) {
requestHandlerThreadPool.submit(() -> {
while (latch.getCount() > 0) {
long result;
try {
result = manager.generateProducerId();
synchronized (pidMap) {
if (latch.getCount() != 0) {
pidMap.merge(result, 1, Integer::sum);
latch.countDown();
}
}
} catch (Exception e) {
assertEquals(CoordinatorLoadInProgressException.class, e.getClass());
}
assertDoesNotThrow(() -> Thread.sleep(100));
}
});
}
assertTrue(latch.await(12000, TimeUnit.MILLISECONDS));
requestHandlerThreadPool.shutdown();
assertEquals(idBlockLen * 3, pidMap.size());
pidMap.forEach((pid, count) -> {
assertEquals(1, count);
assertTrue(pid < (3L * idBlockLen) + numThreads, "Unexpected pid " + pid + "; non-contiguous blocks generated or did not fully exhaust blocks.");
});
}
@ParameterizedTest
@EnumSource(value = Errors.class, names = {"UNKNOWN_SERVER_ERROR", "INVALID_REQUEST"})
public void testUnrecoverableErrors(Errors error) throws Exception {
var time = new MockTime();
var manager = new MockProducerIdManager(0, 0, 1, queue(Errors.NONE, error), false, time);
verifyNewBlockAndProducerId(manager, new ProducerIdsBlock(0, 0, 1), 0);
verifyFailureWithoutGenerateProducerId(manager);
time.sleep(RETRY_BACKOFF_MS);
verifyNewBlockAndProducerId(manager, new ProducerIdsBlock(0, 1, 1), 1);
}
@Test
public void testInvalidRanges() throws InterruptedException {
var manager = new MockProducerIdManager(0, -1, 10, new ConcurrentLinkedQueue<>(), true, Time.SYSTEM);
verifyFailure(manager);
manager = new MockProducerIdManager(0, 0, -1, new ConcurrentLinkedQueue<>(), true, Time.SYSTEM);
verifyFailure(manager);
manager = new MockProducerIdManager(0, Long.MAX_VALUE - 1, 10, new ConcurrentLinkedQueue<>(), true, Time.SYSTEM);
verifyFailure(manager);
}
@Test
public void testRetryBackoff() throws Exception {
var time = new MockTime();
var manager = new MockProducerIdManager(0, 0, 1, queue(Errors.UNKNOWN_SERVER_ERROR), false, time);
verifyFailure(manager);
assertThrows(CoordinatorLoadInProgressException.class, manager::generateProducerId);
time.sleep(RETRY_BACKOFF_MS);
verifyNewBlockAndProducerId(manager, new ProducerIdsBlock(0, 0, 1), 0);
}
private Queue<Errors> queue(Errors... errors) {
Queue<Errors> queue = new ConcurrentLinkedQueue<>();
Collections.addAll(queue, errors);
return queue;
}
private void verifyFailure(MockProducerIdManager manager) throws InterruptedException {
assertThrows(CoordinatorLoadInProgressException.class, manager::generateProducerId);
verifyFailureWithoutGenerateProducerId(manager);
}
private void verifyFailureWithoutGenerateProducerId(MockProducerIdManager manager) throws InterruptedException {
TestUtils.waitForCondition(() -> {
synchronized (manager) {
return manager.capturedFailure.get();
}
}, "Expected failure");
manager.capturedFailure.set(false);
}
private void verifyNewBlockAndProducerId(MockProducerIdManager manager,
ProducerIdsBlock expectedBlock,
long expectedPid
) throws Exception {
assertThrows(CoordinatorLoadInProgressException.class, manager::generateProducerId);
TestUtils.waitForCondition(() -> {
ProducerIdsBlock nextBlock = manager.nextProducerIdBlock.get();
return nextBlock != null && nextBlock.equals(expectedBlock);
}, "failed to generate block");
assertEquals(expectedPid, manager.generateProducerId());
}
} | MockProducerIdManager |
java | quarkusio__quarkus | test-framework/junit5-component/src/test/java/io/quarkus/test/component/declarative/InterceptorMethodsTest.java | {
"start": 2037,
"end": 2368
} | class ____ {
@Inject
Bean<TheComponent> bean;
String ping() {
return "true";
}
@NoClassInterceptors
public Bean<?> getBean() {
return bean;
}
}
@Target({ TYPE, METHOD })
@Retention(RUNTIME)
@InterceptorBinding
public @ | TheComponent |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/docker/type/ImageArchive.java | {
"start": 8612,
"end": 10285
} | class ____ {
private final Image image;
private ImageConfig config;
private @Nullable Instant createDate;
private @Nullable ImageReference tag;
private final List<Layer> newLayers = new ArrayList<>();
private Update(Image image) {
this.image = image;
this.config = image.getConfig();
}
private ImageArchive applyTo(IOConsumer<Update> update) throws IOException {
update.accept(this);
Instant createDate = (this.createDate != null) ? this.createDate : WINDOWS_EPOCH_PLUS_SECOND;
return new ImageArchive(SharedJsonMapper.get(), this.config, createDate, this.tag, this.image.getOs(),
this.image.getArchitecture(), this.image.getVariant(), this.image.getLayers(),
Collections.unmodifiableList(this.newLayers));
}
/**
* Apply updates to the {@link ImageConfig}.
* @param update consumer to apply updates
*/
public void withUpdatedConfig(Consumer<ImageConfig.Update> update) {
this.config = this.config.copy(update);
}
/**
* Add a new layer to the image archive.
* @param layer the layer to add
*/
public void withNewLayer(Layer layer) {
Assert.notNull(layer, "'layer' must not be null");
this.newLayers.add(layer);
}
/**
* Set the create date for the image archive.
* @param createDate the create date
*/
public void withCreateDate(Instant createDate) {
Assert.notNull(createDate, "'createDate' must not be null");
this.createDate = createDate;
}
/**
* Set the tag for the image archive.
* @param tag the tag
*/
public void withTag(ImageReference tag) {
Assert.notNull(tag, "'tag' must not be null");
this.tag = tag.inTaggedForm();
}
}
}
| Update |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-web-secure/src/main/java/smoketest/web/secure/SampleWebSecureApplication.java | {
"start": 979,
"end": 1365
} | class ____ implements WebMvcConfigurer {
@Override
public void addViewControllers(ViewControllerRegistry registry) {
registry.addViewController("/").setViewName("home");
registry.addViewController("/login").setViewName("login");
}
public static void main(String[] args) {
new SpringApplicationBuilder(SampleWebSecureApplication.class).run(args);
}
}
| SampleWebSecureApplication |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cache/CacheRegionStatisticsTest.java | {
"start": 2797,
"end": 2992
} | class ____ {
@Id
private String name;
@ElementCollection
private Set<String> nickNames = new HashSet<>();
public Dog(String name) {
this.name = name;
}
public Dog() {
}
}
}
| Dog |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/MappingConstants.java | {
"start": 1625,
"end": 1852
} | enum ____ strategy that adds a suffix to the source enum.
*
* @since 1.4
*/
public static final String SUFFIX_TRANSFORMATION = "suffix";
/**
* In an {@link EnumMapping} this represent the | transformation |
java | micronaut-projects__micronaut-core | benchmarks/src/jmh/java/io/micronaut/http/server/stack/ControllersBenchmark.java | {
"start": 4738,
"end": 15298
} | enum ____ {
TFB_LIKE {
@Override
FullHttpRequest request() {
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/tfblike/bytes");
request.headers().add(HttpHeaderNames.ACCEPT, "text/plain,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7");
return request;
}
@Override
void verifyResponse(FullHttpResponse response) {
Assertions.assertEquals(HttpResponseStatus.OK, response.status());
Assertions.assertEquals("text/plain", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
String expectedResponseBody = "Hello, World!";
Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
}
},
TFB_STRING {
@Override
FullHttpRequest request() {
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/tfblike/string");
request.headers().add(HttpHeaderNames.ACCEPT, "text/plain,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7");
return request;
}
@Override
void verifyResponse(FullHttpResponse response) {
Assertions.assertEquals(HttpResponseStatus.OK, response.status());
Assertions.assertEquals("text/plain", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
String expectedResponseBody = "Hello, World!";
Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
}
},
TFB_LIKE_BEANS1 {
@Override
FullHttpRequest request() {
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/tfblike/beans1");
request.headers().add(HttpHeaderNames.ACCEPT, "application/json");
return request;
}
@Override
void verifyResponse(FullHttpResponse response) {
Assertions.assertEquals(HttpResponseStatus.OK, response.status());
Assertions.assertEquals("application/json", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
String expectedResponseBody = """
[{"id":1,"message":"A"},{"id":2,"message":"B"},{"id":3,"message":"C"}]""";
Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
}
},
TFB_LIKE_BEANS2 {
@Override
FullHttpRequest request() {
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/tfblike/beans2");
request.headers().add(HttpHeaderNames.ACCEPT, "application/json");
return request;
}
@Override
void verifyResponse(FullHttpResponse response) {
Assertions.assertEquals(HttpResponseStatus.OK, response.status());
Assertions.assertEquals("application/json", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
String expectedResponseBody = """
[{"id":1,"randomNumber":123},{"id":2,"randomNumber":456},{"id":3,"randomNumber":789}]""";
Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
}
},
TFB_LIKE_ASYNC_BEANS1 {
@Override
FullHttpRequest request() {
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/tfblike/async/beans1");
request.headers().add(HttpHeaderNames.ACCEPT, "application/json");
return request;
}
@Override
void verifyResponse(FullHttpResponse response) {
Assertions.assertEquals(HttpResponseStatus.OK, response.status());
Assertions.assertEquals("application/json", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
String expectedResponseBody = """
[{"id":1,"message":"A"},{"id":2,"message":"B"},{"id":3,"message":"C"}]""";
Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
}
},
TFB_LIKE_ASYNC_BEANS2 {
@Override
FullHttpRequest request() {
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/tfblike/async/beans2");
request.headers().add(HttpHeaderNames.ACCEPT, "application/json");
return request;
}
@Override
void verifyResponse(FullHttpResponse response) {
Assertions.assertEquals(HttpResponseStatus.OK, response.status());
Assertions.assertEquals("application/json", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
String expectedResponseBody = """
[{"id":1,"randomNumber":123},{"id":2,"randomNumber":456},{"id":3,"randomNumber":789}]""";
Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
}
},
// Type pollution because of the Reactor
// TFB_LIKE_REACTIVE_BEANS1 {
// @Override
// FullHttpRequest request() {
// FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/tfblike/reactive/beans1");
// request.headers().add(HttpHeaderNames.ACCEPT, "application/json");
// return request;
// }
//
// @Override
// void verifyResponse(FullHttpResponse response) {
// Assertions.assertEquals(HttpResponseStatus.OK, response.status());
// Assertions.assertEquals("application/json", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
// String expectedResponseBody = """
//[{"id":1,"message":"A"},{"id":2,"message":"B"},{"id":3,"message":"C"}]""";
// Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
// Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
// }
// },
// TFB_LIKE_REACTIVE_BEANS2 {
// @Override
// FullHttpRequest request() {
// FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/tfblike/reactive/beans2");
// request.headers().add(HttpHeaderNames.ACCEPT, "application/json");
// return request;
// }
//
// @Override
// void verifyResponse(FullHttpResponse response) {
// Assertions.assertEquals(HttpResponseStatus.OK, response.status());
// Assertions.assertEquals("application/json", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
// String expectedResponseBody = """
//[{"id":1,"randomNumber":123},{"id":2,"randomNumber":456},{"id":3,"randomNumber":789}]""";
// Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
// Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
// }
// },
TFB_LIKE_MAP {
@Override
FullHttpRequest request() {
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/tfblike/map");
request.headers().add(HttpHeaderNames.ACCEPT, "application/json");
return request;
}
@Override
void verifyResponse(FullHttpResponse response) {
Assertions.assertEquals(HttpResponseStatus.OK, response.status());
Assertions.assertEquals("application/json", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
String expectedResponseBody = """
{"message":"Hello, World!"}""";
Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
}
},
MISSING_QUERY_PARAMETER {
@Override
FullHttpRequest request() {
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/ctrl/text-echo/foo");
request.headers().add(HttpHeaderNames.ACCEPT, "text/plain");
return request;
}
@Override
void verifyResponse(FullHttpResponse response) {
Assertions.assertEquals(HttpResponseStatus.OK, response.status());
Assertions.assertEquals("text/plain", response.headers().get(HttpHeaderNames.CONTENT_TYPE));
String expectedResponseBody = "foo";
Assertions.assertEquals(expectedResponseBody, response.content().toString(StandardCharsets.UTF_8));
Assertions.assertEquals(expectedResponseBody.length(), response.headers().getInt(HttpHeaderNames.CONTENT_LENGTH));
}
};
abstract FullHttpRequest request();
abstract void verifyResponse(FullHttpResponse response);
}
@Controller("/tfblike")
@Requires(property = "spec.name", value = "ControllersBenchmark")
static | Request |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/error/ShouldBeIn_create_Test.java | {
"start": 1375,
"end": 2741
} | class ____ {
@Test
void should_create_error_message() {
// GIVEN
ErrorMessageFactory factory = shouldBeIn("Yoda", array("Luke", "Leia"));
// WHEN
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting actual:%n" +
" \"Yoda\"%n" +
"to be in:%n" +
" [\"Luke\", \"Leia\"]%n"));
}
@Test
void should_create_error_message_with_custom_comparison_strategy() {
// GIVEN
ErrorMessageFactory factory = shouldBeIn("Yoda", array("Luke", "Leia"),
new ComparatorBasedComparisonStrategy(CaseInsensitiveStringComparator.INSTANCE));
// WHEN
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting actual:%n" +
" \"Yoda\"%n" +
"to be in:%n" +
" [\"Luke\", \"Leia\"]%n" +
"when comparing values using CaseInsensitiveStringComparator"));
}
}
| ShouldBeIn_create_Test |
java | apache__dubbo | dubbo-compatible/src/test/java/org/apache/dubbo/common/extension/ExtensionTest.java | {
"start": 1383,
"end": 3133
} | class ____ {
@Test
void testExtensionFactory() {
try {
ExtensionInjector myfactory =
ExtensionLoader.getExtensionLoader(ExtensionInjector.class).getExtension("myfactory");
Assertions.assertTrue(myfactory instanceof ExtensionInjector);
Assertions.assertTrue(myfactory instanceof ExtensionFactory);
Assertions.assertTrue(myfactory instanceof com.alibaba.dubbo.common.extension.ExtensionFactory);
Assertions.assertTrue(myfactory instanceof MyExtensionFactory);
ExtensionInjector spring =
ExtensionLoader.getExtensionLoader(ExtensionInjector.class).getExtension("spring");
Assertions.assertTrue(spring instanceof ExtensionInjector);
Assertions.assertFalse(spring instanceof ExtensionFactory);
Assertions.assertFalse(spring instanceof com.alibaba.dubbo.common.extension.ExtensionFactory);
} catch (IllegalArgumentException expected) {
fail();
}
}
private <T> ExtensionLoader<T> getExtensionLoader(Class<T> type) {
return ApplicationModel.defaultModel().getExtensionDirector().getExtensionLoader(type);
}
@Test
void testLoadActivateExtension() {
// test default
URL url = URL.valueOf("test://localhost/test").addParameter(GROUP_KEY, "old_group");
List<ActivateExt1> list =
getExtensionLoader(ActivateExt1.class).getActivateExtension(url, new String[] {}, "old_group");
Assertions.assertEquals(2, list.size());
Assertions.assertTrue(list.get(0).getClass() == OldActivateExt1Impl2.class
|| list.get(0).getClass() == OldActivateExt1Impl3.class);
}
}
| ExtensionTest |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/recovery/TaskManagerRunnerITCase.java | {
"start": 2043,
"end": 5576
} | class ____ extends TestLogger {
@ClassRule public static final TemporaryFolder TEMPORARY_FOLDER = new TemporaryFolder();
@Test
public void testDeterministicWorkingDirIsNotDeletedInCaseOfProcessFailure() throws Exception {
final File workingDirBase = TEMPORARY_FOLDER.newFolder();
final ResourceID resourceId = ResourceID.generate();
final Configuration configuration = new Configuration();
configuration.set(
ClusterOptions.PROCESS_WORKING_DIR_BASE, workingDirBase.getAbsolutePath());
configuration.set(TaskManagerOptions.TASK_MANAGER_RESOURCE_ID, resourceId.toString());
configuration.set(JobManagerOptions.ADDRESS, "localhost");
configuration.set(RpcOptions.LOOKUP_TIMEOUT_DURATION, Duration.ZERO);
final File workingDirectory =
ClusterEntrypointUtils.generateTaskManagerWorkingDirectoryFile(
configuration, resourceId);
final TestProcessBuilder.TestProcess taskManagerProcess =
new TestProcessBuilder(TaskExecutorProcessEntryPoint.class.getName())
.addConfigAsMainClassArgs(configuration)
.start();
boolean success = false;
try {
CommonTestUtils.waitUntilCondition(workingDirectory::exists);
taskManagerProcess.getProcess().destroy();
taskManagerProcess.getProcess().waitFor();
assertTrue(workingDirectory.exists());
success = true;
} finally {
if (!success) {
AbstractTaskManagerProcessFailureRecoveryTest.printProcessLog(
"TaskManager", taskManagerProcess);
}
}
}
@Test
public void testNondeterministicWorkingDirIsDeletedInCaseOfProcessFailure() throws Exception {
final File workingDirBase = TEMPORARY_FOLDER.newFolder();
final Configuration configuration = new Configuration();
configuration.set(
ClusterOptions.PROCESS_WORKING_DIR_BASE, workingDirBase.getAbsolutePath());
configuration.set(JobManagerOptions.ADDRESS, "localhost");
configuration.set(RpcOptions.LOOKUP_TIMEOUT_DURATION, Duration.ZERO);
final TestProcessBuilder.TestProcess taskManagerProcess =
new TestProcessBuilder(TaskExecutorProcessEntryPoint.class.getName())
.addConfigAsMainClassArgs(configuration)
.start();
boolean success = false;
try {
CommonTestUtils.waitUntilCondition(
() -> {
try (Stream<Path> files = Files.list(workingDirBase.toPath())) {
return files.findAny().isPresent();
}
});
final File workingDirectory =
Iterables.getOnlyElement(
Files.list(workingDirBase.toPath())
.collect(Collectors.toList()))
.toFile();
taskManagerProcess.getProcess().destroy();
taskManagerProcess.getProcess().waitFor();
assertFalse(workingDirectory.exists());
success = true;
} finally {
if (!success) {
AbstractTaskManagerProcessFailureRecoveryTest.printProcessLog(
"TaskManager", taskManagerProcess);
}
}
}
}
| TaskManagerRunnerITCase |
java | quarkusio__quarkus | extensions/panache/hibernate-orm-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/orm/rest/data/panache/deployment/security/AuthenticatedMethodTest.java | {
"start": 505,
"end": 1929
} | interface ____ extends PanacheEntityResource<Item, Long> {
@Authenticated
boolean delete(Long id);
}
@Test
void test() {
// list method is not protected so we should get an HTTP 200 even if no user is specified
given().accept("application/json")
.when()
.get("/items")
.then()
.statusCode(200)
.body("$", hasSize(2));
// delete method is protected so we should get an HTTP 401 when no user is specified
given().accept("application/json")
.when()
.delete("/items/1")
.then()
.statusCode(401);
// delete method is protected so we should get an HTTP 401 when a wrong username and password is specified
given().auth().preemptive()
.basic("foo", "foo2")
.accept("application/json")
.when()
.delete("/items/1")
.then()
.statusCode(401);
// delete method is protected so we should get an HTTP 204 when the proper username and password are specified
given().auth().preemptive()
.basic("foo", "foo")
.accept("application/json")
.when()
.delete("/items/1")
.then()
.statusCode(204);
}
}
| ItemsResource |
java | quarkusio__quarkus | extensions/spring-data-jpa/deployment/src/test/java/io/quarkus/spring/data/devmode/BookRepository.java | {
"start": 128,
"end": 240
} | interface ____ extends Repository<Book, Integer> {
List<Book> findAll();
// <placeholder>
}
| BookRepository |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/DefaultOperatorPrivilegesTests.java | {
"start": 2569,
"end": 15323
} | class ____ extends ESTestCase {
private MockLicenseState xPackLicenseState;
private FileOperatorUsersStore fileOperatorUsersStore;
private DefaultOperatorOnlyRegistry operatorOnlyRegistry;
private OperatorPrivilegesService operatorPrivilegesService;
@Before
public void init() {
xPackLicenseState = mock(MockLicenseState.class);
fileOperatorUsersStore = mock(FileOperatorUsersStore.class);
operatorOnlyRegistry = mock(DefaultOperatorOnlyRegistry.class);
operatorPrivilegesService = new DefaultOperatorPrivilegesService(xPackLicenseState, fileOperatorUsersStore, operatorOnlyRegistry);
}
public void testWillMarkThreadContextForAllLicenses() {
when(xPackLicenseState.isAllowed(Security.OPERATOR_PRIVILEGES_FEATURE)).thenReturn(randomBoolean());
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
final Authentication authentication = AuthenticationTestHelper.builder().realm().build(false);
operatorPrivilegesService.maybeMarkOperatorUser(authentication, threadContext);
verify(fileOperatorUsersStore, times(1)).isOperatorUser(authentication);
assertThat(threadContext.getHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY), notNullValue());
}
public void testWillNotCheckWhenLicenseDoesNotSupport() {
when(xPackLicenseState.isAllowed(Security.OPERATOR_PRIVILEGES_FEATURE)).thenReturn(false);
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
final ElasticsearchSecurityException e = operatorPrivilegesService.check(
AuthenticationTestHelper.builder().build(),
"cluster:action",
mock(TransportRequest.class),
threadContext
);
assertNull(e);
verifyNoMoreInteractions(operatorOnlyRegistry);
}
public void testMarkOperatorUser() {
final Settings settings = Settings.builder().put("xpack.security.operator_privileges.enabled", true).build();
when(xPackLicenseState.isAllowed(Security.OPERATOR_PRIVILEGES_FEATURE)).thenReturn(true);
final User operatorUser = new User("operator_user");
final Authentication operatorAuth = AuthenticationTestHelper.builder().user(operatorUser).build(false);
final Authentication nonOperatorAuth = AuthenticationTestHelper.builder().user(new User("non_operator_user")).build();
when(fileOperatorUsersStore.isOperatorUser(operatorAuth)).thenReturn(true);
when(fileOperatorUsersStore.isOperatorUser(nonOperatorAuth)).thenReturn(false);
ThreadContext threadContext = new ThreadContext(settings);
// Will mark for the operator user
final Logger logger = LogManager.getLogger(OperatorPrivileges.class);
Loggers.setLevel(logger, Level.DEBUG);
try (var mockLog = MockLog.capture(OperatorPrivileges.class)) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"marking",
logger.getName(),
Level.DEBUG,
"Marking user [" + operatorUser + "] as an operator"
)
);
operatorPrivilegesService.maybeMarkOperatorUser(operatorAuth, threadContext);
assertEquals(
AuthenticationField.PRIVILEGE_CATEGORY_VALUE_OPERATOR,
threadContext.getHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY)
);
mockLog.assertAllExpectationsMatched();
} finally {
Loggers.setLevel(logger, (Level) null);
}
// Will mark empty for non-operator user
threadContext = new ThreadContext(settings);
operatorPrivilegesService.maybeMarkOperatorUser(nonOperatorAuth, threadContext);
assertThat(
threadContext.getHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY),
equalTo(AuthenticationField.PRIVILEGE_CATEGORY_VALUE_EMPTY)
);
// Will mark empty for run_as user
final Authentication runAsAuth = AuthenticationTestHelper.builder().user(operatorUser).runAs().user(operatorUser).build();
Mockito.reset(fileOperatorUsersStore);
when(fileOperatorUsersStore.isOperatorUser(runAsAuth)).thenReturn(true);
threadContext = new ThreadContext(settings);
operatorPrivilegesService.maybeMarkOperatorUser(runAsAuth, threadContext);
assertThat(
threadContext.getHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY),
equalTo(AuthenticationField.PRIVILEGE_CATEGORY_VALUE_EMPTY)
);
verify(fileOperatorUsersStore, never()).isOperatorUser(any());
// Will mark for internal users
final Authentication internalAuth = AuthenticationTestHelper.builder().internal().build();
threadContext = new ThreadContext(settings);
operatorPrivilegesService.maybeMarkOperatorUser(internalAuth, threadContext);
assertEquals(
AuthenticationField.PRIVILEGE_CATEGORY_VALUE_OPERATOR,
threadContext.getHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY)
);
// Will skip if header already exist
threadContext = new ThreadContext(settings);
final String value = randomAlphaOfLength(20);
threadContext.putHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY, value);
operatorPrivilegesService.maybeMarkOperatorUser(nonOperatorAuth, threadContext);
assertThat(threadContext.getHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY), equalTo(value));
verify(fileOperatorUsersStore, never()).isOperatorUser(any());
}
public void testCheck() {
final Settings settings = Settings.builder().put("xpack.security.operator_privileges.enabled", true).build();
when(xPackLicenseState.isAllowed(Security.OPERATOR_PRIVILEGES_FEATURE)).thenReturn(true);
final String operatorAction = "cluster:operator_only/action";
final String nonOperatorAction = "cluster:non_operator/action";
final String message = "[" + operatorAction + "]";
when(operatorOnlyRegistry.check(eq(operatorAction), any())).thenReturn(() -> message);
when(operatorOnlyRegistry.check(eq(nonOperatorAction), any())).thenReturn(null);
ThreadContext threadContext = new ThreadContext(settings);
final Authentication authentication = randomValueOtherThanMany(
authc -> Authentication.AuthenticationType.INTERNAL == authc.getAuthenticationType(),
() -> AuthenticationTestHelper.builder().build()
);
if (randomBoolean()) {
threadContext.putHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY, AuthenticationField.PRIVILEGE_CATEGORY_VALUE_OPERATOR);
assertNull(operatorPrivilegesService.check(authentication, operatorAction, mock(TransportRequest.class), threadContext));
} else {
final ElasticsearchSecurityException e = operatorPrivilegesService.check(
authentication,
operatorAction,
mock(TransportRequest.class),
threadContext
);
assertNotNull(e);
assertThat(e.getMessage(), containsString("Operator privileges are required for " + message));
}
assertNull(operatorPrivilegesService.check(authentication, nonOperatorAction, mock(TransportRequest.class), threadContext));
}
public void testCheckWillPassForInternalUsersBecauseTheyHaveOperatorPrivileges() {
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
threadContext.putHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY, AuthenticationField.PRIVILEGE_CATEGORY_VALUE_OPERATOR);
when(xPackLicenseState.isAllowed(Security.OPERATOR_PRIVILEGES_FEATURE)).thenReturn(true);
final Authentication internalAuth = AuthenticationTestHelper.builder().internal().build();
assertNull(
operatorPrivilegesService.check(internalAuth, randomAlphaOfLengthBetween(20, 30), mock(TransportRequest.class), threadContext)
);
verify(operatorOnlyRegistry, never()).check(anyString(), any());
}
public void testMaybeInterceptRequest() {
final boolean licensed = randomBoolean();
when(xPackLicenseState.isAllowed(Security.OPERATOR_PRIVILEGES_FEATURE)).thenReturn(licensed);
final Logger logger = LogManager.getLogger(OperatorPrivileges.class);
Loggers.setLevel(logger, Level.DEBUG);
try (var mockLog = MockLog.capture(OperatorPrivileges.class)) {
final RestoreSnapshotRequest restoreSnapshotRequest = mock(RestoreSnapshotRequest.class);
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"intercepting",
logger.getName(),
Level.DEBUG,
"Intercepting [" + restoreSnapshotRequest + "] for operator privileges"
)
);
operatorPrivilegesService.maybeInterceptRequest(new ThreadContext(Settings.EMPTY), restoreSnapshotRequest);
verify(restoreSnapshotRequest).skipOperatorOnlyState(licensed);
mockLog.assertAllExpectationsMatched();
} finally {
Loggers.setLevel(logger, (Level) null);
}
}
public void testMaybeInterceptRequestWillNotInterceptRequestsOtherThanRestoreSnapshotRequest() {
final TransportRequest transportRequest = mock(TransportRequest.class);
operatorPrivilegesService.maybeInterceptRequest(new ThreadContext(Settings.EMPTY), transportRequest);
verifyNoMoreInteractions(xPackLicenseState);
}
public void testNoOpService() {
final Authentication authentication = AuthenticationTestHelper.builder().build();
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
NOOP_OPERATOR_PRIVILEGES_SERVICE.maybeMarkOperatorUser(authentication, threadContext);
assertNull(threadContext.getHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY));
final TransportRequest request = mock(TransportRequest.class);
assertNull(
NOOP_OPERATOR_PRIVILEGES_SERVICE.check(
AuthenticationTestHelper.builder().build(),
randomAlphaOfLengthBetween(10, 20),
request,
threadContext
)
);
verifyNoMoreInteractions(request);
}
public void testNoOpServiceMaybeInterceptRequest() {
final RestoreSnapshotRequest restoreSnapshotRequest = mock(RestoreSnapshotRequest.class);
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
NOOP_OPERATOR_PRIVILEGES_SERVICE.maybeInterceptRequest(threadContext, restoreSnapshotRequest);
verify(restoreSnapshotRequest).skipOperatorOnlyState(false);
// The test just makes sure that other requests are also accepted without any error
NOOP_OPERATOR_PRIVILEGES_SERVICE.maybeInterceptRequest(threadContext, mock(TransportRequest.class));
}
public void testCheckRest() {
final Settings settings = Settings.builder().put("xpack.security.operator_privileges.enabled", true).build();
when(xPackLicenseState.isAllowed(Security.OPERATOR_PRIVILEGES_FEATURE)).thenReturn(true);
RestHandler restHandler = mock(RestHandler.class);
RestRequest restRequest = mock(RestRequest.class);
RestChannel restChannel = mock(RestChannel.class);
ThreadContext threadContext = new ThreadContext(settings);
// not an operator
doThrow(new ElasticsearchSecurityException("violation!")).when(operatorOnlyRegistry).checkRest(restHandler, restRequest);
final ElasticsearchException ex = expectThrows(
ElasticsearchException.class,
() -> operatorPrivilegesService.checkRest(restHandler, restRequest, restChannel, threadContext)
);
assertThat(ex, instanceOf(ElasticsearchSecurityException.class));
assertThat(ex, throwableWithMessage("violation!"));
verify(restRequest, never()).markAsOperatorRequest();
Mockito.clearInvocations(operatorOnlyRegistry);
Mockito.clearInvocations(restRequest);
// is an operator
threadContext.putHeader(AuthenticationField.PRIVILEGE_CATEGORY_KEY, AuthenticationField.PRIVILEGE_CATEGORY_VALUE_OPERATOR);
verifyNoInteractions(operatorOnlyRegistry);
assertTrue(operatorPrivilegesService.checkRest(restHandler, restRequest, restChannel, threadContext));
verify(restRequest, times(1)).markAsOperatorRequest();
Mockito.clearInvocations(operatorOnlyRegistry);
Mockito.clearInvocations(restRequest);
}
}
| DefaultOperatorPrivilegesTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/strategy/PartialFinishedInputConsumableDecider.java | {
"start": 2810,
"end": 3337
} | class ____ implements InputConsumableDecider.Factory {
public static final Factory INSTANCE = new Factory();
private Factory() {}
@Override
public InputConsumableDecider createInstance(
SchedulingTopology schedulingTopology,
Function<ExecutionVertexID, Boolean> scheduledVertexRetriever,
Function<ExecutionVertexID, ExecutionState> executionStateRetriever) {
return new PartialFinishedInputConsumableDecider();
}
}
}
| Factory |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend2.java | {
"start": 2110,
"end": 13904
} | class ____ {
{
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
}
static final int numBlocks = 5;
private byte[] fileContents = null;
final int numDatanodes = 6;
final int numberOfFiles = 50;
final int numThreads = 10;
final int numAppendsPerThread = 20;
Workload[] workload = null;
final ArrayList<Path> testFiles = new ArrayList<Path>();
volatile static boolean globalStatus = true;
/**
* Creates one file, writes a few bytes to it and then closed it.
* Reopens the same file for appending, write all blocks and then close.
* Verify that all data exists in file.
* @throws IOException an exception might be thrown
*/
@Test
public void testSimpleAppend() throws IOException {
final Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_DATANODE_HANDLER_COUNT_KEY, 50);
fileContents = AppendTestUtil.initBuffer(AppendTestUtil.FILE_SIZE);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
FileSystem fs = cluster.getFileSystem();
try {
{ // test appending to a file.
// create a new file.
Path file1 = new Path("/simpleAppend.dat");
FSDataOutputStream stm = AppendTestUtil.createFile(fs, file1, 1);
System.out.println("Created file simpleAppend.dat");
// write to file
int mid = 186; // io.bytes.per.checksum bytes
System.out.println("Writing " + mid + " bytes to file " + file1);
stm.write(fileContents, 0, mid);
stm.close();
System.out.println("Wrote and Closed first part of file.");
// write to file
int mid2 = 607; // io.bytes.per.checksum bytes
System.out.println("Writing " + mid + " bytes to file " + file1);
stm = fs.append(file1);
stm.write(fileContents, mid, mid2-mid);
stm.close();
System.out.println("Wrote and Closed second part of file.");
// write the remainder of the file
stm = fs.append(file1);
// ensure getPos is set to reflect existing size of the file
assertTrue(stm.getPos() > 0);
System.out.println("Writing " + (AppendTestUtil.FILE_SIZE - mid2) +
" bytes to file " + file1);
stm.write(fileContents, mid2, AppendTestUtil.FILE_SIZE - mid2);
System.out.println("Written second part of file");
stm.close();
System.out.println("Wrote and Closed second part of file.");
// verify that entire file is good
AppendTestUtil.checkFullFile(fs, file1, AppendTestUtil.FILE_SIZE,
fileContents, "Read 2");
}
{ // test appending to an non-existing file.
FSDataOutputStream out = null;
try {
out = fs.append(new Path("/non-existing.dat"));
fail("Expected to have FileNotFoundException");
}
catch(java.io.FileNotFoundException fnfe) {
System.out.println("Good: got " + fnfe);
fnfe.printStackTrace(System.out);
}
finally {
IOUtils.closeStream(out);
}
}
{ // test append permission.
//set root to all writable
Path root = new Path("/");
fs.setPermission(root, new FsPermission((short)0777));
fs.close();
// login as a different user
final UserGroupInformation superuser =
UserGroupInformation.getCurrentUser();
String username = "testappenduser";
String group = "testappendgroup";
assertFalse(superuser.getShortUserName().equals(username));
assertFalse(Arrays.asList(superuser.getGroupNames()).contains(group));
UserGroupInformation appenduser =
UserGroupInformation.createUserForTesting(username, new String[]{group});
fs = DFSTestUtil.getFileSystemAs(appenduser, conf);
// create a file
Path dir = new Path(root, getClass().getSimpleName());
Path foo = new Path(dir, "foo.dat");
FSDataOutputStream out = null;
int offset = 0;
try {
out = fs.create(foo);
int len = 10 + AppendTestUtil.nextInt(100);
out.write(fileContents, offset, len);
offset += len;
}
finally {
IOUtils.closeStream(out);
}
// change dir and foo to minimal permissions.
fs.setPermission(dir, new FsPermission((short)0100));
fs.setPermission(foo, new FsPermission((short)0200));
// try append, should success
out = null;
try {
out = fs.append(foo);
int len = 10 + AppendTestUtil.nextInt(100);
out.write(fileContents, offset, len);
offset += len;
}
finally {
IOUtils.closeStream(out);
}
// change dir and foo to all but no write on foo.
fs.setPermission(foo, new FsPermission((short)0577));
fs.setPermission(dir, new FsPermission((short)0777));
// try append, should fail
out = null;
try {
out = fs.append(foo);
fail("Expected to have AccessControlException");
}
catch(AccessControlException ace) {
System.out.println("Good: got " + ace);
ace.printStackTrace(System.out);
}
finally {
IOUtils.closeStream(out);
}
}
} catch (IOException e) {
System.out.println("Exception :" + e);
throw e;
} catch (Throwable e) {
System.out.println("Throwable :" + e);
e.printStackTrace();
throw new IOException("Throwable : " + e);
} finally {
fs.close();
cluster.shutdown();
}
}
/**
* Creates one file, writes a few bytes to it and then closed it.
* Reopens the same file for appending using append2 API, write all blocks and
* then close. Verify that all data exists in file.
*/
@Test
public void testSimpleAppend2() throws Exception {
final Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_DATANODE_HANDLER_COUNT_KEY, 50);
fileContents = AppendTestUtil.initBuffer(AppendTestUtil.FILE_SIZE);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
DistributedFileSystem fs = cluster.getFileSystem();
try {
{ // test appending to a file.
// create a new file.
Path file1 = new Path("/simpleAppend.dat");
FSDataOutputStream stm = AppendTestUtil.createFile(fs, file1, 1);
System.out.println("Created file simpleAppend.dat");
// write to file
int mid = 186; // io.bytes.per.checksum bytes
System.out.println("Writing " + mid + " bytes to file " + file1);
stm.write(fileContents, 0, mid);
stm.close();
System.out.println("Wrote and Closed first part of file.");
// write to file
int mid2 = 607; // io.bytes.per.checksum bytes
System.out.println("Writing " + mid + " bytes to file " + file1);
stm = fs.append(file1,
EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null);
stm.write(fileContents, mid, mid2-mid);
stm.close();
System.out.println("Wrote and Closed second part of file.");
// write the remainder of the file
stm = fs.append(file1,
EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null);
// ensure getPos is set to reflect existing size of the file
assertTrue(stm.getPos() > 0);
System.out.println("Writing " + (AppendTestUtil.FILE_SIZE - mid2) +
" bytes to file " + file1);
stm.write(fileContents, mid2, AppendTestUtil.FILE_SIZE - mid2);
System.out.println("Written second part of file");
stm.close();
System.out.println("Wrote and Closed second part of file.");
// verify that entire file is good
AppendTestUtil.checkFullFile(fs, file1, AppendTestUtil.FILE_SIZE,
fileContents, "Read 2");
// also make sure there three different blocks for the file
List<LocatedBlock> blocks = fs.getClient().getLocatedBlocks(
file1.toString(), 0L).getLocatedBlocks();
assertEquals(12, blocks.size()); // the block size is 1024
assertEquals(mid, blocks.get(0).getBlockSize());
assertEquals(mid2 - mid, blocks.get(1).getBlockSize());
for (int i = 2; i < 11; i++) {
assertEquals(AppendTestUtil.BLOCK_SIZE, blocks.get(i).getBlockSize());
}
assertEquals((AppendTestUtil.FILE_SIZE - mid2)
% AppendTestUtil.BLOCK_SIZE, blocks.get(11).getBlockSize());
}
{ // test appending to an non-existing file.
FSDataOutputStream out = null;
try {
out = fs.append(new Path("/non-existing.dat"),
EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null);
fail("Expected to have FileNotFoundException");
} catch(java.io.FileNotFoundException fnfe) {
System.out.println("Good: got " + fnfe);
fnfe.printStackTrace(System.out);
} finally {
IOUtils.closeStream(out);
}
}
{ // test append permission.
// set root to all writable
Path root = new Path("/");
fs.setPermission(root, new FsPermission((short)0777));
fs.close();
// login as a different user
final UserGroupInformation superuser =
UserGroupInformation.getCurrentUser();
String username = "testappenduser";
String group = "testappendgroup";
assertFalse(superuser.getShortUserName().equals(username));
assertFalse(Arrays.asList(superuser.getGroupNames()).contains(group));
UserGroupInformation appenduser = UserGroupInformation
.createUserForTesting(username, new String[] { group });
fs = (DistributedFileSystem) DFSTestUtil.getFileSystemAs(appenduser,
conf);
// create a file
Path dir = new Path(root, getClass().getSimpleName());
Path foo = new Path(dir, "foo.dat");
FSDataOutputStream out = null;
int offset = 0;
try {
out = fs.create(foo);
int len = 10 + AppendTestUtil.nextInt(100);
out.write(fileContents, offset, len);
offset += len;
} finally {
IOUtils.closeStream(out);
}
// change dir and foo to minimal permissions.
fs.setPermission(dir, new FsPermission((short)0100));
fs.setPermission(foo, new FsPermission((short)0200));
// try append, should success
out = null;
try {
out = fs.append(foo,
EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null);
int len = 10 + AppendTestUtil.nextInt(100);
out.write(fileContents, offset, len);
offset += len;
} finally {
IOUtils.closeStream(out);
}
// change dir and foo to all but no write on foo.
fs.setPermission(foo, new FsPermission((short)0577));
fs.setPermission(dir, new FsPermission((short)0777));
// try append, should fail
out = null;
try {
out = fs.append(foo,
EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null);
fail("Expected to have AccessControlException");
} catch(AccessControlException ace) {
System.out.println("Good: got " + ace);
ace.printStackTrace(System.out);
} finally {
IOUtils.closeStream(out);
}
}
} finally {
fs.close();
cluster.shutdown();
}
}
//
// an object that does a bunch of appends to files
//
| TestFileAppend2 |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/testing/utilities/testpropertyvalues/MyEnvironmentTests.java | {
"start": 914,
"end": 1178
} | class ____ {
@Test
void testPropertySources() {
MockEnvironment environment = new MockEnvironment();
TestPropertyValues.of("org=Spring", "name=Boot").applyTo(environment);
assertThat(environment.getProperty("name")).isEqualTo("Boot");
}
}
| MyEnvironmentTests |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/RingHashLoadBalancer.java | {
"start": 2706,
"end": 13402
} | class ____ extends MultiChildLoadBalancer {
private static final Status RPC_HASH_NOT_FOUND =
Status.INTERNAL.withDescription("RPC hash not found. Probably a bug because xds resolver"
+ " config selector always generates a hash.");
private static final XxHash64 hashFunc = XxHash64.INSTANCE;
private final LoadBalancer.Factory lazyLbFactory =
new LazyLoadBalancer.Factory(pickFirstLbProvider);
private final XdsLogger logger;
private final SynchronizationContext syncContext;
private final ThreadSafeRandom random;
private List<RingEntry> ring;
@Nullable private Metadata.Key<String> requestHashHeaderKey;
RingHashLoadBalancer(Helper helper) {
this(helper, ThreadSafeRandomImpl.instance);
}
@VisibleForTesting
RingHashLoadBalancer(Helper helper, ThreadSafeRandom random) {
super(helper);
syncContext = checkNotNull(helper.getSynchronizationContext(), "syncContext");
logger = XdsLogger.withLogId(InternalLogId.allocate("ring_hash_lb", helper.getAuthority()));
logger.log(XdsLogLevel.INFO, "Created");
this.random = checkNotNull(random, "random");
}
@Override
public Status acceptResolvedAddresses(ResolvedAddresses resolvedAddresses) {
logger.log(XdsLogLevel.DEBUG, "Received resolution result: {0}", resolvedAddresses);
List<EquivalentAddressGroup> addrList = resolvedAddresses.getAddresses();
Status addressValidityStatus = validateAddrList(addrList);
if (!addressValidityStatus.isOk()) {
return addressValidityStatus;
}
// Now do the ringhash specific logic with weights and building the ring
RingHashConfig config = (RingHashConfig) resolvedAddresses.getLoadBalancingPolicyConfig();
if (config == null) {
throw new IllegalArgumentException("Missing RingHash configuration");
}
requestHashHeaderKey =
config.requestHashHeader.isEmpty()
? null
: Metadata.Key.of(config.requestHashHeader, Metadata.ASCII_STRING_MARSHALLER);
Map<EquivalentAddressGroup, Long> serverWeights = new HashMap<>();
long totalWeight = 0L;
for (EquivalentAddressGroup eag : addrList) {
Long weight = eag.getAttributes().get(XdsAttributes.ATTR_SERVER_WEIGHT);
// Support two ways of server weighing: either multiple instances of the same address
// or each address contains a per-address weight attribute. If a weight is not provided,
// each occurrence of the address will be counted a weight value of one.
if (weight == null) {
weight = 1L;
}
totalWeight += weight;
EquivalentAddressGroup addrKey = stripAttrs(eag);
if (serverWeights.containsKey(addrKey)) {
serverWeights.put(addrKey, serverWeights.get(addrKey) + weight);
} else {
serverWeights.put(addrKey, weight);
}
}
// Calculate scale
long minWeight = Collections.min(serverWeights.values());
double normalizedMinWeight = (double) minWeight / totalWeight;
// Scale up the number of hashes per host such that the least-weighted host gets a whole
// number of hashes on the the ring. Other hosts might not end up with whole numbers, and
// that's fine (the ring-building algorithm can handle this). This preserves the original
// implementation's behavior: when weights aren't provided, all hosts should get an equal
// number of hashes. In the case where this number exceeds the max_ring_size, it's scaled
// back down to fit.
double scale = Math.min(
Math.ceil(normalizedMinWeight * config.minRingSize) / normalizedMinWeight,
(double) config.maxRingSize);
// Build the ring
ring = buildRing(serverWeights, totalWeight, scale);
return super.acceptResolvedAddresses(resolvedAddresses);
}
/**
* Updates the overall balancing state by aggregating the connectivity states of all subchannels.
*
* <p>Aggregation rules (in order of dominance):
* <ol>
* <li>If there is at least one subchannel in READY state, overall state is READY</li>
* <li>If there are <em>2 or more</em> subchannels in TRANSIENT_FAILURE, overall state is
* TRANSIENT_FAILURE (to allow timely failover to another policy)</li>
* <li>If there is at least one subchannel in CONNECTING state, overall state is
* CONNECTING</li>
* <li> If there is one subchannel in TRANSIENT_FAILURE state and there is
* more than one subchannel, report CONNECTING </li>
* <li>If there is at least one subchannel in IDLE state, overall state is IDLE</li>
* <li>Otherwise, overall state is TRANSIENT_FAILURE</li>
* </ol>
*/
@Override
protected void updateOverallBalancingState() {
checkState(!getChildLbStates().isEmpty(), "no subchannel has been created");
if (this.currentConnectivityState == SHUTDOWN) {
// Ignore changes that happen after shutdown is called
logger.log(XdsLogLevel.DEBUG, "UpdateOverallBalancingState called after shutdown");
return;
}
// Calculate the current overall state to report
int numIdle = 0;
int numReady = 0;
int numConnecting = 0;
int numTF = 0;
forloop:
for (ChildLbState childLbState : getChildLbStates()) {
ConnectivityState state = childLbState.getCurrentState();
switch (state) {
case READY:
numReady++;
break forloop;
case CONNECTING:
numConnecting++;
break;
case IDLE:
numIdle++;
break;
case TRANSIENT_FAILURE:
numTF++;
break;
default:
// ignore it
}
}
ConnectivityState overallState;
if (numReady > 0) {
overallState = READY;
} else if (numTF >= 2) {
overallState = TRANSIENT_FAILURE;
} else if (numConnecting > 0) {
overallState = CONNECTING;
} else if (numTF == 1 && getChildLbStates().size() > 1) {
overallState = CONNECTING;
} else if (numIdle > 0) {
overallState = IDLE;
} else {
overallState = TRANSIENT_FAILURE;
}
RingHashPicker picker =
new RingHashPicker(syncContext, ring, getChildLbStates(), requestHashHeaderKey, random);
getHelper().updateBalancingState(overallState, picker);
this.currentConnectivityState = overallState;
}
@Override
protected ChildLbState createChildLbState(Object key) {
return new ChildLbState(key, lazyLbFactory);
}
private Status validateAddrList(List<EquivalentAddressGroup> addrList) {
if (addrList.isEmpty()) {
Status unavailableStatus = Status.UNAVAILABLE.withDescription("Ring hash lb error: EDS "
+ "resolution was successful, but returned server addresses are empty.");
handleNameResolutionError(unavailableStatus);
return unavailableStatus;
}
String dupAddrString = validateNoDuplicateAddresses(addrList);
if (dupAddrString != null) {
Status unavailableStatus = Status.UNAVAILABLE.withDescription("Ring hash lb error: EDS "
+ "resolution was successful, but there were duplicate addresses: " + dupAddrString);
handleNameResolutionError(unavailableStatus);
return unavailableStatus;
}
long totalWeight = 0;
for (EquivalentAddressGroup eag : addrList) {
Long weight = eag.getAttributes().get(XdsAttributes.ATTR_SERVER_WEIGHT);
if (weight == null) {
weight = 1L;
}
if (weight < 0) {
Status unavailableStatus = Status.UNAVAILABLE.withDescription(
String.format("Ring hash lb error: EDS resolution was successful, but returned a "
+ "negative weight for %s.", stripAttrs(eag)));
handleNameResolutionError(unavailableStatus);
return unavailableStatus;
}
if (weight > UnsignedInteger.MAX_VALUE.longValue()) {
Status unavailableStatus = Status.UNAVAILABLE.withDescription(
String.format("Ring hash lb error: EDS resolution was successful, but returned a weight"
+ " too large to fit in an unsigned int for %s.", stripAttrs(eag)));
handleNameResolutionError(unavailableStatus);
return unavailableStatus;
}
totalWeight += weight;
}
if (totalWeight > UnsignedInteger.MAX_VALUE.longValue()) {
Status unavailableStatus = Status.UNAVAILABLE.withDescription(
String.format(
"Ring hash lb error: EDS resolution was successful, but returned a sum of weights too"
+ " large to fit in an unsigned int (%d).", totalWeight));
handleNameResolutionError(unavailableStatus);
return unavailableStatus;
}
return Status.OK;
}
@Nullable
private String validateNoDuplicateAddresses(List<EquivalentAddressGroup> addrList) {
Set<SocketAddress> addresses = new HashSet<>();
Multiset<String> dups = HashMultiset.create();
for (EquivalentAddressGroup eag : addrList) {
for (SocketAddress address : eag.getAddresses()) {
if (!addresses.add(address)) {
dups.add(address.toString());
}
}
}
if (!dups.isEmpty()) {
return dups.entrySet().stream()
.map((dup) ->
String.format("Address: %s, count: %d", dup.getElement(), dup.getCount() + 1))
.collect(Collectors.joining("; "));
}
return null;
}
private static List<RingEntry> buildRing(
Map<EquivalentAddressGroup, Long> serverWeights, long totalWeight, double scale) {
List<RingEntry> ring = new ArrayList<>();
double currentHashes = 0.0;
double targetHashes = 0.0;
for (Map.Entry<EquivalentAddressGroup, Long> entry : serverWeights.entrySet()) {
Endpoint endpoint = new Endpoint(entry.getKey());
double normalizedWeight = (double) entry.getValue() / totalWeight;
// Per GRFC A61 use the first address for the hash
StringBuilder sb = new StringBuilder(entry.getKey().getAddresses().get(0).toString());
sb.append('_');
int lengthWithoutCounter = sb.length();
targetHashes += scale * normalizedWeight;
long i = 0L;
while (currentHashes < targetHashes) {
sb.append(i);
long hash = hashFunc.hashAsciiString(sb.toString());
ring.add(new RingEntry(hash, endpoint));
i++;
currentHashes++;
sb.setLength(lengthWithoutCounter);
}
}
Collections.sort(ring);
return Collections.unmodifiableList(ring);
}
@SuppressWarnings("ReferenceEquality")
public static EquivalentAddressGroup stripAttrs(EquivalentAddressGroup eag) {
if (eag.getAttributes() == Attributes.EMPTY) {
return eag;
}
return new EquivalentAddressGroup(eag.getAddresses());
}
private static final | RingHashLoadBalancer |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/schedulers/ScheduledDirectPeriodicTaskTest.java | {
"start": 952,
"end": 1712
} | class ____ extends RxJavaTest {
@Test
public void runnableThrows() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
ScheduledDirectPeriodicTask task = new ScheduledDirectPeriodicTask(new Runnable() {
@Override
public void run() {
throw new TestException();
}
}, true);
try {
task.run();
fail("Should have thrown!");
} catch (TestException expected) {
// expected
}
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
}
| ScheduledDirectPeriodicTaskTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/boolean2darrays/Boolean2DArrays_assertNumberOfRows_Test.java | {
"start": 806,
"end": 1090
} | class ____ extends Boolean2DArraysBaseTest {
@Test
void should_delegate_to_Arrays2D() {
// WHEN
boolean2dArrays.assertNumberOfRows(info, actual, 2);
// THEN
verify(arrays2d).assertNumberOfRows(info, failures, actual, 2);
}
}
| Boolean2DArrays_assertNumberOfRows_Test |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/autoconfigure/DataSourcePoolMetadataProvidersConfiguration.java | {
"start": 2262,
"end": 2914
} | class ____ {
@Bean
DataSourcePoolMetadataProvider tomcatPoolDataSourceMetadataProvider() {
return (dataSource) -> {
org.apache.tomcat.jdbc.pool.DataSource tomcatDataSource = DataSourceUnwrapper.unwrap(dataSource,
ConnectionPoolMBean.class, org.apache.tomcat.jdbc.pool.DataSource.class);
if (tomcatDataSource != null) {
return new TomcatDataSourcePoolMetadata(tomcatDataSource);
}
return null;
};
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(HikariDataSource.class)
@ImportRuntimeHints(HikariDataSourcePoolMetadataRuntimeHints.class)
static | TomcatDataSourcePoolMetadataProviderConfiguration |
java | apache__flink | flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/runtime/StateBootstrapTransformationWithID.java | {
"start": 1139,
"end": 1409
} | class ____ represents a newly bootstrapped operator state within savepoints.
* It wraps the target {@link OperatorID} for the bootstrapped operator, as well as the {@link
* StateBootstrapTransformation} that defines how the state is bootstrapped.
*/
@Internal
public | that |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hhh12076/SettlementExtension.java | {
"start": 182,
"end": 1277
} | class ____ {
private Long _id;
private Integer _version;
private Date _creationDate;
private Date _modifiedDate;
private Integer _orderIndex;
private Settlement _settlement;
public SettlementExtension() {
}
public Claim getClaim() {
return _settlement.getClaim();
}
public Long getId() {
return _id;
}
protected void setId(Long id) {
_id = id;
}
public Integer getVersion() {
return _version;
}
public void setVersion(Integer version) {
_version = version;
}
public Date getCreationDate() {
return _creationDate;
}
public void setCreationDate(Date creationDate) {
_creationDate = creationDate;
}
public Date getModifiedDate() {
return _modifiedDate;
}
public void setModifiedDate(Date modifiedDate) {
_modifiedDate = modifiedDate;
}
public Settlement getSettlement() {
return _settlement;
}
public void setSettlement(Settlement settlement) {
_settlement = settlement;
}
public Integer getOrderIndex() {
return _orderIndex;
}
public void setOrderIndex(Integer orderIndex) {
_orderIndex = orderIndex;
}
}
| SettlementExtension |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/util/PropertySource.java | {
"start": 3852,
"end": 7244
} | class ____ {
private static final Pattern PREFIX_PATTERN = Pattern.compile(
// just lookahead for AsyncLogger
"(^log4j2?[-._/]?|^org\\.apache\\.logging\\.log4j\\.)|(?=AsyncLogger(Config)?\\.)",
Pattern.CASE_INSENSITIVE);
private static final Pattern PROPERTY_TOKENIZER = Pattern.compile("([A-Z]*[a-z0-9]+|[A-Z0-9]+)[-._/]?");
private static final Map<CharSequence, List<CharSequence>> CACHE = new ConcurrentHashMap<>();
static {
// Add legacy properties without Log4j prefix
CACHE.put("disableThreadContext", Arrays.asList("disable", "thread", "context"));
CACHE.put("disableThreadContextStack", Arrays.asList("disable", "thread", "context", "stack"));
CACHE.put("disableThreadContextMap", Arrays.asList("disable", "thread", "context", "map"));
CACHE.put("isThreadContextMapInheritable", Arrays.asList("is", "thread", "context", "map", "inheritable"));
}
/**
* Converts a property name string into a list of tokens. This will strip a prefix of {@code log4j},
* {@code log4j2}, {@code Log4j}, or {@code org.apache.logging.log4j}, along with separators of
* dash {@code -}, dot {@code .}, underscore {@code _}, and slash {@code /}. Tokens can also be separated
* by camel case conventions without needing a separator character in between.
*
* @param value property name
* @return the property broken into lower case tokens
*/
// https://errorprone.info/bugpattern/CollectionUndefinedEquality
@SuppressWarnings("CollectionUndefinedEquality")
public static List<CharSequence> tokenize(final CharSequence value) {
// `value` should be a `String`
if (CACHE.containsKey(value.toString())) {
return CACHE.get(value.toString());
}
final List<CharSequence> tokens = new ArrayList<>();
int start = 0;
final Matcher prefixMatcher = PREFIX_PATTERN.matcher(value);
if (prefixMatcher.find(start)) {
start = prefixMatcher.end();
final Matcher matcher = PROPERTY_TOKENIZER.matcher(value);
while (matcher.find(start)) {
tokens.add(toRootLowerCase(matcher.group(1)));
start = matcher.end();
}
}
CACHE.put(value, tokens);
return tokens;
}
/**
* Joins a list of strings using camelCaseConventions.
*
* @param tokens tokens to convert
* @return tokensAsCamelCase
*/
public static CharSequence joinAsCamelCase(final Iterable<? extends CharSequence> tokens) {
final StringBuilder sb = new StringBuilder();
boolean first = true;
for (final CharSequence token : tokens) {
if (first) {
sb.append(token);
} else {
sb.append(Character.toUpperCase(token.charAt(0)));
if (token.length() > 1) {
sb.append(token.subSequence(1, token.length()));
}
}
first = false;
}
return sb.toString();
}
private Util() {}
}
}
| Util |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxSampleFirst.java | {
"start": 1624,
"end": 2401
} | class ____<T, U> extends InternalFluxOperator<T, T> {
final Function<? super T, ? extends Publisher<U>> throttler;
FluxSampleFirst(Flux<? extends T> source,
Function<? super T, ? extends Publisher<U>> throttler) {
super(source);
this.throttler = Objects.requireNonNull(throttler, "throttler");
}
@Override
public CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super T> actual) {
SampleFirstMain<T, U> main = new SampleFirstMain<>(actual, throttler);
actual.onSubscribe(main);
return main;
}
@Override
public int getPrefetch() {
return Integer.MAX_VALUE;
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
static final | FluxSampleFirst |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1269/model/VehicleTypeInfo.java | {
"start": 238,
"end": 696
} | class ____ {
private final String type;
private final String name;
private final Integer doors;
public VehicleTypeInfo(String type, String name, Integer doors) {
this.type = type;
this.name = name;
this.doors = doors;
}
public String getType() {
return type;
}
public String getName() {
return name;
}
public Integer getDoors() {
return doors;
}
}
| VehicleTypeInfo |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ComponentScanAnnotationIntegrationTests.java | {
"start": 16047,
"end": 16221
} | interface ____ {
}
@Configuration
@ComponentScan("example.scannable_implicitbasepackage")
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@ | MetaConfiguration1 |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/BeanReference.java | {
"start": 988,
"end": 1230
} | interface ____ by any kind of bean reference
* holder, such as {@link RuntimeBeanReference RuntimeBeanReference} and
* {@link RuntimeBeanNameReference RuntimeBeanNameReference}.
*
* @author Juergen Hoeller
* @since 2.0
*/
public | implemented |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/NoticeTask.java | {
"start": 1700,
"end": 8144
} | class ____ extends DefaultTask {
@InputFile
@PathSensitive(PathSensitivity.RELATIVE)
private File inputFile;
@OutputFile
private File outputFile;
private FileTree sources;
/**
* Directories to include notices from
*/
@Internal
abstract ListProperty<File> getLicenseDirs();
private final FileOperations fileOperations;
@Inject
public NoticeTask(BuildLayout buildLayout, ProjectLayout projectLayout, FileOperations fileOperations) {
this.fileOperations = fileOperations;
setDescription("Create a notice file from dependencies");
// Default licenses directory is ${projectDir}/licenses (if it exists)
getLicenseDirs().add(projectLayout.getProjectDirectory().dir("licenses").getAsFile());
inputFile = new File(buildLayout.getRootDirectory(), "NOTICE.txt");
outputFile = projectLayout.getBuildDirectory().dir("notices/" + getName()).get().file("NOTICE.txt").getAsFile();
}
/**
* Add notices from the specified directory.
*/
public void licensesDir(File licensesDir) {
getLicenseDirs().add(licensesDir);
}
public void source(Object source) {
if (sources == null) {
sources = fileOperations.fileTree(source);
} else {
sources = sources.plus(fileOperations.fileTree(source));
}
}
public void source(SourceDirectorySet source) {
if (sources == null) {
sources = source;
} else {
sources = sources.plus(source);
}
}
@TaskAction
public void generateNotice() throws IOException {
StringBuilder output = new StringBuilder();
output.append(readFileToString(inputFile, "UTF-8"));
output.append("\n\n");
// This is a map rather than a set so that the sort order is the 3rd
// party component names, unaffected by the full path to the various files
final Map<String, File> seen = new TreeMap<String, File>();
FileCollection noticeFiles = getNoticeFiles();
if (noticeFiles != null) {
for (File file : getNoticeFiles()) {
String name = file.getName().replaceFirst("-NOTICE\\.txt$", "");
if (seen.containsKey(name)) {
File prevFile = seen.get(name);
String previousFileText = readFileToString(prevFile, "UTF-8");
if (previousFileText.equals(readFileToString(file, "UTF-8")) == false) {
throw new RuntimeException(
"Two different notices exist for dependency '" + name + "': " + prevFile + " and " + file
);
}
} else {
seen.put(name, file);
}
}
}
// Add all LICENSE and NOTICE files in licenses directory
seen.forEach((name, file) -> {
appendFile(file, name, "NOTICE", output);
appendFile(new File(file.getParentFile(), name + "-LICENSE.txt"), name, "LICENSE", output);
});
// Find any source files with "@notice" annotated license header
for (File sourceFile : sources.getFiles()) {
boolean isPackageInfo = sourceFile.getName().equals("package-info.java");
boolean foundNotice = false;
boolean inNotice = false;
StringBuilder header = new StringBuilder();
String packageDeclaration = null;
for (String line : FileUtils.readLines(sourceFile, "UTF-8")) {
if (isPackageInfo && packageDeclaration == null && line.startsWith("package")) {
packageDeclaration = line;
}
if (foundNotice == false) {
foundNotice = line.contains("@notice");
inNotice = true;
} else {
if (line.contains("*/")) {
inNotice = false;
if (isPackageInfo == false) {
break;
}
} else if (inNotice) {
header.append(StringGroovyMethods.stripMargin(line, "*"));
header.append("\n");
}
}
}
if (foundNotice) {
appendText(header.toString(), isPackageInfo ? packageDeclaration : sourceFile.getName(), "", output);
}
}
FileUtils.write(outputFile, output.toString(), "UTF-8");
}
@InputFiles
@Optional
@PathSensitive(PathSensitivity.RELATIVE)
public FileCollection getNoticeFiles() {
FileTree tree = null;
for (File dir : existingLicenseDirs()) {
if (tree == null) {
tree = fileOperations.fileTree(dir);
} else {
tree = tree.plus(fileOperations.fileTree(dir));
}
}
return tree == null ? null : tree.matching(patternFilterable -> patternFilterable.include("**/*-NOTICE.txt"));
}
private List<File> existingLicenseDirs() {
return getLicenseDirs().get().stream().filter(d -> d.exists()).collect(Collectors.toList());
}
@InputFiles
@Optional
@PathSensitive(PathSensitivity.RELATIVE)
public FileCollection getSources() {
return sources;
}
public static void appendFile(File file, String name, String type, StringBuilder output) {
String text = FileUtils.read(file, "UTF-8");
if (text.trim().isEmpty()) {
return;
}
appendText(text, name, type, output);
}
public static void appendText(String text, final String name, final String type, StringBuilder output) {
output.append("================================================================================\n");
output.append(name + " " + type + "\n");
output.append("================================================================================\n");
output.append(text);
output.append("\n\n");
}
public File getInputFile() {
return inputFile;
}
public void setInputFile(File inputFile) {
this.inputFile = inputFile;
}
public File getOutputFile() {
return outputFile;
}
public void setOutputFile(File outputFile) {
this.outputFile = outputFile;
}
}
| NoticeTask |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateMetadata.java | {
"start": 1465,
"end": 4689
} | class ____ implements Metadata.ProjectCustom {
public static final String TYPE = "index_template";
private static final ParseField INDEX_TEMPLATE = new ParseField("index_template");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<ComposableIndexTemplateMetadata, Void> PARSER = new ConstructingObjectParser<>(
TYPE,
false,
a -> new ComposableIndexTemplateMetadata((Map<String, ComposableIndexTemplate>) a[0])
);
static {
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> {
Map<String, ComposableIndexTemplate> templates = new HashMap<>();
while (p.nextToken() != XContentParser.Token.END_OBJECT) {
String name = p.currentName();
templates.put(name, ComposableIndexTemplate.parse(p));
}
return templates;
}, INDEX_TEMPLATE);
}
private final Map<String, ComposableIndexTemplate> indexTemplates;
public ComposableIndexTemplateMetadata(Map<String, ComposableIndexTemplate> templates) {
this.indexTemplates = templates;
}
public ComposableIndexTemplateMetadata(StreamInput in) throws IOException {
this.indexTemplates = in.readMap(ComposableIndexTemplate::new);
}
public static ComposableIndexTemplateMetadata fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public Map<String, ComposableIndexTemplate> indexTemplates() {
return indexTemplates;
}
@Override
public EnumSet<Metadata.XContentContext> context() {
return Metadata.ALL_CONTEXTS;
}
@Override
public Diff<Metadata.ProjectCustom> diff(Metadata.ProjectCustom before) {
return new ComposableIndexTemplateMetadataDiff((ComposableIndexTemplateMetadata) before, this);
}
public static NamedDiff<Metadata.ProjectCustom> readDiffFrom(StreamInput in) throws IOException {
return new ComposableIndexTemplateMetadataDiff(in);
}
@Override
public String getWriteableName() {
return TYPE;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeMap(this.indexTemplates, StreamOutput::writeWriteable);
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params ignored) {
return ChunkedToXContentHelper.xContentObjectFields(INDEX_TEMPLATE.getPreferredName(), indexTemplates);
}
@Override
public int hashCode() {
return Objects.hash(this.indexTemplates);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ComposableIndexTemplateMetadata other = (ComposableIndexTemplateMetadata) obj;
return Objects.equals(this.indexTemplates, other.indexTemplates);
}
@Override
public String toString() {
return Strings.toString(this);
}
static | ComposableIndexTemplateMetadata |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/protocol/types/Schema.java | {
"start": 1081,
"end": 9191
} | class ____ extends DocumentedType {
private static final String STRUCT_TYPE_NAME = "STRUCT";
private static final Object[] NO_VALUES = new Object[0];
private final BoundField[] fields;
private final Map<String, BoundField> fieldsByName;
private final boolean tolerateMissingFieldsWithDefaults;
private final Struct cachedStruct;
/**
* Construct the schema with a given list of its field values
*
* @param fs the fields of this schema
*
* @throws SchemaException If the given list have duplicate fields
*/
public Schema(Field... fs) {
this(false, fs);
}
/**
* Construct the schema with a given list of its field values and the ability to tolerate
* missing optional fields with defaults at the end of the schema definition.
*
* @param tolerateMissingFieldsWithDefaults whether to accept records with missing optional
* fields the end of the schema
* @param fs the fields of this schema
*
* @throws SchemaException If the given list have duplicate fields
*/
@SuppressWarnings("this-escape")
public Schema(boolean tolerateMissingFieldsWithDefaults, Field... fs) {
this.fields = new BoundField[fs.length];
this.fieldsByName = new HashMap<>();
this.tolerateMissingFieldsWithDefaults = tolerateMissingFieldsWithDefaults;
for (int i = 0; i < this.fields.length; i++) {
Field def = fs[i];
if (fieldsByName.containsKey(def.name))
throw new SchemaException("Schema contains a duplicate field: " + def.name);
this.fields[i] = new BoundField(def, this, i);
this.fieldsByName.put(def.name, this.fields[i]);
}
//6 schemas have no fields at the time of this writing (3 versions each of list_groups and api_versions)
//for such schemas there's no point in even creating a unique Struct object when deserializing.
this.cachedStruct = this.fields.length > 0 ? null : new Struct(this, NO_VALUES);
}
/**
* Write a struct to the buffer
*/
@Override
public void write(ByteBuffer buffer, Object o) {
Struct r = (Struct) o;
for (BoundField field : fields) {
try {
Object value = field.def.type.validate(r.get(field));
field.def.type.write(buffer, value);
} catch (Exception e) {
throw new SchemaException("Error writing field '" + field.def.name + "': " +
(e.getMessage() == null ? e.getClass().getName() : e.getMessage()));
}
}
}
/**
* Read a struct from the buffer. If this schema is configured to tolerate missing
* optional fields at the end of the buffer, these fields are replaced with their default
* values; otherwise, if the schema does not tolerate missing fields, or if missing fields
* don't have a default value, a {@code SchemaException} is thrown to signify that mandatory
* fields are missing.
*/
@Override
public Struct read(ByteBuffer buffer) {
if (cachedStruct != null) {
return cachedStruct;
}
Object[] objects = new Object[fields.length];
for (int i = 0; i < fields.length; i++) {
try {
if (tolerateMissingFieldsWithDefaults) {
if (buffer.hasRemaining()) {
objects[i] = fields[i].def.type.read(buffer);
} else if (fields[i].def.hasDefaultValue) {
objects[i] = fields[i].def.defaultValue;
} else {
throw new SchemaException("Missing value for field '" + fields[i].def.name +
"' which has no default value.");
}
} else {
objects[i] = fields[i].def.type.read(buffer);
}
} catch (Exception e) {
throw new SchemaException("Error reading field '" + fields[i].def.name + "': " +
(e.getMessage() == null ? e.getClass().getName() : e.getMessage()));
}
}
return new Struct(this, objects);
}
/**
* The size of the given record
*/
@Override
public int sizeOf(Object o) {
int size = 0;
Struct r = (Struct) o;
for (BoundField field : fields) {
try {
size += field.def.type.sizeOf(r.get(field));
} catch (Exception e) {
throw new SchemaException("Error computing size for field '" + field.def.name + "': " +
(e.getMessage() == null ? e.getClass().getName() : e.getMessage()));
}
}
return size;
}
/**
* The number of fields in this schema
*/
public int numFields() {
return this.fields.length;
}
/**
* Get a field by its slot in the record array
*
* @param slot The slot at which this field sits
* @return The field
*/
public BoundField get(int slot) {
return this.fields[slot];
}
/**
* Get a field by its name
*
* @param name The name of the field
* @return The field
*/
public BoundField get(String name) {
return this.fieldsByName.get(name);
}
/**
* Get all the fields in this schema
*/
public BoundField[] fields() {
return this.fields;
}
protected boolean tolerateMissingFieldsWithDefaults() {
return this.tolerateMissingFieldsWithDefaults;
}
@Override
public String leftBracket() {
return "{";
}
@Override
public String rightBracket() {
return "}";
}
/**
* Display a string representation of the schema
*/
@Override
public String toString() {
StringBuilder b = new StringBuilder();
b.append('{');
for (int i = 0; i < this.fields.length; i++) {
b.append(this.fields[i].toString());
if (i < this.fields.length - 1)
b.append(',');
}
b.append("}");
return b.toString();
}
@Override
public Struct validate(Object item) {
try {
Struct struct = (Struct) item;
for (BoundField field : fields) {
try {
field.def.type.validate(struct.get(field));
} catch (SchemaException e) {
throw new SchemaException("Invalid value for field '" + field.def.name + "': " + e.getMessage());
}
}
return struct;
} catch (ClassCastException e) {
throw new SchemaException("Not a Struct.");
}
}
@Override
public String typeName() {
return STRUCT_TYPE_NAME;
}
@Override
public String documentation() {
return "A struct is named by a string with a capitalized first letter and consists of one or more fields. " +
"It represents a composite object encoded as the serialization of each field in the order they are defined." +
"In protocol documentation a struct containing multiple fields is enclosed by " +
leftBracket() + " and " + rightBracket() + ".";
}
public void walk(Visitor visitor) {
Objects.requireNonNull(visitor, "visitor must be non-null");
handleNode(this, visitor);
}
private static void handleNode(Type node, Visitor visitor) {
if (node instanceof Schema) {
Schema schema = (Schema) node;
visitor.visit(schema);
for (BoundField f : schema.fields())
handleNode(f.def.type, visitor);
} else if (node.isArray()) {
visitor.visit(node);
handleNode(node.arrayElementType().get(), visitor);
} else {
visitor.visit(node);
}
}
/**
* Override one or more of the visit methods with the desired logic.
*/
public abstract static | Schema |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeSerializerFactory.java | {
"start": 971,
"end": 1252
} | interface ____<T> {
void writeParametersToConfig(Configuration config);
void readParametersFromConfig(Configuration config, ClassLoader cl)
throws ClassNotFoundException;
TypeSerializer<T> getSerializer();
Class<T> getDataType();
}
| TypeSerializerFactory |
java | apache__camel | components/camel-aws/camel-aws2-translate/src/test/java/org/apache/camel/component/aws2/translate/Translate2ClientFactoryTest.java | {
"start": 1398,
"end": 3064
} | class ____ {
@Test
public void getStandardTranslateClientDefault() {
Translate2Configuration translate2Configuration = new Translate2Configuration();
Translate2InternalClient translateClient = Translate2ClientFactory.getTranslateClient(translate2Configuration);
assertTrue(translateClient instanceof Translate2ClientStandardImpl);
}
@Test
public void getStandardTranslateClient() {
Translate2Configuration translate2Configuration = new Translate2Configuration();
translate2Configuration.setUseDefaultCredentialsProvider(false);
Translate2InternalClient translateClient = Translate2ClientFactory.getTranslateClient(translate2Configuration);
assertTrue(translateClient instanceof Translate2ClientStandardImpl);
}
@Test
public void getTranslateOptimizedIAMClient() {
Translate2Configuration translate2Configuration = new Translate2Configuration();
translate2Configuration.setUseDefaultCredentialsProvider(true);
Translate2InternalClient translateClient = Translate2ClientFactory.getTranslateClient(translate2Configuration);
assertTrue(translateClient instanceof Translate2ClientIAMOptimized);
}
@Test
public void getTranslateSessionTokenClient() {
Translate2Configuration translate2Configuration = new Translate2Configuration();
translate2Configuration.setUseSessionCredentials(true);
Translate2InternalClient translateClient = Translate2ClientFactory.getTranslateClient(translate2Configuration);
assertTrue(translateClient instanceof Translate2ClientSessionTokenImpl);
}
}
| Translate2ClientFactoryTest |
java | quarkusio__quarkus | integration-tests/devtools/src/test/java/io/quarkus/devtools/codestarts/quarkus/AmazonLambdaCodestartTest.java | {
"start": 376,
"end": 1183
} | class ____ {
@RegisterExtension
public static QuarkusCodestartTest codestartTest = QuarkusCodestartTest.builder()
.codestarts("amazon-lambda")
.languages(JAVA)
.build();
@Test
void testContent() throws Throwable {
codestartTest.checkGeneratedSource("org.acme.lambda.GreetingLambda");
codestartTest.checkGeneratedSource("org.acme.lambda.Person");
codestartTest.checkGeneratedTestSource("org.acme.lambda.LambdaHandlerTest");
codestartTest.checkGeneratedTestSource("org.acme.lambda.LambdaHandlerTestIT");
}
@Test
@EnabledIfSystemProperty(named = "build-projects", matches = "true")
void buildAllProjectsForLocalUse() throws Throwable {
codestartTest.buildAllProjects();
}
}
| AmazonLambdaCodestartTest |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java | {
"start": 1729,
"end": 13489
} | class ____ extends ESIntegTestCase {
private static final String NUMERIC_VALUE = "number";
private static final String MONOTONIC_VALUE = "monotonic";
private static final String KEYWORD_VALUE = "kind";
private static final String LOWER_KEYWORD = "lower";
private static final String UPPER_KEYWORD = "upper";
private static final double PROBABILITY = 0.25;
private static int numDocs;
private static double varMonotonic = 0.0;
private static double varNumeric = 0.0;
private static final int NUM_SAMPLE_RUNS = 25;
@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex("idx");
numDocs = randomIntBetween(5000, 10000);
List<IndexRequestBuilder> builders = new ArrayList<>();
double avgMonotonic = 0.0;
double avgNumeric = 0.0;
for (int i = 0; i < numDocs; i++) {
final String keywordValue;
final double numericValue;
final double monotonicValue = randomDouble() + i;
if (i % 2 == 0) {
keywordValue = LOWER_KEYWORD;
numericValue = randomDoubleBetween(0.0, 3.0, false);
} else {
keywordValue = UPPER_KEYWORD;
numericValue = randomDoubleBetween(5.0, 9.0, false);
}
builders.add(
prepareIndex("idx").setSource(
jsonBuilder().startObject()
.field(KEYWORD_VALUE, keywordValue)
.field(MONOTONIC_VALUE, monotonicValue)
.field(NUMERIC_VALUE, numericValue)
.endObject()
)
);
final double oldAvgMonotonic = avgMonotonic;
final double oldAvgNumeric = avgNumeric;
avgMonotonic = (i * avgMonotonic + monotonicValue) / (i + 1);
avgNumeric = (i * avgNumeric + numericValue) / (i + 1);
final double avgMonotonicDiff = avgMonotonic - oldAvgMonotonic;
final double avgNumericDiff = avgNumeric - oldAvgNumeric;
final double resMonotonic = monotonicValue - avgMonotonic;
final double resNumeric = numericValue - avgNumeric;
varMonotonic = (i * (varMonotonic + Math.pow(avgMonotonicDiff, 2.0)) + Math.pow(resMonotonic, 2.0)) / (i + 1);
varNumeric = (i * (varNumeric + Math.pow(avgNumericDiff, 2.0)) + Math.pow(resNumeric, 2.0)) / (i + 1);
}
indexRandom(true, builders);
ensureSearchable();
// Force merge to ensure segment consistency as any segment merging can change which particular documents
// are sampled
assertNoFailures(indicesAdmin().prepareForceMerge("idx").setMaxNumSegments(1).get());
}
public void testRandomSamplerConsistentSeed() {
double[] sampleMonotonicValue = new double[1];
double[] sampleNumericValue = new double[1];
long[] sampledDocCount = new long[1];
double tolerance = 1e-14;
// initialize the values
assertResponse(
prepareSearch("idx").setPreference("shard:0")
.addAggregation(
new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY)
.setSeed(0)
.subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE))
.subAggregation(avg("mean_numeric").field(NUMERIC_VALUE))
.setShardSeed(42)
),
response -> {
InternalRandomSampler sampler = response.getAggregations().get("sampler");
sampleMonotonicValue[0] = ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue();
sampleNumericValue[0] = ((Avg) sampler.getAggregations().get("mean_numeric")).getValue();
sampledDocCount[0] = sampler.getDocCount();
}
);
assertResponses(response -> {
InternalRandomSampler sampler = response.getAggregations().get("sampler");
double monotonicValue = ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue();
double numericValue = ((Avg) sampler.getAggregations().get("mean_numeric")).getValue();
long docCount = sampler.getDocCount();
assertEquals(monotonicValue, sampleMonotonicValue[0], tolerance);
assertEquals(numericValue, sampleNumericValue[0], tolerance);
assertEquals(docCount, sampledDocCount[0]);
},
IntStream.rangeClosed(0, NUM_SAMPLE_RUNS - 1)
.mapToObj(
num -> prepareSearch("idx").setPreference("shard:0")
.addAggregation(
new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY)
.setSeed(0)
.subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE))
.subAggregation(avg("mean_numeric").field(NUMERIC_VALUE))
.setShardSeed(42)
)
)
.toArray(SearchRequestBuilder[]::new)
);
}
public void testRandomSampler() {
double[] sampleMonotonicValue = new double[1];
double[] sampleNumericValue = new double[1];
double[] sampledDocCount = new double[1];
for (int i = 0; i < NUM_SAMPLE_RUNS; i++) {
assertResponse(
prepareSearch("idx").addAggregation(
new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY)
.subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE))
.subAggregation(avg("mean_numeric").field(NUMERIC_VALUE))
),
response -> {
InternalRandomSampler sampler = response.getAggregations().get("sampler");
sampleMonotonicValue[0] += ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue();
sampleNumericValue[0] += ((Avg) sampler.getAggregations().get("mean_numeric")).getValue();
sampledDocCount[0] += sampler.getDocCount();
}
);
}
sampledDocCount[0] /= NUM_SAMPLE_RUNS;
sampleMonotonicValue[0] /= NUM_SAMPLE_RUNS;
sampleNumericValue[0] /= NUM_SAMPLE_RUNS;
double expectedDocCount = PROBABILITY * numDocs;
// We're taking the mean of NUM_SAMPLE_RUNS for which each run has standard deviation
// sqrt(PROBABILITY * numDocs) so the 6 sigma error, for which we expect 1 failure in
// 500M runs, is 6 * sqrt(PROBABILITY * numDocs / NUM_SAMPLE_RUNS).
double maxCountError = 6.0 * Math.sqrt(PROBABILITY * numDocs / NUM_SAMPLE_RUNS);
assertThat(Math.abs(sampledDocCount[0] - expectedDocCount), lessThan(maxCountError));
assertResponse(
prepareSearch("idx").addAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE))
.addAggregation(avg("mean_numeric").field(NUMERIC_VALUE)),
response -> {
double trueMonotonic = ((Avg) response.getAggregations().get("mean_monotonic")).getValue();
double trueNumeric = ((Avg) response.getAggregations().get("mean_numeric")).getValue();
double maxMonotonicError = 6.0 * Math.sqrt(varMonotonic / (numDocs * PROBABILITY * NUM_SAMPLE_RUNS));
double maxNumericError = 6.0 * Math.sqrt(varNumeric / (numDocs * PROBABILITY * NUM_SAMPLE_RUNS));
assertThat(Math.abs(sampleMonotonicValue[0] - trueMonotonic), lessThan(maxMonotonicError));
assertThat(Math.abs(sampleNumericValue[0] - trueNumeric), lessThan(maxNumericError));
}
);
}
public void testRandomSamplerHistogram() {
Map<String, Double> sampleMonotonicValue = new HashMap<>();
Map<String, Double> sampleNumericValue = new HashMap<>();
Map<String, Double> sampledDocCount = new HashMap<>();
for (int i = 0; i < NUM_SAMPLE_RUNS; i++) {
assertResponse(
prepareSearch("idx").addAggregation(
new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY)
.subAggregation(
histogram("histo").field(NUMERIC_VALUE)
.interval(5.0)
.subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE))
.subAggregation(avg("mean_numeric").field(NUMERIC_VALUE))
)
),
response -> {
InternalRandomSampler sampler = response.getAggregations().get("sampler");
Histogram histo = sampler.getAggregations().get("histo");
for (Histogram.Bucket bucket : histo.getBuckets()) {
sampleMonotonicValue.compute(
bucket.getKeyAsString(),
(k, v) -> ((Avg) bucket.getAggregations().get("mean_monotonic")).getValue() + (v == null ? 0 : v)
);
sampleNumericValue.compute(
bucket.getKeyAsString(),
(k, v) -> ((Avg) bucket.getAggregations().get("mean_numeric")).getValue() + (v == null ? 0 : v)
);
sampledDocCount.compute(bucket.getKeyAsString(), (k, v) -> bucket.getDocCount() + (v == null ? 0 : v));
}
}
);
}
for (String key : sampledDocCount.keySet()) {
sampledDocCount.put(key, sampledDocCount.get(key) / NUM_SAMPLE_RUNS);
sampleNumericValue.put(key, sampleNumericValue.get(key) / NUM_SAMPLE_RUNS);
sampleMonotonicValue.put(key, sampleMonotonicValue.get(key) / NUM_SAMPLE_RUNS);
}
assertResponse(
prepareSearch("idx").addAggregation(
histogram("histo").field(NUMERIC_VALUE)
.interval(5.0)
.subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE))
.subAggregation(avg("mean_numeric").field(NUMERIC_VALUE))
),
response -> {
Histogram histogram = response.getAggregations().get("histo");
for (Histogram.Bucket bucket : histogram.getBuckets()) {
long numDocs = bucket.getDocCount();
// Note the true count is estimated by dividing the bucket sample doc count by PROBABILITY.
double maxCountError = 6.0 * Math.sqrt(numDocs / NUM_SAMPLE_RUNS / (0.5 * PROBABILITY));
assertThat(Math.abs(sampledDocCount.get(bucket.getKeyAsString()) - numDocs), lessThan(maxCountError));
double trueMonotonic = ((Avg) bucket.getAggregations().get("mean_monotonic")).getValue();
double trueNumeric = ((Avg) bucket.getAggregations().get("mean_numeric")).getValue();
double maxMonotonicError = 6.0 * Math.sqrt(varMonotonic / (numDocs * 0.5 * PROBABILITY * NUM_SAMPLE_RUNS));
double maxNumericError = 6.0 * Math.sqrt(varNumeric / (numDocs * 0.5 * PROBABILITY * NUM_SAMPLE_RUNS));
assertThat(Math.abs(sampleMonotonicValue.get(bucket.getKeyAsString()) - trueMonotonic), lessThan(maxMonotonicError));
assertThat(Math.abs(sampleNumericValue.get(bucket.getKeyAsString()) - trueNumeric), lessThan(maxNumericError));
}
}
);
}
}
| RandomSamplerIT |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/path/JSONPath_paths_test1.java | {
"start": 216,
"end": 711
} | class ____ extends TestCase {
public void test_map() throws Exception {
List<Object> list = new ArrayList<Object>();
list.add(1001);
list.add("wenshao");
Map<String, Object> paths = JSONPath.paths(list);
Assert.assertEquals(3, paths.size());
Assert.assertSame(list, paths.get("/"));
Assert.assertEquals(1001, paths.get("/0"));
Assert.assertEquals("wenshao", paths.get("/1"));
}
}
| JSONPath_paths_test1 |
java | google__dagger | javatests/dagger/functional/subcomponent/SingletonType.java | {
"start": 722,
"end": 775
} | class ____ {
@Inject SingletonType() {}
}
| SingletonType |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/ast/ClassElement.java | {
"start": 27710,
"end": 29155
} | class ____
*/
@Experimental
@NonNull
static ClassElement of(@NonNull Type type) {
Objects.requireNonNull(type, "Type cannot be null");
if (type instanceof Class<?> aClass) {
return new ReflectClassElement(aClass);
} else if (type instanceof TypeVariable<?> typeVariable) {
return new ReflectGenericPlaceholderElement(typeVariable, 0);
} else if (type instanceof WildcardType wildcardType) {
return new ReflectWildcardElement(wildcardType);
} else if (type instanceof ParameterizedType pType) {
if (pType.getOwnerType() != null) {
throw new UnsupportedOperationException("Owner types are not supported");
}
return new ReflectClassElement(ReflectTypeElement.getErasure(type)) {
@NonNull
@Override
public List<? extends ClassElement> getBoundGenericTypes() {
return Arrays.stream(pType.getActualTypeArguments())
.map(ClassElement::of)
.toList();
}
};
} else if (type instanceof GenericArrayType genericArrayType) {
return of(genericArrayType.getGenericComponentType()).toArray();
} else {
throw new IllegalArgumentException("Bad type: " + type.getClass().getName());
}
}
/**
* Create a | element |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/cors/CORSFluentApiSameOriginWithoutOriginConfigTest.java | {
"start": 414,
"end": 1275
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(BeanRegisteringRoute.class, CorsProgrammaticConfig.class));
@Test
void corsSameOriginRequest() {
String origin = "http://localhost:8081";
given().header("Origin", origin)
.get("/test").then()
.statusCode(200)
.header("Access-Control-Allow-Origin", origin);
}
@Test
void corsInvalidSameOriginRequest() {
String origin = "http://externalhost:8081";
given().header("Origin", origin)
.get("/test").then()
.statusCode(403)
.header("Access-Control-Allow-Origin", nullValue());
}
public static | CORSFluentApiSameOriginWithoutOriginConfigTest |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/extension/AfterEachCallback.java | {
"start": 2702,
"end": 3036
} | interface ____ extends Extension {
/**
* Callback that is invoked <em>after</em> an individual test and any
* user-defined teardown methods for that test have been executed.
*
* @param context the current extension context; never {@code null}
*/
void afterEach(ExtensionContext context) throws Exception;
}
| AfterEachCallback |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetLicenseAction.java | {
"start": 1005,
"end": 2618
} | class ____ extends TransportMasterNodeReadAction<GetLicenseRequest, GetLicenseResponse> {
private final LicenseService licenseService;
@Inject
public TransportGetLicenseAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
LicenseService licenseService
) {
super(
GetLicenseAction.NAME,
transportService,
clusterService,
threadPool,
actionFilters,
GetLicenseRequest::new,
GetLicenseResponse::new,
threadPool.executor(ThreadPool.Names.MANAGEMENT)
);
this.licenseService = licenseService;
}
@Override
protected ClusterBlockException checkBlock(GetLicenseRequest request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
}
@Override
protected void masterOperation(
Task task,
final GetLicenseRequest request,
ClusterState state,
final ActionListener<GetLicenseResponse> listener
) throws ElasticsearchException {
assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT);
if (licenseService instanceof ClusterStateLicenseService clusterStateLicenseService) {
listener.onResponse(new GetLicenseResponse(clusterStateLicenseService.getLicense(state.metadata())));
} else {
listener.onResponse(new GetLicenseResponse(licenseService.getLicense()));
}
}
}
| TransportGetLicenseAction |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/AnyGetterOrdering4388Test.java | {
"start": 2779,
"end": 2959
} | class ____ {
public int c = 3, a = 1, b = 2;
@JsonAnyGetter
public Map<String, Object> map = new LinkedHashMap<>();
}
static | AlphabeticOrderOnClassBean |
java | elastic__elasticsearch | x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/Version.java | {
"start": 602,
"end": 1471
} | class ____ implements ToXContentFragment, BytesRefProducer, Comparable<Version> {
protected String version;
protected BytesRef bytes;
public Version(String version) {
this.version = version;
this.bytes = VersionEncoder.encodeVersion(version).bytesRef;
}
public Version(BytesRef bytes) {
this.version = VersionEncoder.decodeVersion(bytes).utf8ToString();
this.bytes = bytes;
}
@Override
public String toString() {
return version;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.value(toString());
}
@Override
public BytesRef toBytesRef() {
return bytes;
}
@Override
public int compareTo(Version o) {
return toBytesRef().compareTo(o.toBytesRef());
}
}
| Version |
java | processing__processing4 | app/src/processing/app/contrib/ContribProgress.java | {
"start": 1540,
"end": 3383
} | class ____ {
static private final int UNKNOWN = -1;
final private JProgressBar progressBar;
private boolean canceled = false;
private Exception exception;
public ContribProgress(JProgressBar progressBar) {
this.progressBar = progressBar;
}
public void startTask(String name) {
startTask(name, UNKNOWN);
}
public void startTask(String name, int maxValue) {
if (progressBar != null) {
progressBar.setString(name);
progressBar.setIndeterminate(maxValue == UNKNOWN);
progressBar.setMaximum(maxValue);
}
}
public void setProgress(int value) {
if (progressBar != null) {
progressBar.setValue(value);
}
}
public final void finished() {
try {
EventQueue.invokeAndWait(this::finishedAction);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
if (cause instanceof RuntimeException) {
throw (RuntimeException) cause;
} else {
cause.printStackTrace();
}
}
}
public void finishedAction() { }
public final void cancel() {
canceled = true;
try {
EventQueue.invokeAndWait(this::cancelAction);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
if (cause instanceof RuntimeException) {
throw (RuntimeException) cause;
} else {
cause.printStackTrace();
}
}
}
public void cancelAction() { }
public boolean notCanceled() {
return !canceled;
}
public void setException(Exception e) {
exception = e;
}
public Exception getException() {
return exception;
}
public boolean isException() {
return exception != null;
}
}
| ContribProgress |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/TreeSetTest.java | {
"start": 258,
"end": 615
} | class ____ extends TestCase {
public void test_null() throws Exception {
VO vo = new VO();
vo.setValue(new TreeSet());
Assert.assertEquals("{\"@type\":\"com.alibaba.json.bvt.serializer.TreeSetTest$VO\",\"value\":TreeSet[]}", JSON.toJSONString(vo, SerializerFeature.WriteClassName));
}
public static | TreeSetTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java | {
"start": 74878,
"end": 78017
} | class ____ implements AsyncCallback<AllocateResponse> {
private final SubClusterId subClusterId;
private final boolean isUAM;
HeartbeatCallBack(SubClusterId subClusterId, boolean isUAM) {
this.subClusterId = subClusterId;
this.isUAM = isUAM;
}
@Override
public void callback(AllocateResponse response) {
org.apache.hadoop.yarn.api.records.Token amrmToken =
response.getAMRMToken();
synchronized (asyncResponseSink) {
List<AllocateResponse> responses;
if (asyncResponseSink.containsKey(subClusterId)) {
responses = asyncResponseSink.get(subClusterId);
} else {
responses = new ArrayList<>();
asyncResponseSink.put(subClusterId, responses);
}
responses.add(response);
if (this.isUAM) {
// Do not further propagate the new amrmToken for UAM
response.setAMRMToken(null);
}
// Notify main thread about the response arrival
asyncResponseSink.notifyAll();
}
lastSCResponse.put(subClusterId, response);
lastSCResponseTime.put(subClusterId, clock.getTime());
// Notify policy of allocate response
try {
policyInterpreter.notifyOfResponse(subClusterId, response);
} catch (YarnException e) {
LOG.warn("notifyOfResponse for policy failed for sub-cluster {}.", subClusterId, e);
}
// Save the new AMRMToken for the UAM if present
// Do this last because it can be slow...
if (this.isUAM && amrmToken != null) {
Token<AMRMTokenIdentifier> newToken = ConverterUtils
.convertFromYarn(amrmToken, (Text) null);
// Do not further propagate the new amrmToken for UAM
response.setAMRMToken(null);
// Update the token in registry or NMSS
if (registryClient != null) {
if (registryClient.writeAMRMTokenForUAM(attemptId.getApplicationId(),
subClusterId.getId(), newToken)) {
try {
AMRMTokenIdentifier identifier = new AMRMTokenIdentifier();
identifier.readFields(new DataInputStream(
new ByteArrayInputStream(newToken.getIdentifier())));
LOG.info("Received new UAM amrmToken with keyId {} and service {} from {} for {}, " +
"written to Registry", identifier.getKeyId(), newToken.getService(),
subClusterId, attemptId);
} catch (IOException e) {
}
}
} else if (getNMStateStore() != null) {
try {
getNMStateStore().storeAMRMProxyAppContextEntry(attemptId,
NMSS_SECONDARY_SC_PREFIX + subClusterId.getId(),
newToken.encodeToUrlString().getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
LOG.error("Error storing UAM token as AMRMProxy "
+ "context entry in NMSS for {}.", attemptId, e);
}
}
}
}
}
/**
* Private structure for encapsulating SubClusterId and
* FinishApplicationMasterResponse instances.
*/
private static | HeartbeatCallBack |
java | grpc__grpc-java | util/src/main/java/io/grpc/util/OutlierDetectionLoadBalancer.java | {
"start": 2868,
"end": 7603
} | class ____ extends LoadBalancer {
@VisibleForTesting
final EndpointTrackerMap endpointTrackerMap;
@VisibleForTesting
final Map<SocketAddress, EndpointTracker> addressMap = new HashMap<>();
private final SynchronizationContext syncContext;
private final Helper childHelper;
private final GracefulSwitchLoadBalancer switchLb;
private Ticker ticker;
private final ScheduledExecutorService timeService;
private ScheduledHandle detectionTimerHandle;
private Long detectionTimerStartNanos;
private final ChannelLogger logger;
private static final Attributes.Key<EndpointTracker> ENDPOINT_TRACKER_KEY
= Attributes.Key.create("endpointTrackerKey");
/**
* Creates a new instance of {@link OutlierDetectionLoadBalancer}.
*/
public OutlierDetectionLoadBalancer(Helper helper, Ticker ticker) {
logger = helper.getChannelLogger();
childHelper = new ChildHelper(checkNotNull(helper, "helper"));
switchLb = new GracefulSwitchLoadBalancer(childHelper);
endpointTrackerMap = new EndpointTrackerMap();
this.syncContext = checkNotNull(helper.getSynchronizationContext(), "syncContext");
this.timeService = checkNotNull(helper.getScheduledExecutorService(), "timeService");
this.ticker = ticker;
logger.log(ChannelLogLevel.DEBUG, "OutlierDetection lb created.");
}
@Override
public Status acceptResolvedAddresses(ResolvedAddresses resolvedAddresses) {
logger.log(ChannelLogLevel.DEBUG, "Received resolution result: {0}", resolvedAddresses);
OutlierDetectionLoadBalancerConfig config
= (OutlierDetectionLoadBalancerConfig) resolvedAddresses.getLoadBalancingPolicyConfig();
// The map should only retain entries for endpoints in this latest update.
Set<Set<SocketAddress>> endpoints = new HashSet<>();
Map<SocketAddress, Set<SocketAddress>> addressEndpointMap = new HashMap<>();
for (EquivalentAddressGroup addressGroup : resolvedAddresses.getAddresses()) {
Set<SocketAddress> endpoint = ImmutableSet.copyOf(addressGroup.getAddresses());
endpoints.add(endpoint);
for (SocketAddress address : addressGroup.getAddresses()) {
if (addressEndpointMap.containsKey(address)) {
logger.log(ChannelLogLevel.WARNING,
"Unexpected duplicated address {0} belongs to multiple endpoints", address);
}
addressEndpointMap.put(address, endpoint);
}
}
endpointTrackerMap.keySet().retainAll(endpoints);
endpointTrackerMap.updateTrackerConfigs(config);
// Add any new ones.
endpointTrackerMap.putNewTrackers(config, endpoints);
// Update address -> tracker map.
addressMap.clear();
for (Map.Entry<SocketAddress, Set<SocketAddress>> e : addressEndpointMap.entrySet()) {
addressMap.put(e.getKey(), endpointTrackerMap.get(e.getValue()));
}
// If outlier detection is actually configured, start a timer that will periodically try to
// detect outliers.
if (config.outlierDetectionEnabled()) {
long initialDelayNanos;
if (detectionTimerStartNanos == null) {
// On the first go we use the configured interval.
initialDelayNanos = config.intervalNanos;
} else {
// If a timer has started earlier we cancel it and use the difference between the start
// time and now as the interval.
initialDelayNanos = Math.max(0L,
config.intervalNanos - (ticker.read() - detectionTimerStartNanos));
}
// If a timer has been previously created we need to cancel it and reset all the call counters
// for a fresh start.
if (detectionTimerHandle != null) {
detectionTimerHandle.cancel();
endpointTrackerMap.resetCallCounters();
}
detectionTimerHandle = syncContext.scheduleWithFixedDelay(new DetectionTimer(config, logger),
initialDelayNanos, config.intervalNanos, NANOSECONDS, timeService);
} else if (detectionTimerHandle != null) {
// Outlier detection is not configured, but we have a lingering timer. Let's cancel it and
// uneject any addresses we may have ejected.
detectionTimerHandle.cancel();
detectionTimerStartNanos = null;
endpointTrackerMap.cancelTracking();
}
return switchLb.acceptResolvedAddresses(
resolvedAddresses.toBuilder().setLoadBalancingPolicyConfig(config.childConfig).build());
}
@Override
public void handleNameResolutionError(Status error) {
switchLb.handleNameResolutionError(error);
}
@Override
public void shutdown() {
switchLb.shutdown();
}
/**
* This timer will be invoked periodically, according to configuration, and it will look for any
* outlier subchannels.
*/
final | OutlierDetectionLoadBalancer |
java | google__dagger | javatests/dagger/functional/generictypes/GenericTypesComponentTest.java | {
"start": 1224,
"end": 1309
} | interface ____ extends GenericTypesInterface<Integer, String> {}
| GenericTypesComponent |
java | google__guava | guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java | {
"start": 37808,
"end": 38416
} | class ____ extends DefaultValueChecker {
@SuppressWarnings("unused") // called by NullPointerTester
@Keep
public void checkArray(TypeToken<? extends List<? super Number>> type, String s) {
calledWith(type, s);
}
void check() {
runTester();
TypeToken<?> defaultType = (TypeToken<?>) getDefaultParameterValue(0);
assertTrue(new TypeToken<List<? super Number>>() {}.isSupertypeOf(defaultType));
}
}
public void testGenericTypeTokenDefaultValue() {
new GenericTypeTokenDefaultValueChecker().check();
}
private static | GenericTypeTokenDefaultValueChecker |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/generic/GenericBridgeMethodMatchingTests.java | {
"start": 2711,
"end": 2935
} | class ____ implements DerivedInterface<String> {
@Override
public void genericDerivedInterfaceMethod(String t) {
}
@Override
public void genericBaseInterfaceMethod(String t) {
}
}
@Aspect
| DerivedStringParameterizedClass |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/StringValueSerializerTest.java | {
"start": 1083,
"end": 2235
} | class ____ extends SerializerTestBase<StringValue> {
@Override
protected TypeSerializer<StringValue> createSerializer() {
return new StringValueSerializer();
}
@Override
protected int getLength() {
return -1;
}
@Override
protected Class<StringValue> getTypeClass() {
return StringValue.class;
}
@Override
protected StringValue[] getTestData() {
return new StringValue[] {
new StringValue("a"),
new StringValue(""),
new StringValue("bcd"),
new StringValue("jbmbmner8 jhk hj \n \t üäßß@µ"),
new StringValue(""),
new StringValue("non-empty"),
new StringValue(
"http://some-uri.com/that/is/a/common/prefix/to/all(((cmNH`0R)H<tnLa:/;Q,igWY2EdwW^W7T3H6NMRoqR[O2TqQ@SbGKc(:0XOXq-5]ndm-R8?=,o?AW+9Pi_v4eON=Mpje7N4n*-nhFWKn>Sn0cGMlnDquY@-F:QY@-UZ.-//*OL*8\\SIpiZa)tefalZ99-P_-WFIaKPeGbkQ^iRgd,YYkn7:jBAW::PqAYtgl73dTaJ2CIT:11HJ70<ATOXZ]c6b_7EgQU,@uq+SMa=7Z]kg/OZ>TGduw>D7Lu[nEj_l=Ucwo5BQtBESh/4V>N9nj/pDLw[NM)a=ac6R-(FM2U+dwROMUH;);Y=")
};
}
}
| StringValueSerializerTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/connection/IdleConnectionWatcher.java | {
"start": 1296,
"end": 1414
} | class ____ {
private final Logger log = LoggerFactory.getLogger(getClass());
public static | IdleConnectionWatcher |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.