language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/FastJsonSerializeIterableTest.java
|
{
"start": 246,
"end": 339
}
|
class ____ {
@Test
public void testWithIterable() {
|
FastJsonSerializeIterableTest
|
java
|
playframework__playframework
|
core/play/src/main/java/play/BuiltInComponentsFromContext.java
|
{
"start": 366,
"end": 692
}
|
class ____ extends ContextBasedBuiltInComponents {
private final ApplicationLoader.Context context;
public BuiltInComponentsFromContext(ApplicationLoader.Context context) {
this.context = context;
}
@Override
public ApplicationLoader.Context context() {
return this.context;
}
}
|
BuiltInComponentsFromContext
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/utils/JacksonUtils.java
|
{
"start": 4908,
"end": 8608
}
|
class ____ object
* @param <T> General type
* @return object
* @throws NacosDeserializationException if deserialize failed
*/
public static <T> T toObj(String json, Class<T> cls) {
try {
return mapper.readValue(json, cls);
} catch (IOException e) {
throw new NacosDeserializationException(cls, e);
}
}
/**
* Json string deserialize to Object.
*
* @param json json string
* @param type {@link Type} of object
* @param <T> General type
* @return object
* @throws NacosDeserializationException if deserialize failed
*/
public static <T> T toObj(String json, Type type) {
try {
return mapper.readValue(json, mapper.constructType(type));
} catch (IOException e) {
throw new NacosDeserializationException(e);
}
}
/**
* Json string deserialize to Object.
*
* @param json json string
* @param typeReference {@link TypeReference} of object
* @param <T> General type
* @return object
* @throws NacosDeserializationException if deserialize failed
*/
public static <T> T toObj(String json, TypeReference<T> typeReference) {
try {
return mapper.readValue(json, typeReference);
} catch (IOException e) {
throw new NacosDeserializationException(typeReference.getClass(), e);
}
}
/**
* Json string deserialize to Object.
*
* @param inputStream json string input stream
* @param type {@link Type} of object
* @param <T> General type
* @return object
* @throws NacosDeserializationException if deserialize failed
*/
public static <T> T toObj(InputStream inputStream, Type type) {
try {
return mapper.readValue(inputStream, mapper.constructType(type));
} catch (IOException e) {
throw new NacosDeserializationException(type, e);
}
}
/**
* Json string deserialize to Jackson {@link JsonNode}.
*
* @param json json string
* @return {@link JsonNode}
* @throws NacosDeserializationException if deserialize failed
*/
public static JsonNode toObj(String json) {
try {
return mapper.readTree(json);
} catch (IOException e) {
throw new NacosDeserializationException(e);
}
}
/**
* Register sub type for child class.
*
* @param clz child class
* @param type type name of child class
*/
public static void registerSubtype(Class<?> clz, String type) {
mapper.registerSubtypes(new NamedType(clz, type));
}
/**
* Create a new empty Jackson {@link ObjectNode}.
*
* @return {@link ObjectNode}
*/
public static ObjectNode createEmptyJsonNode() {
return new ObjectNode(mapper.getNodeFactory());
}
/**
* Create a new empty Jackson {@link ArrayNode}.
*
* @return {@link ArrayNode}
*/
public static ArrayNode createEmptyArrayNode() {
return new ArrayNode(mapper.getNodeFactory());
}
/**
* Parse object to Jackson {@link JsonNode}.
*
* @param obj object
* @return {@link JsonNode}
*/
public static JsonNode transferToJsonNode(Object obj) {
return mapper.valueToTree(obj);
}
/**
* construct java type -> Jackson Java Type.
*
* @param type java type
* @return JavaType {@link JavaType}
*/
public static JavaType constructJavaType(Type type) {
return mapper.constructType(type);
}
}
|
of
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/main/java/io/micronaut/annotation/processing/visitor/AbstractJavaMemberElement.java
|
{
"start": 1149,
"end": 2323
}
|
class ____ extends AbstractTypeAwareJavaElement implements MemberElement {
/**
* The constructor.
* @param nativeElement The {@link Element}
* @param annotationMetadataFactory The annotation metadata factory
* @param visitorContext The Java visitor context
*/
AbstractJavaMemberElement(JavaNativeElement nativeElement, ElementAnnotationMetadataFactory annotationMetadataFactory, JavaVisitorContext visitorContext) {
super(nativeElement, annotationMetadataFactory, visitorContext);
}
@Override
public MemberElement withAnnotationMetadata(AnnotationMetadata annotationMetadata) {
return (MemberElement) super.withAnnotationMetadata(annotationMetadata);
}
@Override
protected boolean hasNullMarked() {
if (hasStereotype(NullMarked.class)) {
return true;
}
ClassElement owningType = getOwningType();
if (owningType instanceof AbstractTypeAwareJavaElement typeAwareJavaElement) {
return typeAwareJavaElement.hasNullMarked();
}
return owningType.hasStereotype(NullMarked.class);
}
}
|
AbstractJavaMemberElement
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTests.java
|
{
"start": 790,
"end": 3388
}
|
class ____ extends ESTestCase {
public void testConcurrent() throws InterruptedException {
final AtomicInteger count = new AtomicInteger(0);
final CountDown countDown = new CountDown(scaledRandomIntBetween(10, 1000));
Thread[] threads = new Thread[between(3, 10)];
final CountDownLatch latch = new CountDownLatch(1 + threads.length);
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread() {
@Override
public void run() {
latch.countDown();
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
while (true) {
if (frequently()) {
if (countDown.isCountedDown()) {
break;
}
}
if (countDown.countDown()) {
count.incrementAndGet();
break;
}
}
}
};
threads[i].start();
}
latch.countDown();
Thread.yield();
if (rarely()) {
if (countDown.fastForward()) {
count.incrementAndGet();
}
assertThat(countDown.isCountedDown(), equalTo(true));
assertThat(countDown.fastForward(), equalTo(false));
}
for (Thread thread : threads) {
thread.join();
}
assertThat(countDown.isCountedDown(), equalTo(true));
assertThat(count.get(), Matchers.equalTo(1));
}
public void testSingleThreaded() {
int atLeast = scaledRandomIntBetween(10, 1000);
final CountDown countDown = new CountDown(atLeast);
while (countDown.isCountedDown() == false) {
atLeast--;
if (countDown.countDown()) {
assertThat(atLeast, equalTo(0));
assertThat(countDown.isCountedDown(), equalTo(true));
assertThat(countDown.fastForward(), equalTo(false));
break;
}
if (rarely()) {
assertThat(countDown.fastForward(), equalTo(true));
assertThat(countDown.isCountedDown(), equalTo(true));
assertThat(countDown.fastForward(), equalTo(false));
}
assertThat(atLeast, greaterThan(0));
}
}
}
|
CountDownTests
|
java
|
apache__spark
|
core/src/main/java/org/apache/spark/SparkFirehoseListener.java
|
{
"start": 1059,
"end": 1553
}
|
class ____ order to ensure that we don't forget to update it when adding
* new methods to SparkListener: forgetting to add a method will result in a compilation error (if
* this was a concrete Scala class, default implementations of new event handlers would be inherited
* from the SparkListener trait).
*
* Please note until Spark 3.1.0 this was missing the DevelopApi annotation, this needs to be
* taken into account if changing this API before a major release.
*/
@DeveloperApi
public
|
in
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/spark/parser/SparkSelectParser.java
|
{
"start": 235,
"end": 522
}
|
class ____ extends HiveSelectParser {
public SparkSelectParser(SQLExprParser exprParser) {
super(exprParser);
}
public SparkSelectParser(SQLExprParser exprParser, SQLSelectListCache selectListCache) {
super(exprParser, selectListCache);
}
}
|
SparkSelectParser
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/dynamic/ReactiveTypeAdapters.java
|
{
"start": 28042,
"end": 28432
}
|
enum ____ implements Function<io.reactivex.rxjava3.core.Maybe<?>, Flux<?>> {
INSTANCE;
@Override
public Flux<?> apply(io.reactivex.rxjava3.core.Maybe<?> source) {
return Flux.from(source.toFlowable());
}
}
/**
* An adapter {@link Function} to adopt a {@link Observable} to {@link Single}.
*/
public
|
RxJava3MaybeToFluxAdapter
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/BoxComponentBuilderFactory.java
|
{
"start": 13830,
"end": 17376
}
|
class ____
extends AbstractComponentBuilder<BoxComponent>
implements BoxComponentBuilder {
@Override
protected BoxComponent buildConcreteComponent() {
return new BoxComponent();
}
private org.apache.camel.component.box.BoxConfiguration getOrCreateConfiguration(BoxComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.box.BoxConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "clientId": getOrCreateConfiguration((BoxComponent) component).setClientId((java.lang.String) value); return true;
case "configuration": ((BoxComponent) component).setConfiguration((org.apache.camel.component.box.BoxConfiguration) value); return true;
case "enterpriseId": getOrCreateConfiguration((BoxComponent) component).setEnterpriseId((java.lang.String) value); return true;
case "userId": getOrCreateConfiguration((BoxComponent) component).setUserId((java.lang.String) value); return true;
case "bridgeErrorHandler": ((BoxComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((BoxComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((BoxComponent) component).setAutowiredEnabled((boolean) value); return true;
case "httpParams": getOrCreateConfiguration((BoxComponent) component).setHttpParams((java.util.Map) value); return true;
case "authenticationType": getOrCreateConfiguration((BoxComponent) component).setAuthenticationType((java.lang.String) value); return true;
case "accessTokenCache": getOrCreateConfiguration((BoxComponent) component).setAccessTokenCache((com.box.sdk.IAccessTokenCache) value); return true;
case "clientSecret": getOrCreateConfiguration((BoxComponent) component).setClientSecret((java.lang.String) value); return true;
case "encryptionAlgorithm": getOrCreateConfiguration((BoxComponent) component).setEncryptionAlgorithm((com.box.sdk.EncryptionAlgorithm) value); return true;
case "maxCacheEntries": getOrCreateConfiguration((BoxComponent) component).setMaxCacheEntries((int) value); return true;
case "privateKeyFile": getOrCreateConfiguration((BoxComponent) component).setPrivateKeyFile((java.lang.String) value); return true;
case "privateKeyPassword": getOrCreateConfiguration((BoxComponent) component).setPrivateKeyPassword((java.lang.String) value); return true;
case "publicKeyId": getOrCreateConfiguration((BoxComponent) component).setPublicKeyId((java.lang.String) value); return true;
case "sslContextParameters": getOrCreateConfiguration((BoxComponent) component).setSslContextParameters((org.apache.camel.support.jsse.SSLContextParameters) value); return true;
case "userName": getOrCreateConfiguration((BoxComponent) component).setUserName((java.lang.String) value); return true;
case "userPassword": getOrCreateConfiguration((BoxComponent) component).setUserPassword((java.lang.String) value); return true;
default: return false;
}
}
}
}
|
BoxComponentBuilderImpl
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/domain/sample/AbstractAnnotatedAuditable.java
|
{
"start": 1238,
"end": 1885
}
|
class ____ {
private @Id @GeneratedValue Long id;
private @CreatedBy @ManyToOne AuditableUser createdBy;
private @CreatedDate @Temporal(TemporalType.TIMESTAMP) Date createAt;
private @ManyToOne AuditableUser lastModifiedBy;
private @Temporal(TemporalType.TIMESTAMP) Date lastModifiedAt;
public Long getId() {
return id;
}
public AuditableUser getCreatedBy() {
return createdBy;
}
public Date getCreateAt() {
return createAt;
}
@LastModifiedBy
public AuditableUser getLastModifiedBy() {
return lastModifiedBy;
}
@LastModifiedDate
public Date getLastModifiedAt() {
return lastModifiedAt;
}
}
|
AbstractAnnotatedAuditable
|
java
|
apache__camel
|
components/camel-grpc/src/main/java/org/apache/camel/component/grpc/GrpcConsumerStrategy.java
|
{
"start": 917,
"end": 1198
}
|
enum ____ {
/**
* Collect all streaming elements in a single request and process them as a list in the route.
*/
AGGREGATION,
/**
* Process each streaming element of a request independently.
*/
PROPAGATION,
DELEGATION;
}
|
GrpcConsumerStrategy
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/suite/engine/SuiteLauncherDiscoveryRequestBuilderTests.java
|
{
"start": 25418,
"end": 25911
}
|
class ____ implements ConfigurationParameters {
private final Map<String, String> map;
ParentConfigurationParameters(String key, String value) {
this.map = Map.of(key, value);
}
@Override
public Optional<String> get(String key) {
return Optional.ofNullable(map.get(key));
}
@Override
public Optional<Boolean> getBoolean(String key) {
return Optional.empty();
}
@Override
public Set<String> keySet() {
return Set.of();
}
}
}
|
ParentConfigurationParameters
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/beans/factory/support/QualifierAnnotationAutowireContextTests.java
|
{
"start": 1855,
"end": 29735
}
|
class ____ {
private static final String JUERGEN = "juergen";
private static final String MARK = "mark";
@Test
void autowiredFieldWithSingleNonQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
context.registerBeanDefinition(JUERGEN, person);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredMethodParameterWithSingleNonQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
context.registerBeanDefinition(JUERGEN, person);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredConstructorArgumentWithSingleNonQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
context.registerBeanDefinition(JUERGEN, person);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedConstructorArgumentTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(UnsatisfiedDependencyException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> assertThat(ex.getBeanName()).isEqualTo("autowired"));
}
@Test
void autowiredFieldWithSingleQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
person.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
context.registerBeanDefinition(JUERGEN, person);
context.registerBeanDefinition("autowired", new RootBeanDefinition(QualifiedFieldTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldTestBean bean = (QualifiedFieldTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredMethodParameterWithSingleQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
person.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
context.registerBeanDefinition(JUERGEN, person);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedMethodParameterTestBean bean =
(QualifiedMethodParameterTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredMethodParameterWithStaticallyQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(QualifiedPerson.class, cavs, null);
context.registerBeanDefinition(JUERGEN,
ScopedProxyUtils.createScopedProxy(new BeanDefinitionHolder(person, JUERGEN), context, true).getBeanDefinition());
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedMethodParameterTestBean bean =
(QualifiedMethodParameterTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredMethodParameterWithStaticallyQualifiedCandidateAmongOthers() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(QualifiedPerson.class, cavs, null);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(DefaultValueQualifiedPerson.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedMethodParameterTestBean bean =
(QualifiedMethodParameterTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredConstructorArgumentWithSingleQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs = new ConstructorArgumentValues();
cavs.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person = new RootBeanDefinition(Person.class, cavs, null);
person.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
context.registerBeanDefinition(JUERGEN, person);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedConstructorArgumentTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedConstructorArgumentTestBean bean =
(QualifiedConstructorArgumentTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredFieldWithMultipleNonQualifiedCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredMethodParameterWithMultipleNonQualifiedCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredConstructorArgumentWithMultipleNonQualifiedCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedConstructorArgumentTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(UnsatisfiedDependencyException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> assertThat(ex.getBeanName()).isEqualTo("autowired"));
}
@Test
void autowiredFieldResolvesQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldTestBean bean = (QualifiedFieldTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredFieldResolvesMetaQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(MetaQualifiedFieldTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
MetaQualifiedFieldTestBean bean = (MetaQualifiedFieldTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredMethodParameterResolvesQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedMethodParameterTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedMethodParameterTestBean bean =
(QualifiedMethodParameterTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredConstructorArgumentResolvesQualifiedCandidate() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(TestQualifier.class));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedConstructorArgumentTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedConstructorArgumentTestBean bean =
(QualifiedConstructorArgumentTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredFieldResolvesQualifiedCandidateWithDefaultValueAndNoValueOnBeanDefinition() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
// qualifier added, but includes no value
person1.addQualifier(new AutowireCandidateQualifier(TestQualifierWithDefaultValue.class));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldWithDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldWithDefaultValueTestBean bean =
(QualifiedFieldWithDefaultValueTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredFieldDoesNotResolveCandidateWithDefaultValueAndConflictingValueOnBeanDefinition() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
// qualifier added, and non-default value specified
person1.addQualifier(new AutowireCandidateQualifier(TestQualifierWithDefaultValue.class, "not the default"));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldWithDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredFieldResolvesWithDefaultValueAndExplicitDefaultValueOnBeanDefinition() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
// qualifier added, and value matches the default
person1.addQualifier(new AutowireCandidateQualifier(TestQualifierWithDefaultValue.class, "default"));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldWithDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldWithDefaultValueTestBean bean =
(QualifiedFieldWithDefaultValueTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(JUERGEN);
}
@Test
void autowiredFieldResolvesWithMultipleQualifierValues() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
AutowireCandidateQualifier qualifier = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier.setAttribute("number", 456);
person1.addQualifier(qualifier);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
AutowireCandidateQualifier qualifier2 = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier2.setAttribute("number", 123);
person2.addQualifier(qualifier2);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldWithMultipleAttributesTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldWithMultipleAttributesTestBean bean =
(QualifiedFieldWithMultipleAttributesTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(MARK);
}
@Test
void autowiredFieldDoesNotResolveWithMultipleQualifierValuesAndConflictingDefaultValue() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
AutowireCandidateQualifier qualifier = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier.setAttribute("number", 456);
person1.addQualifier(qualifier);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
AutowireCandidateQualifier qualifier2 = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier2.setAttribute("number", 123);
qualifier2.setAttribute("value", "not the default");
person2.addQualifier(qualifier2);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldWithMultipleAttributesTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredFieldResolvesWithMultipleQualifierValuesAndExplicitDefaultValue() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
AutowireCandidateQualifier qualifier = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier.setAttribute("number", 456);
person1.addQualifier(qualifier);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
AutowireCandidateQualifier qualifier2 = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier2.setAttribute("number", 123);
qualifier2.setAttribute("value", "default");
person2.addQualifier(qualifier2);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldWithMultipleAttributesTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldWithMultipleAttributesTestBean bean =
(QualifiedFieldWithMultipleAttributesTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(MARK);
}
@Test
void autowiredFieldDoesNotResolveWithMultipleQualifierValuesAndMultipleMatchingCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
AutowireCandidateQualifier qualifier = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier.setAttribute("number", 123);
person1.addQualifier(qualifier);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
AutowireCandidateQualifier qualifier2 = new AutowireCandidateQualifier(TestQualifierWithMultipleAttributes.class);
qualifier2.setAttribute("number", 123);
qualifier2.setAttribute("value", "default");
person2.addQualifier(qualifier2);
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldWithMultipleAttributesTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(BeanCreationException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> {
assertThat(ex.getRootCause()).isInstanceOf(NoSuchBeanDefinitionException.class);
assertThat(ex.getBeanName()).isEqualTo("autowired");
});
}
@Test
void autowiredFieldResolvesWithBaseQualifierAndDefaultValue() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue(JUERGEN);
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue(MARK);
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
person2.addQualifier(new AutowireCandidateQualifier(Qualifier.class));
context.registerBeanDefinition(JUERGEN, person1);
context.registerBeanDefinition(MARK, person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedFieldWithBaseQualifierDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedFieldWithBaseQualifierDefaultValueTestBean bean =
(QualifiedFieldWithBaseQualifierDefaultValueTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo(MARK);
}
@Test
void autowiredFieldResolvesWithBaseQualifierAndNonDefaultValue() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue("the real juergen");
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(Qualifier.class, "juergen"));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue("juergen imposter");
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
person2.addQualifier(new AutowireCandidateQualifier(Qualifier.class, "not really juergen"));
context.registerBeanDefinition("juergen1", person1);
context.registerBeanDefinition("juergen2", person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedConstructorArgumentWithBaseQualifierNonDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
context.refresh();
QualifiedConstructorArgumentWithBaseQualifierNonDefaultValueTestBean bean =
(QualifiedConstructorArgumentWithBaseQualifierNonDefaultValueTestBean) context.getBean("autowired");
assertThat(bean.getPerson().getName()).isEqualTo("the real juergen");
}
@Test
void autowiredFieldDoesNotResolveWithBaseQualifierAndNonDefaultValueAndMultipleMatchingCandidates() {
GenericApplicationContext context = new GenericApplicationContext();
ConstructorArgumentValues cavs1 = new ConstructorArgumentValues();
cavs1.addGenericArgumentValue("the real juergen");
RootBeanDefinition person1 = new RootBeanDefinition(Person.class, cavs1, null);
person1.addQualifier(new AutowireCandidateQualifier(Qualifier.class, "juergen"));
ConstructorArgumentValues cavs2 = new ConstructorArgumentValues();
cavs2.addGenericArgumentValue("juergen imposter");
RootBeanDefinition person2 = new RootBeanDefinition(Person.class, cavs2, null);
person2.addQualifier(new AutowireCandidateQualifier(Qualifier.class, "juergen"));
context.registerBeanDefinition("juergen1", person1);
context.registerBeanDefinition("juergen2", person2);
context.registerBeanDefinition("autowired",
new RootBeanDefinition(QualifiedConstructorArgumentWithBaseQualifierNonDefaultValueTestBean.class));
AnnotationConfigUtils.registerAnnotationConfigProcessors(context);
assertThatExceptionOfType(UnsatisfiedDependencyException.class)
.isThrownBy(context::refresh)
.satisfies(ex -> assertThat(ex.getBeanName()).isEqualTo("autowired"));
}
private static
|
QualifierAnnotationAutowireContextTests
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/aggregator/SpringAggregateExpressionTimeoutFallbackTest.java
|
{
"start": 1081,
"end": 1432
}
|
class ____ extends AggregateExpressionTimeoutFallbackTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this,
"org/apache/camel/spring/processor/aggregator/SpringAggregateExpressionTimeoutFallbackTest.xml");
}
}
|
SpringAggregateExpressionTimeoutFallbackTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java
|
{
"start": 5841,
"end": 6040
}
|
class ____ {
public static final int DEPTH_LIMIT = 20;
}
private static Builder builder(Mapper in) {
return ((FlattenedFieldMapper) in).builder;
}
public static
|
Defaults
|
java
|
spring-projects__spring-boot
|
module/spring-boot-quartz/src/test/java/org/springframework/boot/quartz/autoconfigure/QuartzAutoConfigurationTests.java
|
{
"start": 19412,
"end": 19799
}
|
class ____ extends BaseQuartzConfiguration {
@Bean
JobDetail fooJob() {
return JobBuilder.newJob().ofType(FooJob.class).withIdentity("fooJob").storeDurably().build();
}
@Bean
JobDetail barJob() {
return JobBuilder.newJob().ofType(FooJob.class).withIdentity("barJob").storeDurably().build();
}
}
@Configuration(proxyBeanMethods = false)
static
|
QuartzJobsConfiguration
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-jaxrs/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/support/jaxrs/filter/ContainerResponseFilterAdapter.java
|
{
"start": 1485,
"end": 1859
}
|
class ____ implements RestExtensionAdapter<ContainerResponseFilter> {
@Override
public boolean accept(Object extension) {
return extension instanceof ContainerResponseFilter;
}
@Override
public RestFilter adapt(ContainerResponseFilter extension) {
return new Filter(extension);
}
private static final
|
ContainerResponseFilterAdapter
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/pattern/IntegerPatternConverter.java
|
{
"start": 1175,
"end": 2573
}
|
class ____ extends AbstractPatternConverter implements ArrayPatternConverter {
/**
* Singleton.
*/
private static final IntegerPatternConverter INSTANCE = new IntegerPatternConverter();
/**
* Private constructor.
*/
private IntegerPatternConverter() {
super("Integer", "integer");
}
/**
* Obtains an instance of pattern converter.
*
* @param options options, may be null.
* @return instance of pattern converter.
*/
public static IntegerPatternConverter newInstance(final String[] options) {
return INSTANCE;
}
@Override
public void format(final StringBuilder toAppendTo, final Object... objects) {
for (int i = 0; i < objects.length; i++) {
if (objects[i] instanceof Integer) {
format(objects[i], toAppendTo);
break;
} else if (objects[i] instanceof NotANumber) {
toAppendTo.append(NotANumber.VALUE);
break;
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void format(final Object obj, final StringBuilder toAppendTo) {
if (obj instanceof Integer) {
toAppendTo.append(((Integer) obj).intValue());
} else if (obj instanceof Date) {
toAppendTo.append(((Date) obj).getTime());
}
}
}
|
IntegerPatternConverter
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionRequestSerializingTests.java
|
{
"start": 713,
"end": 1969
}
|
class ____ extends AbstractWireSerializingTestCase<PutSynonymRuleAction.Request> {
@Override
protected Writeable.Reader<PutSynonymRuleAction.Request> instanceReader() {
return PutSynonymRuleAction.Request::new;
}
@Override
protected PutSynonymRuleAction.Request createTestInstance() {
return new PutSynonymRuleAction.Request(randomIdentifier(), SynonymsTestUtils.randomSynonymRule(), randomBoolean());
}
@Override
protected PutSynonymRuleAction.Request mutateInstance(PutSynonymRuleAction.Request instance) throws IOException {
String synonymsSetId = instance.synonymsSetId();
SynonymRule synonymRule = instance.synonymRule();
boolean refresh = instance.refresh();
switch (between(0, 2)) {
case 0 -> synonymsSetId = randomValueOtherThan(synonymsSetId, () -> randomIdentifier());
case 1 -> synonymRule = randomValueOtherThan(synonymRule, SynonymsTestUtils::randomSynonymRule);
case 2 -> refresh = refresh == false;
default -> throw new AssertionError("Illegal randomisation branch");
}
return new PutSynonymRuleAction.Request(synonymsSetId, synonymRule, refresh);
}
}
|
PutSynonymRuleActionRequestSerializingTests
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/dynamic/support/ParametrizedTypeInformationUnitTests.java
|
{
"start": 4004,
"end": 4087
}
|
interface ____ extends List<Set<Number>> {
}
private static
|
ListOfSetOfNumber
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/RealmInfoIT.java
|
{
"start": 622,
"end": 1286
}
|
class ____ extends SecurityRealmSmokeTestCase {
public void testThatAllRealmTypesAreEnabled() throws IOException {
final Request request = new Request("GET", "_xpack/usage");
final Response response = client().performRequest(request);
Map<String, Object> usage = entityAsMap(response);
Map<String, Object> realms = ObjectPath.evaluate(usage, "security.realms");
realms.forEach((type, config) -> {
assertThat(config, Matchers.instanceOf(Map.class));
assertThat("Realm type [" + type + "] is not enabled", ((Map<?, ?>) config).get("enabled"), Matchers.equalTo(true));
});
}
}
|
RealmInfoIT
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/messages/webmonitor/RequestJobsWithIDsOverview.java
|
{
"start": 1064,
"end": 2227
}
|
class ____ implements InfoMessage {
private static final long serialVersionUID = 3052933564788843275L;
// ------------------------------------------------------------------------
private static final RequestJobsWithIDsOverview INSTANCE = new RequestJobsWithIDsOverview();
public static RequestJobsWithIDsOverview getInstance() {
return INSTANCE;
}
// ------------------------------------------------------------------------
@Override
public int hashCode() {
return RequestJobsWithIDsOverview.class.hashCode();
}
@Override
public boolean equals(Object obj) {
return obj != null && obj.getClass() == RequestJobsWithIDsOverview.class;
}
@Override
public String toString() {
return RequestJobsWithIDsOverview.class.getSimpleName();
}
// ------------------------------------------------------------------------
/** No external instantiation */
private RequestJobsWithIDsOverview() {}
/** Preserve the singleton property by returning the singleton instance */
private Object readResolve() {
return INSTANCE;
}
}
|
RequestJobsWithIDsOverview
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/logging/AbstractLoggingSystem.java
|
{
"start": 1346,
"end": 7860
}
|
class ____ extends LoggingSystem {
protected static final Comparator<LoggerConfiguration> CONFIGURATION_COMPARATOR = new LoggerConfigurationComparator(
ROOT_LOGGER_NAME);
private final ClassLoader classLoader;
public AbstractLoggingSystem(ClassLoader classLoader) {
this.classLoader = classLoader;
}
@Override
public void beforeInitialize() {
}
@Override
public void initialize(LoggingInitializationContext initializationContext, @Nullable String configLocation,
@Nullable LogFile logFile) {
if (StringUtils.hasLength(configLocation)) {
initializeWithSpecificConfig(initializationContext, configLocation, logFile);
return;
}
initializeWithConventions(initializationContext, logFile);
}
private void initializeWithSpecificConfig(LoggingInitializationContext initializationContext, String configLocation,
@Nullable LogFile logFile) {
configLocation = SystemPropertyUtils.resolvePlaceholders(configLocation);
loadConfiguration(initializationContext, configLocation, logFile);
}
private void initializeWithConventions(LoggingInitializationContext initializationContext,
@Nullable LogFile logFile) {
String config = getSelfInitializationConfig();
if (config != null && logFile == null) {
// self initialization has occurred, reinitialize in case of property changes
reinitialize(initializationContext);
return;
}
if (config == null) {
config = getSpringInitializationConfig();
}
if (config != null) {
loadConfiguration(initializationContext, config, logFile);
return;
}
loadDefaults(initializationContext, logFile);
}
/**
* Return any self initialization config that has been applied. By default this method
* checks {@link #getStandardConfigLocations()} and assumes that any file that exists
* will have been applied.
* @return the self initialization config or {@code null}
*/
protected @Nullable String getSelfInitializationConfig() {
return findConfig(getStandardConfigLocations());
}
/**
* Return any spring specific initialization config that should be applied. By default
* this method checks {@link #getSpringConfigLocations()}.
* @return the spring initialization config or {@code null}
*/
protected @Nullable String getSpringInitializationConfig() {
return findConfig(getSpringConfigLocations());
}
private @Nullable String findConfig(String[] locations) {
for (String location : locations) {
ClassPathResource resource = new ClassPathResource(location, this.classLoader);
if (resource.exists()) {
return "classpath:" + location;
}
}
return null;
}
/**
* Return the standard config locations for this system.
* @return the standard config locations
* @see #getSelfInitializationConfig()
*/
protected abstract String[] getStandardConfigLocations();
/**
* Return the spring config locations for this system. By default this method returns
* a set of locations based on {@link #getStandardConfigLocations()}.
* @return the spring config locations
* @see #getSpringInitializationConfig()
*/
protected String[] getSpringConfigLocations() {
String[] locations = getStandardConfigLocations();
for (int i = 0; i < locations.length; i++) {
String extension = StringUtils.getFilenameExtension(locations[i]);
int extensionLength = (extension != null) ? (extension.length() + 1) : 0;
locations[i] = locations[i].substring(0, locations[i].length() - extensionLength) + "-spring." + extension;
}
return locations;
}
/**
* Load sensible defaults for the logging system.
* @param initializationContext the logging initialization context
* @param logFile the file to load or {@code null} if no log file is to be written
*/
protected abstract void loadDefaults(LoggingInitializationContext initializationContext, @Nullable LogFile logFile);
/**
* Load a specific configuration.
* @param initializationContext the logging initialization context
* @param location the location of the configuration to load (never {@code null})
* @param logFile the file to load or {@code null} if no log file is to be written
*/
protected abstract void loadConfiguration(LoggingInitializationContext initializationContext, String location,
@Nullable LogFile logFile);
/**
* Reinitialize the logging system if required. Called when
* {@link #getSelfInitializationConfig()} is used and the log file hasn't changed. May
* be used to reload configuration (for example to pick up additional System
* properties).
* @param initializationContext the logging initialization context
*/
protected void reinitialize(LoggingInitializationContext initializationContext) {
}
protected final ClassLoader getClassLoader() {
return this.classLoader;
}
protected final String getPackagedConfigFile(String fileName) {
String defaultPath = ClassUtils.getPackageName(getClass());
defaultPath = defaultPath.replace('.', '/');
defaultPath = defaultPath + "/" + fileName;
defaultPath = "classpath:" + defaultPath;
return defaultPath;
}
protected final void applySystemProperties(Environment environment, @Nullable LogFile logFile) {
new LoggingSystemProperties(environment, getDefaultValueResolver(environment), null).apply(logFile);
}
/**
* Return the default value resolver to use when resolving system properties.
* @param environment the environment
* @return the default value resolver
* @since 3.2.0
*/
protected Function<@Nullable String, @Nullable String> getDefaultValueResolver(Environment environment) {
String defaultLogCorrelationPattern = getDefaultLogCorrelationPattern();
return (name) -> {
String applicationPropertyName = LoggingSystemProperty.CORRELATION_PATTERN.getApplicationPropertyName();
Assert.state(applicationPropertyName != null, "applicationPropertyName must not be null");
if (StringUtils.hasLength(defaultLogCorrelationPattern) && applicationPropertyName.equals(name)
&& environment.getProperty(LoggingSystem.EXPECT_CORRELATION_ID_PROPERTY, Boolean.class, false)) {
return defaultLogCorrelationPattern;
}
return null;
};
}
/**
* Return the default log correlation pattern or {@code null} if log correlation
* patterns are not supported.
* @return the default log correlation pattern
* @since 3.2.0
*/
protected @Nullable String getDefaultLogCorrelationPattern() {
return null;
}
/**
* Maintains a mapping between native levels and {@link LogLevel}.
*
* @param <T> the native level type
*/
protected static
|
AbstractLoggingSystem
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestMappingHandlerAdapterIntegrationTests.java
|
{
"start": 4831,
"end": 19673
}
|
class ____ {
private final Object handler = new Handler();
private final MockHttpServletRequest request = new MockHttpServletRequest();
private final MockHttpServletResponse response = new MockHttpServletResponse();
private RequestMappingHandlerAdapter handlerAdapter;
@BeforeEach
void setup() throws Exception {
ConfigurableWebBindingInitializer bindingInitializer = new ConfigurableWebBindingInitializer();
bindingInitializer.setValidator(new StubValidator());
List<HandlerMethodArgumentResolver> customResolvers = new ArrayList<>();
customResolvers.add(new ServletWebArgumentResolverAdapter(new ColorArgumentResolver()));
customResolvers.add(new CustomPrincipalArgumentResolver());
GenericWebApplicationContext context = new GenericWebApplicationContext();
context.refresh();
handlerAdapter = new RequestMappingHandlerAdapter();
handlerAdapter.setWebBindingInitializer(bindingInitializer);
handlerAdapter.setCustomArgumentResolvers(customResolvers);
handlerAdapter.setApplicationContext(context);
handlerAdapter.setBeanFactory(context.getBeanFactory());
handlerAdapter.afterPropertiesSet();
request.setMethod("POST");
// Expose request to the current thread (for SpEL expressions)
RequestContextHolder.setRequestAttributes(new ServletWebRequest(request));
}
@AfterEach
void teardown() {
RequestContextHolder.resetRequestAttributes();
}
@Test
void handle() throws Exception {
Class<?>[] parameterTypes = new Class<?>[] {int.class, String.class, String.class, String.class, Map.class,
Date.class, Map.class, String.class, String.class, TestBean.class, Errors.class, TestBean.class,
Color.class, HttpServletRequest.class, HttpServletResponse.class, TestBean.class, TestBean.class,
User.class, OtherUser.class, Principal.class, Model.class,
SemanticApiVersionParser.Version.class, UriComponentsBuilder.class};
String datePattern = "yyyy.MM.dd";
String formattedDate = "2011.03.16";
Date date = new GregorianCalendar(2011, Calendar.MARCH, 16).getTime();
TestBean sessionAttribute = new TestBean();
TestBean requestAttribute = new TestBean();
request.addHeader("Content-Type", "text/plain; charset=utf-8");
request.addHeader("header", "headerValue");
request.addHeader("anotherHeader", "anotherHeaderValue");
request.addParameter("datePattern", datePattern);
request.addParameter("dateParam", formattedDate);
request.addParameter("paramByConvention", "paramByConventionValue");
request.addParameter("age", "25");
request.setCookies(new Cookie("cookie", "99"));
request.setContent("Hello World".getBytes(StandardCharsets.UTF_8));
request.setUserPrincipal(new User());
request.setContextPath("/contextPath");
request.setServletPath("/main");
System.setProperty("systemHeader", "systemHeaderValue");
Map<String, String> uriTemplateVars = new HashMap<>();
uriTemplateVars.put("pathvar", "pathvarValue");
request.setAttribute(HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE, uriTemplateVars);
request.getSession().setAttribute("sessionAttribute", sessionAttribute);
request.setAttribute("requestAttribute", requestAttribute);
SemanticApiVersionParser.Version version = new SemanticApiVersionParser().parseVersion("1.2");
request.setAttribute(HandlerMapping.API_VERSION_ATTRIBUTE, version);
HandlerMethod handlerMethod = handlerMethod("handle", parameterTypes);
ModelAndView mav = handlerAdapter.handle(request, response, handlerMethod);
ModelMap model = mav.getModelMap();
assertThat(mav.getViewName()).isEqualTo("viewName");
assertThat(model.get("cookie")).isEqualTo(99);
assertThat(model.get("pathvar")).isEqualTo("pathvarValue");
assertThat(model.get("header")).isEqualTo("headerValue");
assertThat(model.get("dateParam")).isEqualTo(date);
Map<?, ?> map = (Map<?, ?>) model.get("headerMap");
assertThat(map.get("header")).isEqualTo("headerValue");
assertThat(map.get("anotherHeader")).isEqualTo("anotherHeaderValue");
assertThat(model.get("systemHeader")).isEqualTo("systemHeaderValue");
map = (Map<?, ?>) model.get("paramMap");
assertThat(map.get("dateParam")).isEqualTo(formattedDate);
assertThat(map.get("paramByConvention")).isEqualTo("paramByConventionValue");
assertThat(model.get("value")).isEqualTo("/contextPath");
TestBean modelAttr = (TestBean) model.get("modelAttr");
assertThat(modelAttr.getAge()).isEqualTo(25);
assertThat(modelAttr.getName()).isEqualTo("Set by model method [modelAttr]");
assertThat(request.getSession().getAttribute("modelAttr")).isSameAs(modelAttr);
BindingResult bindingResult = (BindingResult) model.get(BindingResult.MODEL_KEY_PREFIX + "modelAttr");
assertThat(bindingResult.getTarget()).isSameAs(modelAttr);
assertThat(bindingResult.getErrorCount()).isEqualTo(1);
String conventionAttrName = "testBean";
TestBean modelAttrByConvention = (TestBean) model.get(conventionAttrName);
assertThat(modelAttrByConvention.getAge()).isEqualTo(25);
assertThat(modelAttrByConvention.getName()).isEqualTo("Set by model method [modelAttrByConvention]");
assertThat(request.getSession().getAttribute(conventionAttrName)).isSameAs(modelAttrByConvention);
bindingResult = (BindingResult) model.get(BindingResult.MODEL_KEY_PREFIX + conventionAttrName);
assertThat(bindingResult.getTarget()).isSameAs(modelAttrByConvention);
assertThat(model.get("customArg")).isInstanceOf(Color.class);
assertThat(model.get("user").getClass()).isEqualTo(User.class);
assertThat(model.get("otherUser").getClass()).isEqualTo(OtherUser.class);
assertThat(((Principal) model.get("customUser")).getName()).isEqualTo("Custom User");
assertThat(model.get("sessionAttribute")).isSameAs(sessionAttribute);
assertThat(model.get("requestAttribute")).isSameAs(requestAttribute);
assertThat(model.get("version")).isSameAs(version);
assertThat(model.get("url")).isEqualTo(URI.create("http://localhost/contextPath/main/path"));
}
@Test
void handleInInterface() throws Exception {
Class<?>[] parameterTypes = new Class<?>[] {int.class, String.class, String.class, String.class, Map.class,
Date.class, Map.class, String.class, String.class, TestBean.class, Errors.class, TestBean.class,
Color.class, HttpServletRequest.class, HttpServletResponse.class, TestBean.class, TestBean.class,
User.class, OtherUser.class, Model.class, UriComponentsBuilder.class};
String datePattern = "yyyy.MM.dd";
String formattedDate = "2011.03.16";
Date date = new GregorianCalendar(2011, Calendar.MARCH, 16).getTime();
TestBean sessionAttribute = new TestBean();
TestBean requestAttribute = new TestBean();
request.addHeader("Content-Type", "text/plain; charset=utf-8");
request.addHeader("header", "headerValue");
request.addHeader("anotherHeader", "anotherHeaderValue");
request.addParameter("datePattern", datePattern);
request.addParameter("dateParam", formattedDate);
request.addParameter("paramByConvention", "paramByConventionValue");
request.addParameter("age", "25");
request.setCookies(new Cookie("cookie", "99"));
request.setContent("Hello World".getBytes(StandardCharsets.UTF_8));
request.setUserPrincipal(new User());
request.setContextPath("/contextPath");
request.setServletPath("/main");
System.setProperty("systemHeader", "systemHeaderValue");
Map<String, String> uriTemplateVars = new HashMap<>();
uriTemplateVars.put("pathvar", "pathvarValue");
request.setAttribute(HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE, uriTemplateVars);
request.getSession().setAttribute("sessionAttribute", sessionAttribute);
request.setAttribute("requestAttribute", requestAttribute);
HandlerMethod handlerMethod = handlerMethod("handleInInterface", parameterTypes);
ModelAndView mav = handlerAdapter.handle(request, response, handlerMethod);
ModelMap model = mav.getModelMap();
assertThat(mav.getViewName()).isEqualTo("viewName");
assertThat(model.get("cookie")).isEqualTo(99);
assertThat(model.get("pathvar")).isEqualTo("pathvarValue");
assertThat(model.get("header")).isEqualTo("headerValue");
assertThat(model.get("dateParam")).isEqualTo(date);
Map<?, ?> map = (Map<?, ?>) model.get("headerMap");
assertThat(map.get("header")).isEqualTo("headerValue");
assertThat(map.get("anotherHeader")).isEqualTo("anotherHeaderValue");
assertThat(model.get("systemHeader")).isEqualTo("systemHeaderValue");
map = (Map<?, ?>) model.get("paramMap");
assertThat(map.get("dateParam")).isEqualTo(formattedDate);
assertThat(map.get("paramByConvention")).isEqualTo("paramByConventionValue");
assertThat(model.get("value")).isEqualTo("/contextPath");
TestBean modelAttr = (TestBean) model.get("modelAttr");
assertThat(modelAttr.getAge()).isEqualTo(25);
assertThat(modelAttr.getName()).isEqualTo("Set by model method [modelAttr]");
assertThat(request.getSession().getAttribute("modelAttr")).isSameAs(modelAttr);
BindingResult bindingResult = (BindingResult) model.get(BindingResult.MODEL_KEY_PREFIX + "modelAttr");
assertThat(bindingResult.getTarget()).isSameAs(modelAttr);
assertThat(bindingResult.getErrorCount()).isEqualTo(1);
String conventionAttrName = "testBean";
TestBean modelAttrByConvention = (TestBean) model.get(conventionAttrName);
assertThat(modelAttrByConvention.getAge()).isEqualTo(25);
assertThat(modelAttrByConvention.getName()).isEqualTo("Set by model method [modelAttrByConvention]");
assertThat(request.getSession().getAttribute(conventionAttrName)).isSameAs(modelAttrByConvention);
bindingResult = (BindingResult) model.get(BindingResult.MODEL_KEY_PREFIX + conventionAttrName);
assertThat(bindingResult.getTarget()).isSameAs(modelAttrByConvention);
assertThat(model.get("customArg")).isInstanceOf(Color.class);
assertThat(model.get("user").getClass()).isEqualTo(User.class);
assertThat(model.get("otherUser").getClass()).isEqualTo(OtherUser.class);
assertThat(model.get("sessionAttribute")).isSameAs(sessionAttribute);
assertThat(model.get("requestAttribute")).isSameAs(requestAttribute);
assertThat(model.get("url")).isEqualTo(URI.create("http://localhost/contextPath/main/path"));
}
@Test
void handleRequestBody() throws Exception {
Class<?>[] parameterTypes = new Class<?>[] {byte[].class};
request.setMethod("POST");
request.addHeader("Content-Type", "text/plain; charset=utf-8");
request.setContent("Hello Server".getBytes(StandardCharsets.UTF_8));
HandlerMethod handlerMethod = handlerMethod("handleRequestBody", parameterTypes);
ModelAndView mav = handlerAdapter.handle(request, response, handlerMethod);
assertThat(mav).isNull();
assertThat(new String(response.getContentAsByteArray(), StandardCharsets.UTF_8)).isEqualTo("Handled requestBody=[Hello Server]");
assertThat(response.getStatus()).isEqualTo(HttpStatus.ACCEPTED.value());
}
@Test
void handleAndValidateRequestBody() throws Exception {
Class<?>[] parameterTypes = new Class<?>[] {TestBean.class, Errors.class};
request.addHeader("Content-Type", "text/plain; charset=utf-8");
request.setContent("Hello Server".getBytes(StandardCharsets.UTF_8));
HandlerMethod handlerMethod = handlerMethod("handleAndValidateRequestBody", parameterTypes);
ModelAndView mav = handlerAdapter.handle(request, response, handlerMethod);
assertThat(mav).isNull();
assertThat(new String(response.getContentAsByteArray(), StandardCharsets.UTF_8)).isEqualTo("Error count [1]");
assertThat(response.getStatus()).isEqualTo(HttpStatus.ACCEPTED.value());
}
@Test
void handleHttpEntity() throws Exception {
Class<?>[] parameterTypes = new Class<?>[] {HttpEntity.class};
request.addHeader("Content-Type", "text/plain; charset=utf-8");
request.setContent("Hello Server".getBytes(StandardCharsets.UTF_8));
HandlerMethod handlerMethod = handlerMethod("handleHttpEntity", parameterTypes);
ModelAndView mav = handlerAdapter.handle(request, response, handlerMethod);
assertThat(mav).isNull();
assertThat(response.getStatus()).isEqualTo(HttpStatus.ACCEPTED.value());
assertThat(new String(response.getContentAsByteArray(), StandardCharsets.UTF_8)).isEqualTo("Handled requestBody=[Hello Server]");
assertThat(response.getHeader("header")).isEqualTo("headerValue");
// set because of @SessionAttributes
assertThat(response.getHeader("Cache-Control")).isEqualTo("no-store");
}
// SPR-13867
@Test
void handleHttpEntityWithCacheControl() throws Exception {
Class<?>[] parameterTypes = new Class<?>[] {HttpEntity.class};
request.addHeader("Content-Type", "text/plain; charset=utf-8");
request.setContent("Hello Server".getBytes(StandardCharsets.UTF_8));
HandlerMethod handlerMethod = handlerMethod("handleHttpEntityWithCacheControl", parameterTypes);
ModelAndView mav = handlerAdapter.handle(request, response, handlerMethod);
assertThat(mav).isNull();
assertThat(response.getStatus()).isEqualTo(HttpStatus.OK.value());
assertThat(new String(response.getContentAsByteArray(), StandardCharsets.UTF_8)).isEqualTo("Handled requestBody=[Hello Server]");
assertThat(response.getHeaderValues("Cache-Control")).containsExactly("max-age=3600");
}
@Test
void handleRequestPart() throws Exception {
MockMultipartHttpServletRequest multipartRequest = new MockMultipartHttpServletRequest();
multipartRequest.addFile(new MockMultipartFile("requestPart", "", "text/plain", "content".getBytes(StandardCharsets.UTF_8)));
HandlerMethod handlerMethod = handlerMethod("handleRequestPart", String.class, Model.class);
ModelAndView mav = handlerAdapter.handle(multipartRequest, response, handlerMethod);
assertThat(mav).isNotNull();
assertThat(mav.getModelMap().get("requestPart")).isEqualTo("content");
}
@Test
void handleAndValidateRequestPart() throws Exception {
MockMultipartHttpServletRequest multipartRequest = new MockMultipartHttpServletRequest();
multipartRequest.addFile(new MockMultipartFile("requestPart", "", "text/plain", "content".getBytes(StandardCharsets.UTF_8)));
HandlerMethod handlerMethod = handlerMethod("handleAndValidateRequestPart", String.class, Errors.class, Model.class);
ModelAndView mav = handlerAdapter.handle(multipartRequest, response, handlerMethod);
assertThat(mav).isNotNull();
assertThat(mav.getModelMap().get("error count")).isEqualTo(1);
}
@Test
void handleAndCompleteSession() throws Exception {
HandlerMethod handlerMethod = handlerMethod("handleAndCompleteSession", SessionStatus.class);
handlerAdapter.handle(request, response, handlerMethod);
assertThat(request.getSession().getAttributeNames().hasMoreElements()).isFalse();
}
private HandlerMethod handlerMethod(String methodName, Class<?>... paramTypes) throws Exception {
Method method = handler.getClass().getDeclaredMethod(methodName, paramTypes);
return new InvocableHandlerMethod(handler, method);
}
private
|
RequestMappingHandlerAdapterIntegrationTests
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/async/code/javaguide/async/JavaStream.java
|
{
"start": 1216,
"end": 1801
}
|
class ____ extends MockJavaAction {
Controller1(JavaHandlerComponents javaHandlerComponents) {
super(javaHandlerComponents);
}
// #by-default
public Result index() {
return ok("Hello World");
}
// #by-default
}
@Test
public void byDefaultWithHttpEntity() {
assertThat(
contentAsString(
call(
new ControllerWithHttpEntity(instanceOf(JavaHandlerComponents.class)),
fakeRequest(),
mat)))
.isEqualTo("Hello World");
}
public static
|
Controller1
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/kotlin/KspComponentProcessorTest.java
|
{
"start": 19889,
"end": 21510
}
|
class ____ implements MyComponent {",
" private final MyModule myModule;",
"",
" private final MyComponentImpl myComponentImpl = this;",
"",
" MyComponentImpl(MyModule myModuleParam) {",
" this.myModule = myModuleParam;",
"",
" }",
"",
" @Override",
" public void injectFoo(Foo foo) {",
" injectFoo2(foo);",
" }",
"",
" @CanIgnoreReturnValue",
" private Foo injectFoo2(Foo instance) {",
" Foo_MembersInjector.injectBar("
+ "instance, MyModule_ProvideBarFactory.provideBar(myModule));",
" return instance;",
" }",
" }",
"}"));
});
}
@Test
public void membersInjectionTest() throws Exception {
Source componentSrc =
CompilerTests.kotlinSource(
"MyComponent.kt",
"package test",
"",
"import dagger.Component",
"import dagger.Module",
"import dagger.Provides",
"import javax.inject.Inject",
"import javax.inject.Named",
"import javax.inject.Provider",
"",
"@Component(modules = [MyModule::class])",
"
|
MyComponentImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/LazyLoadingByEnhancerSetterTest.java
|
{
"start": 2053,
"end": 2847
}
|
class ____ is being compared to the persistent map (by the generated code) -- it shouldn't
item = s.find( ItemField.class, "F" );
} );
scope.inTransaction( s -> {
mergedItem = (Item) s.merge( item );
} );
assertEquals( 2, mergedItem.getParameters().size() );
}
@Test
// failure doesn't occur with HHH-16572 change @FailureExpected( jiraKey = "HHH-10747" )
public void testProperty(SessionFactoryScope scope) {
scope.inTransaction( s -> {
ItemProperty input = new ItemProperty();
input.setName( "P" );
Map<String, String> parameters = new HashMap<>();
parameters.put( "ccc", "CCC" );
parameters.put( "ddd", "DDD" );
input.setParameters( parameters );
s.persist( input );
} );
scope.inTransaction( s -> {
// A parameters map is created with the
|
and
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java
|
{
"start": 2960,
"end": 9974
}
|
class ____ extends ESTestCase {
private boolean enableRandomFailures;
@Before
public void injectRandomFailures() {
enableRandomFailures = randomBoolean();
}
protected void disableRandomFailures() {
enableRandomFailures = false;
}
protected void failRandomly() {
if (enableRandomFailures && rarely()) {
throw new RuntimeException("dummy test failure");
}
}
/**
* Checks if cluster state matches internal state of IndicesClusterStateService instance
*
* @param state cluster state used for matching
*/
public void assertClusterStateMatchesNodeState(ClusterState state, IndicesClusterStateService indicesClusterStateService) {
MockIndicesService indicesService = (MockIndicesService) indicesClusterStateService.indicesService;
ConcurrentMap<ShardId, ShardRouting> failedShardsCache = indicesClusterStateService.failedShardsCache;
RoutingNode localRoutingNode = state.getRoutingNodes().node(state.getNodes().getLocalNodeId());
if (localRoutingNode != null) {
if (enableRandomFailures == false) {
// initializing a shard should succeed when enableRandomFailures is disabled
// active shards can be failed if state persistence was disabled in an earlier CS update
if (failedShardsCache.values().stream().anyMatch(ShardRouting::initializing)) {
fail("failed shard cache should not contain initializing shard routing: " + failedShardsCache.values());
}
}
// check that all shards in local routing nodes have been allocated
for (ShardRouting shardRouting : localRoutingNode) {
Index index = shardRouting.index();
IndexMetadata indexMetadata = state.metadata().getProject().getIndexSafe(index);
MockIndexShard shard = indicesService.getShardOrNull(shardRouting.shardId());
ShardRouting failedShard = failedShardsCache.get(shardRouting.shardId());
if (state.blocks().disableStatePersistence()) {
if (shard != null) {
fail("Shard with id " + shardRouting + " should be removed from indicesService due to disabled state persistence");
}
} else {
if (failedShard != null && failedShard.isSameAllocation(shardRouting) == false) {
fail("Shard cache has not been properly cleaned for " + failedShard);
}
if (shard == null && failedShard == null) {
// shard must either be there or there must be a failure
fail("Shard with id " + shardRouting + " expected but missing in indicesService and failedShardsCache");
}
if (enableRandomFailures == false) {
if (shard == null && shardRouting.initializing() && failedShard == shardRouting) {
// initializing a shard should succeed when enableRandomFailures is disabled
fail("Shard with id " + shardRouting + " expected but missing in indicesService " + failedShard);
}
}
if (shard != null) {
AllocatedIndex<? extends Shard> indexService = indicesService.indexService(index);
assertTrue("Index " + index + " expected but missing in indicesService", indexService != null);
// index metadata has been updated
assertThat(indexService.getIndexSettings().getIndexMetadata(), equalTo(indexMetadata));
// shard has been created
if (enableRandomFailures == false || failedShard == null) {
assertTrue("Shard with id " + shardRouting + " expected but missing in indexService", shard != null);
// shard has latest shard routing
assertThat(shard.routingEntry(), equalTo(shardRouting));
}
if (shard.routingEntry().primary() && shard.routingEntry().active()) {
IndexShardRoutingTable shardRoutingTable = state.routingTable().shardRoutingTable(shard.shardId());
Set<String> inSyncIds = state.metadata()
.getProject()
.index(shard.shardId().getIndex())
.inSyncAllocationIds(shard.shardId().id());
assertThat(
shard.routingEntry() + " isn't updated with in-sync aIDs",
shard.inSyncAllocationIds,
equalTo(inSyncIds)
);
assertThat(
shard.routingEntry() + " isn't updated with routing table",
shard.routingTable,
equalTo(shardRoutingTable)
);
}
}
}
}
}
// all other shards / indices have been cleaned up
for (AllocatedIndex<? extends Shard> indexService : indicesService) {
final Index index = indexService.getIndexSettings().getIndex();
if (state.blocks().disableStatePersistence()) {
fail("Index service " + index + " should be removed from indicesService due to disabled state persistence");
}
assertTrue(state.metadata().getProject().getIndexSafe(index) != null);
boolean shardsFound = false;
for (Shard shard : indexService) {
shardsFound = true;
ShardRouting persistedShardRouting = shard.routingEntry();
ShardRouting shardRouting = localRoutingNode.getByShardId(persistedShardRouting.shardId());
if (shardRouting == null) {
fail("Shard with id " + persistedShardRouting + " locally exists but missing in routing table");
}
if (shardRouting.equals(persistedShardRouting) == false) {
fail("Local shard " + persistedShardRouting + " has stale routing" + shardRouting);
}
}
if (shardsFound == false) {
// check if we have shards of that index in failedShardsCache
// if yes, we might not have cleaned the index as failedShardsCache can be populated by another thread
assertFalse(failedShardsCache.keySet().stream().noneMatch(shardId -> shardId.getIndex().equals(index)));
}
}
}
/**
* Mock for {@link IndicesService}
*/
protected
|
AbstractIndicesClusterStateServiceTestCase
|
java
|
elastic__elasticsearch
|
client/rest/src/main/java/org/elasticsearch/client/ResponseException.java
|
{
"start": 1192,
"end": 2458
}
|
class ____ extends IOException {
private final Response response;
public ResponseException(Response response) throws IOException {
super(buildMessage(response));
this.response = response;
}
static String buildMessage(Response response) throws IOException {
String message = String.format(
Locale.ROOT,
"method [%s], host [%s], URI [%s], status line [%s]",
response.getRequestLine().getMethod(),
response.getHost(),
response.getRequestLine().getUri(),
response.getStatusLine().toString()
);
if (response.hasWarnings()) {
message += "\nWarnings: " + response.getWarnings();
}
HttpEntity entity = response.getEntity();
if (entity != null) {
if (entity.isRepeatable() == false) {
entity = new BufferedHttpEntity(entity);
response.getHttpResponse().setEntity(entity);
}
message += "\n" + EntityUtils.toString(entity);
}
return message;
}
/**
* Returns the {@link Response} that caused this exception to be thrown.
*/
public Response getResponse() {
return response;
}
}
|
ResponseException
|
java
|
hibernate__hibernate-orm
|
hibernate-vector/src/main/java/org/hibernate/vector/AbstractSparseVector.java
|
{
"start": 310,
"end": 373
}
|
class ____ sparse vectors.
*
* @since 7.2
*/
public abstract
|
for
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/placement/csmappingrule/MappingRuleMatchers.java
|
{
"start": 3613,
"end": 4893
}
|
class ____ implements MappingRuleMatcher {
/**
* The group which should match the users's groups.
*/
private String group;
UserGroupMatcher(String value) {
this.group = value;
}
/**
* The method will match (return true) if the user is in the provided group.
* This matcher expect an extraVariableSet to be present in the variable
* context, if it's not present, we return false.
* If the expected group is null we always return false.
* @param variables The variable context, which contains all the variables
* @return true if user is member of the group
*/
@Override
public boolean match(VariableContext variables) {
Set<String> groups = variables.getExtraDataset("groups");
if (group == null || groups == null) {
return false;
}
String substituted = variables.replaceVariables(group);
return groups.contains(substituted);
}
@Override
public String toString() {
return "GroupMatcher{" +
"group='" + group + '\'' +
'}';
}
}
/**
* AndMatcher is a basic boolean matcher which takes multiple other
* matcher as it's arguments, and on match it checks if all of them are true.
*/
public static
|
UserGroupMatcher
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/sql/performance/TestSelectPerformance.java
|
{
"start": 934,
"end": 2233
}
|
class ____ extends TestCase {
private final int COUNT = 1000 * 1000;
private String sql = "SELECT distinct a.id \"id\", a.col \"col\", a.position \"position\", a.panel_id \"panelId\" FROM (select * from view_position_info) a LEFT JOIN db1.view_portal b ON a.panel_id = b.panel_id LEFT JOIN (select * from view_portal_panel) c ON a.panel_id = c.panel_id WHERE b.user_id = ? and ((b.is_grid='y' and c.param_name='is_hidden' and c.param_value='false') or b.is_grid != 'y') and b.user_id in (select user_id from table1 where id = 1) ORDER BY a.col ASC, a.position ASC";
public void test_simple() throws Exception {
for (int i = 0; i < 5; ++i) {
f();
}
}
private void f() throws Exception {
long start = System.currentTimeMillis();
for (int i = 0; i < COUNT; ++i) {
List<SQLStatement> statementList = new SQLStatementParser(sql).parseStatementList();
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statementList.get(0).accept(visitor);
// stmtList.toString();
}
long time = System.currentTimeMillis() - start;
System.out.println(NumberFormat.getInstance().format(time));
}
}
|
TestSelectPerformance
|
java
|
spring-projects__spring-framework
|
spring-core-test/src/test/java/org/springframework/core/test/tools/TestCompilerTests.java
|
{
"start": 4444,
"end": 4959
}
|
class ____ {
public static void main(String[] args) {
new Hello().get();
}
}
""");
assertThatExceptionOfType(CompilationException.class).isThrownBy(
() -> TestCompiler.forSystem().failOnWarning().withSources(
SourceFile.of(HELLO_DEPRECATED), main).compile(compiled -> {
}));
}
@Test
@SuppressWarnings("unchecked")
void compileWhenSourceUseDeprecateCodeAndFailOnWarningWithSuppressWarnings() {
SourceFile main = SourceFile.of("""
package com.example;
public
|
Main
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreams.java
|
{
"start": 5264,
"end": 13832
}
|
class ____ extends InputStream
implements Seekable, PositionedReadable, ByteBufferReadable,
HasFileDescriptor, CanSetDropBehind, CanSetReadahead,
HasEnhancedByteBufferAccess, CanUnbuffer,
StreamCapabilities, ByteBufferPositionedReadable {
private final byte[] oneByteBuf = new byte[1];
private int pos = 0;
private final byte[] data;
private final int length;
private boolean closed = false;
FakeInputStream(DataInputBuffer in) {
data = in.getData();
length = in.getLength();
}
@Override
public void seek(long pos) throws IOException {
if (pos > length) {
throw new IOException("Cannot seek after EOF.");
}
if (pos < 0) {
throw new IOException("Cannot seek to negative offset.");
}
checkStream();
this.pos = (int)pos;
}
@Override
public long getPos() throws IOException {
return pos;
}
@Override
public int available() throws IOException {
return length - pos;
}
@Override
public int read(byte b[], int off, int len) throws IOException {
if (b == null) {
throw new NullPointerException();
} else if (off < 0 || len < 0 || len > b.length - off) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return 0;
}
checkStream();
if (pos < length) {
int n = (int) Math.min(len, length - pos);
System.arraycopy(data, pos, b, off, n);
pos += n;
return n;
}
return -1;
}
private void checkStream() throws IOException {
if (closed) {
throw new IOException("Stream is closed!");
}
}
@Override
public int read(ByteBuffer buf) throws IOException {
checkStream();
if (pos < length) {
int n = (int) Math.min(buf.remaining(), length - pos);
if (n > 0) {
buf.put(data, pos, n);
}
pos += n;
return n;
}
return -1;
}
@Override
public long skip(long n) throws IOException {
checkStream();
if ( n > 0 ) {
if( n + pos > length ) {
n = length - pos;
}
pos += n;
return n;
}
return n < 0 ? -1 : 0;
}
@Override
public void close() throws IOException {
closed = true;
}
@Override
public int read(long position, byte[] b, int off, int len)
throws IOException {
if (b == null) {
throw new NullPointerException();
} else if (off < 0 || len < 0 || len > b.length - off) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return 0;
}
if (position > length) {
throw new IOException("Cannot read after EOF.");
}
if (position < 0) {
throw new IOException("Cannot read to negative offset.");
}
checkStream();
if (position < length) {
int n = (int) Math.min(len, length - position);
System.arraycopy(data, (int)position, b, off, n);
return n;
}
return -1;
}
@Override
public int read(long position, ByteBuffer buf) throws IOException {
if (buf == null) {
throw new NullPointerException();
} else if (!buf.hasRemaining()) {
return 0;
}
if (position > length) {
throw new IOException("Cannot read after EOF.");
}
if (position < 0) {
throw new IOException("Cannot read to negative offset.");
}
checkStream();
if (position < length) {
int n = (int) Math.min(buf.remaining(), length - position);
buf.put(data, (int) position, n);
return n;
}
return -1;
}
@Override
public void readFully(long position, ByteBuffer buf) throws IOException {
if (buf == null) {
throw new NullPointerException();
} else if (!buf.hasRemaining()) {
return;
}
if (position > length) {
throw new IOException("Cannot read after EOF.");
}
if (position < 0) {
throw new IOException("Cannot read to negative offset.");
}
checkStream();
if (position + buf.remaining() > length) {
throw new EOFException("Reach the end of stream.");
}
buf.put(data, (int) position, buf.remaining());
}
@Override
public void readFully(long position, byte[] b, int off, int len)
throws IOException {
if (b == null) {
throw new NullPointerException();
} else if (off < 0 || len < 0 || len > b.length - off) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return;
}
if (position > length) {
throw new IOException("Cannot read after EOF.");
}
if (position < 0) {
throw new IOException("Cannot read to negative offset.");
}
checkStream();
if (position + len > length) {
throw new EOFException("Reach the end of stream.");
}
System.arraycopy(data, (int)position, b, off, len);
}
@Override
public void readFully(long position, byte[] buffer) throws IOException {
readFully(position, buffer, 0, buffer.length);
}
@Override
public ByteBuffer read(ByteBufferPool bufferPool, int maxLength,
EnumSet<ReadOption> opts) throws IOException,
UnsupportedOperationException {
if (bufferPool == null) {
throw new IOException("Please specify buffer pool.");
}
ByteBuffer buffer = bufferPool.getBuffer(true, maxLength);
int pos = buffer.position();
int n = read(buffer);
if (n >= 0) {
buffer.position(pos);
return buffer;
}
return null;
}
@Override
public void releaseBuffer(ByteBuffer buffer) {
}
@Override
public void setReadahead(Long readahead) throws IOException,
UnsupportedOperationException {
}
@Override
public void setDropBehind(Boolean dropCache) throws IOException,
UnsupportedOperationException {
}
@Override
public void unbuffer() {
}
@Override
public boolean hasCapability(String capability) {
switch (capability.toLowerCase()) {
case StreamCapabilities.READAHEAD:
case StreamCapabilities.DROPBEHIND:
case StreamCapabilities.UNBUFFER:
case StreamCapabilities.READBYTEBUFFER:
case StreamCapabilities.PREADBYTEBUFFER:
return true;
default:
return false;
}
}
@Override
public FileDescriptor getFileDescriptor() throws IOException {
return null;
}
@Override
public boolean seekToNewSource(long targetPos) throws IOException {
if (targetPos > length) {
throw new IOException("Attempted to read past end of file.");
}
if (targetPos < 0) {
throw new IOException("Cannot seek after EOF.");
}
checkStream();
this.pos = (int)targetPos;
return false;
}
@Override
public int read() throws IOException {
int ret = read( oneByteBuf, 0, 1 );
return ( ret <= 0 ) ? -1 : (oneByteBuf[0] & 0xff);
}
}
/**
* This tests {@link StreamCapabilities#hasCapability(String)} for the
* the underlying streams.
*/
@Test
@Timeout(value = 120)
public void testHasCapability() throws Exception {
// verify hasCapability returns what FakeOutputStream is set up for
CryptoOutputStream cos =
(CryptoOutputStream) getOutputStream(defaultBufferSize, key, iv);
assertCapabilities(cos,
new String[] {
StreamCapabilities.HFLUSH,
StreamCapabilities.HSYNC,
StreamCapabilities.DROPBEHIND
},
new String[] {
StreamCapabilities.READAHEAD,
StreamCapabilities.UNBUFFER
}
);
// verify hasCapability for input stream
CryptoInputStream cis =
(CryptoInputStream) getInputStream(defaultBufferSize, key, iv);
assertCapabilities(cis,
new String[] {
StreamCapabilities.DROPBEHIND,
StreamCapabilities.READAHEAD,
StreamCapabilities.UNBUFFER,
StreamCapabilities.READBYTEBUFFER,
StreamCapabilities.PREADBYTEBUFFER
},
new String[] {
StreamCapabilities.HFLUSH,
StreamCapabilities.HSYNC
}
);
}
}
|
FakeInputStream
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/StandbyTaskAssignorFactoryTest.java
|
{
"start": 1996,
"end": 4641
}
|
enum ____ {
DISABLED,
ENABLED,
NULL
}
private RackAwareTaskAssignor rackAwareTaskAssignor;
public void setUp(final State state, final boolean needValidRack) {
if (state == State.ENABLED || state == State.DISABLED) {
rackAwareTaskAssignor = mock(RackAwareTaskAssignor.class);
if (needValidRack) {
when(rackAwareTaskAssignor.validClientRack()).thenReturn(state.equals(State.ENABLED));
}
} else {
rackAwareTaskAssignor = null;
}
}
@ParameterizedTest
@EnumSource(State.class)
public void shouldReturnClientTagAwareStandbyTaskAssignorWhenRackAwareAssignmentTagsIsSet(final State state) {
setUp(state, false);
final StandbyTaskAssignor standbyTaskAssignor = StandbyTaskAssignorFactory.create(newAssignmentConfigs(singletonList("az")), rackAwareTaskAssignor);
assertInstanceOf(ClientTagAwareStandbyTaskAssignor.class, standbyTaskAssignor);
if (state != State.NULL) {
verify(rackAwareTaskAssignor, never()).racksForProcess();
verify(rackAwareTaskAssignor, never()).validClientRack();
}
}
@ParameterizedTest
@EnumSource(State.class)
public void shouldReturnDefaultOrRackAwareStandbyTaskAssignorWhenRackAwareAssignmentTagsIsEmpty(final State state) {
setUp(state, true);
final StandbyTaskAssignor standbyTaskAssignor = StandbyTaskAssignorFactory.create(newAssignmentConfigs(Collections.emptyList()), rackAwareTaskAssignor);
if (state == State.ENABLED) {
assertInstanceOf(ClientTagAwareStandbyTaskAssignor.class, standbyTaskAssignor);
verify(rackAwareTaskAssignor, times(1)).racksForProcess();
verify(rackAwareTaskAssignor, times(1)).validClientRack();
} else if (state == State.DISABLED) {
assertInstanceOf(DefaultStandbyTaskAssignor.class, standbyTaskAssignor);
verify(rackAwareTaskAssignor, never()).racksForProcess();
verify(rackAwareTaskAssignor, times(1)).validClientRack();
} else {
assertInstanceOf(DefaultStandbyTaskAssignor.class, standbyTaskAssignor);
}
}
private static AssignmentConfigs newAssignmentConfigs(final List<String> rackAwareAssignmentTags) {
return new AssignmentConfigs(ACCEPTABLE_RECOVERY_LAG,
MAX_WARMUP_REPLICAS,
NUMBER_OF_STANDBY_REPLICAS,
PROBING_REBALANCE_INTERVAL_MS,
rackAwareAssignmentTags);
}
}
|
State
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/pool/bonecp/TestPSCache.java
|
{
"start": 1264,
"end": 4368
}
|
class ____ extends TestCase {
public void test_boneCP() throws Exception {
BoneCPDataSource ds = new BoneCPDataSource();
ds.setJdbcUrl("jdbc:mock:test");
ds.setPartitionCount(1);
ds.setMaxConnectionsPerPartition(10);
ds.setMinConnectionsPerPartition(0);
ds.setPreparedStatementsCacheSize(10);
for (int i = 0; i < 10; ++i) {
f(ds, 5);
System.out.println("--------------------------------------------");
}
}
public void f_test_druid() throws Exception {
DruidDataSource ds = new DruidDataSource();
ds.setUrl("jdbc:mock:test");
ds.setMaxIdle(10);
for (int i = 0; i < 10; ++i) {
f(ds, 5);
System.out.println("--------------------------------------------");
}
}
public void f_test_dbcp() throws Exception {
BasicDataSource ds = new BasicDataSource();
ds.setUrl("jdbc:mock:test");
ds.setMaxIdle(10);
ds.setPoolPreparedStatements(true);
ds.setMaxOpenPreparedStatements(10);
for (int i = 0; i < 10; ++i) {
f(ds, 5);
System.out.println("--------------------------------------------");
}
}
public void f_test_c3p0() throws Exception {
ComboPooledDataSource ds = new ComboPooledDataSource();
ds.setJdbcUrl("jdbc:mock:test");
ds.setMaxPoolSize(10);
ds.setMinPoolSize(0);
ds.setMaxStatements(10);
for (int i = 0; i < 10; ++i) {
f(ds, 5);
System.out.println("--------------------------------------------");
}
}
public static void f(DataSource ds, int count) throws Exception {
Connection conn = ds.getConnection();
for (int i = 0; i < count; ++i) {
PreparedStatement stmt = conn.prepareStatement("SELECT 1");
System.out.println(System.identityHashCode(unwrap(stmt)));
stmt.close();
}
conn.close();
}
public static MockPreparedStatement unwrap(PreparedStatement stmt) throws Exception {
if (stmt instanceof NewProxyPreparedStatement) {
Field field = NewProxyPreparedStatement.class.getDeclaredField("inner");
field.setAccessible(true);
return (MockPreparedStatement) field.get(stmt);
}
MockPreparedStatement mockStmt = stmt.unwrap(MockPreparedStatement.class);
return mockStmt;
}
public static MockConnection unwrap(Connection conn) throws Exception {
if (conn instanceof ConnectionHandle) {
ConnectionHandle handle = (ConnectionHandle) conn;
return (MockConnection) handle.getInternalConnection();
}
if (conn instanceof NewProxyConnection) {
NewProxyConnection handle = (NewProxyConnection) conn;
Field field = NewProxyConnection.class.getDeclaredField("inner");
field.setAccessible(true);
return (MockConnection) field.get(handle);
}
return conn.unwrap(MockConnection.class);
}
}
|
TestPSCache
|
java
|
elastic__elasticsearch
|
x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/ScoreNormalizer.java
|
{
"start": 435,
"end": 1132
}
|
class ____ {
public static ScoreNormalizer valueOf(String normalizer) {
if (MinMaxScoreNormalizer.NAME.equalsIgnoreCase(normalizer)) {
return MinMaxScoreNormalizer.INSTANCE;
} else if (L2ScoreNormalizer.NAME.equalsIgnoreCase(normalizer)) {
return L2ScoreNormalizer.INSTANCE;
} else if (IdentityScoreNormalizer.NAME.equalsIgnoreCase(normalizer)) {
return IdentityScoreNormalizer.INSTANCE;
} else {
throw new IllegalArgumentException("Unknown normalizer [" + normalizer + "]");
}
}
public abstract String getName();
public abstract ScoreDoc[] normalizeScores(ScoreDoc[] docs);
}
|
ScoreNormalizer
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/util/PrivateSecurityManagerStackTraceUtil.java
|
{
"start": 3083,
"end": 3706
}
|
class ____ was called next, and so on, until the last element: the
* method that called {@link SecurityManager#getClassContext()} to capture the stack.
* </p>
*
* @return the execution stack.
*/
// benchmarks show that using the SecurityManager is much faster than looping through getCallerClass(int)
static Deque<Class<?>> getCurrentStackTrace() {
final Class<?>[] array = SECURITY_MANAGER.getClassContext();
final Deque<Class<?>> classes = new ArrayDeque<>(array.length);
Collections.addAll(classes, array);
return classes;
}
private static final
|
that
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLDumpStatement.java
|
{
"start": 199,
"end": 1436
}
|
class ____ extends SQLStatementImpl {
private boolean overwrite;
private SQLExprTableSource into;
private SQLSelect select;
public SQLDumpStatement() {
}
public SQLSelect getSelect() {
return select;
}
public void setSelect(SQLSelect x) {
if (x != null) {
x.setParent(this);
}
this.select = x;
}
public SQLExprTableSource getInto() {
return into;
}
public void setInto(SQLExpr x) {
if (x == null) {
return;
}
setInto(new SQLExprTableSource(x));
}
public void setInto(SQLExprTableSource x) {
if (x != null) {
x.setParent(this);
}
this.into = x;
}
public boolean isOverwrite() {
return overwrite;
}
public void setOverwrite(boolean overwrite) {
this.overwrite = overwrite;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
if (into != null) {
into.accept(visitor);
}
if (select != null) {
select.accept(visitor);
}
}
visitor.endVisit(this);
}
}
|
SQLDumpStatement
|
java
|
quarkusio__quarkus
|
extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/ConfigUrlMissingDefaultDatasourceStaticInjectionTest.java
|
{
"start": 1756,
"end": 1919
}
|
class ____ {
@Inject
DataSource ds;
public void useDatasource() throws SQLException {
ds.getConnection();
}
}
}
|
MyBean
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFeaturesTests.java
|
{
"start": 1175,
"end": 5686
}
|
class ____ extends ESTestCase {
private NamedWriteableRegistry namedWriteableRegistry;
@Before
public void registerNamedObjects() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, emptyList());
List<NamedWriteableRegistry.Entry> namedWriteables = searchModule.getNamedWriteables();
namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables);
}
public final void testRandomSerialization() throws IOException {
for (int runs = 0; runs < 10; runs++) {
ConnectorFeatures testInstance = ConnectorTestUtils.getRandomConnectorFeatures();
assertTransportSerialization(testInstance);
}
}
public void testToXContent() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"document_level_security": {
"enabled": true
},
"sync_rules": {
"advanced": {
"enabled": false
},
"basic": {
"enabled": true
}
}
}
""");
testToXContentChecker(content);
}
public void testToXContentMissingDocumentLevelSecurity() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"sync_rules": {
"advanced": {
"enabled": false
},
"basic": {
"enabled": true
}
}
}
""");
testToXContentChecker(content);
}
public void testToXContentMissingSyncRules() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"document_level_security": {
"enabled": true
}
}
""");
testToXContentChecker(content);
}
public void testToXContentMissingSyncRulesAdvanced() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"sync_rules": {
"basic": {
"enabled": true
}
}
}
""");
testToXContentChecker(content);
}
public void testToXContent_NativeConnectorAPIKeysEnabled() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"document_level_security": {
"enabled": true
},
"sync_rules": {
"advanced": {
"enabled": false
},
"basic": {
"enabled": true
}
},
"native_connector_api_keys": {
"enabled": true
}
}
""");
testToXContentChecker(content);
}
private void testToXContentChecker(String content) throws IOException {
ConnectorFeatures features = ConnectorFeatures.fromXContentBytes(new BytesArray(content), XContentType.JSON);
boolean humanReadable = true;
BytesReference originalBytes = toShuffledXContent(features, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable);
ConnectorFeatures parsed;
try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) {
parsed = ConnectorFeatures.fromXContent(parser);
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON);
}
private void assertTransportSerialization(ConnectorFeatures testInstance) throws IOException {
ConnectorFeatures deserializedInstance = copyInstance(testInstance);
assertNotSame(testInstance, deserializedInstance);
assertThat(testInstance, equalTo(deserializedInstance));
}
private ConnectorFeatures copyInstance(ConnectorFeatures instance) throws IOException {
return copyWriteable(instance, namedWriteableRegistry, ConnectorFeatures::new);
}
}
|
ConnectorFeaturesTests
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
|
{
"start": 1649,
"end": 29305
}
|
class ____ extends LazyBinaryFormat<String> implements StringData {
public static final BinaryStringData EMPTY_UTF8 =
BinaryStringData.fromBytes(StringUtf8Utils.encodeUTF8(""));
public BinaryStringData() {}
public BinaryStringData(String javaObject) {
super(javaObject);
}
public BinaryStringData(MemorySegment[] segments, int offset, int sizeInBytes) {
super(segments, offset, sizeInBytes);
}
public BinaryStringData(
MemorySegment[] segments, int offset, int sizeInBytes, String javaObject) {
super(segments, offset, sizeInBytes, javaObject);
}
// ------------------------------------------------------------------------------------------
// Construction Utilities
// ------------------------------------------------------------------------------------------
/**
* Creates a {@link BinaryStringData} instance from the given address (base and offset) and
* length.
*/
public static BinaryStringData fromAddress(MemorySegment[] segments, int offset, int numBytes) {
return new BinaryStringData(segments, offset, numBytes);
}
/** Creates a {@link BinaryStringData} instance from the given Java string. */
public static BinaryStringData fromString(String str) {
if (str == null) {
return null;
} else {
return new BinaryStringData(str);
}
}
/** Creates a {@link BinaryStringData} instance from the given UTF-8 bytes. */
public static BinaryStringData fromBytes(byte[] bytes) {
return fromBytes(bytes, 0, bytes.length);
}
/**
* Creates a {@link BinaryStringData} instance from the given UTF-8 bytes with offset and number
* of bytes.
*/
public static BinaryStringData fromBytes(byte[] bytes, int offset, int numBytes) {
return new BinaryStringData(
new MemorySegment[] {MemorySegmentFactory.wrap(bytes)}, offset, numBytes);
}
/** Creates a {@link BinaryStringData} instance that contains `length` spaces. */
public static BinaryStringData blankString(int length) {
byte[] spaces = new byte[length];
Arrays.fill(spaces, (byte) ' ');
return fromBytes(spaces);
}
// ------------------------------------------------------------------------------------------
// Public Interfaces
// ------------------------------------------------------------------------------------------
@Override
public byte[] toBytes() {
ensureMaterialized();
return BinarySegmentUtils.getBytes(
binarySection.segments, binarySection.offset, binarySection.sizeInBytes);
}
@Override
public boolean equals(Object o) {
if (o instanceof BinaryStringData) {
BinaryStringData other = (BinaryStringData) o;
if (javaObject != null && other.javaObject != null) {
return javaObject.equals(other.javaObject);
}
ensureMaterialized();
other.ensureMaterialized();
return binarySection.equals(other.binarySection);
} else {
return false;
}
}
@Override
public int hashCode() {
ensureMaterialized();
return binarySection.hashCode();
}
@Override
public String toString() {
if (javaObject == null) {
byte[] bytes = BinarySegmentUtils.allocateReuseBytes(binarySection.sizeInBytes);
BinarySegmentUtils.copyToBytes(
binarySection.segments,
binarySection.offset,
bytes,
0,
binarySection.sizeInBytes);
javaObject = StringUtf8Utils.decodeUTF8(bytes, 0, binarySection.sizeInBytes);
}
return javaObject;
}
/**
* Compares two strings lexicographically. Since UTF-8 uses groups of six bits, it is sometimes
* useful to use octal notation which uses 3-bit groups. With a calculator which can convert
* between hexadecimal and octal it can be easier to manually create or interpret UTF-8 compared
* with using binary. So we just compare the binary.
*/
@Override
public int compareTo(@Nonnull StringData o) {
// BinaryStringData is the only implementation of StringData
BinaryStringData other = (BinaryStringData) o;
if (javaObject != null && other.javaObject != null) {
return javaObject.compareTo(other.javaObject);
}
ensureMaterialized();
other.ensureMaterialized();
if (binarySection.segments.length == 1 && other.binarySection.segments.length == 1) {
int len = Math.min(binarySection.sizeInBytes, other.binarySection.sizeInBytes);
MemorySegment seg1 = binarySection.segments[0];
MemorySegment seg2 = other.binarySection.segments[0];
for (int i = 0; i < len; i++) {
int res =
(seg1.get(binarySection.offset + i) & 0xFF)
- (seg2.get(other.binarySection.offset + i) & 0xFF);
if (res != 0) {
return res;
}
}
return binarySection.sizeInBytes - other.binarySection.sizeInBytes;
}
// if there are multi segments.
return compareMultiSegments(other);
}
/** Find the boundaries of segments, and then compare MemorySegment. */
private int compareMultiSegments(BinaryStringData other) {
if (binarySection.sizeInBytes == 0 || other.binarySection.sizeInBytes == 0) {
return binarySection.sizeInBytes - other.binarySection.sizeInBytes;
}
int len = Math.min(binarySection.sizeInBytes, other.binarySection.sizeInBytes);
MemorySegment seg1 = binarySection.segments[0];
MemorySegment seg2 = other.binarySection.segments[0];
int segmentSize = binarySection.segments[0].size();
int otherSegmentSize = other.binarySection.segments[0].size();
int sizeOfFirst1 = segmentSize - binarySection.offset;
int sizeOfFirst2 = otherSegmentSize - other.binarySection.offset;
int varSegIndex1 = 1;
int varSegIndex2 = 1;
// find the first segment of this string.
while (sizeOfFirst1 <= 0) {
sizeOfFirst1 += segmentSize;
seg1 = binarySection.segments[varSegIndex1++];
}
while (sizeOfFirst2 <= 0) {
sizeOfFirst2 += otherSegmentSize;
seg2 = other.binarySection.segments[varSegIndex2++];
}
int offset1 = segmentSize - sizeOfFirst1;
int offset2 = otherSegmentSize - sizeOfFirst2;
int needCompare = Math.min(Math.min(sizeOfFirst1, sizeOfFirst2), len);
while (needCompare > 0) {
// compare in one segment.
for (int i = 0; i < needCompare; i++) {
int res = (seg1.get(offset1 + i) & 0xFF) - (seg2.get(offset2 + i) & 0xFF);
if (res != 0) {
return res;
}
}
if (needCompare == len) {
break;
}
len -= needCompare;
// next segment
if (sizeOfFirst1 < sizeOfFirst2) { // I am smaller
seg1 = binarySection.segments[varSegIndex1++];
offset1 = 0;
offset2 += needCompare;
sizeOfFirst1 = segmentSize;
sizeOfFirst2 -= needCompare;
} else if (sizeOfFirst1 > sizeOfFirst2) { // other is smaller
seg2 = other.binarySection.segments[varSegIndex2++];
offset2 = 0;
offset1 += needCompare;
sizeOfFirst2 = otherSegmentSize;
sizeOfFirst1 -= needCompare;
} else { // same, should go ahead both.
seg1 = binarySection.segments[varSegIndex1++];
seg2 = other.binarySection.segments[varSegIndex2++];
offset1 = 0;
offset2 = 0;
sizeOfFirst1 = segmentSize;
sizeOfFirst2 = otherSegmentSize;
}
needCompare = Math.min(Math.min(sizeOfFirst1, sizeOfFirst2), len);
}
checkArgument(needCompare == len);
return binarySection.sizeInBytes - other.binarySection.sizeInBytes;
}
// ------------------------------------------------------------------------------------------
// Public methods on BinaryStringData
// ------------------------------------------------------------------------------------------
/** Returns the number of UTF-8 code points in the string. */
public int numChars() {
ensureMaterialized();
if (inFirstSegment()) {
int len = 0;
for (int i = 0;
i < binarySection.sizeInBytes;
i += numBytesForFirstByte(getByteOneSegment(i))) {
len++;
}
return len;
} else {
return numCharsMultiSegs();
}
}
private int numCharsMultiSegs() {
int len = 0;
int segSize = binarySection.segments[0].size();
BinaryStringData.SegmentAndOffset index = firstSegmentAndOffset(segSize);
int i = 0;
while (i < binarySection.sizeInBytes) {
int charBytes = numBytesForFirstByte(index.value());
i += charBytes;
len++;
index.skipBytes(charBytes, segSize);
}
return len;
}
/**
* Returns the {@code byte} value at the specified index. An index ranges from {@code 0} to
* {@code binarySection.sizeInBytes - 1}.
*
* @param index the index of the {@code byte} value.
* @return the {@code byte} value at the specified index of this UTF-8 bytes.
* @exception IndexOutOfBoundsException if the {@code index} argument is negative or not less
* than the length of this UTF-8 bytes.
*/
public byte byteAt(int index) {
ensureMaterialized();
int globalOffset = binarySection.offset + index;
int size = binarySection.segments[0].size();
if (globalOffset < size) {
return binarySection.segments[0].get(globalOffset);
} else {
return binarySection.segments[globalOffset / size].get(globalOffset % size);
}
}
@Override
public MemorySegment[] getSegments() {
ensureMaterialized();
return super.getSegments();
}
@Override
public int getOffset() {
ensureMaterialized();
return super.getOffset();
}
@Override
public int getSizeInBytes() {
ensureMaterialized();
return super.getSizeInBytes();
}
public void ensureMaterialized() {
ensureMaterialized(null);
}
@Override
protected BinarySection materialize(TypeSerializer<String> serializer) {
if (serializer != null) {
throw new IllegalArgumentException(
"BinaryStringData does not support custom serializers");
}
byte[] bytes = StringUtf8Utils.encodeUTF8(javaObject);
return new BinarySection(
new MemorySegment[] {MemorySegmentFactory.wrap(bytes)}, 0, bytes.length);
}
/** Copy a new {@code BinaryStringData}. */
public BinaryStringData copy() {
ensureMaterialized();
byte[] copy =
BinarySegmentUtils.copyToBytes(
binarySection.segments, binarySection.offset, binarySection.sizeInBytes);
return new BinaryStringData(
new MemorySegment[] {MemorySegmentFactory.wrap(copy)},
0,
binarySection.sizeInBytes,
javaObject);
}
/**
* Returns a binary string that is a substring of this binary string. The substring begins at
* the specified {@code beginIndex} and extends to the character at index {@code endIndex - 1}.
*
* <p>Examples:
*
* <blockquote>
*
* <pre>
* fromString("hamburger").substring(4, 8) returns binary string "urge"
* fromString("smiles").substring(1, 5) returns binary string "mile"
* </pre>
*
* </blockquote>
*
* @param beginIndex the beginning index, inclusive.
* @param endIndex the ending index, exclusive.
* @return the specified substring, return EMPTY_UTF8 when index out of bounds instead of
* StringIndexOutOfBoundsException.
*/
public BinaryStringData substring(int beginIndex, int endIndex) {
ensureMaterialized();
if (endIndex <= beginIndex || beginIndex >= binarySection.sizeInBytes) {
return EMPTY_UTF8;
}
if (inFirstSegment()) {
MemorySegment segment = binarySection.segments[0];
int i = 0;
int c = 0;
while (i < binarySection.sizeInBytes && c < beginIndex) {
i += numBytesForFirstByte(segment.get(i + binarySection.offset));
c += 1;
}
int j = i;
while (i < binarySection.sizeInBytes && c < endIndex) {
i += numBytesForFirstByte(segment.get(i + binarySection.offset));
c += 1;
}
if (i > j) {
byte[] bytes = new byte[i - j];
segment.get(binarySection.offset + j, bytes, 0, i - j);
return fromBytes(bytes);
} else {
return EMPTY_UTF8;
}
} else {
return substringMultiSegs(beginIndex, endIndex);
}
}
private BinaryStringData substringMultiSegs(final int start, final int until) {
int segSize = binarySection.segments[0].size();
BinaryStringData.SegmentAndOffset index = firstSegmentAndOffset(segSize);
int i = 0;
int c = 0;
while (i < binarySection.sizeInBytes && c < start) {
int charSize = numBytesForFirstByte(index.value());
i += charSize;
index.skipBytes(charSize, segSize);
c += 1;
}
int j = i;
while (i < binarySection.sizeInBytes && c < until) {
int charSize = numBytesForFirstByte(index.value());
i += charSize;
index.skipBytes(charSize, segSize);
c += 1;
}
if (i > j) {
return fromBytes(
BinarySegmentUtils.copyToBytes(
binarySection.segments, binarySection.offset + j, i - j));
} else {
return EMPTY_UTF8;
}
}
/**
* Returns true if and only if this BinaryStringData contains the specified sequence of bytes
* values.
*
* @param s the sequence to search for
* @return true if this BinaryStringData contains {@code s}, false otherwise
*/
public boolean contains(final BinaryStringData s) {
ensureMaterialized();
s.ensureMaterialized();
if (s.binarySection.sizeInBytes == 0) {
return true;
}
int find =
BinarySegmentUtils.find(
binarySection.segments,
binarySection.offset,
binarySection.sizeInBytes,
s.binarySection.segments,
s.binarySection.offset,
s.binarySection.sizeInBytes);
return find != -1;
}
/**
* Tests if this BinaryStringData starts with the specified prefix.
*
* @param prefix the prefix.
* @return {@code true} if the bytes represented by the argument is a prefix of the bytes
* represented by this string; {@code false} otherwise. Note also that {@code true} will be
* returned if the argument is an empty BinaryStringData or is equal to this {@code
* BinaryStringData} object as determined by the {@link #equals(Object)} method.
*/
public boolean startsWith(final BinaryStringData prefix) {
ensureMaterialized();
prefix.ensureMaterialized();
return matchAt(prefix, 0);
}
/**
* Tests if this BinaryStringData ends with the specified suffix.
*
* @param suffix the suffix.
* @return {@code true} if the bytes represented by the argument is a suffix of the bytes
* represented by this object; {@code false} otherwise. Note that the result will be {@code
* true} if the argument is the empty string or is equal to this {@code BinaryStringData}
* object as determined by the {@link #equals(Object)} method.
*/
public boolean endsWith(final BinaryStringData suffix) {
ensureMaterialized();
suffix.ensureMaterialized();
return matchAt(suffix, binarySection.sizeInBytes - suffix.binarySection.sizeInBytes);
}
/**
* Returns a string whose value is this string, with any leading and trailing whitespace
* removed.
*
* @return A string whose value is this string, with any leading and trailing white space
* removed, or this string if it has no leading or trailing white space.
*/
public BinaryStringData trim() {
ensureMaterialized();
if (inFirstSegment()) {
int s = 0;
int e = this.binarySection.sizeInBytes - 1;
// skip all of the space (0x20) in the left side
while (s < this.binarySection.sizeInBytes && getByteOneSegment(s) == 0x20) {
s++;
}
// skip all of the space (0x20) in the right side
while (e >= s && getByteOneSegment(e) == 0x20) {
e--;
}
if (s > e) {
// empty string
return EMPTY_UTF8;
} else {
return copyBinaryStringInOneSeg(s, e - s + 1);
}
} else {
return trimMultiSegs();
}
}
private BinaryStringData trimMultiSegs() {
int s = 0;
int e = this.binarySection.sizeInBytes - 1;
int segSize = binarySection.segments[0].size();
BinaryStringData.SegmentAndOffset front = firstSegmentAndOffset(segSize);
// skip all of the space (0x20) in the left side
while (s < this.binarySection.sizeInBytes && front.value() == 0x20) {
s++;
front.nextByte(segSize);
}
BinaryStringData.SegmentAndOffset behind = lastSegmentAndOffset(segSize);
// skip all of the space (0x20) in the right side
while (e >= s && behind.value() == 0x20) {
e--;
behind.previousByte(segSize);
}
if (s > e) {
// empty string
return EMPTY_UTF8;
} else {
return copyBinaryString(s, e);
}
}
/**
* Returns the index within this string of the first occurrence of the specified substring,
* starting at the specified index.
*
* @param str the substring to search for.
* @param fromIndex the index from which to start the search.
* @return the index of the first occurrence of the specified substring, starting at the
* specified index, or {@code -1} if there is no such occurrence.
*/
public int indexOf(BinaryStringData str, int fromIndex) {
ensureMaterialized();
str.ensureMaterialized();
if (str.binarySection.sizeInBytes == 0) {
return 0;
}
if (inFirstSegment()) {
// position in byte
int byteIdx = 0;
// position is char
int charIdx = 0;
while (byteIdx < binarySection.sizeInBytes && charIdx < fromIndex) {
byteIdx += numBytesForFirstByte(getByteOneSegment(byteIdx));
charIdx++;
}
do {
if (byteIdx + str.binarySection.sizeInBytes > binarySection.sizeInBytes) {
return -1;
}
if (BinarySegmentUtils.equals(
binarySection.segments,
binarySection.offset + byteIdx,
str.binarySection.segments,
str.binarySection.offset,
str.binarySection.sizeInBytes)) {
return charIdx;
}
byteIdx += numBytesForFirstByte(getByteOneSegment(byteIdx));
charIdx++;
} while (byteIdx < binarySection.sizeInBytes);
return -1;
} else {
return indexOfMultiSegs(str, fromIndex);
}
}
private int indexOfMultiSegs(BinaryStringData str, int fromIndex) {
// position in byte
int byteIdx = 0;
// position is char
int charIdx = 0;
int segSize = binarySection.segments[0].size();
BinaryStringData.SegmentAndOffset index = firstSegmentAndOffset(segSize);
while (byteIdx < binarySection.sizeInBytes && charIdx < fromIndex) {
int charBytes = numBytesForFirstByte(index.value());
byteIdx += charBytes;
charIdx++;
index.skipBytes(charBytes, segSize);
}
do {
if (byteIdx + str.binarySection.sizeInBytes > binarySection.sizeInBytes) {
return -1;
}
if (BinarySegmentUtils.equals(
binarySection.segments,
binarySection.offset + byteIdx,
str.binarySection.segments,
str.binarySection.offset,
str.binarySection.sizeInBytes)) {
return charIdx;
}
int charBytes = numBytesForFirstByte(index.segment.get(index.offset));
byteIdx += charBytes;
charIdx++;
index.skipBytes(charBytes, segSize);
} while (byteIdx < binarySection.sizeInBytes);
return -1;
}
/**
* Converts all of the characters in this {@code BinaryStringData} to upper case.
*
* @return the {@code BinaryStringData}, converted to uppercase.
*/
public BinaryStringData toUpperCase() {
if (javaObject != null) {
return javaToUpperCase();
}
if (binarySection.sizeInBytes == 0) {
return EMPTY_UTF8;
}
int size = binarySection.segments[0].size();
BinaryStringData.SegmentAndOffset segmentAndOffset = startSegmentAndOffset(size);
byte[] bytes = new byte[binarySection.sizeInBytes];
bytes[0] = (byte) Character.toTitleCase(segmentAndOffset.value());
for (int i = 0; i < binarySection.sizeInBytes; i++) {
byte b = segmentAndOffset.value();
if (numBytesForFirstByte(b) != 1) {
// fallback
return javaToUpperCase();
}
int upper = Character.toUpperCase((int) b);
if (upper > 127) {
// fallback
return javaToUpperCase();
}
bytes[i] = (byte) upper;
segmentAndOffset.nextByte(size);
}
return fromBytes(bytes);
}
private BinaryStringData javaToUpperCase() {
return fromString(toString().toUpperCase());
}
/**
* Converts all of the characters in this {@code BinaryStringData} to lower case.
*
* @return the {@code BinaryStringData}, converted to lowercase.
*/
public BinaryStringData toLowerCase() {
if (javaObject != null) {
return javaToLowerCase();
}
if (binarySection.sizeInBytes == 0) {
return EMPTY_UTF8;
}
int size = binarySection.segments[0].size();
BinaryStringData.SegmentAndOffset segmentAndOffset = startSegmentAndOffset(size);
byte[] bytes = new byte[binarySection.sizeInBytes];
bytes[0] = (byte) Character.toTitleCase(segmentAndOffset.value());
for (int i = 0; i < binarySection.sizeInBytes; i++) {
byte b = segmentAndOffset.value();
if (numBytesForFirstByte(b) != 1) {
// fallback
return javaToLowerCase();
}
int lower = Character.toLowerCase((int) b);
if (lower > 127) {
// fallback
return javaToLowerCase();
}
bytes[i] = (byte) lower;
segmentAndOffset.nextByte(size);
}
return fromBytes(bytes);
}
private BinaryStringData javaToLowerCase() {
return fromString(toString().toLowerCase());
}
// ------------------------------------------------------------------------------------------
// Internal methods on BinaryStringData
// ------------------------------------------------------------------------------------------
byte getByteOneSegment(int i) {
return binarySection.segments[0].get(binarySection.offset + i);
}
boolean inFirstSegment() {
return binarySection.sizeInBytes + binarySection.offset <= binarySection.segments[0].size();
}
private boolean matchAt(final BinaryStringData s, int pos) {
return (inFirstSegment() && s.inFirstSegment())
? matchAtOneSeg(s, pos)
: matchAtVarSeg(s, pos);
}
private boolean matchAtOneSeg(final BinaryStringData s, int pos) {
return s.binarySection.sizeInBytes + pos <= binarySection.sizeInBytes
&& pos >= 0
&& binarySection.segments[0].equalTo(
s.binarySection.segments[0],
binarySection.offset + pos,
s.binarySection.offset,
s.binarySection.sizeInBytes);
}
private boolean matchAtVarSeg(final BinaryStringData s, int pos) {
return s.binarySection.sizeInBytes + pos <= binarySection.sizeInBytes
&& pos >= 0
&& BinarySegmentUtils.equals(
binarySection.segments,
binarySection.offset + pos,
s.binarySection.segments,
s.binarySection.offset,
s.binarySection.sizeInBytes);
}
BinaryStringData copyBinaryStringInOneSeg(int start, int len) {
byte[] newBytes = new byte[len];
binarySection.segments[0].get(binarySection.offset + start, newBytes, 0, len);
return fromBytes(newBytes);
}
BinaryStringData copyBinaryString(int start, int end) {
int len = end - start + 1;
byte[] newBytes = new byte[len];
BinarySegmentUtils.copyToBytes(
binarySection.segments, binarySection.offset + start, newBytes, 0, len);
return fromBytes(newBytes);
}
BinaryStringData.SegmentAndOffset firstSegmentAndOffset(int segSize) {
int segIndex = binarySection.offset / segSize;
return new BinaryStringData.SegmentAndOffset(segIndex, binarySection.offset % segSize);
}
BinaryStringData.SegmentAndOffset lastSegmentAndOffset(int segSize) {
int lastOffset = binarySection.offset + binarySection.sizeInBytes - 1;
int segIndex = lastOffset / segSize;
return new BinaryStringData.SegmentAndOffset(segIndex, lastOffset % segSize);
}
private BinaryStringData.SegmentAndOffset startSegmentAndOffset(int segSize) {
return inFirstSegment()
? new BinaryStringData.SegmentAndOffset(0, binarySection.offset)
: firstSegmentAndOffset(segSize);
}
/** CurrentSegment and positionInSegment. */
|
BinaryStringData
|
java
|
apache__spark
|
core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
|
{
"start": 2019,
"end": 24057
}
|
class ____ extends MemoryConsumer {
private static final SparkLogger logger =
SparkLoggerFactory.getLogger(UnsafeExternalSorter.class);
@Nullable
private final PrefixComparator prefixComparator;
/**
* {@link RecordComparator} may probably keep the reference to the records they compared last
* time, so we should not keep a {@link RecordComparator} instance inside
* {@link UnsafeExternalSorter}, because {@link UnsafeExternalSorter} is referenced by
* {@link TaskContext} and thus can not be garbage collected until the end of the task.
*/
@Nullable
private final Supplier<RecordComparator> recordComparatorSupplier;
private final TaskMemoryManager taskMemoryManager;
private final BlockManager blockManager;
private final SerializerManager serializerManager;
private final TaskContext taskContext;
/** The buffer size to use when writing spills using DiskBlockObjectWriter */
private final int fileBufferSizeBytes;
/**
* Force this sorter to spill when there are this many elements in memory.
*/
private final int numElementsForSpillThreshold;
/**
* Force this sorter to spill when the in memory size in bytes is beyond this threshold.
*/
private final long sizeInBytesForSpillThreshold;
/**
* Memory pages that hold the records being sorted. The pages in this list are freed when
* spilling, although in principle we could recycle these pages across spills (on the other hand,
* this might not be necessary if we maintained a pool of re-usable pages in the TaskMemoryManager
* itself).
*/
private final LinkedList<MemoryBlock> allocatedPages = new LinkedList<>();
private final LinkedList<UnsafeSorterSpillWriter> spillWriters = new LinkedList<>();
// These variables are reset after spilling:
@Nullable private volatile UnsafeInMemorySorter inMemSorter;
private long totalPageMemoryUsageBytes = 0;
private MemoryBlock currentPage = null;
private long pageCursor = -1;
private long peakMemoryUsedBytes = 0;
private long totalSpillBytes = 0L;
private long totalSortTimeNanos = 0L;
private volatile SpillableIterator readingIterator = null;
public static UnsafeExternalSorter createWithExistingInMemorySorter(
TaskMemoryManager taskMemoryManager,
BlockManager blockManager,
SerializerManager serializerManager,
TaskContext taskContext,
Supplier<RecordComparator> recordComparatorSupplier,
PrefixComparator prefixComparator,
int initialSize,
long pageSizeBytes,
int numElementsForSpillThreshold,
long sizeInBytesForSpillThreshold,
UnsafeInMemorySorter inMemorySorter,
long existingMemoryConsumption) throws IOException {
UnsafeExternalSorter sorter = new UnsafeExternalSorter(taskMemoryManager, blockManager,
serializerManager, taskContext, recordComparatorSupplier, prefixComparator, initialSize,
pageSizeBytes, numElementsForSpillThreshold, sizeInBytesForSpillThreshold,
inMemorySorter, false /* ignored */);
sorter.spill(Long.MAX_VALUE, sorter);
taskContext.taskMetrics().incMemoryBytesSpilled(existingMemoryConsumption);
sorter.totalSpillBytes += existingMemoryConsumption;
// The external sorter will be used to insert records, in-memory sorter is not needed.
sorter.inMemSorter = null;
return sorter;
}
public static UnsafeExternalSorter create(
TaskMemoryManager taskMemoryManager,
BlockManager blockManager,
SerializerManager serializerManager,
TaskContext taskContext,
Supplier<RecordComparator> recordComparatorSupplier,
PrefixComparator prefixComparator,
int initialSize,
long pageSizeBytes,
int numElementsForSpillThreshold,
long sizeInBytesForSpillThreshold,
boolean canUseRadixSort) {
return new UnsafeExternalSorter(taskMemoryManager, blockManager, serializerManager,
taskContext, recordComparatorSupplier, prefixComparator, initialSize, pageSizeBytes,
numElementsForSpillThreshold, sizeInBytesForSpillThreshold, null, canUseRadixSort);
}
private UnsafeExternalSorter(
TaskMemoryManager taskMemoryManager,
BlockManager blockManager,
SerializerManager serializerManager,
TaskContext taskContext,
Supplier<RecordComparator> recordComparatorSupplier,
PrefixComparator prefixComparator,
int initialSize,
long pageSizeBytes,
int numElementsForSpillThreshold,
long sizeInBytesForSpillThreshold,
@Nullable UnsafeInMemorySorter existingInMemorySorter,
boolean canUseRadixSort) {
super(taskMemoryManager, pageSizeBytes, taskMemoryManager.getTungstenMemoryMode());
this.taskMemoryManager = taskMemoryManager;
this.blockManager = blockManager;
this.serializerManager = serializerManager;
this.taskContext = taskContext;
this.recordComparatorSupplier = recordComparatorSupplier;
this.prefixComparator = prefixComparator;
// Use getSizeAsKb (not bytes) to maintain backwards compatibility for units
// this.fileBufferSizeBytes = (int) conf.getSizeAsKb("spark.shuffle.file.buffer", "32k") * 1024
this.fileBufferSizeBytes = 32 * 1024;
if (existingInMemorySorter == null) {
RecordComparator comparator = null;
if (recordComparatorSupplier != null) {
comparator = recordComparatorSupplier.get();
}
this.inMemSorter = new UnsafeInMemorySorter(
this,
taskMemoryManager,
comparator,
prefixComparator,
initialSize,
canUseRadixSort);
} else {
this.inMemSorter = existingInMemorySorter;
}
this.peakMemoryUsedBytes = getMemoryUsage();
this.sizeInBytesForSpillThreshold = sizeInBytesForSpillThreshold;
this.numElementsForSpillThreshold = numElementsForSpillThreshold;
// Register a cleanup task with TaskContext to ensure that memory is guaranteed to be freed at
// the end of the task. This is necessary to avoid memory leaks in when the downstream operator
// does not fully consume the sorter's output (e.g. sort followed by limit).
taskContext.addTaskCompletionListener(context -> {
cleanupResources();
});
}
/**
* Marks the current page as no-more-space-available, and as a result, either allocate a
* new page or spill when we see the next record.
*/
@VisibleForTesting
public void closeCurrentPage() {
if (currentPage != null) {
pageCursor = currentPage.getBaseOffset() + currentPage.size();
}
}
/**
* Sort and spill the current records in response to memory pressure.
*/
@Override
public long spill(long size, MemoryConsumer trigger) throws IOException {
if (trigger != this) {
if (readingIterator != null) {
return readingIterator.spill();
}
return 0L; // this should throw exception
}
if (inMemSorter == null || inMemSorter.numRecords() <= 0) {
// There could still be some memory allocated when there are no records in the in-memory
// sorter. We will not spill it however, to ensure that we can always process at least one
// record before spilling. See the comments in `allocateMemoryForRecordIfNecessary` for why
// this is necessary.
return 0L;
}
logger.info("Thread {} spilling sort data of {} to disk ({} {} so far)",
MDC.of(LogKeys.THREAD_ID, Thread.currentThread().getId()),
MDC.of(LogKeys.MEMORY_SIZE, Utils.bytesToString(getMemoryUsage())),
MDC.of(LogKeys.NUM_SPILL_WRITERS, spillWriters.size()),
MDC.of(LogKeys.SPILL_TIMES, spillWriters.size() > 1 ? "times" : "time"));
ShuffleWriteMetrics writeMetrics = new ShuffleWriteMetrics();
final UnsafeSorterSpillWriter spillWriter =
new UnsafeSorterSpillWriter(blockManager, fileBufferSizeBytes, writeMetrics,
inMemSorter.numRecords());
spillWriters.add(spillWriter);
spillIterator(inMemSorter.getSortedIterator(), spillWriter);
final long spillSize = freeMemory();
// Note that this is more-or-less going to be a multiple of the page size, so wasted space in
// pages will currently be counted as memory spilled even though that space isn't actually
// written to disk. This also counts the space needed to store the sorter's pointer array.
inMemSorter.freeMemory();
// Reset the in-memory sorter's pointer array only after freeing up the memory pages holding the
// records. Otherwise, if the task is over allocated memory, then without freeing the memory
// pages, we might not be able to get memory for the pointer array.
taskContext.taskMetrics().incMemoryBytesSpilled(spillSize);
taskContext.taskMetrics().incDiskBytesSpilled(writeMetrics.bytesWritten());
totalSpillBytes += spillSize;
return spillSize;
}
/**
* Return the total memory usage of this sorter, including the data pages and the sorter's pointer
* array.
*/
private long getMemoryUsage() {
return ((inMemSorter == null) ? 0 : inMemSorter.getMemoryUsage()) + totalPageMemoryUsageBytes;
}
private void updatePeakMemoryUsed() {
long mem = getMemoryUsage();
if (mem > peakMemoryUsedBytes) {
peakMemoryUsedBytes = mem;
}
}
/**
* Return the peak memory used so far, in bytes.
*/
public long getPeakMemoryUsedBytes() {
updatePeakMemoryUsed();
return peakMemoryUsedBytes;
}
/**
* @return the total amount of time spent sorting data (in-memory only).
*/
public long getSortTimeNanos() {
UnsafeInMemorySorter sorter = inMemSorter;
if (sorter != null) {
return sorter.getSortTimeNanos();
}
return totalSortTimeNanos;
}
/**
* Return the total number of bytes that has been spilled into disk so far.
*/
public long getSpillSize() {
return totalSpillBytes;
}
@VisibleForTesting
public int getNumberOfAllocatedPages() {
return allocatedPages.size();
}
/**
* Free this sorter's data pages.
*
* @return the number of bytes freed.
*/
private long freeMemory() {
List<MemoryBlock> pagesToFree = clearAndGetAllocatedPagesToFree();
long memoryFreed = 0;
for (MemoryBlock block : pagesToFree) {
memoryFreed += block.size();
freePage(block);
totalPageMemoryUsageBytes -= block.size();
}
return memoryFreed;
}
/**
* Clear the allocated pages and return the list of allocated pages to let
* the caller free the page. This is to prevent the deadlock by nested locks
* if the caller locks the UnsafeExternalSorter and call freePage which locks the
* TaskMemoryManager and cause nested locks.
*
* @return list of allocated pages to free
*/
private List<MemoryBlock> clearAndGetAllocatedPagesToFree() {
updatePeakMemoryUsed();
List<MemoryBlock> pagesToFree = new LinkedList<>(allocatedPages);
allocatedPages.clear();
currentPage = null;
pageCursor = 0;
return pagesToFree;
}
/**
* Deletes any spill files created by this sorter.
*/
private void deleteSpillFiles() {
for (UnsafeSorterSpillWriter spill : spillWriters) {
File file = spill.getFile();
if (file != null && file.exists()) {
if (!file.delete()) {
logger.error("Was unable to delete spill file {}",
MDC.of(LogKeys.PATH, file.getAbsolutePath()));
}
}
}
}
/**
* Frees this sorter's in-memory data structures and cleans up its spill files.
*/
public void cleanupResources() {
// To avoid deadlocks, we can't call methods that lock the TaskMemoryManager
// (such as various free() methods) while synchronizing on the UnsafeExternalSorter.
// Instead, we will manipulate UnsafeExternalSorter state inside the synchronized
// lock and perform the actual free() calls outside it.
UnsafeInMemorySorter inMemSorterToFree = null;
List<MemoryBlock> pagesToFree = null;
try {
synchronized (this) {
deleteSpillFiles();
pagesToFree = clearAndGetAllocatedPagesToFree();
if (inMemSorter != null) {
inMemSorterToFree = inMemSorter;
inMemSorter = null;
}
}
} finally {
for (MemoryBlock pageToFree : pagesToFree) {
freePage(pageToFree);
totalPageMemoryUsageBytes -= pageToFree.size();
}
if (inMemSorterToFree != null) {
inMemSorterToFree.freeMemory();
}
}
}
/**
* Checks whether there is enough space to insert an additional record in to the sort pointer
* array and grows the array if additional space is required. If the required space cannot be
* obtained, then the in-memory data will be spilled to disk.
*/
private void growPointerArrayIfNecessary() throws IOException {
assert(inMemSorter != null);
if (!inMemSorter.hasSpaceForAnotherRecord()) {
if (inMemSorter.numRecords() <= 0) {
// Spilling was triggered just before this method was called. The pointer array was freed
// during the spill, so a new pointer array needs to be allocated here.
LongArray array = allocateArray(inMemSorter.getInitialSize());
inMemSorter.expandPointerArray(array);
return;
}
long used = inMemSorter.getMemoryUsage();
LongArray array = null;
try {
// could trigger spilling
array = allocateArray(used / 8 * 2);
} catch (TooLargePageException e) {
// The pointer array is too big to fix in a single page, spill.
spill();
} catch (SparkOutOfMemoryError e) {
if (inMemSorter.numRecords() > 0) {
logger.error("Unable to grow the pointer array");
throw e;
}
// The new array could not be allocated, but that is not an issue as it is longer needed,
// as all records were spilled.
}
if (inMemSorter.numRecords() <= 0) {
// Spilling was triggered while trying to allocate the new array.
if (array != null) {
// We succeeded in allocating the new array, but, since all records were spilled, a
// smaller array would also suffice.
freeArray(array);
}
// The pointer array was freed during the spill, so a new pointer array needs to be
// allocated here.
array = allocateArray(inMemSorter.getInitialSize());
}
inMemSorter.expandPointerArray(array);
}
}
/**
* Allocates an additional page in order to insert an additional record. This will request
* additional memory from the memory manager and spill if the requested memory can not be
* obtained.
*
* @param required the required space in the data page, in bytes, including space for storing
* the record size.
*/
private void acquireNewPageIfNecessary(int required) {
if (currentPage == null ||
pageCursor + required > currentPage.getBaseOffset() + currentPage.size()) {
// TODO: try to find space on previous pages
currentPage = allocatePage(required);
pageCursor = currentPage.getBaseOffset();
allocatedPages.add(currentPage);
totalPageMemoryUsageBytes += currentPage.size();
}
}
/**
* Allocates more memory in order to insert an additional record. This will request additional
* memory from the memory manager and spill if the requested memory can not be obtained.
*
* @param required the required space in the data page, in bytes, including space for storing
* the record size.
*/
private void allocateMemoryForRecordIfNecessary(int required) throws IOException {
// Step 1:
// Ensure that the pointer array has space for another record. This may cause a spill.
growPointerArrayIfNecessary();
// Step 2:
// Ensure that the last page has space for another record. This may cause a spill.
acquireNewPageIfNecessary(required);
// Step 3:
// The allocation in step 2 could have caused a spill, which would have freed the pointer
// array allocated in step 1. Therefore we need to check again whether we have to allocate
// a new pointer array.
//
// If the allocation in this step causes a spill event then it will not cause the page
// allocated in the previous step to be freed. The function `spill` only frees memory if at
// least one record has been inserted in the in-memory sorter. This will not be the case if
// we have spilled in the previous step.
//
// If we did not spill in the previous step then `growPointerArrayIfNecessary` will be a
// no-op that does not allocate any memory, and therefore can't cause a spill event.
//
// Thus there is no need to call `acquireNewPageIfNecessary` again after this step.
growPointerArrayIfNecessary();
}
/**
* Write a record to the sorter.
*/
public void insertRecord(
Object recordBase, long recordOffset, int length, long prefix, boolean prefixIsNull)
throws IOException {
assert(inMemSorter != null);
if (inMemSorter.numRecords() >= numElementsForSpillThreshold) {
logger.info("Spilling data because number of spilledRecords ({}) crossed the threshold {}",
MDC.of(LogKeys.NUM_ELEMENTS_SPILL_RECORDS, inMemSorter.numRecords()),
MDC.of(LogKeys.NUM_ELEMENTS_SPILL_THRESHOLD, numElementsForSpillThreshold));
spill();
}
// TODO: Ideally we only need to check the spill threshold when new memory needs to be
// allocated (both this sorter and the underlying UnsafeInMemorySorter may allocate
// new memory), but it's simpler to check the total memory usage of these two sorters
// before inserting each record.
final long usedMemory = getMemoryUsage();
if (usedMemory >= sizeInBytesForSpillThreshold) {
logger.info("Spilling data because memory usage ({}) crossed the threshold {}",
MDC.of(LogKeys.SPILL_RECORDS_SIZE, usedMemory),
MDC.of(LogKeys.SPILL_RECORDS_SIZE_THRESHOLD, sizeInBytesForSpillThreshold));
spill();
}
final int uaoSize = UnsafeAlignedOffset.getUaoSize();
// Need 4 or 8 bytes to store the record length.
final int required = length + uaoSize;
allocateMemoryForRecordIfNecessary(required);
final Object base = currentPage.getBaseObject();
final long recordAddress = taskMemoryManager.encodePageNumberAndOffset(currentPage, pageCursor);
UnsafeAlignedOffset.putSize(base, pageCursor, length);
pageCursor += uaoSize;
Platform.copyMemory(recordBase, recordOffset, base, pageCursor, length);
pageCursor += length;
inMemSorter.insertRecord(recordAddress, prefix, prefixIsNull);
}
/**
* Write a key-value record to the sorter. The key and value will be put together in-memory,
* using the following format:
*
* record length (4 bytes), key length (4 bytes), key data, value data
*
* record length = key length + value length + 4
*/
public void insertKVRecord(Object keyBase, long keyOffset, int keyLen,
Object valueBase, long valueOffset, int valueLen, long prefix, boolean prefixIsNull)
throws IOException {
final int uaoSize = UnsafeAlignedOffset.getUaoSize();
final int required = keyLen + valueLen + (2 * uaoSize);
allocateMemoryForRecordIfNecessary(required);
final Object base = currentPage.getBaseObject();
final long recordAddress = taskMemoryManager.encodePageNumberAndOffset(currentPage, pageCursor);
UnsafeAlignedOffset.putSize(base, pageCursor, keyLen + valueLen + uaoSize);
pageCursor += uaoSize;
UnsafeAlignedOffset.putSize(base, pageCursor, keyLen);
pageCursor += uaoSize;
Platform.copyMemory(keyBase, keyOffset, base, pageCursor, keyLen);
pageCursor += keyLen;
Platform.copyMemory(valueBase, valueOffset, base, pageCursor, valueLen);
pageCursor += valueLen;
assert(inMemSorter != null);
inMemSorter.insertRecord(recordAddress, prefix, prefixIsNull);
}
/**
* Merges another UnsafeExternalSorters into this one, the other one will be emptied.
*/
public void merge(UnsafeExternalSorter other) throws IOException {
other.spill();
totalSpillBytes += other.totalSpillBytes;
spillWriters.addAll(other.spillWriters);
// remove them from `spillWriters`, or the files will be deleted in `cleanupResources`.
other.spillWriters.clear();
other.cleanupResources();
}
/**
* Returns a sorted iterator. It is the caller's responsibility to call `cleanupResources()`
* after consuming this iterator.
*/
public UnsafeSorterIterator getSortedIterator() throws IOException {
assert(recordComparatorSupplier != null);
if (spillWriters.isEmpty()) {
assert(inMemSorter != null);
readingIterator = new SpillableIterator(inMemSorter.getSortedIterator());
return readingIterator;
} else {
final UnsafeSorterSpillMerger spillMerger = new UnsafeSorterSpillMerger(
recordComparatorSupplier.get(), prefixComparator, spillWriters.size());
for (UnsafeSorterSpillWriter spillWriter : spillWriters) {
spillMerger.addSpillIfNotEmpty(spillWriter.getReader(serializerManager));
}
if (inMemSorter != null) {
readingIterator = new SpillableIterator(inMemSorter.getSortedIterator());
spillMerger.addSpillIfNotEmpty(readingIterator);
}
return spillMerger.getSortedIterator();
}
}
@VisibleForTesting boolean hasSpaceForAnotherRecord() {
return inMemSorter.hasSpaceForAnotherRecord();
}
private static void spillIterator(UnsafeSorterIterator inMemIterator,
UnsafeSorterSpillWriter spillWriter) throws IOException {
while (inMemIterator.hasNext()) {
inMemIterator.loadNext();
final Object baseObject = inMemIterator.getBaseObject();
final long baseOffset = inMemIterator.getBaseOffset();
final int recordLength = inMemIterator.getRecordLength();
spillWriter.write(baseObject, baseOffset, recordLength, inMemIterator.getKeyPrefix());
}
spillWriter.close();
}
/**
* An UnsafeSorterIterator that support spilling.
*/
|
UnsafeExternalSorter
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java
|
{
"start": 1945,
"end": 2482
}
|
class ____ all actions related to retention leases. Note carefully that these actions are executed under a primary permit. Care is
* taken to thread the listener through the invocations so that for the sync APIs we do not notify the listener until these APIs have
* responded with success. Additionally, note the use of
* {@link TransportSingleShardAction#asyncShardOperation(SingleShardRequest, ShardId, ActionListener)} to handle the case when acquiring
* permits goes asynchronous because acquiring permits is blocked
*/
public
|
holds
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/association/toone/CriteriaJoinFetchTest.java
|
{
"start": 6646,
"end": 7884
}
|
class ____ {
@Id
private Integer id;
private String street;
private String city;
@OneToOne(cascade = CascadeType.ALL)
@JoinColumn(name = "NOTE_FK")
private Note note;
@OneToOne(cascade = CascadeType.ALL)
@JoinColumn(name = "CUSTOMER_FK")
private Customer customer;
public Address() {
}
public Address(
Integer id,
String street,
String city,
Note note,
Customer customer) {
this.id = id;
this.street = street;
this.city = city;
this.note = note;
this.note.setAddress( this );
this.customer = customer;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public Note getNote() {
return note;
}
public void setNote(Note note) {
this.note = note;
}
public Customer getCustomer() {
return customer;
}
public void setCustomer(Customer customer) {
this.customer = customer;
}
}
@Entity(name = "Note")
@Table(name = "NOTE_TABLE")
public static
|
Address
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/SimpleEmbeddableTest.java
|
{
"start": 575,
"end": 1039
}
|
class ____ {
@Test
public void testLifecycle(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Book book = new Book();
book.setTitle("High-Performance Java Persistence");
book.setAuthor("Vlad Mihalcea");
book.setPublisher(
new Publisher(
"Amazon",
"USA"
)
);
entityManager.persist(book);
});
}
//tag::embeddable-type-mapping-example[]
@Entity(name = "Book")
public static
|
SimpleEmbeddableTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ClassCanBeStaticTest.java
|
{
"start": 3055,
"end": 3129
}
|
class ____ {}
}
}
// inner
|
Inner8
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/jls/JLS_15_12_2_5Test.java
|
{
"start": 9077,
"end": 11259
}
|
class ____ {
@Before
public void setUp() throws Exception {
Assume.assumeTrue(
ClassFileVersion.of(JLS_15_12_2_5_Java8_Test.class).isAtLeast(JAVA_V8));
}
@Test
public void with_single_arg() throws Exception {
SingleOverload mock = mock(SingleOverload.class);
when(mock.oneArg(isNull())).thenReturn("ok");
assertThat(mock.oneArg(null))
.describedAs("Most specific method chosen for matcher and for null")
.isEqualTo("ok");
}
@Test
public void with_single_arg_and_null_Object_reference() throws Exception {
SingleOverload mock = mock(SingleOverload.class);
when(mock.oneArg(isNull())).thenReturn("ok");
Object arg = null;
assertThat(mock.oneArg(arg)).describedAs("not the stubbed method").isEqualTo(null);
}
@Test
public void with_variable_arg() throws Exception {
SingleOverload mock = mock(SingleOverload.class);
when(mock.varargs(isNull())).thenReturn("ok");
assertThat(mock.varargs(null))
.describedAs("Most specific method chosen for matcher and for null")
.isEqualTo("ok");
}
@Test
public void with_variable_arg_and_null_Object_array() throws Exception {
SingleOverload mock = mock(SingleOverload.class);
when(mock.varargs(isNull())).thenReturn("ok");
Object[] args = null;
assertThat(mock.varargs(args))
.describedAs("Most specific method chosen for matcher")
.isEqualTo(null);
}
@Test
public void with_variable_arg_and_null_Object_arg() throws Exception {
SingleOverload mock = mock(SingleOverload.class);
when(mock.varargs(isNull())).thenReturn("ok");
Object arg = null;
assertThat(mock.varargs(arg))
.describedAs("Most specific method chosen for matcher")
.isEqualTo(null);
}
}
|
JLS_15_12_2_5_Java8_Test
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BinaryImplNode.java
|
{
"start": 616,
"end": 1175
}
|
class ____ extends BinaryNode {
/* ---- begin visitor ---- */
@Override
public <Scope> void visit(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
irTreeVisitor.visitBinaryImpl(this, scope);
}
@Override
public <Scope> void visitChildren(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
getLeftNode().visit(irTreeVisitor, scope);
getRightNode().visit(irTreeVisitor, scope);
}
/* ---- end visitor ---- */
public BinaryImplNode(Location location) {
super(location);
}
}
|
BinaryImplNode
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/utils/DiffRepository.java
|
{
"start": 34482,
"end": 36390
}
|
class ____ {
private final Class<?> clazz;
private final DiffRepository baseRepository;
private final Filter filter;
private final int indent;
Key(Class<?> clazz, DiffRepository baseRepository, Filter filter, int indent) {
this.clazz = requireNonNull(clazz, "clazz");
this.baseRepository = baseRepository;
this.filter = filter;
this.indent = indent;
}
@Override
public int hashCode() {
return Objects.hash(clazz, baseRepository, filter);
}
@Override
public boolean equals(Object obj) {
return this == obj
|| obj instanceof Key
&& clazz.equals(((Key) obj).clazz)
&& Objects.equals(baseRepository, ((Key) obj).baseRepository)
&& Objects.equals(filter, ((Key) obj).filter);
}
DiffRepository toRepo() {
final URL refFile = findFile(clazz, ".xml");
final File file = Sources.of(refFile).file();
final String refFilePath = file.getAbsolutePath();
final String logFilePath =
file.length() > 0 ? refFilePath.replace(".xml", "_actual.xml") : refFilePath;
final File logFile = new File(logFilePath);
assert file.length() == 0 || !refFilePath.equals(logFile.getAbsolutePath());
return new DiffRepository(refFile, logFile, baseRepository, filter, indent);
}
}
private static Iterable<Node> iterate(NodeList nodeList) {
return new AbstractList<Node>() {
@Override
public Node get(int index) {
return nodeList.item(index);
}
@Override
public int size() {
return nodeList.getLength();
}
};
}
}
|
Key
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/savepoints/SavepointHandlers.java
|
{
"start": 5399,
"end": 7710
}
|
class ____<B extends RequestBody>
extends AbstractRestHandler<
RestfulGateway, B, TriggerResponse, SavepointTriggerMessageParameters> {
SavepointHandlerBase(
final GatewayRetriever<? extends RestfulGateway> leaderRetriever,
final Duration timeout,
Map<String, String> responseHeaders,
final MessageHeaders<B, TriggerResponse, SavepointTriggerMessageParameters>
messageHeaders) {
super(leaderRetriever, timeout, responseHeaders, messageHeaders);
}
protected AsynchronousJobOperationKey createOperationKey(final HandlerRequest<B> request) {
final JobID jobId = request.getPathParameter(JobIDPathParameter.class);
return AsynchronousJobOperationKey.of(
extractTriggerId(request.getRequestBody()).orElseGet(TriggerId::new), jobId);
}
protected abstract Optional<TriggerId> extractTriggerId(B request);
public CompletableFuture<TriggerResponse> handleRequest(
@Nonnull HandlerRequest<B> request, @Nonnull RestfulGateway gateway)
throws RestHandlerException {
final AsynchronousJobOperationKey operationKey = createOperationKey(request);
return triggerOperation(request, operationKey, gateway)
.handle(
(acknowledge, throwable) -> {
if (throwable == null) {
return new TriggerResponse(operationKey.getTriggerId());
} else {
throw new CompletionException(
createInternalServerError(
throwable, operationKey, "triggering"));
}
});
}
protected abstract CompletableFuture<Acknowledge> triggerOperation(
HandlerRequest<B> request,
AsynchronousJobOperationKey operationKey,
RestfulGateway gateway)
throws RestHandlerException;
}
/** HTTP handler to stop a job with a savepoint. */
public
|
SavepointHandlerBase
|
java
|
google__guava
|
guava/src/com/google/common/util/concurrent/Futures.java
|
{
"start": 49797,
"end": 65658
}
|
class ____<V extends @Nullable Object> implements Runnable {
final Future<V> future;
final FutureCallback<? super V> callback;
CallbackListener(Future<V> future, FutureCallback<? super V> callback) {
this.future = future;
this.callback = callback;
}
@Override
public void run() {
if (future instanceof InternalFutureFailureAccess) {
Throwable failure =
InternalFutures.tryInternalFastPathGetFailure((InternalFutureFailureAccess) future);
if (failure != null) {
callback.onFailure(failure);
return;
}
}
V value;
try {
value = getDone(future);
} catch (ExecutionException e) {
callback.onFailure(e.getCause());
return;
} catch (Throwable e) {
// Any Exception is either a RuntimeException or sneaky checked exception.
callback.onFailure(e);
return;
}
callback.onSuccess(value);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this).addValue(callback).toString();
}
}
/**
* Returns the result of the input {@code Future}, which must have already completed.
*
* <p>The benefits of this method are twofold. First, the name "getDone" suggests to readers that
* the {@code Future} is already done. Second, if buggy code calls {@code getDone} on a {@code
* Future} that is still pending, the program will throw instead of block. This can be important
* for APIs like {@link #whenAllComplete whenAllComplete(...)}{@code .}{@link
* FutureCombiner#call(Callable, Executor) call(...)}, where it is easy to use a new input from
* the {@code call} implementation but forget to add it to the arguments of {@code
* whenAllComplete}.
*
* <p>If you are looking for a method to determine whether a given {@code Future} is done, use the
* instance method {@link Future#isDone()}.
*
* @throws ExecutionException if the {@code Future} failed with an exception
* @throws CancellationException if the {@code Future} was cancelled
* @throws IllegalStateException if the {@code Future} is not done
* @since 20.0
*/
@CanIgnoreReturnValue
// TODO(cpovirk): Consider calling getDone() in our own code.
@ParametricNullness
public static <V extends @Nullable Object> V getDone(Future<V> future) throws ExecutionException {
/*
* We throw IllegalStateException, since the call could succeed later. Perhaps we "should" throw
* IllegalArgumentException, since the call could succeed with a different argument. Those
* exceptions' docs suggest that either is acceptable. Google's Java Practices page recommends
* IllegalArgumentException here, in part to keep its recommendation simple: Static methods
* should throw IllegalStateException only when they use static state.
*
* Why do we deviate here? The answer: We want for fluentFuture.getDone() to throw the same
* exception as Futures.getDone(fluentFuture).
*/
checkState(future.isDone(), "Future was expected to be done: %s", future);
return getUninterruptibly(future);
}
/**
* Returns the result of {@link Future#get()}, converting most exceptions to a new instance of the
* given checked exception type. This reduces boilerplate for a common use of {@code Future} in
* which it is unnecessary to programmatically distinguish between exception types or to extract
* other information from the exception instance.
*
* <p>Exceptions from {@code Future.get} are treated as follows:
*
* <ul>
* <li>Any {@link ExecutionException} has its <i>cause</i> wrapped in an {@code X} if the cause
* is a checked exception, an {@link UncheckedExecutionException} if the cause is a {@code
* RuntimeException}, or an {@link ExecutionError} if the cause is an {@code Error}.
* <li>Any {@link InterruptedException} is wrapped in an {@code X} (after restoring the
* interrupt).
* <li>Any {@link CancellationException} is propagated untouched, as is any other {@link
* RuntimeException} (though {@code get} implementations are discouraged from throwing such
* exceptions).
* </ul>
*
* <p>The overall principle is to continue to treat every checked exception as a checked
* exception, every unchecked exception as an unchecked exception, and every error as an error. In
* addition, the cause of any {@code ExecutionException} is wrapped in order to ensure that the
* new stack trace matches that of the current thread.
*
* <p>Instances of {@code exceptionClass} are created by choosing an arbitrary public constructor
* that accepts zero or more arguments, all of type {@code String} or {@code Throwable}
* (preferring constructors with at least one {@code String}, then preferring constructors with at
* least one {@code Throwable}) and calling the constructor via reflection. If the exception did
* not already have a cause, one is set by calling {@link Throwable#initCause(Throwable)} on it.
* If no such constructor exists, an {@code IllegalArgumentException} is thrown.
*
* @throws X if {@code get} throws any checked exception except for an {@code ExecutionException}
* whose cause is not itself a checked exception
* @throws UncheckedExecutionException if {@code get} throws an {@code ExecutionException} with a
* {@code RuntimeException} as its cause
* @throws ExecutionError if {@code get} throws an {@code ExecutionException} with an {@code
* Error} as its cause
* @throws CancellationException if {@code get} throws a {@code CancellationException}
* @throws IllegalArgumentException if {@code exceptionClass} extends {@code RuntimeException} or
* does not have a suitable constructor
* @since 19.0 (in 10.0 as {@code get})
*/
@CanIgnoreReturnValue
@J2ktIncompatible
@GwtIncompatible // reflection
@ParametricNullness
public static <V extends @Nullable Object, X extends Exception> V getChecked(
Future<V> future, Class<X> exceptionClass) throws X {
return FuturesGetChecked.getChecked(future, exceptionClass);
}
/**
* Returns the result of {@link Future#get(long, TimeUnit)}, converting most exceptions to a new
* instance of the given checked exception type. This reduces boilerplate for a common use of
* {@code Future} in which it is unnecessary to programmatically distinguish between exception
* types or to extract other information from the exception instance.
*
* <p>Exceptions from {@code Future.get} are treated as follows:
*
* <ul>
* <li>Any {@link ExecutionException} has its <i>cause</i> wrapped in an {@code X} if the cause
* is a checked exception, an {@link UncheckedExecutionException} if the cause is a {@code
* RuntimeException}, or an {@link ExecutionError} if the cause is an {@code Error}.
* <li>Any {@link InterruptedException} is wrapped in an {@code X} (after restoring the
* interrupt).
* <li>Any {@link TimeoutException} is wrapped in an {@code X}.
* <li>Any {@link CancellationException} is propagated untouched, as is any other {@link
* RuntimeException} (though {@code get} implementations are discouraged from throwing such
* exceptions).
* </ul>
*
* <p>The overall principle is to continue to treat every checked exception as a checked
* exception, every unchecked exception as an unchecked exception, and every error as an error. In
* addition, the cause of any {@code ExecutionException} is wrapped in order to ensure that the
* new stack trace matches that of the current thread.
*
* <p>Instances of {@code exceptionClass} are created by choosing an arbitrary public constructor
* that accepts zero or more arguments, all of type {@code String} or {@code Throwable}
* (preferring constructors with at least one {@code String}, then preferring constructors with at
* least one {@code Throwable}) and calling the constructor via reflection. If the exception did
* not already have a cause, one is set by calling {@link Throwable#initCause(Throwable)} on it.
* If no such constructor exists, an {@code IllegalArgumentException} is thrown.
*
* @throws X if {@code get} throws any checked exception except for an {@code ExecutionException}
* whose cause is not itself a checked exception
* @throws UncheckedExecutionException if {@code get} throws an {@code ExecutionException} with a
* {@code RuntimeException} as its cause
* @throws ExecutionError if {@code get} throws an {@code ExecutionException} with an {@code
* Error} as its cause
* @throws CancellationException if {@code get} throws a {@code CancellationException}
* @throws IllegalArgumentException if {@code exceptionClass} extends {@code RuntimeException} or
* does not have a suitable constructor
* @since 28.0 (but only since 33.4.0 in the Android flavor)
*/
@CanIgnoreReturnValue
@J2ktIncompatible
@GwtIncompatible // reflection
@ParametricNullness
public static <V extends @Nullable Object, X extends Exception> V getChecked(
Future<V> future, Class<X> exceptionClass, Duration timeout) throws X {
return getChecked(future, exceptionClass, toNanosSaturated(timeout), TimeUnit.NANOSECONDS);
}
/**
* Returns the result of {@link Future#get(long, TimeUnit)}, converting most exceptions to a new
* instance of the given checked exception type. This reduces boilerplate for a common use of
* {@code Future} in which it is unnecessary to programmatically distinguish between exception
* types or to extract other information from the exception instance.
*
* <p>Exceptions from {@code Future.get} are treated as follows:
*
* <ul>
* <li>Any {@link ExecutionException} has its <i>cause</i> wrapped in an {@code X} if the cause
* is a checked exception, an {@link UncheckedExecutionException} if the cause is a {@code
* RuntimeException}, or an {@link ExecutionError} if the cause is an {@code Error}.
* <li>Any {@link InterruptedException} is wrapped in an {@code X} (after restoring the
* interrupt).
* <li>Any {@link TimeoutException} is wrapped in an {@code X}.
* <li>Any {@link CancellationException} is propagated untouched, as is any other {@link
* RuntimeException} (though {@code get} implementations are discouraged from throwing such
* exceptions).
* </ul>
*
* <p>The overall principle is to continue to treat every checked exception as a checked
* exception, every unchecked exception as an unchecked exception, and every error as an error. In
* addition, the cause of any {@code ExecutionException} is wrapped in order to ensure that the
* new stack trace matches that of the current thread.
*
* <p>Instances of {@code exceptionClass} are created by choosing an arbitrary public constructor
* that accepts zero or more arguments, all of type {@code String} or {@code Throwable}
* (preferring constructors with at least one {@code String}) and calling the constructor via
* reflection. If the exception did not already have a cause, one is set by calling {@link
* Throwable#initCause(Throwable)} on it. If no such constructor exists, an {@code
* IllegalArgumentException} is thrown.
*
* @throws X if {@code get} throws any checked exception except for an {@code ExecutionException}
* whose cause is not itself a checked exception
* @throws UncheckedExecutionException if {@code get} throws an {@code ExecutionException} with a
* {@code RuntimeException} as its cause
* @throws ExecutionError if {@code get} throws an {@code ExecutionException} with an {@code
* Error} as its cause
* @throws CancellationException if {@code get} throws a {@code CancellationException}
* @throws IllegalArgumentException if {@code exceptionClass} extends {@code RuntimeException} or
* does not have a suitable constructor
* @since 19.0 (in 10.0 as {@code get} and with different parameter order)
*/
@CanIgnoreReturnValue
@J2ktIncompatible
@GwtIncompatible // reflection
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
@ParametricNullness
public static <V extends @Nullable Object, X extends Exception> V getChecked(
Future<V> future, Class<X> exceptionClass, long timeout, TimeUnit unit) throws X {
return FuturesGetChecked.getChecked(future, exceptionClass, timeout, unit);
}
/**
* Returns the result of calling {@link Future#get()} uninterruptibly on a task known not to throw
* a checked exception. This makes {@code Future} more suitable for lightweight, fast-running
* tasks that, barring bugs in the code, will not fail. This gives it exception-handling behavior
* similar to that of {@code ForkJoinTask.join}.
*
* <p>Exceptions from {@code Future.get} are treated as follows:
*
* <ul>
* <li>Any {@link ExecutionException} has its <i>cause</i> wrapped in an {@link
* UncheckedExecutionException} (if the cause is an {@code Exception}) or {@link
* ExecutionError} (if the cause is an {@code Error}).
* <li>Any {@link InterruptedException} causes a retry of the {@code get} call. The interrupt is
* restored before {@code getUnchecked} returns.
* <li>Any {@link CancellationException} is propagated untouched. So is any other {@link
* RuntimeException} ({@code get} implementations are discouraged from throwing such
* exceptions).
* </ul>
*
* <p>The overall principle is to eliminate all checked exceptions: to loop to avoid {@code
* InterruptedException}, to pass through {@code CancellationException}, and to wrap any exception
* from the underlying computation in an {@code UncheckedExecutionException} or {@code
* ExecutionError}.
*
* <p>For an uninterruptible {@code get} that preserves other exceptions, see {@link
* Uninterruptibles#getUninterruptibly(Future)}.
*
* @throws UncheckedExecutionException if {@code get} throws an {@code ExecutionException} with an
* {@code Exception} as its cause
* @throws ExecutionError if {@code get} throws an {@code ExecutionException} with an {@code
* Error} as its cause
* @throws CancellationException if {@code get} throws a {@code CancellationException}
* @since 10.0
*/
@CanIgnoreReturnValue
@ParametricNullness
public static <V extends @Nullable Object> V getUnchecked(Future<V> future) {
checkNotNull(future);
try {
return getUninterruptibly(future);
} catch (ExecutionException wrapper) {
if (wrapper.getCause() instanceof Error) {
throw new ExecutionError((Error) wrapper.getCause());
}
/*
* It's an Exception. (Or it's a non-Error, non-Exception Throwable. From my survey of such
* classes, I believe that most users intended to extend Exception, so we'll treat it like an
* Exception.)
*/
throw new UncheckedExecutionException(wrapper.getCause());
}
}
/*
* Arguably we don't need a timed getUnchecked because any operation slow enough to require a
* timeout is heavyweight enough to throw a checked exception and therefore be inappropriate to
* use with getUnchecked. Further, it's not clear that converting the checked TimeoutException to
* a RuntimeException -- especially to an UncheckedExecutionException, since it wasn't thrown by
* the computation -- makes sense, and if we don't convert it, the user still has to write a
* try-catch block.
*
* If you think you would use this method, let us know. You might also look into the
* Fork-Join framework: http://docs.oracle.com/javase/tutorial/essential/concurrency/forkjoin.html
*/
}
|
CallbackListener
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/http/support/JettyHeadersAdapter.java
|
{
"start": 7418,
"end": 8242
}
|
class ____ implements Iterator<String> {
private final Iterator<String> iterator;
private @Nullable String currentName;
private HeaderNamesIterator(Iterator<String> iterator) {
this.iterator = iterator;
}
@Override
public boolean hasNext() {
return this.iterator.hasNext();
}
@Override
public String next() {
this.currentName = this.iterator.next();
return this.currentName;
}
@Override
public void remove() {
HttpFields.Mutable mutableHttpFields = mutableFields();
if (this.currentName == null) {
throw new IllegalStateException("No current Header in iterator");
}
if (!headers.contains(this.currentName)) {
throw new IllegalStateException("Header not present: " + this.currentName);
}
mutableHttpFields.remove(this.currentName);
}
}
}
|
HeaderNamesIterator
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/impl/WorkerTaskQueue.java
|
{
"start": 640,
"end": 1053
}
|
class ____ extends TaskQueue {
public WorkerTaskQueue() {
}
/**
* Shutdown the task queue.
*
* @param executor an executor that can block in order to join threads.
* @param
*/
void shutdown(EventLoop executor, Promise<Void> completion) {
TaskQueue.CloseResult closeResult = close();
// Maintain context invariant: serialize task execution while interrupting tasks
|
WorkerTaskQueue
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/IrateDoubleAggregatorFunctionSupplier.java
|
{
"start": 650,
"end": 1736
}
|
class ____ implements AggregatorFunctionSupplier {
private final boolean isDelta;
public IrateDoubleAggregatorFunctionSupplier(boolean isDelta) {
this.isDelta = isDelta;
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
throw new UnsupportedOperationException("non-grouping aggregator is not supported");
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return IrateDoubleGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public AggregatorFunction aggregator(DriverContext driverContext, List<Integer> channels) {
throw new UnsupportedOperationException("non-grouping aggregator is not supported");
}
@Override
public IrateDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext,
List<Integer> channels) {
return IrateDoubleGroupingAggregatorFunction.create(channels, driverContext, isDelta);
}
@Override
public String describe() {
return IrateDoubleAggregator.describe();
}
}
|
IrateDoubleAggregatorFunctionSupplier
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/controlbus/ControlBusRestartRouteTest.java
|
{
"start": 2114,
"end": 2545
}
|
class ____ extends RoutePolicySupport {
private int start;
private int stop;
@Override
public void onStart(Route route) {
start++;
}
@Override
public void onStop(Route route) {
stop++;
}
public int getStart() {
return start;
}
public int getStop() {
return stop;
}
}
}
|
MyRoutePolicy
|
java
|
apache__camel
|
components/camel-lra/src/main/java/org/apache/camel/service/lra/LRAUrlBuilder.java
|
{
"start": 1089,
"end": 3819
}
|
class ____ {
private String host;
private String path = "";
private String query = "";
public LRAUrlBuilder() {
}
public LRAUrlBuilder(String host, String path, String query) {
this.host = host;
this.path = path;
this.query = query;
}
public LRAUrlBuilder host(String host) {
if (this.host != null) {
throw new IllegalStateException("Host already set");
}
LRAUrlBuilder copy = copy();
copy.host = host;
return copy;
}
public LRAUrlBuilder path(String path) {
LRAUrlBuilder copy = copy();
copy.path = joinPath(this.path, path);
return copy;
}
public LRAUrlBuilder compensation(Optional<Endpoint> endpoint) {
if (endpoint.isPresent()) {
return compensation(endpoint.get().getEndpointUri());
}
return this;
}
public LRAUrlBuilder compensation(String uri) {
return this.query(URL_COMPENSATION_KEY, uri);
}
public LRAUrlBuilder completion(Optional<Endpoint> endpoint) {
if (endpoint.isPresent()) {
return completion(endpoint.get().getEndpointUri());
}
return this;
}
public LRAUrlBuilder completion(String uri) {
return this.query(URL_COMPLETION_KEY, uri);
}
public LRAUrlBuilder options(Map<String, ?> options) {
LRAUrlBuilder result = this;
for (Map.Entry<String, ?> entry : options.entrySet()) {
result = result.query(entry.getKey(), entry.getValue());
}
return result;
}
public LRAUrlBuilder query(String key, Object value) {
LRAUrlBuilder copy = copy();
if (copy.query.isEmpty()) {
copy.query += "?";
} else {
copy.query += "&";
}
copy.query += toNonnullString(key) + "=" + toNonnullString(value);
return copy;
}
public String build() {
if (this.host == null) {
throw new IllegalStateException("Host not set");
}
return joinPath(this.host, this.path) + query;
}
private String joinPath(String first, String second) {
first = toNonnullString(first);
second = toNonnullString(second);
while (first.endsWith("/")) {
first = first.substring(0, first.length() - 1);
}
while (second.startsWith("/")) {
second = second.substring(1);
}
return first + "/" + second;
}
private String toNonnullString(Object obj) {
return obj != null ? obj.toString() : "";
}
private LRAUrlBuilder copy() {
return new LRAUrlBuilder(this.host, this.path, this.query);
}
}
|
LRAUrlBuilder
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/SubcomponentCreatorValidationTest.java
|
{
"start": 22850,
"end": 23086
}
|
interface ____ extends Parent<TestModule> {",
" ChildComponent build();",
" void set2(TestModule s);",
" }")
.addLinesIf(
FACTORY,
"
|
Builder
|
java
|
quarkusio__quarkus
|
test-framework/common/src/main/java/io/quarkus/test/common/TestResourceManager.java
|
{
"start": 32140,
"end": 33146
}
|
class ____ {
private final QuarkusTestResourceLifecycleManager testResource;
private final Map<String, String> args;
private final Annotation configAnnotation;
public TestResourceStartInfo(QuarkusTestResourceLifecycleManager testResource) {
this(testResource, Collections.emptyMap(), null);
}
public TestResourceStartInfo(QuarkusTestResourceLifecycleManager testResource, Map<String, String> args,
Annotation configAnnotation) {
this.testResource = testResource;
this.args = args;
this.configAnnotation = configAnnotation;
}
public QuarkusTestResourceLifecycleManager getTestResource() {
return testResource;
}
public Map<String, String> getArgs() {
return args;
}
public Annotation getConfigAnnotation() {
return configAnnotation;
}
}
// visible for testing
static
|
TestResourceStartInfo
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/client/support/RestGatewaySupport.java
|
{
"start": 1224,
"end": 2319
}
|
class ____ {
/** Logger available to subclasses. */
protected final Log logger = LogFactory.getLog(getClass());
private RestTemplate restTemplate;
/**
* Construct a new instance of the {@link RestGatewaySupport}, with default parameters.
*/
public RestGatewaySupport() {
this.restTemplate = new RestTemplate();
}
/**
* Construct a new instance of the {@link RestGatewaySupport}, with the given {@link ClientHttpRequestFactory}.
* @see RestTemplate#RestTemplate(ClientHttpRequestFactory)
*/
public RestGatewaySupport(ClientHttpRequestFactory requestFactory) {
Assert.notNull(requestFactory, "'requestFactory' must not be null");
this.restTemplate = new RestTemplate(requestFactory);
}
/**
* Sets the {@link RestTemplate} for the gateway.
*/
public void setRestTemplate(RestTemplate restTemplate) {
Assert.notNull(restTemplate, "'restTemplate' must not be null");
this.restTemplate = restTemplate;
}
/**
* Returns the {@link RestTemplate} for the gateway.
*/
public RestTemplate getRestTemplate() {
return this.restTemplate;
}
}
|
RestGatewaySupport
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/ObservationFilterChainDecorator.java
|
{
"start": 11340,
"end": 11819
}
|
interface ____ extends FilterObservation {
AroundFilterObservation NOOP = new AroundFilterObservation() {
};
static AroundFilterObservation create(Observation before, Observation after) {
if (before.isNoop() || after.isNoop()) {
return NOOP;
}
return new SimpleAroundFilterObservation(before, after);
}
default Observation before() {
return Observation.NOOP;
}
default Observation after() {
return Observation.NOOP;
}
|
AroundFilterObservation
|
java
|
spring-projects__spring-boot
|
module/spring-boot-elasticsearch/src/main/java/org/springframework/boot/elasticsearch/autoconfigure/ElasticsearchRestClientConfigurations.java
|
{
"start": 6951,
"end": 8927
}
|
class ____ implements Rest5ClientBuilderCustomizer, Ordered {
private final ElasticsearchProperties properties;
private final ElasticsearchConnectionDetails connectionDetails;
DefaultRest5ClientBuilderCustomizer(ElasticsearchProperties properties,
ElasticsearchConnectionDetails connectionDetails) {
this.properties = properties;
this.connectionDetails = connectionDetails;
}
@Override
public void customize(Rest5ClientBuilder restClientBuilder) {
}
@Override
public void customize(HttpAsyncClientBuilder httpClientBuilder) {
httpClientBuilder
.setDefaultCredentialsProvider(new ConnectionDetailsCredentialsProvider(this.connectionDetails));
PropertyMapper map = PropertyMapper.get();
map.from(this.properties::isSocketKeepAlive)
.to((keepAlive) -> httpClientBuilder
.setIOReactorConfig(IOReactorConfig.custom().setSoKeepAlive(keepAlive).build()));
}
@Override
public void customize(ConnectionConfig.Builder connectionConfigBuilder) {
PropertyMapper map = PropertyMapper.get();
map.from(this.properties::getConnectionTimeout)
.as(Timeout::of)
.to(connectionConfigBuilder::setConnectTimeout);
map.from(this.properties::getSocketTimeout).as(Timeout::of).to(connectionConfigBuilder::setSocketTimeout);
}
@Override
public void customize(PoolingAsyncClientConnectionManagerBuilder connectionManagerBuilder) {
SslBundle sslBundle = this.connectionDetails.getSslBundle();
if (sslBundle != null) {
SSLContext sslContext = sslBundle.createSslContext();
SslOptions sslOptions = sslBundle.getOptions();
DefaultClientTlsStrategy tlsStrategy = new DefaultClientTlsStrategy(sslContext,
sslOptions.getEnabledProtocols(), sslOptions.getCiphers(), SSLBufferMode.STATIC,
NoopHostnameVerifier.INSTANCE);
connectionManagerBuilder.setTlsStrategy(tlsStrategy);
}
}
@Override
public int getOrder() {
return 0;
}
}
private static
|
DefaultRest5ClientBuilderCustomizer
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/http/HttpHeadersAssert.java
|
{
"start": 1293,
"end": 9962
}
|
class ____ extends AbstractObjectAssert<HttpHeadersAssert, HttpHeaders> {
private final AbstractCollectionAssert<?, Collection<? extends String>, String, ObjectAssert<String>> namesAssert;
public HttpHeadersAssert(HttpHeaders actual) {
super(actual, HttpHeadersAssert.class);
as("HTTP headers");
this.namesAssert = Assertions.assertThat(actual.headerNames())
.as("HTTP header names");
}
/**
* Verify that the actual HTTP headers contain a header with the given
* {@code name}.
* @param name the name of an expected HTTP header
*/
public HttpHeadersAssert containsHeader(String name) {
this.namesAssert
.as("check headers contain HTTP header '%s'", name)
.contains(name);
return this.myself;
}
/**
* Verify that the actual HTTP headers contain the headers with the given
* {@code names}.
* @param names the names of expected HTTP headers
*/
public HttpHeadersAssert containsHeaders(String... names) {
this.namesAssert
.as("check headers contain HTTP headers '%s'", Arrays.toString(names))
.contains(names);
return this.myself;
}
/**
* Verify that the actual HTTP headers contain only the headers with the
* given {@code names}, in any order and in a case-insensitive manner.
* @param names the names of expected HTTP headers
* @since 7.0
*/
public HttpHeadersAssert containsOnlyHeaders(String... names) {
this.namesAssert
.as("check headers contain only HTTP headers '%s'", Arrays.toString(names))
.containsOnly(names);
return this.myself;
}
/**
* Verify that the actual HTTP headers do not contain a header with the
* given {@code name}.
* @param name the name of an HTTP header that should not be present
*/
public HttpHeadersAssert doesNotContainHeader(String name) {
this.namesAssert
.as("check headers do not contain HTTP header '%s'", name)
.doesNotContain(name);
return this.myself;
}
/**
* Verify that the actual HTTP headers do not contain any of the headers
* with the given {@code names}.
* @param names the names of HTTP headers that should not be present
* @since 6.2.2
*/
public HttpHeadersAssert doesNotContainHeaders(String... names) {
this.namesAssert
.as("check headers do not contain HTTP headers '%s'", Arrays.toString(names))
.doesNotContain(names);
return this.myself;
}
/**
* Verify that the actual HTTP headers contain a header with the given
* {@code name} that satisfies the given {@code valueRequirements}.
* @param name the name of the header
* @param valueRequirements the group of assertions to run against the
* values of the header with the given name
* @since 7.0
*/
@SuppressWarnings("unchecked")
public HttpHeadersAssert hasHeaderSatisfying(String name, Consumer<List<String>> valueRequirements) {
containsHeader(name);
Assertions.assertThat(this.actual.get(name))
.as("check all values for HTTP header '%s'", name)
.satisfies(values -> valueRequirements.accept((List<String>) values));
return this.myself;
}
/**
* Verify that the actual HTTP headers contain a header with the given
* {@code name} and {@link String} primary {@code value}.
* @param name the name of the header
* @param value the expected value of the header
*/
public HttpHeadersAssert hasValue(String name, String value) {
containsHeader(name);
Assertions.assertThat(this.actual.getFirst(name))
.as("check primary value for HTTP header '%s'", name)
.isEqualTo(value);
return this.myself;
}
/**
* Verify that the actual HTTP headers contain a header with the given
* {@code name} and {@code long} primary {@code value}.
* @param name the name of the header
* @param value the expected value of the header
*/
public HttpHeadersAssert hasValue(String name, long value) {
containsHeader(name);
Assertions.assertThat(this.actual.getFirst(name))
.as("check primary long value for HTTP header '%s'", name)
.asLong().isEqualTo(value);
return this.myself;
}
/**
* Verify that the actual HTTP headers contain a header with the given
* {@code name} and {@link Instant} primary {@code value}.
* @param name the name of the header
* @param value the expected value of the header
*/
public HttpHeadersAssert hasValue(String name, Instant value) {
containsHeader(name);
Assertions.assertThat(this.actual.getFirstZonedDateTime(name))
.as("check primary date value for HTTP header '%s'", name)
.isCloseTo(value.atZone(ZoneOffset.UTC), Assertions.within(999, ChronoUnit.MILLIS));
return this.myself;
}
/**
* Verify that the actual HTTP headers contain a header with the given
* {@code name} and {@link String} primary {@code value}.
* <p>This assertion fails if the header has secondary values.
* @param name the name of the header
* @param value the expected value of the header
* @since 7.0
*/
public HttpHeadersAssert hasSingleValue(String name, String value) {
doesNotHaveSecondaryValues(name);
return hasValue(name, value);
}
/**
* Verify that the actual HTTP headers contain a header with the given
* {@code name} and {@code long} primary {@code value}.
* <p>This assertion fails if the header has secondary values.
* @param name the name of the header
* @param value the expected value of the header
* @since 7.0
*/
public HttpHeadersAssert hasSingleValue(String name, long value) {
doesNotHaveSecondaryValues(name);
return hasValue(name, value);
}
/**
* Verify that the actual HTTP headers contain a header with the given
* {@code name} and {@link Instant} primary {@code value}.
* <p>This assertion fails if the header has secondary values.
* @param name the name of the header
* @param value the expected value of the header
* @since 7.0
*/
public HttpHeadersAssert hasSingleValue(String name, Instant value) {
doesNotHaveSecondaryValues(name);
return hasValue(name, value);
}
/**
* Verify that the given header has a full list of values exactly equal to
* the given list of values, and in the same order.
* @param name the considered header name (case-insensitive)
* @param values the exhaustive list of expected values
* @since 7.0
*/
public HttpHeadersAssert hasExactlyValues(String name, List<String> values) {
containsHeader(name);
Assertions.assertThat(this.actual.get(name))
.as("check all values of HTTP header '%s'", name)
.containsExactlyElementsOf(values);
return this.myself;
}
/**
* Verify that the given header has a full list of values exactly equal to
* the given list of values, in any order.
* @param name the considered header name (case-insensitive)
* @param values the exhaustive list of expected values
* @since 7.0
*/
public HttpHeadersAssert hasExactlyValuesInAnyOrder(String name, List<String> values) {
containsHeader(name);
Assertions.assertThat(this.actual.get(name))
.as("check all values of HTTP header '%s' in any order", name)
.containsExactlyInAnyOrderElementsOf(values);
return this.myself;
}
/**
* Verify that the actual HTTP headers are empty and no header is present.
*/
public HttpHeadersAssert isEmpty() {
this.namesAssert
.as("check headers are empty")
.isEmpty();
return this.myself;
}
/**
* Verify that the actual HTTP headers are not empty and at least one header
* is present.
*/
public HttpHeadersAssert isNotEmpty() {
this.namesAssert
.as("check headers are not empty")
.isNotEmpty();
return this.myself;
}
/**
* Verify that there are exactly {@code expected} headers present, when
* considering header names in a case-insensitive manner.
* @param expected the expected number of headers
*/
public HttpHeadersAssert hasSize(int expected) {
this.namesAssert
.as("check headers have size '%s'", expected)
.hasSize(expected);
return this.myself;
}
/**
* Verify that the number of actual headers is the same as in the given
* {@code HttpHeaders}.
* @param other the {@code HttpHeaders} to compare size with
* @since 7.0
*/
public HttpHeadersAssert hasSameSizeAs(HttpHeaders other) {
this.namesAssert
.as("check headers have same size as '%s'", other)
.hasSize(other.size());
return this.myself;
}
private HttpHeadersAssert doesNotHaveSecondaryValues(String name) {
containsHeader(name);
List<String> values = this.actual.get(name);
if (values != null && !values.isEmpty()) {
int size = values.size();
Assertions.assertThat(size)
.withFailMessage("Expected HTTP header '%s' to be present " +
"without secondary values, but found <%s> secondary value(s)", name, size - 1)
.isOne();
}
return this.myself;
}
}
|
HttpHeadersAssert
|
java
|
alibaba__nacos
|
ai/src/main/java/com/alibaba/nacos/ai/controller/A2aAdminController.java
|
{
"start": 2545,
"end": 6993
}
|
class ____ {
private final A2aServerOperationService a2aServerOperationService;
public A2aAdminController(A2aServerOperationService a2aServerOperationService) {
this.a2aServerOperationService = a2aServerOperationService;
}
/**
* Register agent.
*
* @param form the agent detail form to register
* @return result of the registration operation
* @throws NacosException if the agent registration fails due to invalid input or internal error
*/
@PostMapping
@Secured(action = ActionTypes.WRITE, signType = SignType.AI, apiType = ApiType.ADMIN_API)
public Result<String> registerAgent(AgentCardForm form) throws NacosException {
form.validate();
AgentCard agentCard = AgentRequestUtil.parseAgentCard(form);
a2aServerOperationService.registerAgent(agentCard, form.getNamespaceId(), form.getRegistrationType());
return Result.success("ok");
}
/**
* Get agent card.
*
* @param form the agent form to get
* @return result of the get operation
* @throws NacosApiException if the agent get fails due to invalid input or internal error
*/
@GetMapping
@Secured(action = ActionTypes.READ, signType = SignType.AI, apiType = ApiType.ADMIN_API)
public Result<AgentCardDetailInfo> getAgentCard(AgentForm form) throws NacosApiException {
form.validate();
return Result.success(
a2aServerOperationService.getAgentCard(form.getNamespaceId(), form.getAgentName(), form.getVersion(),
form.getRegistrationType()));
}
/**
* Update agent.
*
* @param form the agent update form to update
* @return result of the update operation
* @throws NacosException if the agent update fails due to invalid input or internal error
*/
@PutMapping
@Secured(action = ActionTypes.WRITE, signType = SignType.AI, apiType = ApiType.ADMIN_API)
public Result<String> updateAgentCard(AgentCardUpdateForm form) throws NacosException {
form.validate();
AgentCard agentCard = AgentRequestUtil.parseAgentCard(form);
a2aServerOperationService.updateAgentCard(agentCard, form.getNamespaceId(), form.getRegistrationType(),
form.getSetAsLatest());
return Result.success("ok");
}
/**
* Delete agent.
*
* @param form the agent form to delete
* @return result of the deletion operation
* @throws NacosException if the agent deletion fails due to invalid input or internal error
*/
@DeleteMapping
@Secured(action = ActionTypes.WRITE, signType = SignType.AI, apiType = ApiType.ADMIN_API)
public Result<String> deleteAgent(AgentForm form) throws NacosException {
form.validate();
a2aServerOperationService.deleteAgent(form.getNamespaceId(), form.getAgentName(), form.getVersion());
return Result.success("ok");
}
/**
* List agents.
*
* @param agentListForm the agent list form to list
* @param pageForm the page form to list
* @return result of the list operation
* @throws NacosException if the agent list fails due to invalid input or internal error
*/
@GetMapping("/list")
@Secured(action = ActionTypes.READ, signType = SignType.AI, apiType = ApiType.ADMIN_API)
public Result<Page<AgentCardVersionInfo>> listAgents(AgentListForm agentListForm, PageForm pageForm)
throws NacosException {
agentListForm.validate();
pageForm.validate();
return Result.success(
a2aServerOperationService.listAgents(agentListForm.getNamespaceId(), agentListForm.getAgentName(),
agentListForm.getSearch(), pageForm.getPageNo(), pageForm.getPageSize()));
}
/**
* List all versions for target Agent.
*
* @param agentForm agent form
* @return all version for target agent.
* @throws NacosException nacos exception
*/
@GetMapping("/version/list")
@Secured(action = ActionTypes.READ, signType = SignType.AI, apiType = ApiType.ADMIN_API)
public Result<List<AgentVersionDetail>> listAgentVersions(AgentForm agentForm) throws NacosException {
agentForm.validate();
return Result.success(
a2aServerOperationService.listAgentVersions(agentForm.getNamespaceId(), agentForm.getAgentName()));
}
}
|
A2aAdminController
|
java
|
apache__flink
|
flink-kubernetes/src/test/java/org/apache/flink/kubernetes/kubeclient/TestingWatchCallbackHandler.java
|
{
"start": 1062,
"end": 2698
}
|
class ____<T> implements WatchCallbackHandler<T> {
private final Consumer<List<T>> onAddedConsumer;
private final Consumer<List<T>> onModifiedConsumer;
private final Consumer<List<T>> onDeletedConsumer;
private final Consumer<List<T>> onErrorConsumer;
private final Consumer<Throwable> handleErrorConsumer;
private TestingWatchCallbackHandler(
Consumer<List<T>> onAddedConsumer,
Consumer<List<T>> onModifiedConsumer,
Consumer<List<T>> onDeletedConsumer,
Consumer<List<T>> onErrorConsumer,
Consumer<Throwable> handleErrorConsumer) {
this.onAddedConsumer = onAddedConsumer;
this.onModifiedConsumer = onModifiedConsumer;
this.onDeletedConsumer = onDeletedConsumer;
this.onErrorConsumer = onErrorConsumer;
this.handleErrorConsumer = handleErrorConsumer;
}
@Override
public void onAdded(List<T> resources) {
onAddedConsumer.accept(resources);
}
@Override
public void onModified(List<T> resources) {
onModifiedConsumer.accept(resources);
}
@Override
public void onDeleted(List<T> resources) {
onDeletedConsumer.accept(resources);
}
@Override
public void onError(List<T> resources) {
onErrorConsumer.accept(resources);
}
@Override
public void handleError(Throwable throwable) {
handleErrorConsumer.accept(throwable);
}
public static <T> Builder<T> builder() {
return new Builder<>();
}
/** Builder for {@link TestingWatchCallbackHandler}. */
public static
|
TestingWatchCallbackHandler
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/output/VectorMetadataParserUnitTests.java
|
{
"start": 1264,
"end": 7584
}
|
class ____ {
@Test
void shouldParseVectorMetadata() {
// Arrange
List<Object> vinfoOutput = Arrays.asList("quant-type", "int8", "vector-dim", 300, "size", 3000000, "max-level", 12,
"vset-uid", 1, "ef-construction", 200, "hnsw-max-node-uid", 3000000, "hnsw-m", 16, "attributes-count", 0,
"projection-input-dim", 0);
ArrayComplexData complexData = new ArrayComplexData(vinfoOutput.size());
for (Object item : vinfoOutput) {
complexData.storeObject(item);
}
// Act
VectorMetadata metadata = VectorMetadataParser.INSTANCE.parse(complexData);
// Assert
assertThat(metadata).isNotNull();
assertThat(metadata.getType()).isEqualTo(QuantizationType.Q8);
assertThat(metadata.getDimensionality()).isEqualTo(300);
assertThat(metadata.getSize()).isEqualTo(3000000);
assertThat(metadata.getvSetUid()).isEqualTo(1);
assertThat(metadata.getMaxNodeUid()).isEqualTo(3000000);
assertThat(metadata.getMaxNodes()).isEqualTo(16);
assertThat(metadata.getMaxLevel()).isEqualTo(12);
assertThat(metadata.getProjectionInputDim()).isEqualTo(0);
assertThat(metadata.getAttributesCount()).isEqualTo(0);
}
@Test
void shouldParseVectorMetadataWithNoQuantization() {
// Arrange
List<Object> vinfoOutput = Arrays.asList("quant-type", "float32", "vector-dim", 512, "size", 500);
ArrayComplexData complexData = new ArrayComplexData(vinfoOutput.size());
for (Object item : vinfoOutput) {
complexData.storeObject(item);
}
// Act
VectorMetadata metadata = VectorMetadataParser.INSTANCE.parse(complexData);
// Assert
assertThat(metadata).isNotNull();
assertThat(metadata.getType()).isEqualTo(QuantizationType.NO_QUANTIZATION);
assertThat(metadata.getDimensionality()).isEqualTo(512);
assertThat(metadata.getSize()).isEqualTo(500);
}
@Test
void shouldHandleUnknownFields() {
// Arrange
List<Object> vinfoOutput = Arrays.asList("quant-type", "int8", "vector-dim", 300, "unknown-field", "unknown-value",
"size", 3000000);
ArrayComplexData complexData = new ArrayComplexData(vinfoOutput.size());
for (Object item : vinfoOutput) {
complexData.storeObject(item);
}
// Act
VectorMetadata metadata = VectorMetadataParser.INSTANCE.parse(complexData);
// Assert
assertThat(metadata).isNotNull();
assertThat(metadata.getType()).isEqualTo(QuantizationType.Q8);
assertThat(metadata.getDimensionality()).isEqualTo(300);
assertThat(metadata.getSize()).isEqualTo(3000000);
}
@Test
void shouldHandleUnknownQuantizationType() {
// Arrange
List<Object> vinfoOutput = Arrays.asList("quant-type", "unknown", "vector-dim", 300, "size", 3000000);
ArrayComplexData complexData = new ArrayComplexData(vinfoOutput.size());
for (Object item : vinfoOutput) {
complexData.storeObject(item);
}
// Act
VectorMetadata metadata = VectorMetadataParser.INSTANCE.parse(complexData);
// Assert
assertThat(metadata).isNotNull();
assertThat(metadata.getType()).isNull();
assertThat(metadata.getDimensionality()).isEqualTo(300);
assertThat(metadata.getSize()).isEqualTo(3000000);
}
@Test
void shouldHandleNullInput() {
VectorMetadata metadata = VectorMetadataParser.INSTANCE.parse(null);
assertThat(metadata).isNull();
}
@Test
void shouldHandleEmptyInput() {
// Arrange
ArrayComplexData complexData = new ArrayComplexData(1);
// Act & Assert
VectorMetadata metadata = VectorMetadataParser.INSTANCE.parse(complexData);
assertThat(metadata).isNotNull();
assertThat(metadata.getType()).isNull();
assertThat(metadata.getDimensionality()).isNull();
assertThat(metadata.getSize()).isNull();
assertThat(metadata.getMaxNodeUid()).isNull();
assertThat(metadata.getvSetUid()).isNull();
assertThat(metadata.getMaxNodes()).isNull();
assertThat(metadata.getProjectionInputDim()).isNull();
assertThat(metadata.getAttributesCount()).isNull();
assertThat(metadata.getMaxLevel()).isNull();
}
@Test
void shouldHandleOddNumberOfElements() {
// Arrange
List<Object> vinfoOutput = Arrays.asList("quant-type", "int8", "vector-dim", 300, "size" // Missing value for "size"
);
ArrayComplexData complexData = new ArrayComplexData(vinfoOutput.size());
for (Object item : vinfoOutput) {
complexData.storeObject(item);
}
VectorMetadata metadata = VectorMetadataParser.INSTANCE.parse(complexData);
assertThat(metadata).isNotNull();
assertThat(metadata.getType()).isNull();
assertThat(metadata.getDimensionality()).isNull();
assertThat(metadata.getSize()).isNull();
assertThat(metadata.getMaxNodeUid()).isNull();
assertThat(metadata.getvSetUid()).isNull();
assertThat(metadata.getMaxNodes()).isNull();
assertThat(metadata.getProjectionInputDim()).isNull();
assertThat(metadata.getAttributesCount()).isNull();
assertThat(metadata.getMaxLevel()).isNull();
}
@Test
void shouldHandleNonListInput() {
// Arrange
MapComplexData complexData = new MapComplexData(2);
complexData.storeObject("key");
complexData.storeObject("value");
VectorMetadata metadata = VectorMetadataParser.INSTANCE.parse(complexData);
assertThat(metadata).isNotNull();
assertThat(metadata.getType()).isNull();
assertThat(metadata.getDimensionality()).isNull();
assertThat(metadata.getSize()).isNull();
assertThat(metadata.getMaxNodeUid()).isNull();
assertThat(metadata.getvSetUid()).isNull();
assertThat(metadata.getMaxNodes()).isNull();
assertThat(metadata.getProjectionInputDim()).isNull();
assertThat(metadata.getAttributesCount()).isNull();
assertThat(metadata.getMaxLevel()).isNull();
}
}
|
VectorMetadataParserUnitTests
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/beans/factory/ScopeBeanFactory.java
|
{
"start": 15577,
"end": 16252
}
|
class ____<T> {
private final String name;
private final Class<T> beanClass;
private final Supplier<T> beanFactory;
private volatile boolean initialized;
BeanDefinition(String name, Class<T> beanClass) {
this.name = name;
this.beanClass = beanClass;
beanFactory = null;
}
BeanDefinition(String name, Class<T> beanClass, Supplier<T> beanFactory) {
this.name = name;
this.beanClass = beanClass;
this.beanFactory = beanFactory;
}
}
public Set<Class<?>> getRegisteredClasses() {
return registeredClasses;
}
}
|
BeanDefinition
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/Sets.java
|
{
"start": 58366,
"end": 63341
}
|
class ____<E> extends ForwardingCollection<List<E>>
implements Set<List<E>> {
private final transient ImmutableList<ImmutableSet<E>> axes;
private final transient CartesianList<E> delegate;
static <E> Set<List<E>> create(List<? extends Set<? extends E>> sets) {
ImmutableList.Builder<ImmutableSet<E>> axesBuilder = new ImmutableList.Builder<>(sets.size());
for (Set<? extends E> set : sets) {
ImmutableSet<E> copy = ImmutableSet.copyOf(set);
if (copy.isEmpty()) {
return ImmutableSet.of();
}
axesBuilder.add(copy);
}
ImmutableList<ImmutableSet<E>> axes = axesBuilder.build();
ImmutableList<List<E>> listAxes =
new ImmutableList<List<E>>() {
@Override
public int size() {
return axes.size();
}
@Override
public List<E> get(int index) {
return axes.get(index).asList();
}
@Override
boolean isPartialView() {
return true;
}
// redeclare to help optimizers with b/310253115
@SuppressWarnings("RedundantOverride")
@Override
@J2ktIncompatible
@GwtIncompatible
Object writeReplace() {
return super.writeReplace();
}
};
return new CartesianSet<E>(axes, new CartesianList<E>(listAxes));
}
private CartesianSet(ImmutableList<ImmutableSet<E>> axes, CartesianList<E> delegate) {
this.axes = axes;
this.delegate = delegate;
}
@Override
protected Collection<List<E>> delegate() {
return delegate;
}
@Override
public boolean contains(@Nullable Object object) {
if (!(object instanceof List)) {
return false;
}
List<?> list = (List<?>) object;
if (list.size() != axes.size()) {
return false;
}
int i = 0;
for (Object o : list) {
if (!axes.get(i).contains(o)) {
return false;
}
i++;
}
return true;
}
@Override
public boolean equals(@Nullable Object object) {
// Warning: this is broken if size() == 0, so it is critical that we
// substitute an empty ImmutableSet to the user in place of this
if (object instanceof CartesianSet) {
CartesianSet<?> that = (CartesianSet<?>) object;
return this.axes.equals(that.axes);
}
if (object instanceof Set) {
Set<?> that = (Set<?>) object;
return this.size() == that.size() && this.containsAll(that);
}
return false;
}
@Override
public int hashCode() {
// Warning: this is broken if size() == 0, so it is critical that we
// substitute an empty ImmutableSet to the user in place of this
// It's a weird formula, but tests prove it works.
int adjust = size() - 1;
for (int i = 0; i < axes.size(); i++) {
adjust *= 31;
adjust = ~~adjust;
// in GWT, we have to deal with integer overflow carefully
}
int hash = 1;
for (Set<E> axis : axes) {
hash = 31 * hash + (size() / axis.size() * axis.hashCode());
hash = ~~hash;
}
hash += adjust;
return ~~hash;
}
}
/**
* Returns the set of all possible subsets of {@code set}. For example, {@code
* powerSet(ImmutableSet.of(1, 2))} returns the set {@code {{}, {1}, {2}, {1, 2}}}.
*
* <p>Elements appear in these subsets in the same iteration order as they appeared in the input
* set. The order in which these subsets appear in the outer set is undefined. Note that the power
* set of the empty set is not the empty set, but a one-element set containing the empty set.
*
* <p>The returned set and its constituent sets use {@code equals} to decide whether two elements
* are identical, even if the input set uses a different concept of equivalence.
*
* <p><i>Performance notes:</i> while the power set of a set with size {@code n} is of size {@code
* 2^n}, its memory usage is only {@code O(n)}. When the power set is constructed, the input set
* is merely copied. Only as the power set is iterated are the individual subsets created, and
* these subsets themselves occupy only a small constant amount of memory.
*
* @param set the set of elements to construct a power set from
* @return the power set, as an immutable set of immutable sets
* @throws IllegalArgumentException if {@code set} has more than 30 unique elements (causing the
* power set size to exceed the {@code int} range)
* @throws NullPointerException if {@code set} is or contains {@code null}
* @see <a href="http://en.wikipedia.org/wiki/Power_set">Power set article at Wikipedia</a>
* @since 4.0
*/
public static <E> Set<Set<E>> powerSet(Set<E> set) {
return new PowerSet<E>(set);
}
private static final
|
CartesianSet
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringRoundRobinLoadBalanceTest.java
|
{
"start": 1046,
"end": 1321
}
|
class ____ extends RoundRobinLoadBalanceTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/roundRobinLoadBalance.xml");
}
}
|
SpringRoundRobinLoadBalanceTest
|
java
|
spring-projects__spring-framework
|
spring-context-support/src/test/java/org/springframework/cache/jcache/config/JCacheCustomInterceptorTests.java
|
{
"start": 3677,
"end": 4212
}
|
class ____ {
@Bean
public CacheManager cacheManager() {
SimpleCacheManager cm = new SimpleCacheManager();
cm.setCaches(Arrays.asList(
defaultCache(),
exceptionCache()));
return cm;
}
@Bean
public JCacheableService<?> service() {
return new AnnotatedJCacheableService(defaultCache());
}
@Bean
public Cache defaultCache() {
return new ConcurrentMapCache("default");
}
@Bean
public Cache exceptionCache() {
return new ConcurrentMapCache("exception");
}
}
static
|
EnableCachingConfig
|
java
|
netty__netty
|
testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketConnectTest.java
|
{
"start": 2104,
"end": 10191
}
|
class ____ extends AbstractSocketTest {
@Test
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void testCloseTwice(TestInfo testInfo) throws Throwable {
run(testInfo, new Runner<ServerBootstrap, Bootstrap>() {
@Override
public void run(ServerBootstrap serverBootstrap, Bootstrap bootstrap) throws Throwable {
testCloseTwice(serverBootstrap, bootstrap);
}
});
}
public void testCloseTwice(ServerBootstrap sb, Bootstrap cb) throws Throwable {
Channel serverChannel = null;
Channel clientChannel = null;
try {
serverChannel = sb.childHandler(new ChannelInboundHandlerAdapter()).bind().syncUninterruptibly().channel();
final BlockingQueue<ChannelFuture> futures = new LinkedBlockingQueue<>();
clientChannel = cb.handler(new ChannelInboundHandlerAdapter() {
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
futures.add(ctx.close());
}
})
.connect(serverChannel.localAddress()).syncUninterruptibly().channel();
clientChannel.pipeline().fireUserEventTriggered("test");
clientChannel.close().syncUninterruptibly();
futures.take().sync();
clientChannel = null;
serverChannel.close().syncUninterruptibly();
serverChannel.close().syncUninterruptibly();
serverChannel = null;
} finally {
if (clientChannel != null) {
clientChannel.close().syncUninterruptibly();
}
if (serverChannel != null) {
serverChannel.close().syncUninterruptibly();
}
}
}
@Test
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void testLocalAddressAfterConnect(TestInfo testInfo) throws Throwable {
run(testInfo, new Runner<ServerBootstrap, Bootstrap>() {
@Override
public void run(ServerBootstrap serverBootstrap, Bootstrap bootstrap) throws Throwable {
testLocalAddressAfterConnect(serverBootstrap, bootstrap);
}
});
}
public void testLocalAddressAfterConnect(ServerBootstrap sb, Bootstrap cb) throws Throwable {
Channel serverChannel = null;
Channel clientChannel = null;
try {
final Promise<InetSocketAddress> localAddressPromise = ImmediateEventExecutor.INSTANCE.newPromise();
serverChannel = sb.childHandler(new ChannelInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
localAddressPromise.setSuccess((InetSocketAddress) ctx.channel().localAddress());
}
}).bind().syncUninterruptibly().channel();
clientChannel = cb.handler(new ChannelInboundHandlerAdapter()).register().syncUninterruptibly().channel();
assertNull(clientChannel.localAddress());
assertNull(clientChannel.remoteAddress());
clientChannel.connect(serverChannel.localAddress()).syncUninterruptibly().channel();
assertLocalAddress((InetSocketAddress) clientChannel.localAddress());
assertNotNull(clientChannel.remoteAddress());
assertLocalAddress(localAddressPromise.get());
} finally {
if (clientChannel != null) {
clientChannel.close().syncUninterruptibly();
}
if (serverChannel != null) {
serverChannel.close().syncUninterruptibly();
}
}
}
@Test
@Timeout(value = 3000, unit = TimeUnit.MILLISECONDS)
public void testChannelEventsFiredWhenClosedDirectly(TestInfo testInfo) throws Throwable {
run(testInfo, new Runner<ServerBootstrap, Bootstrap>() {
@Override
public void run(ServerBootstrap serverBootstrap, Bootstrap bootstrap) throws Throwable {
testChannelEventsFiredWhenClosedDirectly(serverBootstrap, bootstrap);
}
});
}
public void testChannelEventsFiredWhenClosedDirectly(ServerBootstrap sb, Bootstrap cb) throws Throwable {
final BlockingQueue<Integer> events = new LinkedBlockingQueue<Integer>();
Channel sc = null;
Channel cc = null;
try {
sb.childHandler(new ChannelInboundHandlerAdapter());
sc = sb.bind().syncUninterruptibly().channel();
cb.handler(new ChannelInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
events.add(0);
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
events.add(1);
}
});
// Connect and directly close again.
cc = cb.connect(sc.localAddress()).addListener(ChannelFutureListener.CLOSE).
syncUninterruptibly().channel();
assertEquals(0, events.take().intValue());
assertEquals(1, events.take().intValue());
} finally {
if (cc != null) {
cc.close();
}
if (sc != null) {
sc.close();
}
}
}
@Test
@Timeout(value = 3000, unit = TimeUnit.MILLISECONDS)
public void testWriteWithFastOpenBeforeConnect(TestInfo testInfo) throws Throwable {
run(testInfo, new Runner<ServerBootstrap, Bootstrap>() {
@Override
public void run(ServerBootstrap serverBootstrap, Bootstrap bootstrap) throws Throwable {
testWriteWithFastOpenBeforeConnect(serverBootstrap, bootstrap);
}
});
}
public void testWriteWithFastOpenBeforeConnect(ServerBootstrap sb, Bootstrap cb) throws Throwable {
enableTcpFastOpen(sb, cb);
sb.childOption(ChannelOption.AUTO_READ, true);
cb.option(ChannelOption.AUTO_READ, true);
sb.childHandler(new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(new EchoServerHandler());
}
});
Channel sc = sb.bind().sync().channel();
connectAndVerifyDataTransfer(cb, sc);
connectAndVerifyDataTransfer(cb, sc);
}
private static void connectAndVerifyDataTransfer(Bootstrap cb, Channel sc)
throws InterruptedException {
BufferingClientHandler handler = new BufferingClientHandler();
cb.handler(handler);
ChannelFuture register = cb.register();
Channel channel = register.sync().channel();
ChannelFuture write = channel.write(writeAscii(DEFAULT, "[fastopen]"));
SocketAddress remoteAddress = sc.localAddress();
ChannelFuture connectFuture = channel.connect(remoteAddress);
Channel cc = connectFuture.sync().channel();
cc.writeAndFlush(writeAscii(DEFAULT, "[normal data]")).sync();
write.sync();
String expectedString = "[fastopen][normal data]";
String result = handler.collectBuffer(expectedString.getBytes(US_ASCII).length);
cc.disconnect().sync();
assertEquals(expectedString, result);
}
protected void enableTcpFastOpen(ServerBootstrap sb, Bootstrap cb) {
// TFO is an almost-pure optimisation and should not change any observable behaviour in our tests.
sb.option(ChannelOption.TCP_FASTOPEN, 5);
cb.option(ChannelOption.TCP_FASTOPEN_CONNECT, true);
}
private static void assertLocalAddress(InetSocketAddress address) {
assertTrue(address.getPort() > 0);
assertFalse(address.getAddress().isAnyLocalAddress());
}
private static
|
SocketConnectTest
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java
|
{
"start": 2689,
"end": 12871
}
|
class ____ {
private final String historyFileName =
"job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
private final String historyFileNameZeroReduceTasks =
"job_1416424547277_0002-1416424775281-root-TeraGen-1416424785433-2-0-SUCCEEDED-default-1416424779349.jhist";
private final String confFileName = "job_1329348432655_0001_conf.xml";
private final Configuration conf = new Configuration();
private final JobACLsManager jobAclsManager = new JobACLsManager(conf);
private boolean loadTasks;
private JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
Path fullHistoryPath =
new Path(this.getClass().getClassLoader().getResource(historyFileName)
.getFile());
Path fullHistoryPathZeroReduces =
new Path(this.getClass().getClassLoader().getResource(historyFileNameZeroReduceTasks)
.getFile());
Path fullConfPath =
new Path(this.getClass().getClassLoader().getResource(confFileName)
.getFile());
private CompletedJob completedJob;
public void initTestJobHistoryEntities(boolean paramLoadTasks) throws Exception {
this.loadTasks = paramLoadTasks;
}
public static Collection<Object[]> data() {
List<Object[]> list = new ArrayList<Object[]>(2);
list.add(new Object[] { true });
list.add(new Object[] { false });
return list;
}
/* Verify some expected values based on the history file */
@MethodSource("data")
@ParameterizedTest
@Timeout(value = 100)
public void testCompletedJob(boolean paramLoadTasks) throws Exception {
initTestJobHistoryEntities(paramLoadTasks);
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
when(info.getHistoryFile()).thenReturn(fullHistoryPath);
//Re-initialize to verify the delayed load.
completedJob =
new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user",
info, jobAclsManager);
//Verify tasks loaded based on loadTask parameter.
assertEquals(loadTasks, completedJob.tasksLoaded.get());
assertEquals(1, completedJob.getAMInfos().size());
assertEquals(10, completedJob.getCompletedMaps());
assertEquals(1, completedJob.getCompletedReduces());
assertEquals(12, completedJob.getTasks().size());
//Verify tasks loaded at this point.
assertThat(completedJob.tasksLoaded.get()).isTrue();
assertEquals(10, completedJob.getTasks(TaskType.MAP).size());
assertEquals(2, completedJob.getTasks(TaskType.REDUCE).size());
assertEquals("user", completedJob.getUserName());
assertEquals(JobState.SUCCEEDED, completedJob.getState());
JobReport jobReport = completedJob.getReport();
assertEquals("user", jobReport.getUser());
assertEquals(JobState.SUCCEEDED, jobReport.getJobState());
assertEquals(fullHistoryPath.toString(), jobReport.getHistoryFile());
}
@MethodSource("data")
@ParameterizedTest
@Timeout(value = 100)
public void testCopmletedJobReportWithZeroTasks(boolean paramLoadTasks) throws Exception {
initTestJobHistoryEntities(paramLoadTasks);
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
when(info.getHistoryFile()).thenReturn(fullHistoryPathZeroReduces);
completedJob =
new CompletedJob(conf, jobId, fullHistoryPathZeroReduces, loadTasks, "user",
info, jobAclsManager);
JobReport jobReport = completedJob.getReport();
// Make sure that the number reduces (completed and total) are equal to zero.
assertEquals(0, completedJob.getTotalReduces());
assertEquals(0, completedJob.getCompletedReduces());
// Verify that the reduce progress is 1.0 (not NaN)
assertEquals(1.0, jobReport.getReduceProgress(), 0.001);
assertEquals(fullHistoryPathZeroReduces.toString(),
jobReport.getHistoryFile());
}
@MethodSource("data")
@ParameterizedTest
@Timeout(value = 10)
public void testCompletedTask(boolean paramLoadTasks) throws Exception {
initTestJobHistoryEntities(paramLoadTasks);
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
completedJob =
new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user",
info, jobAclsManager);
TaskId mt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
TaskId rt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
Map<TaskId, Task> mapTasks = completedJob.getTasks(TaskType.MAP);
Map<TaskId, Task> reduceTasks = completedJob.getTasks(TaskType.REDUCE);
assertEquals(10, mapTasks.size());
assertEquals(2, reduceTasks.size());
Task mt1 = mapTasks.get(mt1Id);
assertEquals(1, mt1.getAttempts().size());
assertEquals(TaskState.SUCCEEDED, mt1.getState());
TaskReport mt1Report = mt1.getReport();
assertEquals(TaskState.SUCCEEDED, mt1Report.getTaskState());
assertEquals(mt1Id, mt1Report.getTaskId());
Task rt1 = reduceTasks.get(rt1Id);
assertEquals(1, rt1.getAttempts().size());
assertEquals(TaskState.SUCCEEDED, rt1.getState());
TaskReport rt1Report = rt1.getReport();
assertEquals(TaskState.SUCCEEDED, rt1Report.getTaskState());
assertEquals(rt1Id, rt1Report.getTaskId());
}
@MethodSource("data")
@ParameterizedTest
@Timeout(value = 10)
public void testCompletedTaskAttempt(boolean paramLoadTasks) throws Exception {
initTestJobHistoryEntities(paramLoadTasks);
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
completedJob =
new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user",
info, jobAclsManager);
TaskId mt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
TaskId rt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
TaskAttemptId mta1Id = MRBuilderUtils.newTaskAttemptId(mt1Id, 0);
TaskAttemptId rta1Id = MRBuilderUtils.newTaskAttemptId(rt1Id, 0);
Task mt1 = completedJob.getTask(mt1Id);
Task rt1 = completedJob.getTask(rt1Id);
TaskAttempt mta1 = mt1.getAttempt(mta1Id);
assertEquals(TaskAttemptState.SUCCEEDED, mta1.getState());
assertEquals("localhost:45454", mta1.getAssignedContainerMgrAddress());
assertEquals("localhost:9999", mta1.getNodeHttpAddress());
TaskAttemptReport mta1Report = mta1.getReport();
assertEquals(TaskAttemptState.SUCCEEDED, mta1Report.getTaskAttemptState());
assertEquals("localhost", mta1Report.getNodeManagerHost());
assertEquals(45454, mta1Report.getNodeManagerPort());
assertEquals(9999, mta1Report.getNodeManagerHttpPort());
TaskAttempt rta1 = rt1.getAttempt(rta1Id);
assertEquals(TaskAttemptState.SUCCEEDED, rta1.getState());
assertEquals("localhost:45454", rta1.getAssignedContainerMgrAddress());
assertEquals("localhost:9999", rta1.getNodeHttpAddress());
TaskAttemptReport rta1Report = rta1.getReport();
assertEquals(TaskAttemptState.SUCCEEDED, rta1Report.getTaskAttemptState());
assertEquals("localhost", rta1Report.getNodeManagerHost());
assertEquals(45454, rta1Report.getNodeManagerPort());
assertEquals(9999, rta1Report.getNodeManagerHttpPort());
}
/**
* Simple test of some methods of CompletedJob
* @throws Exception
*/
@MethodSource("data")
@ParameterizedTest
@Timeout(value = 30)
public void testGetTaskAttemptCompletionEvent(boolean paramLoadTasks) throws Exception {
initTestJobHistoryEntities(paramLoadTasks);
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
completedJob =
new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user",
info, jobAclsManager);
TaskCompletionEvent[] events= completedJob.getMapAttemptCompletionEvents(0,1000);
assertEquals(10, completedJob.getMapAttemptCompletionEvents(0,10).length);
int currentEventId=0;
for (TaskCompletionEvent taskAttemptCompletionEvent : events) {
int eventId= taskAttemptCompletionEvent.getEventId();
assertTrue(eventId>=currentEventId);
currentEventId=eventId;
}
assertNull(completedJob.loadConfFile() );
// job name
assertEquals("Sleep job",completedJob.getName());
// queue name
assertEquals("default",completedJob.getQueueName());
// progress
assertEquals(1.0, completedJob.getProgress(),0.001);
// 12 rows in answer
assertEquals(12,completedJob.getTaskAttemptCompletionEvents(0,1000).length);
// select first 10 rows
assertEquals(10,completedJob.getTaskAttemptCompletionEvents(0,10).length);
// select 5-10 rows include 5th
assertEquals(7,completedJob.getTaskAttemptCompletionEvents(5,10).length);
// without errors
assertEquals(1,completedJob.getDiagnostics().size());
assertEquals("",completedJob.getDiagnostics().get(0));
assertEquals(0, completedJob.getJobACLs().size());
}
@MethodSource("data")
@ParameterizedTest
@Timeout(value = 30)
public void testCompletedJobWithDiagnostics(boolean paramLoadTasks) throws Exception {
initTestJobHistoryEntities(paramLoadTasks);
final String jobError = "Job Diagnostics";
JobInfo jobInfo = spy(new JobInfo());
when(jobInfo.getErrorInfo()).thenReturn(jobError);
when(jobInfo.getJobStatus()).thenReturn(JobState.FAILED.toString());
when(jobInfo.getAMInfos()).thenReturn(Collections.<JobHistoryParser.AMInfo>emptyList());
final JobHistoryParser mockParser = mock(JobHistoryParser.class);
when(mockParser.parse()).thenReturn(jobInfo);
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
when(info.getHistoryFile()).thenReturn(fullHistoryPath);
CompletedJob job =
new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user",
info, jobAclsManager) {
@Override
protected JobHistoryParser createJobHistoryParser(
Path historyFileAbsolute) throws IOException {
return mockParser;
}
};
assertEquals(jobError, job.getReport().getDiagnostics());
}
}
|
TestJobHistoryEntities
|
java
|
netty__netty
|
example/src/main/java/io/netty/example/http/upload/HttpUploadClient.java
|
{
"start": 2348,
"end": 76099
}
|
class ____ {
static final String BASE_URL = System.getProperty("baseUrl", "http://127.0.0.1:8080/");
static final String FILE = System.getProperty("file", "upload.txt");
public static void main(String[] args) throws Exception {
String postSimple, postFile, get;
if (BASE_URL.endsWith("/")) {
postSimple = BASE_URL + "formpost";
postFile = BASE_URL + "formpostmultipart";
get = BASE_URL + "formget";
} else {
postSimple = BASE_URL + "/formpost";
postFile = BASE_URL + "/formpostmultipart";
get = BASE_URL + "/formget";
}
URI uriSimple = new URI(postSimple);
String scheme = uriSimple.getScheme() == null? "http" : uriSimple.getScheme();
String host = uriSimple.getHost() == null? "127.0.0.1" : uriSimple.getHost();
int port = uriSimple.getPort();
if (port == -1) {
if ("http".equalsIgnoreCase(scheme)) {
port = 80;
} else if ("https".equalsIgnoreCase(scheme)) {
port = 443;
}
}
if (!"http".equalsIgnoreCase(scheme) && !"https".equalsIgnoreCase(scheme)) {
System.err.println("Only HTTP(S) is supported.");
return;
}
final boolean ssl = "https".equalsIgnoreCase(scheme);
final SslContext sslCtx;
if (ssl) {
sslCtx = SslContextBuilder.forClient()
.trustManager(InsecureTrustManagerFactory.INSTANCE).build();
} else {
sslCtx = null;
}
URI uriFile = new URI(postFile);
File file = new File(FILE);
if (!file.canRead()) {
throw new FileNotFoundException(FILE);
}
// Configure the client.
EventLoopGroup group = new MultiThreadIoEventLoopGroup(NioIoHandler.newFactory());
// setup the factory: here using a mixed memory/disk based on size threshold
HttpDataFactory factory = new DefaultHttpDataFactory(DefaultHttpDataFactory.MINSIZE); // Disk if MINSIZE exceed
DiskFileUpload.deleteOnExitTemporaryFile = true; // should delete file on exit (in normal exit)
DiskFileUpload.baseDirectory = null; // system temp directory
DiskAttribute.deleteOnExitTemporaryFile = true; // should delete file on exit (in normal exit)
DiskAttribute.baseDirectory = null; // system temp directory
try {
Bootstrap b = new Bootstrap();
b.group(group).channel(NioSocketChannel.class).handler(new HttpUploadClientInitializer(sslCtx));
// Simple Get form: no factory used (not usable)
List<Entry<String, String>> headers = formget(b, host, port, get, uriSimple);
if (headers == null) {
factory.cleanAllHttpData();
return;
}
// Simple Post form: factory used for big attributes
List<InterfaceHttpData> bodylist = formpost(b, host, port, uriSimple, file, factory, headers);
if (bodylist == null) {
factory.cleanAllHttpData();
return;
}
// Multipart Post form: factory used
formpostmultipart(b, host, port, uriFile, factory, headers, bodylist);
} finally {
// Shut down executor threads to exit.
group.shutdownGracefully();
// Really clean all temporary files if they still exist
factory.cleanAllHttpData();
}
}
/**
* Standard usage of HTTP API in Netty without file Upload (get is not able to achieve File upload
* due to limitation on request size).
*
* @return the list of headers that will be used in every example after
**/
private static List<Entry<String, String>> formget(
Bootstrap bootstrap, String host, int port, String get, URI uriSimple) throws Exception {
// XXX /formget
// No use of HttpPostRequestEncoder since not a POST
Channel channel = bootstrap.connect(host, port).sync().channel();
// Prepare the HTTP request.
QueryStringEncoder encoder = new QueryStringEncoder(get);
// add Form attribute
encoder.addParam("getform", "GET");
encoder.addParam("info", "first value");
encoder.addParam("secondinfo", "secondvalue ���&");
// not the big one since it is not compatible with GET size
// encoder.addParam("thirdinfo", textArea);
encoder.addParam("thirdinfo", "third value\r\ntest second line\r\n\r\nnew line\r\n");
encoder.addParam("Send", "Send");
URI uriGet = new URI(encoder.toString());
HttpRequest request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, uriGet.toASCIIString());
HttpHeaders headers = request.headers();
headers.set(HttpHeaderNames.HOST, host);
headers.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE);
headers.set(HttpHeaderNames.ACCEPT_ENCODING, HttpHeaderValues.GZIP + "," + HttpHeaderValues.DEFLATE);
headers.set(HttpHeaderNames.ACCEPT_CHARSET, "ISO-8859-1,utf-8;q=0.7,*;q=0.7");
headers.set(HttpHeaderNames.ACCEPT_LANGUAGE, "fr");
headers.set(HttpHeaderNames.REFERER, uriSimple.toString());
headers.set(HttpHeaderNames.USER_AGENT, "Netty Simple Http Client side");
headers.set(HttpHeaderNames.ACCEPT, "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8");
//connection will not close but needed
// headers.set("Connection","keep-alive");
// headers.set("Keep-Alive","300");
headers.set(
HttpHeaderNames.COOKIE, ClientCookieEncoder.STRICT.encode(
new DefaultCookie("my-cookie", "foo"),
new DefaultCookie("another-cookie", "bar"))
);
// send request
channel.write(request);
channel.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
// Wait for the server to close the connection.
channel.closeFuture().sync();
// convert headers to list
return headers.entries();
}
/**
* Standard post without multipart but already support on Factory (memory management)
*
* @return the list of HttpData object (attribute and file) to be reused on next post
*/
private static List<InterfaceHttpData> formpost(
Bootstrap bootstrap,
String host, int port, URI uriSimple, File file, HttpDataFactory factory,
List<Entry<String, String>> headers) throws Exception {
// XXX /formpost
// Start the connection attempt.
ChannelFuture future = bootstrap.connect(SocketUtils.socketAddress(host, port));
// Wait until the connection attempt succeeds or fails.
Channel channel = future.sync().channel();
// Prepare the HTTP request.
HttpRequest request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, uriSimple.toASCIIString());
// Use the PostBody encoder
HttpPostRequestEncoder bodyRequestEncoder =
new HttpPostRequestEncoder(factory, request, false); // false => not multipart
// it is legal to add directly header or cookie into the request until finalize
for (Entry<String, String> entry : headers) {
request.headers().set(entry.getKey(), entry.getValue());
}
// add Form attribute
bodyRequestEncoder.addBodyAttribute("getform", "POST");
bodyRequestEncoder.addBodyAttribute("info", "first value");
bodyRequestEncoder.addBodyAttribute("secondinfo", "secondvalue ���&");
bodyRequestEncoder.addBodyAttribute("thirdinfo", textArea);
bodyRequestEncoder.addBodyAttribute("fourthinfo", textAreaLong);
bodyRequestEncoder.addBodyFileUpload("myfile", file, "application/x-zip-compressed", false);
// finalize request
request = bodyRequestEncoder.finalizeRequest();
// Create the bodylist to be reused on the last version with Multipart support
List<InterfaceHttpData> bodylist = bodyRequestEncoder.getBodyListAttributes();
// send request
channel.write(request);
// test if request was chunked and if so, finish the write
if (bodyRequestEncoder.isChunked()) { // could do either request.isChunked()
// either do it through ChunkedWriteHandler
channel.write(bodyRequestEncoder);
}
channel.flush();
// Do not clear here since we will reuse the InterfaceHttpData on the next request
// for the example (limit action on client side). Take this as a broadcast of the same
// request on both Post actions.
//
// On standard program, it is clearly recommended to clean all files after each request
// bodyRequestEncoder.cleanFiles();
// Wait for the server to close the connection.
channel.closeFuture().sync();
return bodylist;
}
/**
* Multipart example
*/
private static void formpostmultipart(
Bootstrap bootstrap, String host, int port, URI uriFile, HttpDataFactory factory,
Iterable<Entry<String, String>> headers, List<InterfaceHttpData> bodylist) throws Exception {
// XXX /formpostmultipart
// Start the connection attempt.
ChannelFuture future = bootstrap.connect(SocketUtils.socketAddress(host, port));
// Wait until the connection attempt succeeds or fails.
Channel channel = future.sync().channel();
// Prepare the HTTP request.
HttpRequest request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, uriFile.toASCIIString());
// Use the PostBody encoder
HttpPostRequestEncoder bodyRequestEncoder =
new HttpPostRequestEncoder(factory, request, true); // true => multipart
// it is legal to add directly header or cookie into the request until finalize
for (Entry<String, String> entry : headers) {
request.headers().set(entry.getKey(), entry.getValue());
}
// add Form attribute from previous request in formpost()
bodyRequestEncoder.setBodyHttpDatas(bodylist);
// finalize request
bodyRequestEncoder.finalizeRequest();
// send request
channel.write(request);
// test if request was chunked and if so, finish the write
if (bodyRequestEncoder.isChunked()) {
channel.write(bodyRequestEncoder);
}
channel.flush();
// Now no more use of file representation (and list of HttpData)
bodyRequestEncoder.cleanFiles();
// Wait for the server to close the connection.
channel.closeFuture().sync();
}
// use to simulate a small TEXTAREA field in a form
private static final String textArea = "short text";
// use to simulate a big TEXTAREA field in a form
private static final String textAreaLong =
"lkjlkjlKJLKJLKJLKJLJlkj lklkj\r\n\r\nLKJJJJJJJJKKKKKKKKKKKKKKK ����&\r\n\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n" +
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\r\n";
}
|
HttpUploadClient
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/i18n/LocaleResolverTests.java
|
{
"start": 1591,
"end": 6048
}
|
class ____ {
@Test
void acceptHeaderLocaleResolver() {
doTest(new AcceptHeaderLocaleResolver(), false);
}
@Test
void fixedLocaleResolver() {
doTest(new FixedLocaleResolver(Locale.UK), false);
}
@Test
void cookieLocaleResolver() {
doTest(new CookieLocaleResolver(), true);
}
@Test
void sessionLocaleResolver() {
doTest(new SessionLocaleResolver(), true);
}
private void doTest(LocaleResolver localeResolver, boolean shouldSet) {
// create mocks
MockServletContext context = new MockServletContext();
MockHttpServletRequest request = new MockHttpServletRequest(context);
request.addPreferredLocale(Locale.UK);
MockHttpServletResponse response = new MockHttpServletResponse();
// check original locale
Locale locale = localeResolver.resolveLocale(request);
assertThat(locale).isEqualTo(Locale.UK);
// set new locale
try {
localeResolver.setLocale(request, response, Locale.GERMANY);
assertThat(shouldSet).as("should not be able to set Locale").isTrue();
// check new locale
locale = localeResolver.resolveLocale(request);
assertThat(locale).isEqualTo(Locale.GERMANY);
}
catch (UnsupportedOperationException ex) {
assertThat(shouldSet).as("should be able to set Locale").isFalse();
}
// check LocaleContext
if (localeResolver instanceof LocaleContextResolver localeContextResolver) {
LocaleContext localeContext = localeContextResolver.resolveLocaleContext(request);
if (shouldSet) {
assertThat(localeContext.getLocale()).isEqualTo(Locale.GERMANY);
}
else {
assertThat(localeContext.getLocale()).isEqualTo(Locale.UK);
}
boolean condition2 = localeContext instanceof TimeZoneAwareLocaleContext;
assertThat(condition2).isTrue();
assertThat(((TimeZoneAwareLocaleContext) localeContext).getTimeZone()).isNull();
if (localeContextResolver instanceof AbstractLocaleContextResolver) {
((AbstractLocaleContextResolver) localeContextResolver).setDefaultTimeZone(TimeZone.getTimeZone("GMT+1"));
request.removeAttribute(CookieLocaleResolver.LOCALE_REQUEST_ATTRIBUTE_NAME);
localeContextResolver.resolveLocaleContext(request);
assertThat(TimeZone.getTimeZone("GMT+1")).isEqualTo(((TimeZoneAwareLocaleContext) localeContext).getTimeZone());
}
try {
localeContextResolver.setLocaleContext(request, response, new SimpleLocaleContext(Locale.US));
if (!shouldSet) {
fail("should not be able to set Locale");
}
localeContext = localeContextResolver.resolveLocaleContext(request);
assertThat(localeContext.getLocale()).isEqualTo(Locale.US);
if (localeContextResolver instanceof AbstractLocaleContextResolver) {
assertThat(TimeZone.getTimeZone("GMT+1")).isEqualTo(((TimeZoneAwareLocaleContext) localeContext).getTimeZone());
}
else {
assertThat(((TimeZoneAwareLocaleContext) localeContext).getTimeZone()).isNull();
}
localeContextResolver.setLocaleContext(request, response,
new SimpleTimeZoneAwareLocaleContext(Locale.GERMANY, TimeZone.getTimeZone("GMT+2")));
localeContext = localeContextResolver.resolveLocaleContext(request);
assertThat(localeContext.getLocale()).isEqualTo(Locale.GERMANY);
boolean condition1 = localeContext instanceof TimeZoneAwareLocaleContext;
assertThat(condition1).isTrue();
assertThat(TimeZone.getTimeZone("GMT+2")).isEqualTo(((TimeZoneAwareLocaleContext) localeContext).getTimeZone());
localeContextResolver.setLocaleContext(request, response,
new SimpleTimeZoneAwareLocaleContext(null, TimeZone.getTimeZone("GMT+3")));
localeContext = localeContextResolver.resolveLocaleContext(request);
assertThat(localeContext.getLocale()).isEqualTo(Locale.UK);
boolean condition = localeContext instanceof TimeZoneAwareLocaleContext;
assertThat(condition).isTrue();
assertThat(TimeZone.getTimeZone("GMT+3")).isEqualTo(((TimeZoneAwareLocaleContext) localeContext).getTimeZone());
if (localeContextResolver instanceof AbstractLocaleContextResolver) {
((AbstractLocaleContextResolver) localeContextResolver).setDefaultLocale(Locale.GERMANY);
request.removeAttribute(CookieLocaleResolver.LOCALE_REQUEST_ATTRIBUTE_NAME);
localeContextResolver.resolveLocaleContext(request);
assertThat(localeContext.getLocale()).isEqualTo(Locale.GERMANY);
}
}
catch (UnsupportedOperationException ex) {
if (shouldSet) {
fail("should be able to set Locale");
}
}
}
}
}
|
LocaleResolverTests
|
java
|
netty__netty
|
codec-http/src/test/java/io/netty/handler/codec/http/websocketx/WebSocket00FrameEncoderTest.java
|
{
"start": 990,
"end": 1854
}
|
class ____ {
// Test for https://github.com/netty/netty/issues/2768
@Test
public void testMultipleWebSocketCloseFrames() {
EmbeddedChannel channel = new EmbeddedChannel(new WebSocket00FrameEncoder());
assertTrue(channel.writeOutbound(new CloseWebSocketFrame()));
assertTrue(channel.writeOutbound(new CloseWebSocketFrame()));
assertTrue(channel.finish());
assertCloseWebSocketFrame(channel);
assertCloseWebSocketFrame(channel);
assertNull(channel.readOutbound());
}
private static void assertCloseWebSocketFrame(EmbeddedChannel channel) {
ByteBuf buf = channel.readOutbound();
assertEquals(2, buf.readableBytes());
assertEquals((byte) 0xFF, buf.readByte());
assertEquals((byte) 0x00, buf.readByte());
buf.release();
}
}
|
WebSocket00FrameEncoderTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/binding/annotations/embedded/SpreadDeal.java
|
{
"start": 1218,
"end": 2290
}
|
class ____ extends NotonialDeal {
/**
* Swap with the tenor.
*/
private Swap longSwap;
@Embedded
public Swap getLongSwap() {
return longSwap;
}
public void setLongSwap(Swap swap) {
this.longSwap = swap;
}
/**
* Swap with the longer tenor.
*/
private Swap shortSwap;
@Embedded
@AttributeOverrides(value = {
@AttributeOverride(name = "tenor", column = @Column(name = "SHORT_TENOR")),
@AttributeOverride(name = "fixedLeg.paymentFrequency", column = @Column(name = "SHORT_FIXED_FREQUENCY")),
@AttributeOverride(name = "fixedLeg.rate", column = @Column(name = "SHORT_FIXED_RATE")),
@AttributeOverride(name = "floatLeg.paymentFrequency", column = @Column(name = "SHORT_FLOAT_FREQUENCY")),
@AttributeOverride(name = "floatLeg.rateIndex", column = @Column(name = "SHORT_FLOAT_RATEINDEX")),
@AttributeOverride(name = "floatLeg.rateSpread", column = @Column(name = "SHORT_FLOAT_RATESPREAD"))
})
public Swap getShortSwap() {
return shortSwap;
}
public void setShortSwap(Swap shortSwap) {
this.shortSwap = shortSwap;
}
}
|
SpreadDeal
|
java
|
junit-team__junit5
|
junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/DiscoverySelectors.java
|
{
"start": 20719,
"end": 21176
}
|
class ____
* @since 6.0
* @see #selectClass(Class)
* @see #selectClasses(List)
* @see ClassSelector
*/
@API(status = EXPERIMENTAL, since = "6.0")
public static List<ClassSelector> selectClasses(Class<?>... classes) {
return selectClasses(List.of(classes));
}
/**
* Create a {@code ClassSelector} for each supplied {@link Class}.
*
* @param classes the classes to select; never {@code null} and never containing
* {@code null}
|
references
|
java
|
apache__kafka
|
coordinator-common/src/main/java/org/apache/kafka/coordinator/common/runtime/CoordinatorRuntimeMetrics.java
|
{
"start": 1101,
"end": 2825
}
|
interface ____ extends AutoCloseable {
/**
* Called when the partition state changes.
* @param oldState The old state.
* @param newState The new state to transition to.
*/
void recordPartitionStateChange(CoordinatorState oldState, CoordinatorState newState);
/**
* Record the partition load metric.
* @param startTimeMs The partition load start time.
* @param endTimeMs The partition load end time.
*/
void recordPartitionLoadSensor(long startTimeMs, long endTimeMs);
/**
* Update the event queue time.
*
* @param durationMs The queue time.
*/
void recordEventQueueTime(long durationMs);
/**
* Update the event queue processing time.
*
* @param durationMs The event processing time.
*/
void recordEventProcessingTime(long durationMs);
/**
* Record the event purgatory time.
*
* @param durationMs The time the event was completed.
*/
void recordEventPurgatoryTime(long durationMs);
/**
* Record the effective batch linger time.
*
* @param durationMs The linger time in milliseconds.
*/
void recordLingerTime(long durationMs);
/**
* Record the flush time.
*
* @param durationMs The flush time in milliseconds.
*/
void recordFlushTime(long durationMs);
/**
* Record the thread idle time.
* @param idleTimeMs The idle time in milliseconds.
*/
void recordThreadIdleTime(double idleTimeMs);
/**
* Register the event queue size gauge.
*
* @param sizeSupplier The size supplier.
*/
void registerEventQueueSizeGauge(Supplier<Integer> sizeSupplier);
}
|
CoordinatorRuntimeMetrics
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/SessionManagementConfigurerTests.java
|
{
"start": 31524,
"end": 31719
}
|
class ____ {
@RequestMapping("/")
String encoded(HttpServletResponse response) {
response.encodeURL("/foo");
response.encodeRedirectURL("/foo");
return "encoded";
}
}
}
|
EncodesUrls
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/query/AuditQueryCreator.java
|
{
"start": 6717,
"end": 8584
}
|
enum ____ of class {@link org.hibernate.envers.RevisionType})</li>
* </ol>
* Additional conditions that the results must satisfy may be specified.
*
* @param c Class of the entities for which to query.
* @param selectEntitiesOnly If true, instead of a list of three-element arrays, a list of entities will be
* returned as a result of executing this query.
* @param selectDeletedEntities If true, also revisions where entities were deleted will be returned. The additional
* entities will have revision type "delete", and contain no data (all fields null), except for the id field.
*
* @return A query for revisions at which instances of the given entity were modified, to which
* conditions can be added (for example - a specific id of an entity of class <code>c</code>), and which
* can then be executed. The results of the query will be sorted in ascending order by the revision number,
* unless an order or projection is added.
*/
public AuditQuery forRevisionsOfEntity(Class<?> c, boolean selectEntitiesOnly, boolean selectDeletedEntities) {
c = getTargetClassIfProxied( c );
checkEntityAudited( c.getName() );
return new RevisionsOfEntityQuery(
enversService,
auditReaderImplementor,
c,
selectEntitiesOnly,
selectDeletedEntities,
false,
false
);
}
/**
* Creates a query, which selects the revisions, at which the given entity was modified and with a given entityName.
* Unless an explicit projection is set, the result will be a list of three-element arrays, containing:
* <ol>
* <li>the entity instance</li>
* <li>revision entity, corresponding to the revision at which the entity was modified. If no custom
* revision entity is used, this will be an instance of {@link org.hibernate.envers.DefaultRevisionEntity}</li>
* <li>type of the revision (an
|
instance
|
java
|
apache__dubbo
|
dubbo-common/src/test/java/org/apache/dubbo/common/model/person/Cgeneric.java
|
{
"start": 886,
"end": 1525
}
|
class ____<T> implements Serializable {
public static String NAME = "C";
private String name = NAME;
private T data;
private Ageneric<T> a;
private Bgeneric<PersonInfo> b;
public T getData() {
return data;
}
public void setData(T data) {
this.data = data;
}
public String getName() {
return name;
}
public Ageneric<T> getA() {
return a;
}
public void setA(Ageneric<T> a) {
this.a = a;
}
public Bgeneric<PersonInfo> getB() {
return b;
}
public void setB(Bgeneric<PersonInfo> b) {
this.b = b;
}
}
|
Cgeneric
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/main/java/org/springframework/jdbc/core/PreparedStatementSetter.java
|
{
"start": 820,
"end": 1148
}
|
interface ____ values on a {@link java.sql.PreparedStatement} provided
* by the JdbcTemplate class, for each of a number of updates in a batch using the
* same SQL. Implementations are responsible for setting any necessary parameters.
* SQL with placeholders will already have been supplied.
*
* <p>It's easier to use this
|
sets
|
java
|
apache__logging-log4j2
|
src/site/antora/modules/ROOT/examples/manual/markers/MarkerExample.java
|
{
"start": 1006,
"end": 2441
}
|
class ____ {
private static final Logger LOGGER = LogManager.getLogger("example.MarkerExample");
// tag::create-marker[]
private static final Marker SQL_MARKER = MarkerManager.getMarker("SQL");
// end::create-marker[]
// tag::create-marker-parent[]
private static final Marker QUERY_MARKER =
MarkerManager.getMarker("SQL_QUERY").addParents(SQL_MARKER);
private static final Marker UPDATE_MARKER =
MarkerManager.getMarker("UPDATE").addParents(SQL_MARKER);
// end::create-marker-parent[]
public static void main(final String[] args) {
doQuery("my_table");
doQueryParent("my_table");
doUpdate("my_table", "column", "value");
}
public static void doQuery(String table) {
// Do business logic here
// tag::use-marker[]
LOGGER.debug(SQL_MARKER, "SELECT * FROM {}", table);
// end::use-marker[]
}
public static void doQueryParent(String table) {
// Do business logic here
// tag::use-marker-parent[]
LOGGER.debug(QUERY_MARKER, "SELECT * FROM {}", table);
// end::use-marker-parent[]
}
public static void doUpdate(String table, String column, String value) {
// Do business logic here
// tag::use-marker-parent[]
LOGGER.debug(UPDATE_MARKER, "UPDATE {} SET {} = {}", table, column, value);
// end::use-marker-parent[]
}
}
|
MarkerExample
|
java
|
apache__avro
|
lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java
|
{
"start": 28201,
"end": 28412
}
|
class ____ implements org.apache.thrift.scheme.SchemeFactory {
public ping_argsTupleScheme getScheme() {
return new ping_argsTupleScheme();
}
}
private static
|
ping_argsTupleSchemeFactory
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/XPathFilterTest.java
|
{
"start": 973,
"end": 2018
}
|
class ____ extends ContextTestSupport {
protected String matchingBody = "<person name='James' city='London'/>";
protected final String notMatchingBody = "<person name='Hiram' city='Tampa'/>";
@Test
public void testSendMatchingMessage() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived(matchingBody);
sendBody("direct:start", matchingBody);
assertMockEndpointsSatisfied();
}
@Test
public void testSendNotMatchingMessage() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(0);
sendBody("direct:start", notMatchingBody);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: example
from("direct:start").filter().xpath("/person[@name='James']").to("mock:result");
// END SNIPPET: example
}
};
}
}
|
XPathFilterTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetomany/orderby/DepartmentId.java
|
{
"start": 194,
"end": 302
}
|
class ____ implements Serializable {
private ECompany company;
private String departmentCode;
}
|
DepartmentId
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/CamelConfiguration.java
|
{
"start": 851,
"end": 892
}
|
class ____ Camel applications.
*
* This
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestGetQueryRulesetAction.java
|
{
"start": 964,
"end": 1814
}
|
class ____ extends EnterpriseSearchBaseRestHandler {
public RestGetQueryRulesetAction(XPackLicenseState licenseState) {
super(licenseState, LicenseUtils.Product.QUERY_RULES);
}
@Override
public String getName() {
return "query_ruleset_get_action";
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/" + EnterpriseSearch.QUERY_RULES_API_ENDPOINT + "/{ruleset_id}"));
}
@Override
protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
GetQueryRulesetAction.Request request = new GetQueryRulesetAction.Request(restRequest.param("ruleset_id"));
return channel -> client.execute(GetQueryRulesetAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}
|
RestGetQueryRulesetAction
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/queryhint/QueryHintHANATest.java
|
{
"start": 6409,
"end": 6567
}
|
class ____ {
@Id
@GeneratedValue
public long id;
@ManyToOne(fetch = FetchType.LAZY)
public Department department;
}
@Entity
public static
|
Employee
|
java
|
apache__rocketmq
|
test/src/main/java/org/apache/rocketmq/test/factory/TagMessage.java
|
{
"start": 983,
"end": 3320
}
|
class ____ {
private List<String> tags = null;
private String topic = null;
private int msgSize = 0;
private Map<String, List<Object>> rmqMsgs = new HashMap<String, List<Object>>();
public TagMessage(String tag, String topic, int msgSize) {
String[] tags = {tag};
this.tags = Arrays.asList(tags);
this.topic = topic;
this.msgSize = msgSize;
init();
}
public TagMessage(String[] tags, String topic, int msgSize) {
this(Arrays.asList(tags), topic, msgSize);
}
public TagMessage(List<String> tags, String topic, int msgSize) {
this.tags = tags;
this.topic = topic;
this.msgSize = msgSize;
init();
}
private void init() {
for (String tag : tags) {
List<Object> tagMsgs = MQMessageFactory.getRMQMessage(tag, topic, msgSize);
rmqMsgs.put(tag, tagMsgs);
}
}
public List<Object> getMessageByTag(String tag) {
if (tags.contains(tag)) {
return rmqMsgs.get(tag);
} else {
return new ArrayList<Object>();
}
}
public List<Object> getMixedTagMessages() {
List<Object> mixedMsgs = new ArrayList<Object>();
for (int i = 0; i < msgSize; i++) {
for (String tag : tags) {
mixedMsgs.add(rmqMsgs.get(tag).get(i));
}
}
return mixedMsgs;
}
public List<Object> getMessageBodyByTag(String tag) {
if (tags.contains(tag)) {
return MQMessageFactory.getMessageBody(rmqMsgs.get(tag));
} else {
return new ArrayList<Object>();
}
}
public List<Object> getMessageBodyByTag(String... tag) {
return this.getMessageBodyByTag(Arrays.asList(tag));
}
public List<Object> getMessageBodyByTag(List<String> tags) {
List<Object> msgBodys = new ArrayList<Object>();
for (String tag : tags) {
msgBodys.addAll(MQMessageFactory.getMessageBody(rmqMsgs.get(tag)));
}
return msgBodys;
}
public List<Object> getAllTagMessageBody() {
List<Object> msgs = new ArrayList<Object>();
for (String tag : tags) {
msgs.addAll(MQMessageFactory.getMessageBody(rmqMsgs.get(tag)));
}
return msgs;
}
}
|
TagMessage
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/AvroMissingFieldException.java
|
{
"start": 980,
"end": 1530
}
|
class ____ extends AvroRuntimeException {
private List<Field> chainOfFields = new ArrayList<>(8);
public AvroMissingFieldException(String message, Field field) {
super(message);
chainOfFields.add(field);
}
public void addParentField(Field field) {
chainOfFields.add(field);
}
@Override
public String toString() {
StringBuilder result = new StringBuilder();
for (Field field : chainOfFields) {
result.insert(0, " --> " + field.name());
}
return "Path in schema:" + result;
}
}
|
AvroMissingFieldException
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/builder/ParentContextApplicationContextInitializer.java
|
{
"start": 2101,
"end": 2809
}
|
class ____ implements ApplicationListener<ContextRefreshedEvent>, Ordered {
private static final EventPublisher INSTANCE = new EventPublisher();
@Override
public int getOrder() {
return Ordered.HIGHEST_PRECEDENCE;
}
@Override
public void onApplicationEvent(ContextRefreshedEvent event) {
ApplicationContext context = event.getApplicationContext();
if (context instanceof ConfigurableApplicationContext configurableApplicationContext
&& context == event.getSource()) {
context.publishEvent(new ParentContextAvailableEvent(configurableApplicationContext));
}
}
}
/**
* {@link ApplicationEvent} fired when a parent context is available.
*/
public static
|
EventPublisher
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/allocator/StateSizeEstimates.java
|
{
"start": 1888,
"end": 5208
}
|
class ____ {
private final Map<ExecutionVertexID, Long> stateSizes;
public StateSizeEstimates(Map<ExecutionVertexID, Long> stateSizes) {
this.stateSizes = stateSizes;
}
public Optional<Long> estimate(ExecutionVertexID jobVertexId) {
return Optional.ofNullable(stateSizes.get(jobVertexId));
}
public static StateSizeEstimates fromGraphAndState(
final ExecutionGraph executionGraph, final CompletedCheckpoint latestCheckpoint) {
Objects.requireNonNull(executionGraph);
Objects.requireNonNull(latestCheckpoint);
return new StateSizeEstimates(
merge(
fromCompletedCheckpoint(latestCheckpoint),
mapVerticesToOperators(executionGraph)));
}
/**
* Map {@link ExecutionVertexID}s to state sizes according to the supplied mappings of operators
* to state sizes and {@link JobVertexID}s to {@link OperatorID}s.
*/
private static Map<ExecutionVertexID, Long> merge(
Map<OperatorID, Map<Integer, Long>> operatorsToSubtaskSizes,
Map<JobVertexID, Set<OperatorID>> verticesToOperators) {
Map<ExecutionVertexID, Long> result = new HashMap<>();
for (Entry<JobVertexID, Set<OperatorID>> vertexAndOperators :
verticesToOperators.entrySet()) {
for (OperatorID operatorID : vertexAndOperators.getValue()) {
for (Entry<Integer, Long> subtaskIdAndSize :
operatorsToSubtaskSizes.getOrDefault(operatorID, emptyMap()).entrySet()) {
result.merge(
new ExecutionVertexID(
vertexAndOperators.getKey(), subtaskIdAndSize.getKey()),
subtaskIdAndSize.getValue(),
Long::sum);
}
}
}
return result;
}
private static Map<JobVertexID, Set<OperatorID>> mapVerticesToOperators(
ExecutionGraph executionGraph) {
return executionGraph.getAllVertices().entrySet().stream()
.collect(toMap(Entry::getKey, e -> getOperatorIDS(e.getValue())));
}
private static Set<OperatorID> getOperatorIDS(ExecutionJobVertex v) {
return v.getOperatorIDs().stream()
.map(OperatorIDPair::getGeneratedOperatorID)
.collect(Collectors.toSet());
}
private static Map<OperatorID, Map<Integer, Long>> fromCompletedCheckpoint(
CompletedCheckpoint cp) {
Map<OperatorID, Map<Integer, Long>> result = new HashMap<>();
for (Entry<OperatorID, OperatorState> e : cp.getOperatorStates().entrySet()) {
result.put(e.getKey(), calculateStateSizeInBytes(e.getValue()));
}
return result;
}
private static Map<Integer, Long> calculateStateSizeInBytes(OperatorState state) {
Map<Integer, Long> sizesPerSubtask = new HashMap<>();
for (Entry<Integer, OperatorSubtaskState> e : state.getSubtaskStates().entrySet()) {
for (KeyedStateHandle handle : e.getValue().getManagedKeyedState()) {
sizesPerSubtask.merge(e.getKey(), handle.getStateSize(), Long::sum);
}
}
return sizesPerSubtask;
}
}
|
StateSizeEstimates
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/scheduling/support/PeriodicTrigger.java
|
{
"start": 1555,
"end": 2101
}
|
interface ____ defines methods for scheduling
* tasks at fixed-rate or with fixed-delay. Both also support an optional value
* for the initial delay. Those methods should be used directly whenever
* possible. The value of this Trigger implementation is that it can be used
* within components that rely on the Trigger abstraction. For example, it may
* be convenient to allow periodic triggers, cron-based triggers, and even
* custom Trigger implementations to be used interchangeably.
*
* @author Mark Fisher
* @since 3.0
*/
public
|
already
|
java
|
apache__maven
|
its/core-it-suite/src/test/resources/mng-5958-lifecycle-phases/mng5958-extension/src/main/java/org/apache/maven/its/mng5958/BadLifecycleMapping.java
|
{
"start": 1003,
"end": 1401
}
|
class ____ extends AbstractLifecycleMapping {
@Override
protected Map<String, String> initPhases() {
Map phases = new LinkedHashMap<>();
LifecyclePhase lp = new LifecyclePhase();
lp.set("org.apache.maven.its.plugins:mng-5958-pkg-type-mojo-configuration-plugin:2.1-SNAPSHOT:test");
phases.put("validate", lp);
return phases;
}
}
|
BadLifecycleMapping
|
java
|
netty__netty
|
transport/src/main/java/io/netty/channel/ChannelPipelineException.java
|
{
"start": 790,
"end": 1435
}
|
class ____ extends ChannelException {
private static final long serialVersionUID = 3379174210419885980L;
/**
* Creates a new instance.
*/
public ChannelPipelineException() {
}
/**
* Creates a new instance.
*/
public ChannelPipelineException(String message, Throwable cause) {
super(message, cause);
}
/**
* Creates a new instance.
*/
public ChannelPipelineException(String message) {
super(message);
}
/**
* Creates a new instance.
*/
public ChannelPipelineException(Throwable cause) {
super(cause);
}
}
|
ChannelPipelineException
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java
|
{
"start": 4455,
"end": 6915
}
|
class ____ extends ESTestCase {
/**
* Index for which mock settings contain a default pipeline.
*/
private static final String WITH_DEFAULT_PIPELINE = "index_with_default_pipeline";
private static final String WITH_DEFAULT_PIPELINE_ALIAS = "alias_for_index_with_default_pipeline";
private static final String WITH_FAILURE_STORE_ENABLED = "data-stream-failure-store-enabled";
private static final Settings SETTINGS = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true).build();
private static final Thread DUMMY_WRITE_THREAD = new Thread(ThreadPool.Names.WRITE);
private FeatureService mockFeatureService;
private static final ExecutorService writeCoordinationExecutor = new NamedDirectExecutorService("write_coordination");
private static final ExecutorService systemWriteCoordinationExecutor = new NamedDirectExecutorService("system_write_coordination");
private final ProjectId projectId = randomProjectIdOrDefault();
/** Services needed by bulk action */
TransportService transportService;
ClusterService clusterService;
IngestService ingestService;
ThreadPool threadPool;
/** Arguments to callbacks we want to capture, but which require generics, so we must use @Captor */
@Captor
ArgumentCaptor<Function<String, Boolean>> redirectPredicate;
@Captor
ArgumentCaptor<TriConsumer<Integer, String, Exception>> redirectHandler;
@Captor
ArgumentCaptor<TriConsumer<Integer, Exception, IndexDocFailureStoreStatus>> failureHandler;
@Captor
ArgumentCaptor<ActionListener<Void>> listener;
@Captor
ArgumentCaptor<TransportResponseHandler<BulkResponse>> remoteResponseHandler;
@Captor
ArgumentCaptor<Iterable<DocWriteRequest<?>>> bulkDocsItr;
/** The actual action we want to test, with real indexing mocked */
TestTransportBulkAction action;
/** Single item bulk write action that wraps index requests */
TestSingleItemBulkWriteAction singleItemBulkWriteAction;
/** True if the next call to the index action should act as an ingest node */
boolean localIngest;
/** The nodes that forwarded index requests should be cycled through. */
DiscoveryNodes nodes;
DiscoveryNode remoteNode1;
DiscoveryNode remoteNode2;
/** A subclass of the real bulk action to allow skipping real bulk indexing, and marking when it would have happened. */
|
TransportBulkActionIngestTests
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/FluxIntervalTest.java
|
{
"start": 1296,
"end": 6274
}
|
class ____ {
Scheduler exec;
@BeforeEach
public void before() {
exec = Schedulers.newSingle("interval-test");
}
@AfterEach
public void after() {
exec.dispose();
}
@Test
public void normal() {
try {
AssertSubscriber<Long> ts = AssertSubscriber.create();
ts.values()
.add(System.currentTimeMillis());
Flux.interval(Duration.ofMillis(100), Duration.ofMillis(100), exec)
.take(5, false)
.map(v -> System.currentTimeMillis())
.subscribe(ts);
ts.await(Duration.ofSeconds(5));
ts.assertValueCount(5)
.assertNoError()
.assertComplete();
List<Long> list = ts.values();
for (int i = 0; i < list.size() - 1; i++) {
long diff = list.get(i + 1) - list.get(i);
if (diff < 50 || diff > 150) {
fail("Period failure: " + diff);
}
}
}
finally {
exec.dispose();
}
}
Flux<Integer> flatMapScenario() {
return Flux.interval(Duration.ofSeconds(3))
.flatMap(v -> Flux.fromIterable(Arrays.asList("A"))
.flatMap(w -> Mono.fromCallable(() -> Arrays.asList(1, 2))
.subscribeOn(Schedulers.parallel())
.flatMapMany(Flux::fromIterable))).log();
}
@Test
public void flatMap() throws Exception {
StepVerifier.withVirtualTime(this::flatMapScenario)
.thenAwait(Duration.ofSeconds(3))
.expectNext(1)
.expectNext(2)
.thenCancel()
.verify();
}
Flux<Long> scenario2(){
return Flux.interval(Duration.ofMillis(500));
}
@Test
public void normal2() {
StepVerifier.withVirtualTime(this::scenario2)
.thenAwait(Duration.ofMillis(5_000))
.expectNextCount(10)
.thenCancel()
.verify();
}
Flux<Long> scenario3(){
return Flux.interval(Duration.ofMillis(500), Duration.ofMillis(1000));
}
@Test
public void normal3() {
StepVerifier.withVirtualTime(this::scenario3)
.thenAwait(Duration.ofMillis(1500))
.expectNext(0L)
.thenAwait(Duration.ofSeconds(4))
.expectNextCount(4)
.thenCancel()
.verify();
}
Flux<Long> scenario4(){
return Flux.interval(Duration.ofMillis(500), Duration.ofMillis(1000));
}
@Test
public void normal4() {
StepVerifier.withVirtualTime(this::scenario4)
.thenAwait(Duration.ofMillis(1500))
.expectNext(0L)
.thenAwait(Duration.ofSeconds(3))
.expectNextCount(4)
.thenCancel()
.verify();
}
@Test
public void normal5() {
// Prior to gh-1734, sub millis period would round to 0 and this would fail.
Duration period = Duration.ofNanos(1_000);
Duration timespan = Duration.ofSeconds(1);
StepVerifier.withVirtualTime(() ->
Flux.interval(Duration.ofNanos(0), period).take(timespan).count()
)
.thenAwait(timespan)
.expectNext(timespan.toNanos() / period.toNanos())
.verifyComplete();
}
@Test
public void scanOperator() {
final Flux<Long> interval = Flux.interval(Duration.ofSeconds(1));
assertThat(interval).isInstanceOf(Scannable.class);
assertThat(from(interval).scan(Scannable.Attr.RUN_ON)).isSameAs(Schedulers.parallel());
assertThat(from(interval).scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.ASYNC);
}
@Test
public void scanIntervalRunnable() {
Scheduler.Worker worker = Schedulers.single().createWorker();
try {
CoreSubscriber<Long> actual = new LambdaSubscriber<>(null, e -> {}, null, null);
FluxInterval.IntervalRunnable test = new FluxInterval.IntervalRunnable(actual, worker);
assertThat(test.scan(Scannable.Attr.RUN_ON)).isSameAs(worker);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.ASYNC);
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
finally {
worker.dispose();
}
}
@Test
void tickOverflow() {
StepVerifier.withVirtualTime(() -> Flux.interval(Duration.ofMillis(50)), 0)
.expectSubscription()
.thenRequest(10)
.thenAwait(Duration.ofMillis(550))
.expectNextCount(10)
.expectErrorSatisfies(e -> assertThat(e)
.matches(Exceptions::isOverflow)
.hasMessage("Could not emit tick 10 due to lack of requests (interval doesn't support small downstream requests that replenish slower than the ticks)")
)
.verify(Duration.ofSeconds(1));
}
@Test
public void shouldBeAbleToScheduleIntervalsWithLowGranularity() {
StepVerifier.create(Flux.interval(Duration.ofNanos(1)))
.expectSubscription()
.expectNext(0L)
.expectNext(1L)
.expectNext(2L)
.thenCancel()
.verify();
}
}
|
FluxIntervalTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java
|
{
"start": 7664,
"end": 8445
}
|
class ____ extends StoredFieldsReader {
protected final StoredFieldsReader in;
FilterStoredFieldsReader(StoredFieldsReader fieldsReader) {
this.in = fieldsReader;
}
@Override
public void close() throws IOException {
in.close();
}
@Override
public void document(int docID, StoredFieldVisitor visitor) throws IOException {
in.document(docID, visitor);
}
@Override
public abstract StoredFieldsReader clone();
@Override
public void checkIntegrity() throws IOException {
in.checkIntegrity();
}
}
private static
|
FilterStoredFieldsReader
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ErrorTranslation.java
|
{
"start": 9456,
"end": 9579
}
|
class
____ new HttpChannelEOFException(path, message, thrown);
}
// there's ambiguity about what exception
|
return
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.