language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java
{ "start": 506, "end": 15592 }
class ____ extends ComputeTestCase { static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent List<DoubleVector> vectors = List.of( blockFactory.newDoubleArrayVector(new double[] {}, 0), blockFactory.newDoubleArrayVector(new double[] { 0 }, 0), blockFactory.newConstantDoubleVector(0, 0), blockFactory.newConstantDoubleBlockWith(0, 0).filter().asVector(), blockFactory.newDoubleBlockBuilder(0).build().asVector(), blockFactory.newDoubleBlockBuilder(0).appendDouble(1).build().asVector().filter() ); assertAllEquals(vectors); } public void testEmptyBlock() { // all these "empty" vectors should be equivalent List<DoubleBlock> blocks = List.of( blockFactory.newDoubleArrayBlock( new double[] {}, 0, new int[] { 0 }, BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values()) ), blockFactory.newDoubleArrayBlock( new double[] { 0 }, 0, new int[] { 0 }, BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values()) ), blockFactory.newConstantDoubleBlockWith(0, 0), blockFactory.newDoubleBlockBuilder(0).build(), blockFactory.newDoubleBlockBuilder(0).appendDouble(1).build().filter(), blockFactory.newDoubleBlockBuilder(0).appendNull().build().filter(), (ConstantNullBlock) blockFactory.newConstantNullBlock(0) ); assertAllEquals(blocks); Releasables.close(blocks); } public void testVectorEquality() { // all these vectors should be equivalent List<DoubleVector> vectors = List.of( blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock().asVector(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).filter(0, 1, 2), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), blockFactory.newDoubleArrayVector(new double[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector(), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector().filter(0, 1, 2), blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(2) .appendDouble(3) .build() .filter(0, 2, 3) .asVector(), blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(2) .appendDouble(3) .build() .asVector() .filter(0, 2, 3) ); assertAllEquals(vectors); // all these constant-like vectors should be equivalent List<DoubleVector> moreVectors = List.of( blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1 }, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1 }, 3).asBlock().asVector(), blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1, 1 }, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1 }, 3).filter(0, 1, 2), blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), blockFactory.newDoubleArrayVector(new double[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), blockFactory.newConstantDoubleBlockWith(1, 3).asVector(), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector(), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector().filter(0, 1, 2), blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(1) .appendDouble(1) .build() .filter(0, 2, 3) .asVector(), blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(1) .appendDouble(1) .build() .asVector() .filter(0, 2, 3) ); assertAllEquals(moreVectors); } public void testBlockEquality() { // all these blocks should be equivalent List<DoubleBlock> blocks = List.of( blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock(), new DoubleArrayBlock( new double[] { 1, 2, 3 }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b000 }), randomFrom(Block.MvOrdering.values()), blockFactory ), new DoubleArrayBlock( new double[] { 1, 2, 3, 4 }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b1000 }), randomFrom(Block.MvOrdering.values()), blockFactory ), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).filter(0, 1, 2).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 3).filter(0, 1, 2).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 4, 3 }, 4).filter(0, 1, 3).asBlock(), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build(), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().filter(0, 1, 2), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(4).appendDouble(2).appendDouble(3).build().filter(0, 2, 3), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendNull().appendDouble(2).appendDouble(3).build().filter(0, 2, 3) ); assertAllEquals(blocks); // all these constant-like blocks should be equivalent List<DoubleBlock> moreBlocks = List.of( blockFactory.newDoubleArrayVector(new double[] { 9, 9 }, 2).asBlock(), new DoubleArrayBlock( new double[] { 9, 9 }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b000 }), randomFrom(Block.MvOrdering.values()), blockFactory ), new DoubleArrayBlock( new double[] { 9, 9, 4 }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b100 }), randomFrom(Block.MvOrdering.values()), blockFactory ), blockFactory.newDoubleArrayVector(new double[] { 9, 9 }, 2).filter(0, 1).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), blockFactory.newConstantDoubleBlockWith(9, 2), blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendDouble(9).build(), blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendDouble(9).build().filter(0, 1), blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendDouble(4).appendDouble(9).build().filter(0, 2), blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendNull().appendDouble(9).build().filter(0, 2) ); assertAllEquals(moreBlocks); } public void testVectorInequality() { // all these vectors should NOT be equivalent List<DoubleVector> notEqualVectors = List.of( blockFactory.newDoubleArrayVector(new double[] { 1 }, 1), blockFactory.newDoubleArrayVector(new double[] { 9 }, 1), blockFactory.newDoubleArrayVector(new double[] { 1, 2 }, 2), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 4 }, 3), blockFactory.newConstantDoubleBlockWith(9, 2).asVector(), blockFactory.newDoubleBlockBuilder(2).appendDouble(1).appendDouble(2).build().asVector().filter(1), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build().asVector(), blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build().asVector() ); assertAllNotEquals(notEqualVectors); } public void testBlockInequality() { // all these blocks should NOT be equivalent List<DoubleBlock> notEqualBlocks = List.of( blockFactory.newDoubleArrayVector(new double[] { 1 }, 1).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 9 }, 1).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2 }, 2).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock(), blockFactory.newDoubleArrayVector(new double[] { 1, 2, 4 }, 3).asBlock(), blockFactory.newConstantDoubleBlockWith(9, 2), blockFactory.newDoubleBlockBuilder(2).appendDouble(1).appendDouble(2).build().filter(1), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build(), blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build(), blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendNull().build(), blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendNull().appendDouble(3).build(), blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendDouble(3).build(), blockFactory.newDoubleBlockBuilder(3).appendDouble(1).beginPositionEntry().appendDouble(2).appendDouble(3).build() ); assertAllNotEquals(notEqualBlocks); } public void testSimpleBlockWithSingleNull() { List<DoubleBlock> blocks = List.of( blockFactory.newDoubleBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build(), blockFactory.newDoubleBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build() ); assertEquals(3, blocks.get(0).getPositionCount()); assertTrue(blocks.get(0).isNull(1)); assertAllEquals(blocks); } public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); DoubleBlock.Builder builder1 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); DoubleBlock.Builder builder2 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); ConstantNullBlock.Builder builder3 = new ConstantNullBlock.Builder(blockFactory); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); builder3.appendNull(); } DoubleBlock block1 = builder1.build(); DoubleBlock block2 = builder2.build(); Block block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); List<Block> blocks = List.of(block1, block2, block3); assertAllEquals(blocks); } public void testSimpleBlockWithSingleMultiValue() { List<DoubleBlock> blocks = List.of( blockFactory.newDoubleBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build(), blockFactory.newDoubleBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build() ); assert blocks.get(0).getPositionCount() == 1 && blocks.get(0).getValueCount(0) == 2; assertAllEquals(blocks); } public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); DoubleBlock.Builder builder1 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); DoubleBlock.Builder builder2 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); DoubleBlock.Builder builder3 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { builder1.beginPositionEntry(); builder2.beginPositionEntry(); builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); for (int i = 0; i < values; i++) { double value = randomDouble(); builder1.appendDouble(value); builder2.appendDouble(value); builder3.appendDouble(value); } builder1.endPositionEntry(); builder2.endPositionEntry(); builder3.endPositionEntry(); } DoubleBlock block1 = builder1.build(); DoubleBlock block2 = builder2.build(); DoubleBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); } static void assertAllEquals(List<?> objs) { for (Object obj1 : objs) { for (Object obj2 : objs) { assertEquals(obj1, obj2); assertEquals(obj2, obj1); // equal objects must generate the same hash code assertEquals(obj1.hashCode(), obj2.hashCode()); } } } static void assertAllNotEquals(List<?> objs) { for (Object obj1 : objs) { for (Object obj2 : objs) { if (obj1 == obj2) { continue; // skip self } assertNotEquals(obj1, obj2); assertNotEquals(obj2, obj1); // unequal objects SHOULD generate the different hash code assertNotEquals(obj1.hashCode(), obj2.hashCode()); } } } }
DoubleBlockEqualityTests
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/processor/TracerConfigurationTest.java
{ "start": 1064, "end": 1847 }
class ____ extends ContextTestSupport { @Override protected CamelContext createCamelContext() throws Exception { CamelContext context = super.createCamelContext(); context.setTracing(true); return context; } @Test public void testTracerConfiguration() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from("direct:start").to("mock:result"); } }; } }
TracerConfigurationTest
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cascade/multicircle/jpa/identity/EntityG.java
{ "start": 235, "end": 816 }
class ____ extends AbstractEntity { private static final long serialVersionUID = 325417437L; @jakarta.persistence.ManyToOne(optional = false) private EntityB b; @jakarta.persistence.OneToMany(mappedBy = "g") private java.util.Set<EntityF> fCollection = new java.util.HashSet<EntityF>(); public EntityB getB() { return b; } public void setB(EntityB parameter){ this.b = parameter; } public java.util.Set<EntityF> getFCollection() { return fCollection; } public void setFCollection( java.util.Set<EntityF> parameter) { this.fCollection = parameter; } }
EntityG
java
mockito__mockito
mockito-core/src/test/java/org/mockito/internal/invocation/InvocationMarkerTest.java
{ "start": 493, "end": 2078 }
class ____ extends TestBase { @Test public void shouldMarkInvocationAsVerified() { // given Invocation i = new InvocationBuilder().toInvocation(); InvocationMatcher im = new InvocationBuilder().toInvocationMatcher(); assertFalse(i.isVerified()); // when InvocationMarker.markVerified(Arrays.asList(i), im); // then assertTrue(i.isVerified()); } @Test public void shouldCaptureArguments() { // given Invocation i = new InvocationBuilder().toInvocation(); final AtomicReference<Invocation> box = new AtomicReference<Invocation>(); MatchableInvocation c = new InvocationMatcher(i) { public void captureArgumentsFrom(Invocation i) { box.set(i); } }; // when InvocationMarker.markVerified(Arrays.asList(i), c); // then assertEquals(i, box.get()); } @Test public void shouldMarkInvocationsAsVerifiedInOrder() { // given InOrderContextImpl context = new InOrderContextImpl(); Invocation i = new InvocationBuilder().toInvocation(); InvocationMatcher im = new InvocationBuilder().toInvocationMatcher(); assertFalse(context.isVerified(i)); assertFalse(i.isVerified()); // when InvocationMarker.markVerifiedInOrder(Arrays.asList(i), im, context); // then assertTrue(context.isVerified(i)); assertTrue(i.isVerified()); } }
InvocationMarkerTest
java
alibaba__nacos
core/src/main/java/com/alibaba/nacos/core/model/request/LogUpdateRequest.java
{ "start": 748, "end": 1160 }
class ____ { private String logName; private String logLevel; public String getLogName() { return logName; } public void setLogName(String logName) { this.logName = logName; } public String getLogLevel() { return logLevel; } public void setLogLevel(String logLevel) { this.logLevel = logLevel; } }
LogUpdateRequest
java
elastic__elasticsearch
modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java
{ "start": 56673, "end": 57144 }
class ____ [[%s], %s]", typeToCanonicalTypeName(typeParameter), targetCanonicalClassName, typesToCanonicalTypeNames(typeParameters) ); } Class<?> javaTypeParameter = constructorParameterTypes[typeParameterIndex]; if (isValidType(javaTypeParameter) == false) { throw lookupException( "type parameter [%s] not found for
binding
java
apache__camel
components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FromFtpAggregateIT.java
{ "start": 1327, "end": 3011 }
class ____ extends FtpServerTestSupport { protected String getFtpUrl() { return "ftp://admin@localhost:{{ftp.server.port}}/filter?password=admin&binary=false&noop=true"; } @BeforeEach public void prepareFtpServer() { // Send multiple files to the FTP server before the Camel route starts sendFile(getFtpUrl(), "Message 1", "file1.txt"); sendFile(getFtpUrl(), "Message 2", "file2.txt"); sendFile(getFtpUrl(), "Message 3", "file3.txt"); } @Test @SuppressWarnings("unchecked") public void testAggregateCompletionFromBatchConsumer() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); mock.assertIsSatisfied(); // Verify the grouped exchange contains all files Exchange out = mock.getExchanges().get(0); List<Exchange> grouped = out.getIn().getBody(List.class); assertEquals(3, grouped.size()); assertEquals("Message 1", grouped.get(0).getIn().getBody(String.class)); assertEquals("Message 2", grouped.get(1).getIn().getBody(String.class)); assertEquals("Message 3", grouped.get(2).getIn().getBody(String.class)); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from(getFtpUrl()) .aggregate(constant(true), new GroupedExchangeAggregationStrategy()) .completionFromBatchConsumer() .eagerCheckCompletion() .to("mock:result"); } }; } }
FromFtpAggregateIT
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/validation/StereotypeOnMethodTest.java
{ "start": 629, "end": 1307 }
class ____ { @RegisterExtension public ArcTestContainer container = ArcTestContainer.builder() .beanClasses(BeanWithStereotypeOnMethod.class, Audited.class) .shouldFail() .build(); /** * Verify that DefinitionException is thrown if there is a stereotype applied on a non-producer method. */ @Test public void testStereotypeOnNonProducerMethod() { Throwable error = container.getFailure(); assertNotNull(error); assertTrue(error instanceof DefinitionException); assertTrue(error.getMessage().contains("auditedMethod")); } @ApplicationScoped static
StereotypeOnMethodTest
java
apache__commons-lang
src/main/java/org/apache/commons/lang3/EnumUtils.java
{ "start": 17640, "end": 17909 }
enum ____ is valid, otherwise false. */ public static <E extends Enum<E>> boolean isValidEnum(final Class<E> enumClass, final String enumName) { return getEnum(enumClass, enumName) != null; } /** * Checks if the specified name is a valid
name
java
alibaba__fastjson
src/test/java/com/alibaba/json/test/benchmark/encode/ArrayBoolean1000Encode.java
{ "start": 153, "end": 599 }
class ____ extends BenchmarkCase { private Object object; public ArrayBoolean1000Encode(){ super("BooleanArray1000Encode"); boolean[] array = new boolean[1000]; for (int i = 0; i < array.length; ++i) { array[i] = (i % 2 == 0); } this.object = array; } @Override public void execute(Codec codec) throws Exception { codec.encode(object); } }
ArrayBoolean1000Encode
java
netty__netty
common/src/main/java/io/netty/util/internal/ReadOnlyIterator.java
{ "start": 711, "end": 1226 }
class ____<T> implements Iterator<T> { private final Iterator<? extends T> iterator; public ReadOnlyIterator(Iterator<? extends T> iterator) { this.iterator = ObjectUtil.checkNotNull(iterator, "iterator"); } @Override public boolean hasNext() { return iterator.hasNext(); } @Override public T next() { return iterator.next(); } @Override public void remove() { throw new UnsupportedOperationException("read-only"); } }
ReadOnlyIterator
java
spring-projects__spring-framework
spring-context/src/testFixtures/java/org/springframework/context/testfixture/AbstractApplicationContextTests.java
{ "start": 1871, "end": 7580 }
class ____ extends AbstractListableBeanFactoryTests { protected ConfigurableApplicationContext applicationContext; /** Subclass must register this */ protected TestApplicationListener listener = new TestApplicationListener(); protected TestApplicationListener parentListener = new TestApplicationListener(); @BeforeEach protected void setup() throws Exception { this.applicationContext = createContext(); } @Override protected BeanFactory getBeanFactory() { return this.applicationContext; } protected ApplicationContext getApplicationContext() { return this.applicationContext; } /** * Must register a TestListener. * Must register standard beans. * Parent must register rod with name Roderick * and father with name Albert. */ protected abstract ConfigurableApplicationContext createContext() throws Exception; @Test protected void contextAwareSingletonWasCalledBack() { ACATester aca = (ACATester) applicationContext.getBean("aca"); assertThat(aca.getApplicationContext()).as("has had context set").isSameAs(applicationContext); Object aca2 = applicationContext.getBean("aca"); assertThat(aca).as("Same instance").isSameAs(aca2); assertThat(applicationContext.isSingleton("aca")).as("Says is singleton").isTrue(); } @Test protected void contextAwarePrototypeWasCalledBack() { ACATester aca = (ACATester) applicationContext.getBean("aca-prototype"); assertThat(aca.getApplicationContext()).as("has had context set").isSameAs(applicationContext); Object aca2 = applicationContext.getBean("aca-prototype"); assertThat(aca).as("NOT Same instance").isNotSameAs(aca2); boolean condition = !applicationContext.isSingleton("aca-prototype"); assertThat(condition).as("Says is prototype").isTrue(); } @Test protected void parentNonNull() { assertThat(applicationContext.getParent()).as("parent isn't null").isNotNull(); } @Test protected void grandparentNull() { assertThat(applicationContext.getParent().getParent()).as("grandparent is null").isNull(); } @Test protected void overrideWorked() { TestBean rod = (TestBean) applicationContext.getParent().getBean("rod"); assertThat(rod.getName().equals("Roderick")).as("Parent's name differs").isTrue(); } @Test protected void grandparentDefinitionFound() { TestBean dad = (TestBean) applicationContext.getBean("father"); assertThat(dad.getName().equals("Albert")).as("Dad has correct name").isTrue(); } @Test protected void grandparentTypedDefinitionFound() { TestBean dad = applicationContext.getBean("father", TestBean.class); assertThat(dad.getName().equals("Albert")).as("Dad has correct name").isTrue(); } @Test protected void closeTriggersDestroy() { LifecycleBean lb = (LifecycleBean) applicationContext.getBean("lifecycle"); boolean condition = !lb.isDestroyed(); assertThat(condition).as("Not destroyed").isTrue(); applicationContext.close(); if (applicationContext.getParent() != null) { ((ConfigurableApplicationContext) applicationContext.getParent()).close(); } assertThat(lb.isDestroyed()).as("Destroyed").isTrue(); applicationContext.close(); if (applicationContext.getParent() != null) { ((ConfigurableApplicationContext) applicationContext.getParent()).close(); } assertThat(lb.isDestroyed()).as("Destroyed").isTrue(); } @Test protected void messageSource() throws NoSuchMessageException { assertThat(applicationContext.getMessage("code1", null, Locale.getDefault())).isEqualTo("message1"); assertThat(applicationContext.getMessage("code2", null, Locale.getDefault())).isEqualTo("message2"); assertThatExceptionOfType(NoSuchMessageException.class).isThrownBy(() -> applicationContext.getMessage("code0", null, Locale.getDefault())); } @Test protected void events() throws Exception { doTestEvents(this.listener, this.parentListener, new MyEvent(this)); } @Test protected void eventsWithNoSource() throws Exception { // See SPR-10945 Serialized events result in a null source MyEvent event = new MyEvent(this); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(event); oos.close(); event = (MyEvent) new ObjectInputStream(new ByteArrayInputStream( bos.toByteArray())).readObject(); doTestEvents(this.listener, this.parentListener, event); } protected void doTestEvents(TestApplicationListener listener, TestApplicationListener parentListener, MyEvent event) { listener.zeroCounter(); parentListener.zeroCounter(); assertThat(listener.getEventCount()).as("0 events before publication").isEqualTo(0); assertThat(parentListener.getEventCount()).as("0 parent events before publication").isEqualTo(0); this.applicationContext.publishEvent(event); assertThat(listener.getEventCount()).as("1 events after publication, not " + listener.getEventCount()) .isEqualTo(1); assertThat(parentListener.getEventCount()).as("1 parent events after publication").isEqualTo(1); } @Test protected void beanAutomaticallyHearsEvents() { //String[] listenerNames = ((ListableBeanFactory) applicationContext).getBeanDefinitionNames(ApplicationListener.class); //assertTrue("listeners include beanThatListens", Arrays.asList(listenerNames).contains("beanThatListens")); BeanThatListens b = (BeanThatListens) applicationContext.getBean("beanThatListens"); b.zero(); assertThat(b.getEventCount()).as("0 events before publication").isEqualTo(0); this.applicationContext.publishEvent(new MyEvent(this)); assertThat(b.getEventCount()).as("1 events after publication, not " + b.getEventCount()).isEqualTo(1); } @SuppressWarnings("serial") public static
AbstractApplicationContextTests
java
hibernate__hibernate-orm
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/superclass/auditoverride/EmbeddableTest.java
{ "start": 11507, "end": 12062 }
class ____ { @Id @GeneratedValue private Integer id; @Embedded @AuditOverride(name = "intValue", forClass = SimpleAbstractMappedSuperclass.class) private SimpleEmbeddable embeddable; public SimpleEmbeddable getEmbeddable() { return embeddable; } public void setEmbeddable(SimpleEmbeddable embeddable) { this.embeddable = embeddable; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } } @Entity(name = "see") @Audited public static
SimpleEmbeddableWithPropertyOverrideEntity
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
{ "start": 18830, "end": 19994 }
class ____ extends QueryContext { public QueryContext query() { return getRuleContext(QueryContext.class,0); } public TerminalNode PIPE() { return getToken(EsqlBaseParser.PIPE, 0); } public ProcessingCommandContext processingCommand() { return getRuleContext(ProcessingCommandContext.class,0); } @SuppressWarnings("this-escape") public CompositeQueryContext(QueryContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterCompositeQuery(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitCompositeQuery(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitCompositeQuery(this); else return visitor.visitChildren(this); } } @SuppressWarnings("CheckReturnValue") public static
CompositeQueryContext
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/connections/AbstractBeforeCompletionReleaseTest.java
{ "start": 6527, "end": 6677 }
class ____ { @Id public Integer id; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } } }
Thing
java
elastic__elasticsearch
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/TimedListener.java
{ "start": 1182, "end": 2693 }
class ____<Response> { private final ActionListener<Response> listenerWithTimeout; private final AtomicBoolean completed = new AtomicBoolean(); public TimedListener(@Nullable TimeValue timeout, ActionListener<Response> listener, ThreadPool threadPool) { listenerWithTimeout = getListener(Objects.requireNonNull(listener), timeout, Objects.requireNonNull(threadPool)); } private ActionListener<Response> getListener( ActionListener<Response> origListener, @Nullable TimeValue timeout, ThreadPool threadPool ) { ActionListener<Response> notificationListener = ActionListener.wrap(result -> { completed.set(true); origListener.onResponse(result); }, e -> { completed.set(true); origListener.onFailure(e); }); if (timeout == null) { return notificationListener; } return ListenerTimeouts.wrapWithTimeout( threadPool, timeout, threadPool.executor(UTILITY_THREAD_POOL_NAME), notificationListener, (ignored) -> notificationListener.onFailure( new ElasticsearchStatusException(Strings.format("Request timed out after [%s]", timeout), RestStatus.REQUEST_TIMEOUT) ) ); } public boolean hasCompleted() { return completed.get(); } public ActionListener<Response> getListener() { return listenerWithTimeout; } }
TimedListener
java
apache__flink
flink-docs/src/test/java/org/apache/flink/docs/configuration/data/TestCommonOptions.java
{ "start": 1161, "end": 2235 }
class ____ { public static final String SECTION_1 = "test_A"; public static final String SECTION_2 = "other"; @Documentation.Section({SECTION_1, SECTION_2}) public static final ConfigOption<Integer> COMMON_OPTION = ConfigOptions.key("first.option.a") .intType() .defaultValue(2) .withDescription("This is the description for the common option."); public static final ConfigOption<String> GENERIC_OPTION = ConfigOptions.key("second.option.a") .stringType() .noDefaultValue() .withDescription("This is the description for the generic option."); @Documentation.Section(value = SECTION_1, position = 2) public static final ConfigOption<Integer> COMMON_POSITIONED_OPTION = ConfigOptions.key("third.option.a") .intType() .defaultValue(3) .withDescription("This is the description for the positioned common option."); }
TestCommonOptions
java
quarkusio__quarkus
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/BasicGenericTypesHandlingTest.java
{ "start": 2600, "end": 2852 }
class ____ { private final String inMessage; public Input(String inMessage) { this.inMessage = inMessage; } public String getInMessage() { return inMessage; } } public static
Input
java
quarkusio__quarkus
integration-tests/hibernate-search-standalone-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/standalone/elasticsearch/devservices/HibernateSearchDevServicesTestResource.java
{ "start": 561, "end": 1663 }
class ____ { @Inject SearchMapping searchMapping; @GET @Path("/hosts") @Transactional public String hosts() { return searchMapping.backend().unwrap(ElasticsearchBackend.class).client(RestClient.class) .getNodes().stream() .map(n -> n.getHost().toHostString()) .collect(Collectors.joining()); } @PUT @Path("/init-data") @Transactional public void initData() { try (var searchSession = searchMapping.createSession()) { IndexedEntity entity = new IndexedEntity(1, "John Irving"); searchSession.indexingPlan().add(entity); } searchMapping.scope(IndexedEntity.class).workspace().refresh(); } @GET @Path("/count") @Produces(MediaType.TEXT_PLAIN) public long count() { try (var searchSession = searchMapping.createSession()) { return searchSession.search(IndexedEntity.class) .where(f -> f.matchAll()) .fetchTotalHitCount(); } } }
HibernateSearchDevServicesTestResource
java
spring-projects__spring-boot
module/spring-boot-mustache/src/test/java/org/springframework/boot/mustache/autoconfigure/MustacheWebFluxTestIntegrationTests.java
{ "start": 1265, "end": 1566 }
class ____ { @Autowired private ApplicationContext applicationContext; @Test void mustacheAutoConfigurationWasImported() { assertThat(this.applicationContext).has(importedAutoConfiguration(MustacheAutoConfiguration.class)); } @SpringBootConfiguration static
MustacheWebFluxTestIntegrationTests
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/IsInstanceIncompatibleTypeTest.java
{ "start": 2736, "end": 3280 }
class ____ { <T extends Number> Optional<T> f(Optional<T> t) { // BUG: Diagnostic contains: Number cannot be cast to String return t.filter(String.class::isInstance); } } """) .doTest(); } @Test public void negativeInstanceOf() { testHelper .addSourceLines( "Test.java", """ import java.util.Optional; import java.util.HashMap; import java.util.LinkedHashMap;
Test
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterAllResolver.java
{ "start": 2854, "end": 15439 }
class ____ { /** Directory that will be in a HASH_ALL mount point. */ private static final String TEST_DIR_HASH_ALL = "/hashall"; /** Directory that will be in a RANDOM mount point. */ private static final String TEST_DIR_RANDOM = "/random"; /** Directory that will be in a SPACE mount point. */ private static final String TEST_DIR_SPACE = "/space"; /** Number of namespaces. */ private static final int NUM_NAMESPACES = 2; /** Mini HDFS clusters with Routers and State Store. */ private static StateStoreDFSCluster cluster; /** Router for testing. */ private static RouterContext routerContext; /** Router/federated filesystem. */ private static FileSystem routerFs; /** Filesystem for each namespace. */ private static List<FileSystem> nsFss = new LinkedList<>(); @BeforeEach public void setup() throws Exception { // 2 nameservices with 1 namenode each (no HA needed for this test) cluster = new StateStoreDFSCluster( false, NUM_NAMESPACES, MultipleDestinationMountTableResolver.class); // Start NNs and DNs and wait until ready cluster.startCluster(); // Build and start a Router with: State Store + Admin + RPC Configuration routerConf = new RouterConfigBuilder() .stateStore() .admin() .rpc() .build(); cluster.addRouterOverrides(routerConf); cluster.startRouters(); routerContext = cluster.getRandomRouter(); // Register and verify all NNs with all routers cluster.registerNamenodes(); cluster.waitNamenodeRegistration(); // Setup the test mount point createMountTableEntry(TEST_DIR_HASH_ALL, DestinationOrder.HASH_ALL); createMountTableEntry(TEST_DIR_RANDOM, DestinationOrder.RANDOM); createMountTableEntry(TEST_DIR_SPACE, DestinationOrder.SPACE); // Get filesystems for federated and each namespace routerFs = routerContext.getFileSystem(); for (String nsId : cluster.getNameservices()) { List<NamenodeContext> nns = cluster.getNamenodes(nsId); for (NamenodeContext nn : nns) { FileSystem nnFs = nn.getFileSystem(); nsFss.add(nnFs); } } assertEquals(NUM_NAMESPACES, nsFss.size()); } @AfterEach public void cleanup() { cluster.shutdown(); cluster = null; routerContext = null; routerFs = null; nsFss.clear(); } @Test public void testHashAll() throws Exception { testAll(TEST_DIR_HASH_ALL); } @Test public void testRandomAll() throws Exception { testAll(TEST_DIR_RANDOM); } @Test public void testSpaceAll() throws Exception { testAll(TEST_DIR_SPACE); } /** * Tests that the resolver spreads files across subclusters in the whole * tree. * @throws Exception If the resolver is not working. */ private void testAll(final String path) throws Exception { // Create directories in different levels routerFs.mkdirs(new Path(path + "/dir0")); routerFs.mkdirs(new Path(path + "/dir1")); routerFs.mkdirs(new Path(path + "/dir2/dir20")); routerFs.mkdirs(new Path(path + "/dir2/dir21")); routerFs.mkdirs(new Path(path + "/dir2/dir22")); routerFs.mkdirs(new Path(path + "/dir2/dir22/dir220")); routerFs.mkdirs(new Path(path + "/dir2/dir22/dir221")); routerFs.mkdirs(new Path(path + "/dir2/dir22/dir222")); assertDirsEverywhere(path, 9); // Create 14 files at different levels of the tree createTestFile(routerFs, path + "/dir0/file1.txt"); createTestFile(routerFs, path + "/dir0/file2.txt"); createTestFile(routerFs, path + "/dir1/file2.txt"); createTestFile(routerFs, path + "/dir1/file3.txt"); createTestFile(routerFs, path + "/dir2/dir20/file4.txt"); createTestFile(routerFs, path + "/dir2/dir20/file5.txt"); createTestFile(routerFs, path + "/dir2/dir21/file6.txt"); createTestFile(routerFs, path + "/dir2/dir21/file7.txt"); createTestFile(routerFs, path + "/dir2/dir22/file8.txt"); createTestFile(routerFs, path + "/dir2/dir22/file9.txt"); createTestFile(routerFs, path + "/dir2/dir22/dir220/file10.txt"); createTestFile(routerFs, path + "/dir2/dir22/dir220/file11.txt"); createTestFile(routerFs, path + "/dir2/dir22/dir220/file12.txt"); createTestFile(routerFs, path + "/dir2/dir22/dir220/file13.txt"); assertDirsEverywhere(path, 9); assertFilesDistributed(path, 14); // Test append String testFile = path + "/dir2/dir22/dir220/file-append.txt"; createTestFile(routerFs, testFile); Path testFilePath = new Path(testFile); assertTrue(routerFs.getFileStatus(testFilePath).getLen() > 50, "Created file is too small"); appendTestFile(routerFs, testFile); assertTrue(routerFs.getFileStatus(testFilePath).getLen() > 110, "Append file is too small"); assertDirsEverywhere(path, 9); assertFilesDistributed(path, 15); // Test truncate String testTruncateFile = path + "/dir2/dir22/dir220/file-truncate.txt"; createTestFile(routerFs, testTruncateFile); Path testTruncateFilePath = new Path(testTruncateFile); routerFs.truncate(testTruncateFilePath, 10); TestFileTruncate.checkBlockRecovery(testTruncateFilePath, (DistributedFileSystem) routerFs); assertEquals(10, routerFs.getFileStatus(testTruncateFilePath).getLen(), "Truncate file fails"); assertDirsEverywhere(path, 9); assertFilesDistributed(path, 16); // Removing a directory should remove it from every subcluster routerFs.delete(new Path(path + "/dir2/dir22/dir220"), true); assertDirsEverywhere(path, 8); assertFilesDistributed(path, 10); // Removing all sub directories routerFs.delete(new Path(path + "/dir0"), true); routerFs.delete(new Path(path + "/dir1"), true); routerFs.delete(new Path(path + "/dir2"), true); assertDirsEverywhere(path, 0); assertFilesDistributed(path, 0); } /** * Directories in HASH_ALL mount points must be in every namespace. * @param path Path to check under. * @param expectedNumDirs Expected number of directories. * @throws IOException If it cannot check the directories. */ private void assertDirsEverywhere(String path, int expectedNumDirs) throws IOException { // Check for the directories in each filesystem List<FileStatus> files = listRecursive(routerFs, path); int numDirs = 0; for (FileStatus file : files) { if (file.isDirectory()) { numDirs++; Path dirPath = file.getPath(); Path checkPath = getRelativePath(dirPath); for (FileSystem nsFs : nsFss) { FileStatus fileStatus1 = nsFs.getFileStatus(checkPath); assertTrue(fileStatus1.isDirectory(), file + " should be a directory"); } } } assertEquals(expectedNumDirs, numDirs); } /** * Check that the files are somewhat spread across namespaces. * @param path Path to check under. * @param expectedNumFiles Number of files expected. * @throws IOException If the files cannot be checked. */ private void assertFilesDistributed(String path, int expectedNumFiles) throws IOException { // Check where the files went List<FileStatus> routerFiles = listRecursive(routerFs, path); List<List<FileStatus>> nssFiles = new LinkedList<>(); for (FileSystem nsFs : nsFss) { List<FileStatus> nsFiles = listRecursive(nsFs, path); nssFiles.add(nsFiles); } // We should see all the files in the federated view int numRouterFiles = getNumTxtFiles(routerFiles); assertEquals(numRouterFiles, expectedNumFiles); // All the files should be spread somewhat evenly across subclusters List<Integer> numNsFiles = new LinkedList<>(); int sumNsFiles = 0; for (int i = 0; i < NUM_NAMESPACES; i++) { List<FileStatus> nsFiles = nssFiles.get(i); int numFiles = getNumTxtFiles(nsFiles); numNsFiles.add(numFiles); sumNsFiles += numFiles; } assertEquals(numRouterFiles, sumNsFiles); if (expectedNumFiles > 0) { for (int numFiles : numNsFiles) { assertTrue(numFiles > 0, "Files not distributed: " + numNsFiles); } } } /** * Create a test file in the filesystem and check if it was written. * @param fs Filesystem. * @param filename Name of the file to create. * @throws IOException If it cannot create the file. */ private static void createTestFile( final FileSystem fs, final String filename)throws IOException { final Path path = new Path(filename); // Write the data FSDataOutputStream os = fs.create(path); os.writeUTF("Test data " + filename); os.close(); // Read the data and check FSDataInputStream is = fs.open(path); String read = is.readUTF(); assertEquals("Test data " + filename, read); is.close(); } /** * Append to a test file in the filesystem and check if we appended. * @param fs Filesystem. * @param filename Name of the file to append to. * @throws IOException */ private static void appendTestFile( final FileSystem fs, final String filename) throws IOException { final Path path = new Path(filename); // Write the data FSDataOutputStream os = fs.append(path); os.writeUTF("Test append data " + filename); os.close(); // Read the data previous data FSDataInputStream is = fs.open(path); String read = is.readUTF(); assertEquals(read, "Test data " + filename); // Read the new data and check read = is.readUTF(); assertEquals(read, "Test append data " + filename); is.close(); } /** * Count the number of text files in a list. * @param files File list. * @return Number of .txt files. */ private static int getNumTxtFiles(final List<FileStatus> files) { int numFiles = 0; for (FileStatus file : files) { if (file.getPath().getName().endsWith(".txt")) { numFiles++; } } return numFiles; } /** * Get the relative path within a filesystem (removes the filesystem prefix). * @param path Path to check. * @return File within the filesystem. */ private static Path getRelativePath(final Path path) { URI uri = path.toUri(); String uriPath = uri.getPath(); return new Path(uriPath); } /** * Get the list the files/dirs under a path. * @param fs Filesystem to check in. * @param path Path to check for. * @return List of files. * @throws IOException If it cannot list the files. */ private List<FileStatus> listRecursive( final FileSystem fs, final String path) throws IOException { List<FileStatus> ret = new LinkedList<>(); List<Path> temp = new LinkedList<>(); temp.add(new Path(path)); while (!temp.isEmpty()) { Path p = temp.remove(0); for (FileStatus fileStatus : fs.listStatus(p)) { ret.add(fileStatus); if (fileStatus.isDirectory()) { temp.add(fileStatus.getPath()); } } } return ret; } /** * Add a mount table entry in all nameservices and wait until it is * available in all routers. * @param mountPoint Name of the mount point. * @param order Order of the mount table entry. * @throws Exception If the entry could not be created. */ private void createMountTableEntry( final String mountPoint, final DestinationOrder order) throws Exception { RouterClient admin = routerContext.getAdminClient(); MountTableManager mountTable = admin.getMountTableManager(); Map<String, String> destMap = new HashMap<>(); for (String nsId : cluster.getNameservices()) { destMap.put(nsId, mountPoint); } MountTable newEntry = MountTable.newInstance(mountPoint, destMap); newEntry.setDestOrder(order); AddMountTableEntryRequest addRequest = AddMountTableEntryRequest.newInstance(newEntry); AddMountTableEntryResponse addResponse = mountTable.addMountTableEntry(addRequest); boolean created = addResponse.getStatus(); assertTrue(created); // Refresh the caches to get the mount table Router router = routerContext.getRouter(); StateStoreService stateStore = router.getStateStore(); stateStore.refreshCaches(true); // Check for the path GetMountTableEntriesRequest getRequest = GetMountTableEntriesRequest.newInstance(mountPoint); GetMountTableEntriesResponse getResponse = mountTable.getMountTableEntries(getRequest); List<MountTable> entries = getResponse.getEntries(); assertEquals(1, entries.size()); assertEquals(mountPoint, entries.get(0).getSourcePath()); } }
TestRouterAllResolver
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java
{ "start": 4161, "end": 45116 }
class ____ extends ESTestCase { private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); @Override public void tearDown() throws Exception { super.tearDown(); ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); } private MockTransportService startTransport( String id, List<DiscoveryNode> knownNodes, VersionInformation version, TransportVersion transportVersion ) { return startTransport(id, knownNodes, version, transportVersion, threadPool); } public static MockTransportService startTransport( String id, List<DiscoveryNode> knownNodes, VersionInformation version, TransportVersion transportVersion, ThreadPool threadPool ) { return startTransport(id, knownNodes, version, transportVersion, threadPool, Settings.EMPTY); } public static MockTransportService startTransport( final String id, final List<DiscoveryNode> knownNodes, final VersionInformation version, final TransportVersion transportVersion, final ThreadPool threadPool, final Settings settings ) { boolean success = false; final Settings s = Settings.builder().put(settings).put("node.name", id).build(); ClusterName clusterName = ClusterName.CLUSTER_NAME_SETTING.get(s); MockTransportService newService = MockTransportService.createNewService(s, version, transportVersion, threadPool, null); try { newService.registerRequestHandler( TransportSearchShardsAction.TYPE.name(), EsExecutors.DIRECT_EXECUTOR_SERVICE, SearchShardsRequest::new, (request, channel, task) -> { if ("index_not_found".equals(request.preference())) { channel.sendResponse(new IndexNotFoundException("index")); } else { channel.sendResponse(new SearchShardsResponse(List.of(), knownNodes, Collections.emptyMap())); } } ); newService.registerRequestHandler( TransportSearchAction.TYPE.name(), EsExecutors.DIRECT_EXECUTOR_SERVICE, SearchRequest::new, (request, channel, task) -> { if ("index_not_found".equals(request.preference())) { channel.sendResponse(new IndexNotFoundException("index")); return; } SearchHits searchHits; if ("null_target".equals(request.preference())) { searchHits = SearchHits.unpooled( new SearchHit[] { SearchHit.unpooled(0) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F ); } else { searchHits = SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); } try (var searchResponseRef = ReleasableRef.of(SearchResponseUtils.successfulResponse(searchHits))) { channel.sendResponse(searchResponseRef.get()); } } ); newService.registerRequestHandler( ClusterStateAction.NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, RemoteClusterStateRequest::new, (request, channel, task) -> { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); for (DiscoveryNode node : knownNodes) { builder.add(node); } ClusterState build = ClusterState.builder(clusterName).nodes(builder.build()).build(); channel.sendResponse(new ClusterStateResponse(clusterName, build, false)); } ); if (RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.get(s)) { newService.registerRequestHandler( RemoteClusterNodesAction.TYPE.name(), EsExecutors.DIRECT_EXECUTOR_SERVICE, RemoteClusterNodesAction.Request::new, (request, channel, task) -> channel.sendResponse(new RemoteClusterNodesAction.Response(knownNodes)) ); } newService.start(); newService.acceptIncomingRequests(); success = true; return newService; } finally { if (success == false) { newService.close(); } } } @SuppressForbidden(reason = "calls getLocalHost here but it's fine in this case") public void testSlowNodeCanBeCancelled() throws IOException, InterruptedException { try (ServerSocket socket = new MockServerSocket()) { socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1); socket.setReuseAddress(true); DiscoveryNode seedNode = DiscoveryNodeUtils.create( "TEST", new TransportAddress(socket.getInetAddress(), socket.getLocalPort()), emptyMap(), emptySet() ); CountDownLatch acceptedLatch = new CountDownLatch(1); CountDownLatch closeRemote = new CountDownLatch(1); Thread t = new Thread() { @Override public void run() { try (Socket accept = socket.accept()) { acceptedLatch.countDown(); closeRemote.await(); } catch (IOException e) { // that's fine we might close } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } }; t.start(); try ( MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, VersionInformation.CURRENT, TransportVersion.current(), threadPool, null ) ) { service.start(); service.acceptIncomingRequests(); CountDownLatch listenerCalled = new CountDownLatch(1); AtomicReference<Exception> exceptionReference = new AtomicReference<>(); String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, addresses(seedNode)); try (RemoteClusterConnection connection = createConnection(clusterAlias, settings, service, randomBoolean())) { ActionListener<Void> listener = ActionListener.wrap(x -> { listenerCalled.countDown(); fail("expected exception"); }, x -> { exceptionReference.set(x); listenerCalled.countDown(); }); connection.ensureConnected(listener); acceptedLatch.await(); connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on assertTrue(connection.assertNoRunningConnections()); } closeRemote.countDown(); listenerCalled.await(); Exception e = exceptionReference.get(); assertNotNull(e); assertThat(e, either(instanceOf(AlreadyClosedException.class)).or(instanceOf(ConnectTransportException.class))); } } } private static List<String> addresses(final DiscoveryNode... seedNodes) { return Arrays.stream(seedNodes).map(s -> s.getAddress().toString()).collect(Collectors.toCollection(ArrayList::new)); } public void testCloseWhileConcurrentlyConnecting() throws IOException, InterruptedException, BrokenBarrierException { List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>(); try ( MockTransportService seedTransport = startTransport( "seed_node", knownNodes, VersionInformation.CURRENT, TransportVersion.current() ); MockTransportService seedTransport1 = startTransport( "seed_node_1", knownNodes, VersionInformation.CURRENT, TransportVersion.current() ); MockTransportService discoverableTransport = startTransport( "discoverable_node", knownNodes, VersionInformation.CURRENT, TransportVersion.current() ) ) { DiscoveryNode seedNode = seedTransport.getLocalNode(); DiscoveryNode seedNode1 = seedTransport1.getLocalNode(); knownNodes.add(seedTransport.getLocalNode()); knownNodes.add(discoverableTransport.getLocalNode()); knownNodes.add(seedTransport1.getLocalNode()); Collections.shuffle(knownNodes, random()); List<String> seedNodes = addresses(seedNode1, seedNode); Collections.shuffle(seedNodes, random()); try ( MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, VersionInformation.CURRENT, TransportVersion.current(), threadPool, null ) ) { service.start(); service.acceptIncomingRequests(); String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, seedNodes); try (RemoteClusterConnection connection = createConnection(clusterAlias, settings, service, false)) { int numThreads = randomIntBetween(4, 10); Thread[] threads = new Thread[numThreads]; CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); for (int i = 0; i < threads.length; i++) { final int numConnectionAttempts = randomIntBetween(10, 100); threads[i] = new Thread() { @Override public void run() { try { barrier.await(); CountDownLatch latch = new CountDownLatch(numConnectionAttempts); for (int i = 0; i < numConnectionAttempts; i++) { AtomicReference<Exception> executed = new AtomicReference<>(); ActionListener<Void> listener = ActionListener.wrap(x -> { if (executed.compareAndSet(null, new RuntimeException())) { latch.countDown(); } else { throw new AssertionError("shit's been called twice", executed.get()); } }, x -> { if (executed.compareAndSet(null, x)) { latch.countDown(); } else { final String message = x.getMessage(); if ((executed.get().getClass() == x.getClass() && "operation was cancelled reason [connect handler is closed]".equals(message) && message.equals(executed.get().getMessage())) == false) { // we do cancel the operation and that means that if timing allows it, the caller // of a blocking call as well as the handler will get the exception from the // ExecutionCancelledException concurrently. unless that is the case we fail // if we get called more than once! AssertionError assertionError = new AssertionError("shit's been called twice", x); assertionError.addSuppressed(executed.get()); throw assertionError; } } if (x instanceof RejectedExecutionException || x instanceof AlreadyClosedException) { // that's fine } else { throw new AssertionError(x); } }); try { connection.ensureConnected(listener); } catch (Exception e) { // it's ok if we're shutting down assertThat(e.getMessage(), containsString("threadcontext is already closed")); latch.countDown(); } } safeAwait(latch); } catch (Exception ex) { throw new AssertionError(ex); } } }; threads[i].start(); } barrier.await(); } } } } public void testGetConnectionInfo() throws Exception { doTestGetConnectionInfo(false); doTestGetConnectionInfo(true); } private void doTestGetConnectionInfo(boolean hasClusterCredentials) throws Exception { List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>(); final Settings seedTransportSettings; if (hasClusterCredentials) { seedTransportSettings = Settings.builder() .put(RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.getKey(), "true") .put(RemoteClusterPortSettings.PORT.getKey(), "0") .build(); } else { seedTransportSettings = Settings.EMPTY; } try ( MockTransportService transport1 = startTransport( "seed_node", knownNodes, VersionInformation.CURRENT, TransportVersion.current(), threadPool, seedTransportSettings ); MockTransportService transport2 = startTransport( "seed_node_1", knownNodes, VersionInformation.CURRENT, TransportVersion.current(), threadPool, seedTransportSettings ); MockTransportService transport3 = startTransport( "discoverable_node", knownNodes, VersionInformation.CURRENT, TransportVersion.current(), threadPool, seedTransportSettings ) ) { DiscoveryNode node1 = transport1.getLocalNode(); DiscoveryNode node2 = transport3.getLocalNode(); DiscoveryNode node3 = transport2.getLocalNode(); if (hasClusterCredentials) { node1 = node1.withTransportAddress(transport1.boundRemoteAccessAddress().publishAddress()); node2 = node2.withTransportAddress(transport3.boundRemoteAccessAddress().publishAddress()); node3 = node3.withTransportAddress(transport2.boundRemoteAccessAddress().publishAddress()); } knownNodes.add(node1); knownNodes.add(node2); knownNodes.add(node3); Collections.shuffle(knownNodes, random()); List<String> seedNodes = addresses(node3, node1, node2); Collections.shuffle(seedNodes, random()); try ( MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, VersionInformation.CURRENT, TransportVersion.current(), threadPool, null ) ) { service.start(); service.acceptIncomingRequests(); int maxNumConnections = randomIntBetween(1, 5); String clusterAlias = "test-cluster"; Settings settings = Settings.builder() .put(buildSniffSettings(clusterAlias, seedNodes)) .put(SniffConnectionStrategySettings.REMOTE_CONNECTIONS_PER_CLUSTER.getKey(), maxNumConnections) .build(); if (hasClusterCredentials) { final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString( RemoteClusterSettings.REMOTE_CLUSTER_CREDENTIALS.getConcreteSettingForNamespace(clusterAlias).getKey(), randomAlphaOfLength(20) ); settings = Settings.builder().put(settings).setSecureSettings(secureSettings).build(); } try (RemoteClusterConnection connection = createConnection(clusterAlias, settings, service, hasClusterCredentials)) { // test no nodes connected RemoteConnectionInfo remoteConnectionInfo = assertSerialization(connection.getConnectionInfo()); assertNotNull(remoteConnectionInfo); SniffConnectionStrategy.SniffModeInfo sniffInfo = (SniffConnectionStrategy.SniffModeInfo) remoteConnectionInfo.modeInfo; assertEquals(0, sniffInfo.numNodesConnected); assertEquals(3, sniffInfo.seedNodes.size()); assertEquals(maxNumConnections, sniffInfo.maxConnectionsPerCluster); assertEquals(clusterAlias, remoteConnectionInfo.clusterAlias); assertEquals(hasClusterCredentials, remoteConnectionInfo.hasClusterCredentials); } } } } public void testRemoteConnectionInfo() throws IOException { List<String> remoteAddresses = Collections.singletonList("seed:1"); String serverName = "the_server_name"; RemoteConnectionInfo.ModeInfo modeInfo1; RemoteConnectionInfo.ModeInfo modeInfo2; if (randomBoolean()) { modeInfo1 = new SniffConnectionStrategy.SniffModeInfo(remoteAddresses, 4, 4); modeInfo2 = new SniffConnectionStrategy.SniffModeInfo(remoteAddresses, 4, 3); } else { modeInfo1 = new ProxyConnectionStrategy.ProxyModeInfo(remoteAddresses.get(0), serverName, 18, 18); modeInfo2 = new ProxyConnectionStrategy.ProxyModeInfo(remoteAddresses.get(0), serverName, 18, 17); } RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster", modeInfo1, TimeValue.timeValueMinutes(30), false, false); assertSerialization(stats); RemoteConnectionInfo stats1 = new RemoteConnectionInfo("test_cluster", modeInfo1, TimeValue.timeValueMinutes(30), true, false); assertSerialization(stats1); assertNotEquals(stats, stats1); stats1 = new RemoteConnectionInfo("test_cluster_1", modeInfo1, TimeValue.timeValueMinutes(30), false, false); assertSerialization(stats1); assertNotEquals(stats, stats1); stats1 = new RemoteConnectionInfo("test_cluster", modeInfo1, TimeValue.timeValueMinutes(325), false, false); assertSerialization(stats1); assertNotEquals(stats, stats1); stats1 = new RemoteConnectionInfo("test_cluster", modeInfo2, TimeValue.timeValueMinutes(30), false, false); assertSerialization(stats1); assertNotEquals(stats, stats1); stats1 = new RemoteConnectionInfo("test_cluster", modeInfo1, TimeValue.timeValueMinutes(30), false, true); assertSerialization(stats1); assertNotEquals(stats, stats1); } private static RemoteConnectionInfo assertSerialization(RemoteConnectionInfo info) throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { out.setTransportVersion(TransportVersion.current()); info.writeTo(out); StreamInput in = out.bytes().streamInput(); in.setTransportVersion(TransportVersion.current()); RemoteConnectionInfo remoteConnectionInfo = new RemoteConnectionInfo(in); assertEquals(info, remoteConnectionInfo); assertEquals(info.hashCode(), remoteConnectionInfo.hashCode()); return randomBoolean() ? info : remoteConnectionInfo; } } public void testRenderConnectionInfoXContent() throws IOException { List<String> remoteAddresses = Arrays.asList("seed:1", "seed:2"); String serverName = "the_server_name"; RemoteConnectionInfo.ModeInfo modeInfo; boolean sniff = randomBoolean(); if (sniff) { modeInfo = new SniffConnectionStrategy.SniffModeInfo(remoteAddresses, 3, 2); } else { modeInfo = new ProxyConnectionStrategy.ProxyModeInfo(remoteAddresses.get(0), serverName, 18, 16); } final boolean hasClusterCredentials = randomBoolean(); RemoteConnectionInfo stats = new RemoteConnectionInfo( "test_cluster", modeInfo, TimeValue.timeValueMinutes(30), true, hasClusterCredentials ); stats = assertSerialization(stats); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); stats.toXContent(builder, null); builder.endObject(); if (sniff) { assertEquals(XContentHelper.stripWhitespace(Strings.format(""" { "test_cluster": { "connected": true, "mode": "sniff", "seeds": [ "seed:1", "seed:2" ], "num_nodes_connected": 2, "max_connections_per_cluster": 3, "initial_connect_timeout": "30m", "skip_unavailable": true%s } }""", hasClusterCredentials ? ",\"cluster_credentials\":\"::es_redacted::\"" : "")), Strings.toString(builder)); } else { assertEquals(XContentHelper.stripWhitespace(Strings.format(""" { "test_cluster": { "connected": true, "mode": "proxy", "proxy_address": "seed:1", "server_name": "the_server_name", "num_proxy_sockets_connected": 16, "max_proxy_socket_connections": 18, "initial_connect_timeout": "30m", "skip_unavailable": true%s } }""", hasClusterCredentials ? ",\"cluster_credentials\":\"::es_redacted::\"" : "")), Strings.toString(builder)); } } public void testCollectNodes() throws Exception { doTestCollectNodes(false); doTestCollectNodes(true); } private void doTestCollectNodes(boolean hasClusterCredentials) throws Exception { List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>(); final Settings seedTransportSettings; if (hasClusterCredentials) { seedTransportSettings = Settings.builder() .put(RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED.getKey(), "true") .put(RemoteClusterPortSettings.PORT.getKey(), "0") .build(); } else { seedTransportSettings = Settings.EMPTY; } try ( MockTransportService seedTransport = startTransport( "seed_node", knownNodes, VersionInformation.CURRENT, TransportVersion.current(), threadPool, seedTransportSettings ) ) { DiscoveryNode seedNode = seedTransport.getLocalNode(); if (hasClusterCredentials) { seedNode = seedNode.withTransportAddress(seedTransport.boundRemoteAccessAddress().publishAddress()); } knownNodes.add(seedNode); try ( MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, VersionInformation.CURRENT, TransportVersion.current(), threadPool, null ) ) { service.start(); service.acceptIncomingRequests(); service.addSendBehavior((connection, requestId, action, request, options) -> { if (hasClusterCredentials) { assertThat( action, oneOf(RemoteClusterService.REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME, RemoteClusterNodesAction.TYPE.name()) ); } else { assertThat(action, oneOf(TransportService.HANDSHAKE_ACTION_NAME, ClusterStateAction.NAME)); } connection.sendRequest(requestId, action, request, options); }); String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, addresses(seedNode)); if (hasClusterCredentials) { final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString( RemoteClusterSettings.REMOTE_CLUSTER_CREDENTIALS.getConcreteSettingForNamespace(clusterAlias).getKey(), randomAlphaOfLength(20) ); settings = Settings.builder().put(settings).setSecureSettings(secureSettings).build(); } try (RemoteClusterConnection connection = createConnection(clusterAlias, settings, service, hasClusterCredentials)) { CountDownLatch responseLatch = new CountDownLatch(1); AtomicReference<Function<String, DiscoveryNode>> reference = new AtomicReference<>(); AtomicReference<Exception> failReference = new AtomicReference<>(); ActionListener<Function<String, DiscoveryNode>> shardsListener = ActionListener.wrap(x -> { reference.set(x); responseLatch.countDown(); }, x -> { failReference.set(x); responseLatch.countDown(); }); connection.collectNodes(shardsListener); responseLatch.await(); assertNull(failReference.get()); assertNotNull(reference.get()); Function<String, DiscoveryNode> function = reference.get(); assertEquals(seedNode, function.apply(seedNode.getId())); assertNull(function.apply(seedNode.getId() + "foo")); assertTrue(connection.assertNoRunningConnections()); } } } } public void testNoChannelsExceptREG() throws Exception { List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>(); try ( MockTransportService seedTransport = startTransport( "seed_node", knownNodes, VersionInformation.CURRENT, TransportVersion.current() ) ) { DiscoveryNode seedNode = seedTransport.getLocalNode(); knownNodes.add(seedTransport.getLocalNode()); try ( MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, VersionInformation.CURRENT, TransportVersion.current(), threadPool, null ) ) { service.start(); service.acceptIncomingRequests(); String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, addresses(seedNode)); try (RemoteClusterConnection connection = createConnection(clusterAlias, settings, service, false)) { PlainActionFuture<Void> plainActionFuture = new PlainActionFuture<>(); connection.ensureConnected(plainActionFuture); plainActionFuture.get(10, TimeUnit.SECONDS); for (TransportRequestOptions.Type type : TransportRequestOptions.Type.values()) { if (type != TransportRequestOptions.Type.REG) { assertThat( expectThrows( IllegalStateException.class, () -> connection.getConnection() .sendRequest( randomNonNegativeLong(), "arbitrary", new EmptyRequest(), TransportRequestOptions.of(null, type) ) ).getMessage(), allOf(containsString("can't select"), containsString(type.toString())) ); } } } } } } public void testConnectedNodesConcurrentAccess() throws IOException, InterruptedException { List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>(); List<MockTransportService> discoverableTransports = new CopyOnWriteArrayList<>(); try { final int numDiscoverableNodes = randomIntBetween(5, 20); List<DiscoveryNode> discoverableNodes = new ArrayList<>(numDiscoverableNodes); for (int i = 0; i < numDiscoverableNodes; i++) { MockTransportService transportService = startTransport( "discoverable_node" + i, knownNodes, VersionInformation.CURRENT, TransportVersion.current() ); discoverableNodes.add(transportService.getLocalNode()); discoverableTransports.add(transportService); } List<String> seedNodes = new CopyOnWriteArrayList<>( randomSubsetOf( randomIntBetween(1, discoverableNodes.size()), discoverableNodes.stream().map(d -> d.getAddress().toString()).toList() ) ); Collections.shuffle(seedNodes, random()); try ( MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, VersionInformation.CURRENT, TransportVersion.current(), threadPool, null ) ) { service.start(); service.acceptIncomingRequests(); String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, seedNodes); try (RemoteClusterConnection connection = createConnection(clusterAlias, settings, service, randomBoolean())) { final int numGetThreads = randomIntBetween(4, 10); final Thread[] getThreads = new Thread[numGetThreads]; final int numModifyingThreads = randomIntBetween(4, 10); final Thread[] modifyingThreads = new Thread[numModifyingThreads]; CyclicBarrier barrier = new CyclicBarrier(numGetThreads + numModifyingThreads); for (int i = 0; i < getThreads.length; i++) { final int numGetCalls = randomIntBetween(1000, 10000); getThreads[i] = new Thread(() -> { try { safeAwait(barrier); for (int j = 0; j < numGetCalls; j++) { try { Transport.Connection lowLevelConnection = connection.getConnection(); assertNotNull(lowLevelConnection); } catch (ConnectTransportException e) { // ignore, this is an expected exception } } } catch (Exception ex) { throw new AssertionError(ex); } }); getThreads[i].start(); } for (int i = 0; i < modifyingThreads.length; i++) { final int numDisconnects = randomIntBetween(5, 10); modifyingThreads[i] = new Thread(() -> { try { safeAwait(barrier); for (int j = 0; j < numDisconnects; j++) { DiscoveryNode node = randomFrom(discoverableNodes); try { connection.getConnectionManager().getConnection(node); } catch (ConnectTransportException e) { // Ignore } } } catch (Exception ex) { throw new AssertionError(ex); } }); modifyingThreads[i].start(); } for (Thread thread : getThreads) { thread.join(); } for (Thread thread : modifyingThreads) { thread.join(); } } } } finally { IOUtils.closeWhileHandlingException(discoverableTransports); } } public void testGetConnection() throws Exception { List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>(); try ( MockTransportService seedTransport = startTransport( "seed_node", knownNodes, VersionInformation.CURRENT, TransportVersion.current() ); MockTransportService disconnectedTransport = startTransport( "disconnected_node", knownNodes, VersionInformation.CURRENT, TransportVersion.current() ) ) { DiscoveryNode seedNode = seedTransport.getLocalNode(); knownNodes.add(seedNode); DiscoveryNode disconnectedNode = disconnectedTransport.getLocalNode(); try ( MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, VersionInformation.CURRENT, TransportVersion.current(), threadPool, null ) ) { service.start(); service.acceptIncomingRequests(); String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, addresses(seedNode)); try (RemoteClusterConnection connection = createConnection(clusterAlias, settings, service, false)) { safeAwait(listener -> connection.ensureConnected(listener.map(x -> null))); for (int i = 0; i < 10; i++) { // always a direct connection as the remote node is already connected Transport.Connection remoteConnection = connection.getConnection(seedNode); assertEquals(seedNode, remoteConnection.getNode()); } for (int i = 0; i < 10; i++) { // we don't use the transport service connection manager so we will get a proxy connection for the local node Transport.Connection remoteConnection = connection.getConnection(service.getLocalNode()); assertThat(remoteConnection, instanceOf(RemoteConnectionManager.ProxyConnection.class)); assertThat(remoteConnection.getNode(), equalTo(service.getLocalNode())); } for (int i = 0; i < 10; i++) { // always a proxy connection as the target node is not connected Transport.Connection remoteConnection = connection.getConnection(disconnectedNode); assertThat(remoteConnection, instanceOf(RemoteConnectionManager.ProxyConnection.class)); assertThat(remoteConnection.getNode(), sameInstance(disconnectedNode)); } } } } } private Settings buildRandomSettings(String clusterAlias, List<String> addresses) { if (randomBoolean()) { return buildProxySettings(clusterAlias, addresses); } else { return buildSniffSettings(clusterAlias, addresses); } } private static Settings buildProxySettings(String clusterAlias, List<String> addresses) { Settings.Builder builder = Settings.builder(); builder.put(ProxyConnectionStrategySettings.PROXY_ADDRESS.getConcreteSettingForNamespace(clusterAlias).getKey(), addresses.get(0)); builder.put(RemoteClusterSettings.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias).getKey(), "proxy"); return builder.build(); } private static Settings buildSniffSettings(String clusterAlias, List<String> seedNodes) { Settings.Builder builder = Settings.builder(); builder.put(RemoteClusterSettings.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias).getKey(), "sniff"); builder.put( SniffConnectionStrategySettings.REMOTE_CLUSTER_SEEDS.getConcreteSettingForNamespace(clusterAlias).getKey(), Strings.collectionToCommaDelimitedString(seedNodes) ); return builder.build(); } private static RemoteClusterCredentialsManager buildCredentialsManager(String clusterAlias) { Objects.requireNonNull(clusterAlias); final Settings.Builder builder = Settings.builder(); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("cluster.remote." + clusterAlias + ".credentials", randomAlphaOfLength(20)); builder.setSecureSettings(secureSettings); return new RemoteClusterCredentialsManager(builder.build()); } private RemoteClusterConnection createConnection( String alias, Settings settings, TransportService transportService, boolean hasCredentials ) { return new RemoteClusterConnection( RemoteClusterSettings.toConfig(alias, settings), transportService, hasCredentials ? buildCredentialsManager(alias) : RemoteClusterCredentialsManager.EMPTY, false ); } }
RemoteClusterConnectionTests
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java
{ "start": 807, "end": 2865 }
class ____ extends AbstractBulkIndexByScrollRequestBuilder<ReindexRequest, ReindexRequestBuilder> { private final IndexRequestBuilder destinationBuilder; private RemoteInfo remoteInfo; public ReindexRequestBuilder(ElasticsearchClient client) { this(client, new SearchRequestBuilder(client), new IndexRequestBuilder(client)); } private ReindexRequestBuilder(ElasticsearchClient client, SearchRequestBuilder search, IndexRequestBuilder destination) { super(client, ReindexAction.INSTANCE, search); this.destinationBuilder = destination; } @Override protected ReindexRequestBuilder self() { return this; } public IndexRequestBuilder destination() { return destinationBuilder; } /** * Set the destination index. */ public ReindexRequestBuilder destination(String index) { destinationBuilder.setIndex(index); return this; } /** * Setup reindexing from a remote cluster. */ public ReindexRequestBuilder setRemoteInfo(RemoteInfo remoteInfo) { this.remoteInfo = remoteInfo; return this; } @Override public ReindexRequest request() { SearchRequest source = source().request(); try { IndexRequest destination = destinationBuilder.request(); try { ReindexRequest reindexRequest = new ReindexRequest(source, destination, false); try { super.apply(reindexRequest); if (remoteInfo != null) { reindexRequest.setRemoteInfo(remoteInfo); } return reindexRequest; } catch (Exception e) { reindexRequest.decRef(); throw e; } } catch (Exception e) { destination.decRef(); throw e; } } catch (Exception e) { source.decRef(); throw e; } } }
ReindexRequestBuilder
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLocatedFileStatusFetcher.java
{ "start": 3300, "end": 3889 }
class ____ extends LocalFileSystem { @Override public FileStatus[] globStatus(Path pathPattern, PathFilter filter) throws IOException { // The executor service now is running tasks LATCH.countDown(); try { // Try to sleep some time to // let LocatedFileStatusFetcher#getFileStatuses be interrupted before // the getting file info task finishes. Thread.sleep(5000); } catch (InterruptedException e) { // Ignore this exception } return super.globStatus(pathPattern, filter); } } }
MockFileSystem
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/bug/Issue1695.java
{ "start": 163, "end": 1276 }
class ____ extends TestCase { public void test_for_mysql() throws Exception { String sql = "select ht.* from t_books ht"; String result = PagerUtils.count(sql, JdbcConstants.MYSQL); assertEquals("SELECT COUNT(*)\n" + "FROM t_books ht", result); } public void test_for_pg() throws Exception { String sql = "select ht.* from t_books ht"; String result = PagerUtils.count(sql, JdbcConstants.POSTGRESQL); assertEquals("SELECT COUNT(*)\n" + "FROM t_books ht", result); } public void test_for_oracle() throws Exception { String sql = "select ht.* from t_books ht"; String result = PagerUtils.count(sql, JdbcConstants.ORACLE); assertEquals("SELECT COUNT(*)\n" + "FROM t_books ht", result); } public void test_for_sqlserver() throws Exception { String sql = "select ht.* from t_books ht"; String result = PagerUtils.count(sql, JdbcConstants.SQL_SERVER); assertEquals("SELECT COUNT(*)\n" + "FROM t_books ht", result); } }
Issue1695
java
google__error-prone
core/src/main/java/com/google/errorprone/bugpatterns/SelfAssertion.java
{ "start": 2147, "end": 6857 }
class ____ extends BugChecker implements MethodInvocationTreeMatcher { private final Matcher<MethodInvocationTree> equalsMatcher = anyOf( allOf( instanceMethod() .anyClass() .namedAnyOf( "isEqualTo", "isSameInstanceAs", "containsExactlyElementsIn", "containsAtLeastElementsIn", "areEqualTo"), this::truthSameArguments), allOf( staticMethod() .onClassAny( "org.junit.Assert", "junit.framework.Assert", "junit.framework.TestCase") .namedAnyOf("assertEquals", "assertArrayEquals"), this::junitSameArguments)); private final Matcher<MethodInvocationTree> notEqualsMatcher = anyOf( allOf( instanceMethod().anyClass().namedAnyOf("isNotEqualTo", "isNotSameInstanceAs"), this::truthSameArguments), allOf( staticMethod() .onClassAny( "org.junit.Assert", "junit.framework.Assert", "junit.framework.TestCase") .namedAnyOf("assertNotEquals"), this::junitSameArguments)); private static final Matcher<ExpressionTree> ASSERT_THAT = anyOf( staticMethod().anyClass().named("assertThat"), instanceMethod().onDescendantOf("com.google.common.truth.TestVerb").named("that"), instanceMethod() .onDescendantOf("com.google.common.truth.StandardSubjectBuilder") .named("that")); private final ConstantExpressions constantExpressions; @Inject SelfAssertion(ConstantExpressions constantExpressions) { this.constantExpressions = constantExpressions; } @Override public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) { if (tree.getArguments().isEmpty()) { return NO_MATCH; } if (equalsMatcher.matches(tree, state)) { return buildDescription(tree) .setMessage(generateSummary(getSymbol(tree).getSimpleName().toString(), "passes")) .build(); } if (notEqualsMatcher.matches(tree, state)) { return buildDescription(tree) .setMessage(generateSummary(getSymbol(tree).getSimpleName().toString(), "fails")) .build(); } return NO_MATCH; } private static String generateSummary(String methodName, String constantOutput) { return format( "You are passing identical arguments to the %s method, so this assertion always %s. THIS IS" + " LIKELY A BUG! If you are trying to test the correctness of an equals()" + " implementation, use EqualsTester instead.", methodName, constantOutput); } private boolean junitSameArguments(MethodInvocationTree tree, VisitorState state) { var arguments = tree.getArguments(); if (arguments.isEmpty()) { return false; } var firstArgument = tree.getArguments().get(0); ExpressionTree expected; ExpressionTree actual; if (tree.getArguments().size() > 2 && isSameType(getType(firstArgument), state.getSymtab().stringType, state)) { expected = arguments.get(1); actual = arguments.get(2); } else { expected = arguments.get(0); actual = arguments.get(1); } return sameExpression(state, expected, actual); } private boolean truthSameArguments(MethodInvocationTree tree, VisitorState state) { ExpressionTree rec = getReceiver(tree); if (rec == null) { return false; } if (!ASSERT_THAT.matches(rec, state)) { return false; } if (((MethodInvocationTree) rec).getArguments().size() != 1 || tree.getArguments().size() != 1) { // Oops: we over-matched and this doesn't look like a normal assertion. return false; } ExpressionTree receiverExpression = getOnlyElement(((MethodInvocationTree) rec).getArguments()); ExpressionTree invocationExpression = getOnlyElement(tree.getArguments()); return sameExpression(state, receiverExpression, invocationExpression); } private boolean sameExpression( VisitorState state, ExpressionTree receiverExpression, ExpressionTree invocationExpression) { if (sameVariable(receiverExpression, invocationExpression)) { return true; } var receiverConstant = constantExpressions.constantExpression(receiverExpression, state); var invocationConstant = constantExpressions.constantExpression(invocationExpression, state); return receiverConstant.isPresent() && receiverConstant.equals(invocationConstant); } }
SelfAssertion
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java
{ "start": 23983, "end": 24450 }
class ____ { public String[] getMessage(boolean b) { return b ? null : new String[0]; } } """) .addOutputLines( "out/com/google/errorprone/bugpatterns/nullness/LiteralNullReturnTest.java", """ package com.google.errorprone.bugpatterns.nullness; import org.checkerframework.checker.nullness.qual.Nullable; public
LiteralNullReturnTest
java
apache__maven
impl/maven-core/src/main/java/org/apache/maven/project/artifact/PluginArtifact.java
{ "start": 1171, "end": 2027 }
class ____ extends DefaultArtifact implements ArtifactWithDependencies { private Plugin plugin; public PluginArtifact(Plugin plugin, Artifact pluginArtifact) { super( plugin.getGroupId(), plugin.getArtifactId(), plugin.getVersion(), null, "maven-plugin", null, new PluginArtifactHandler()); this.plugin = plugin; setFile(pluginArtifact.getFile()); setResolved(true); } @Override public List<Dependency> getDependencies() { return plugin.getDependencies(); } @Override public List<Dependency> getManagedDependencies() { return Collections.emptyList(); } // TODO: this is duplicate of MavenPluginArtifactHandlerProvider provided one static
PluginArtifact
java
square__moshi
moshi/src/test/java/com/squareup/moshi/CircularAdaptersTest.java
{ "start": 1217, "end": 2475 }
class ____ { final String name; final Team[] teams; Project(String name, Team... teams) { this.name = name; this.teams = teams; } } @Test public void circularAdapters() throws Exception { Moshi moshi = new Moshi.Builder().build(); JsonAdapter<Team> teamAdapter = moshi.adapter(Team.class); Team team = new Team( "Alice", new Project("King", new Team("Charlie", new Project("Delivery", (Team[]) null)))); assertThat(teamAdapter.toJson(team)) .isEqualTo( "{\"lead\":\"Alice\",\"projects\":[{\"name\":" + "\"King\",\"teams\":[{\"lead\":\"Charlie\",\"projects\":[{\"name\":\"Delivery\"}]}]}]}"); Team fromJson = teamAdapter.fromJson( "{\"lead\":\"Alice\",\"projects\":[{\"name\":" + "\"King\",\"teams\":[{\"lead\":\"Charlie\",\"projects\":[{\"name\":\"Delivery\"}]}]}]}"); assertThat(fromJson.lead).isEqualTo("Alice"); assertThat(fromJson.projects[0].name).isEqualTo("King"); assertThat(fromJson.projects[0].teams[0].lead).isEqualTo("Charlie"); assertThat(fromJson.projects[0].teams[0].projects[0].name).isEqualTo("Delivery"); } @Retention(RUNTIME) @JsonQualifier public @
Project
java
elastic__elasticsearch
plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java
{ "start": 1183, "end": 2116 }
class ____ extends ESBlobStoreRepositoryIntegTestCase { @Override protected String repositoryType() { return "hdfs"; } @Override protected Settings repositorySettings(String repoName) { return Settings.builder() .put("uri", "hdfs:///") .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) .put("path", "foo") .put("chunk_size", randomIntBetween(100, 1000) + "k") .put("compress", randomBoolean()) .build(); } @Override public void testSnapshotAndRestore() throws Exception { // the HDFS mockup doesn't preserve the repository contents after removing the repository testSnapshotAndRestore(false); } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singletonList(HdfsPlugin.class); } }
HdfsBlobStoreRepositoryTests
java
elastic__elasticsearch
modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternAnalyzerTests.java
{ "start": 1029, "end": 5226 }
class ____ extends ESTokenStreamTestCase { /** * Test PatternAnalyzer when it is configured with a non-word pattern. */ public void testNonWordPattern() throws IOException { // Split on non-letter pattern, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\W+"), false, null); assertAnalyzesTo( a, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "The", "quick", "brown", "Fox", "the", "abcd1234", "56", "78", "dc" } ); // split on non-letter pattern, lowercase, english stopwords PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\W+"), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); assertAnalyzesTo( b, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "quick", "brown", "fox", "abcd1234", "56", "78", "dc" } ); } /** * Test PatternAnalyzer when it is configured with a whitespace pattern. * Behavior can be similar to WhitespaceAnalyzer (depending upon options) */ public void testWhitespacePattern() throws IOException { // Split on whitespace patterns, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); assertAnalyzesTo( a, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "The", "quick", "brown", "Fox,the", "abcd1234", "(56.78)", "dc." } ); // Split on whitespace patterns, lowercase, english stopwords PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\s+"), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); assertAnalyzesTo( b, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "quick", "brown", "fox,the", "abcd1234", "(56.78)", "dc." } ); } /** * Test PatternAnalyzer when it is configured with a custom pattern. In this * case, text is tokenized on the comma "," */ public void testCustomPattern() throws IOException { // Split on comma, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile(","), false, null); assertAnalyzesTo(a, "Here,Are,some,Comma,separated,words,", new String[] { "Here", "Are", "some", "Comma", "separated", "words" }); // split on comma, lowercase, english stopwords PatternAnalyzer b = new PatternAnalyzer(Pattern.compile(","), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); assertAnalyzesTo(b, "Here,Are,some,Comma,separated,words,", new String[] { "here", "some", "comma", "separated", "words" }); } /** * Test PatternAnalyzer against a large document. */ public void testHugeDocument() throws IOException { StringBuilder document = new StringBuilder(); // 5000 a's char largeWord[] = new char[5000]; Arrays.fill(largeWord, 'a'); document.append(largeWord); // a space document.append(' '); // 2000 b's char largeWord2[] = new char[2000]; Arrays.fill(largeWord2, 'b'); document.append(largeWord2); // Split on whitespace patterns, do not lowercase, no stopwords PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); assertAnalyzesTo(a, document.toString(), new String[] { new String(largeWord), new String(largeWord2) }); } /** blast some random strings through the analyzer */ public void testRandomStrings() throws Exception { Analyzer a = new PatternAnalyzer(Pattern.compile(","), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); checkRandomData(random(), a, 10000 * RANDOM_MULTIPLIER); } public void testNormalize() { PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); assertEquals(new BytesRef("FooBar"), a.normalize("dummy", "FooBar")); a = new PatternAnalyzer(Pattern.compile("\\s+"), true, null); assertEquals(new BytesRef("foobar"), a.normalize("dummy", "FooBar")); } }
PatternAnalyzerTests
java
spring-projects__spring-boot
module/spring-boot-actuator-autoconfigure/src/test/java/org/springframework/boot/actuate/autoconfigure/endpoint/jackson/Jackson2EndpointAutoConfigurationTests.java
{ "start": 1780, "end": 4395 }
class ____ { private final ApplicationContextRunner runner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(Jackson2EndpointAutoConfiguration.class)); @Test void endpointObjectMapperWhenNoProperty() { this.runner.run((context) -> assertThat(context) .hasSingleBean(org.springframework.boot.actuate.endpoint.jackson.EndpointJackson2ObjectMapper.class)); } @Test void endpointObjectMapperWhenPropertyTrue() { this.runner.run((context) -> assertThat(context) .hasSingleBean(org.springframework.boot.actuate.endpoint.jackson.EndpointJackson2ObjectMapper.class)); } @Test void endpointObjectMapperWhenPropertyFalse() { this.runner.withPropertyValues("management.endpoints.jackson2.isolated-object-mapper=false") .run((context) -> assertThat(context) .doesNotHaveBean(org.springframework.boot.actuate.endpoint.jackson.EndpointJackson2ObjectMapper.class)); } @Test void endpointObjectMapperWhenSpringWebIsAbsent() { this.runner.withClassLoader(new FilteredClassLoader(Jackson2ObjectMapperBuilder.class)) .run((context) -> assertThat(context) .doesNotHaveBean(org.springframework.boot.actuate.endpoint.jackson.EndpointJackson2ObjectMapper.class)); } @Test void endpointObjectMapperDoesNotSerializeDatesAsTimestamps() { this.runner.run((context) -> { ObjectMapper objectMapper = context .getBean(org.springframework.boot.actuate.endpoint.jackson.EndpointJackson2ObjectMapper.class) .get(); Instant now = Instant.now(); String json = objectMapper.writeValueAsString(Map.of("timestamp", now)); assertThat(json).contains(DateTimeFormatter.ISO_INSTANT.format(now)); }); } @Test void endpointObjectMapperDoesNotSerializeDurationsAsTimestamps() { this.runner.run((context) -> { ObjectMapper objectMapper = context .getBean(org.springframework.boot.actuate.endpoint.jackson.EndpointJackson2ObjectMapper.class) .get(); Duration duration = Duration.ofSeconds(42); String json = objectMapper.writeValueAsString(Map.of("duration", duration)); assertThat(json).contains(duration.toString()); }); } @Test void endpointObjectMapperDoesNotSerializeNullValues() { this.runner.run((context) -> { ObjectMapper objectMapper = context .getBean(org.springframework.boot.actuate.endpoint.jackson.EndpointJackson2ObjectMapper.class) .get(); HashMap<String, String> map = new HashMap<>(); map.put("key", null); String json = objectMapper.writeValueAsString(map); assertThat(json).isEqualTo("{}"); }); } @Configuration(proxyBeanMethods = false) static
Jackson2EndpointAutoConfigurationTests
java
alibaba__nacos
ai/src/main/java/com/alibaba/nacos/ai/form/mcp/admin/McpImportForm.java
{ "start": 1042, "end": 4224 }
class ____ extends McpForm { @Serial private static final long serialVersionUID = 8016131725604983671L; private String importType; private String data; private boolean overrideExisting = false; private boolean validateOnly = false; /** * Whether to skip invalid servers when executing import. */ private boolean skipInvalid = false; private String[] selectedServers; /** * Optional start cursor for URL-based import pagination. */ private String cursor; /** * Optional page size for URL-based import (items per page). */ private Integer limit; /** * Optional fuzzy search keyword for registry import listing. * Only used when importType is 'url'. */ private String search; @Override public void validate() throws NacosApiException { fillDefaultValue(); if (StringUtils.isEmpty(importType)) { throw new NacosApiException(NacosException.INVALID_PARAM, ErrorCode.PARAMETER_MISSING, "Required parameter 'importType' is not present"); } if (StringUtils.isEmpty(data)) { throw new NacosApiException(NacosException.INVALID_PARAM, ErrorCode.PARAMETER_MISSING, "Required parameter 'data' is not present"); } if (ExternalDataTypeEnum.parseType(importType) == null) { throw new NacosApiException(NacosException.INVALID_PARAM, ErrorCode.PARAMETER_VALIDATE_ERROR, "importType must be one of: json, url, file"); } } public String getImportType() { return importType; } public void setImportType(String importType) { this.importType = importType; } public String getData() { return data; } public void setData(String data) { this.data = data; } public boolean isOverrideExisting() { return overrideExisting; } public void setOverrideExisting(boolean overrideExisting) { this.overrideExisting = overrideExisting; } public boolean isValidateOnly() { return validateOnly; } public void setValidateOnly(boolean validateOnly) { this.validateOnly = validateOnly; } public boolean isSkipInvalid() { return skipInvalid; } public void setSkipInvalid(boolean skipInvalid) { this.skipInvalid = skipInvalid; } public String[] getSelectedServers() { return selectedServers; } public void setSelectedServers(String[] selectedServers) { this.selectedServers = selectedServers; } public String getCursor() { return cursor; } public void setCursor(String cursor) { this.cursor = cursor; } public Integer getLimit() { return limit; } public void setLimit(Integer limit) { this.limit = limit; } public String getSearch() { return search; } public void setSearch(String search) { this.search = search; } }
McpImportForm
java
spring-projects__spring-framework
spring-expression/src/test/java/org/springframework/expression/spel/SpelDocumentationTests.java
{ "start": 5462, "end": 9381 }
class ____ { @Test void propertyNavigation() { EvaluationContext context = TestScenarioCreator.getTestEvaluationContext(); // evaluates to 1856 int year = (Integer) parser.parseExpression("Birthdate.Year + 1900").getValue(context); // 1856 assertThat(year).isEqualTo(1856); // evaluates to "Smiljan" String city = (String) parser.parseExpression("placeOfBirth.City").getValue(context); assertThat(city).isEqualTo("Smiljan"); } @Test void indexingIntoArraysAndCollections() { ExpressionParser parser = new SpelExpressionParser(); StandardEvaluationContext teslaContext = TestScenarioCreator.getTestEvaluationContext(); StandardEvaluationContext societyContext = new StandardEvaluationContext(); societyContext.setRootObject(new IEEE()); // Inventions Array // evaluates to "Induction motor" String invention = parser.parseExpression("inventions[3]").getValue(teslaContext, String.class); assertThat(invention).isEqualTo("Induction motor"); // Members List // evaluates to "Nikola Tesla" String name = parser.parseExpression("members[0].Name").getValue(societyContext, String.class); assertThat(name).isEqualTo("Nikola Tesla"); // List and Array Indexing // evaluates to "Wireless communication" invention = parser.parseExpression("members[0].Inventions[6]").getValue(societyContext, String.class); assertThat(invention).isEqualTo("Wireless communication"); } @Test void indexingIntoStrings() { ExpressionParser parser = new SpelExpressionParser(); StandardEvaluationContext societyContext = new StandardEvaluationContext(); societyContext.setRootObject(new IEEE()); // evaluates to "T" (8th letter of "Nikola Tesla") String character = parser.parseExpression("members[0].name[7]") .getValue(societyContext, String.class); assertThat(character).isEqualTo("T"); } @Test void indexingIntoMaps() { StandardEvaluationContext societyContext = new StandardEvaluationContext(); societyContext.setRootObject(new IEEE()); // Officer's Map // evaluates to Inventor("Pupin") Inventor pupin = parser.parseExpression("officers['president']") .getValue(societyContext, Inventor.class); assertThat(pupin).isNotNull(); assertThat(pupin.getName()).isEqualTo("Pupin"); // evaluates to "Idvor" String city = parser.parseExpression("officers['president'].placeOfBirth.city") .getValue(societyContext, String.class); assertThat(city).isEqualTo("Idvor"); String countryExpression = "officers['advisors'][0].placeOfBirth.Country"; // setting values parser.parseExpression(countryExpression) .setValue(societyContext, "Croatia"); // evaluates to "Croatia" String country = parser.parseExpression(countryExpression) .getValue(societyContext, String.class); assertThat(country).isEqualTo("Croatia"); } @Test void indexingIntoObjects() { ExpressionParser parser = new SpelExpressionParser(); // Create an inventor to use as the root context object. Inventor tesla = new Inventor("Nikola Tesla"); // evaluates to "Nikola Tesla" String name = parser.parseExpression("#root['name']") .getValue(context, tesla, String.class); assertThat(name).isEqualTo("Nikola Tesla"); } @Test void indexingIntoCustomStructure() { // Create a ReflectiveIndexAccessor for FruitMap IndexAccessor fruitMapAccessor = new ReflectiveIndexAccessor( FruitMap.class, Color.class, "getFruit", "setFruit"); // Register the IndexAccessor for FruitMap context.addIndexAccessor(fruitMapAccessor); // Register the fruitMap variable context.setVariable("fruitMap", new FruitMap()); // evaluates to "cherry" String fruit = parser.parseExpression("#fruitMap[T(example.Color).RED]") .getValue(context, String.class); assertThat(fruit).isEqualTo("cherry"); } } @Nested
PropertiesArraysListsMapsAndIndexers
java
elastic__elasticsearch
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openshiftai/request/embeddings/OpenShiftAiEmbeddingsRequest.java
{ "start": 1071, "end": 1250 }
class ____ responsible for creating a request to the OpenShift AI embeddings endpoint. * It constructs an HTTP POST request with the necessary headers and body content. */ public
is
java
quarkusio__quarkus
test-framework/junit5-component/src/test/java/io/quarkus/test/component/declarative/InterceptorMockingTest.java
{ "start": 1150, "end": 1354 }
class ____ { @SimpleBinding String ping() { return "true"; } } @Target({ TYPE, METHOD }) @Retention(RUNTIME) @InterceptorBinding public @
TheComponent
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java
{ "start": 4601, "end": 5202 }
class ____ implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; private final EvalOperator.ExpressionEvaluator.Factory in; public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory in) { this.source = source; this.in = in; } @Override public ToDateNanosFromStringEvaluator get(DriverContext context) { return new ToDateNanosFromStringEvaluator(source, in.get(context), context); } @Override public String toString() { return "ToDateNanosFromStringEvaluator[" + "in=" + in + "]"; } } }
Factory
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtosLegacy.java
{ "start": 142362, "end": 148981 }
interface ____ { /** * <code>rpc sleep(.hadoop.common.SleepRequestProto2) returns (.hadoop.common.SleepResponseProto2);</code> */ public abstract void sleep( com.google.protobuf.RpcController controller, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request, com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done); } public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new TestProtobufRpcHandoffProto() { @java.lang.Override public void sleep( com.google.protobuf.RpcController controller, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request, com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done) { impl.sleep(controller, request, done); } }; } public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request) throws com.google.protobuf.ServiceException { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callBlockingMethod() given method descriptor for " + "wrong service type."); } switch(method.getIndex()) { case 0: return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)request); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } }; } /** * <code>rpc sleep(.hadoop.common.SleepRequestProto2) returns (.hadoop.common.SleepResponseProto2);</code> */ public abstract void sleep( com.google.protobuf.RpcController controller, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request, com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done); public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(6); } public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request, com.google.protobuf.RpcCallback< com.google.protobuf.Message> done) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callMethod() given method descriptor for wrong " + "service type."); } switch(method.getIndex()) { case 0: this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2>specializeCallback( done)); return; default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } public static final
Interface
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/jobgraph/InputOutputFormatContainerTest.java
{ "start": 7116, "end": 7712 }
class ____ extends GenericInputFormat<Object> { private final String name; TestInputFormat(String name) { this.name = name; } public String getName() { return name; } @Override public boolean reachedEnd() { return true; } @Override public Object nextRecord(Object reuse) { return null; } @Override public GenericInputSplit[] createInputSplits(int numSplits) { return null; } } private static final
TestInputFormat
java
grpc__grpc-java
interop-testing/src/generated/main/grpc/io/grpc/testing/integration/LoadBalancerStatsServiceGrpc.java
{ "start": 243, "end": 8098 }
class ____ { private LoadBalancerStatsServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "grpc.testing.LoadBalancerStatsService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.LoadBalancerStatsRequest, io.grpc.testing.integration.Messages.LoadBalancerStatsResponse> getGetClientStatsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetClientStats", requestType = io.grpc.testing.integration.Messages.LoadBalancerStatsRequest.class, responseType = io.grpc.testing.integration.Messages.LoadBalancerStatsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.LoadBalancerStatsRequest, io.grpc.testing.integration.Messages.LoadBalancerStatsResponse> getGetClientStatsMethod() { io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.LoadBalancerStatsRequest, io.grpc.testing.integration.Messages.LoadBalancerStatsResponse> getGetClientStatsMethod; if ((getGetClientStatsMethod = LoadBalancerStatsServiceGrpc.getGetClientStatsMethod) == null) { synchronized (LoadBalancerStatsServiceGrpc.class) { if ((getGetClientStatsMethod = LoadBalancerStatsServiceGrpc.getGetClientStatsMethod) == null) { LoadBalancerStatsServiceGrpc.getGetClientStatsMethod = getGetClientStatsMethod = io.grpc.MethodDescriptor.<io.grpc.testing.integration.Messages.LoadBalancerStatsRequest, io.grpc.testing.integration.Messages.LoadBalancerStatsResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetClientStats")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( io.grpc.testing.integration.Messages.LoadBalancerStatsRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( io.grpc.testing.integration.Messages.LoadBalancerStatsResponse.getDefaultInstance())) .setSchemaDescriptor(new LoadBalancerStatsServiceMethodDescriptorSupplier("GetClientStats")) .build(); } } } return getGetClientStatsMethod; } private static volatile io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsRequest, io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsResponse> getGetClientAccumulatedStatsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetClientAccumulatedStats", requestType = io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsRequest.class, responseType = io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsRequest, io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsResponse> getGetClientAccumulatedStatsMethod() { io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsRequest, io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsResponse> getGetClientAccumulatedStatsMethod; if ((getGetClientAccumulatedStatsMethod = LoadBalancerStatsServiceGrpc.getGetClientAccumulatedStatsMethod) == null) { synchronized (LoadBalancerStatsServiceGrpc.class) { if ((getGetClientAccumulatedStatsMethod = LoadBalancerStatsServiceGrpc.getGetClientAccumulatedStatsMethod) == null) { LoadBalancerStatsServiceGrpc.getGetClientAccumulatedStatsMethod = getGetClientAccumulatedStatsMethod = io.grpc.MethodDescriptor.<io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsRequest, io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetClientAccumulatedStats")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( io.grpc.testing.integration.Messages.LoadBalancerAccumulatedStatsResponse.getDefaultInstance())) .setSchemaDescriptor(new LoadBalancerStatsServiceMethodDescriptorSupplier("GetClientAccumulatedStats")) .build(); } } } return getGetClientAccumulatedStatsMethod; } /** * Creates a new async stub that supports all call types for the service */ public static LoadBalancerStatsServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<LoadBalancerStatsServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<LoadBalancerStatsServiceStub>() { @java.lang.Override public LoadBalancerStatsServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new LoadBalancerStatsServiceStub(channel, callOptions); } }; return LoadBalancerStatsServiceStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports all types of calls on the service */ public static LoadBalancerStatsServiceBlockingV2Stub newBlockingV2Stub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<LoadBalancerStatsServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<LoadBalancerStatsServiceBlockingV2Stub>() { @java.lang.Override public LoadBalancerStatsServiceBlockingV2Stub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new LoadBalancerStatsServiceBlockingV2Stub(channel, callOptions); } }; return LoadBalancerStatsServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static LoadBalancerStatsServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<LoadBalancerStatsServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<LoadBalancerStatsServiceBlockingStub>() { @java.lang.Override public LoadBalancerStatsServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new LoadBalancerStatsServiceBlockingStub(channel, callOptions); } }; return LoadBalancerStatsServiceBlockingStub.newStub(factory, channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static LoadBalancerStatsServiceFutureStub newFutureStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<LoadBalancerStatsServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<LoadBalancerStatsServiceFutureStub>() { @java.lang.Override public LoadBalancerStatsServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new LoadBalancerStatsServiceFutureStub(channel, callOptions); } }; return LoadBalancerStatsServiceFutureStub.newStub(factory, channel); } /** * <pre> * A service used to obtain stats for verifying LB behavior. * </pre> */ public
LoadBalancerStatsServiceGrpc
java
apache__camel
components/camel-wal/src/main/java/org/apache/camel/component/wal/LogReader.java
{ "start": 7173, "end": 7258 }
class ____ { int metadata; int length; byte[] data; } }
Slot
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestAdminHelper.java
{ "start": 1058, "end": 1792 }
class ____ { @Test public void prettifyExceptionWithNpe() { String pretty = AdminHelper.prettifyException(new NullPointerException()); assertTrue(pretty.startsWith("NullPointerException at org.apache.hadoop.hdfs.tools" + ".TestAdminHelper.prettifyExceptionWithNpe"), "Prettified exception message doesn't contain the required exception " + "message"); } @Test public void prettifyException() { String pretty = AdminHelper.prettifyException( new IllegalArgumentException("Something is wrong", new IllegalArgumentException("Something is illegal"))); assertEquals( "IllegalArgumentException: Something is wrong", pretty); } }
TestAdminHelper
java
quarkusio__quarkus
extensions/smallrye-context-propagation/deployment/src/main/java/io/quarkus/smallrye/context/deployment/SmallRyeContextPropagationProcessor.java
{ "start": 21455, "end": 22122 }
class ____ { String[] cleared; String[] propagated; String[] unchanged; ThreadConfig(AnnotationValue cleared, AnnotationValue propagated, AnnotationValue unchanged) { this.cleared = cleared == null ? ThreadContextConfig.Literal.DEFAULT_INSTANCE.cleared() : cleared.asStringArray(); this.propagated = propagated == null ? ThreadContextConfig.Literal.DEFAULT_INSTANCE.propagated() : propagated.asStringArray(); this.unchanged = unchanged == null ? ThreadContextConfig.Literal.DEFAULT_INSTANCE.unchanged() : unchanged.asStringArray(); } } }
ThreadConfig
java
quarkusio__quarkus
integration-tests/opentelemetry-reactive/src/main/java/io/quarkus/it/opentelemetry/reactive/CustomSecurityIdentityAugmentor.java
{ "start": 462, "end": 1918 }
class ____ implements SecurityIdentityAugmentor { @Override public Uni<SecurityIdentity> augment(SecurityIdentity securityIdentity, AuthenticationRequestContext authenticationRequestContext) { return augment(securityIdentity, authenticationRequestContext, Map.of()); } @Override public Uni<SecurityIdentity> augment(SecurityIdentity identity, AuthenticationRequestContext context, Map<String, Object> attributes) { var routingContext = HttpSecurityUtils.getRoutingContextAttribute(attributes); if (routingContext != null) { var augmentorScenario = routingContext.normalizedPath().contains("-augmentor"); var configRolesMappingScenario = routingContext.normalizedPath().contains("roles-mapping-http-perm"); if (augmentorScenario || configRolesMappingScenario) { var builder = QuarkusSecurityIdentity.builder(identity); if (augmentorScenario) { builder.addRole("AUGMENTOR"); } if (configRolesMappingScenario) { // this role is supposed to be re-mapped by HTTP roles mapping (not path-specific) builder.addRole("ROLES-ALLOWED-MAPPING-ROLE"); } return Uni.createFrom().item(builder.build()); } } return Uni.createFrom().item(identity); } }
CustomSecurityIdentityAugmentor
java
google__gson
gson/src/test/java/com/google/gson/functional/ObjectTest.java
{ "start": 24463, "end": 24596 }
class ____ { static String s = "initial"; } @SuppressWarnings("PrivateConstructorForUtilityClass") static
ClassWithStaticField
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/boot/registry/classloading/internal/ClassLoaderServiceImpl.java
{ "start": 2441, "end": 2563 }
class ____ orderedClassLoaderSet.add( ClassLoaderServiceImpl.class.getClassLoader() ); // now build the aggregated
loader
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/bidirectional/Person.java
{ "start": 547, "end": 1640 }
class ____ implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.AUTO) private Integer id; @Basic private String name; @OneToOne(mappedBy = "id") private PersonInfo personInfo; public Integer getId() { return this.id; } public void setId(Integer id) { this.id = id; } public String getName() { return this.name; } public void setName(String name) { this.name = name; } public int hashCode() { int hash = 0; hash += ( this.id != null ? this.id.hashCode() : 0 ); return hash; } public boolean equals(Object object) { if ( !( object instanceof Person ) ) { return false; } Person other = (Person) object; return ( ( this.id != null ) || ( other.id == null ) ) && ( ( this.id == null ) || ( this.id.equals( other.id ) ) ); } public String toString() { return "nogroup.hibertest.Person[ id=" + this.id + " ]"; } public PersonInfo getPersonInfo() { return this.personInfo; } public void setPersonInfo(PersonInfo personInfo) { this.personInfo = personInfo; } }
Person
java
lettuce-io__lettuce-core
src/main/java/io/lettuce/core/support/BasePoolConfig.java
{ "start": 273, "end": 2182 }
class ____ { /** * The default value for the {@code testOnCreate} configuration attribute. */ public static final boolean DEFAULT_TEST_ON_CREATE = false; /** * The default value for the {@code testOnAcquire} configuration attribute. */ public static final boolean DEFAULT_TEST_ON_ACQUIRE = false; /** * The default value for the {@code testOnRelease} configuration attribute. */ public static final boolean DEFAULT_TEST_ON_RELEASE = false; private final boolean testOnCreate; private final boolean testOnAcquire; private final boolean testOnRelease; protected BasePoolConfig(boolean testOnCreate, boolean testOnAcquire, boolean testOnRelease) { this.testOnCreate = testOnCreate; this.testOnAcquire = testOnAcquire; this.testOnRelease = testOnRelease; } /** * Get the value for the {@code testOnCreate} configuration attribute for pools created with this configuration instance. * * @return the current setting of {@code testOnCreate} for this configuration instance. */ public boolean isTestOnCreate() { return testOnCreate; } /** * Get the value for the {@code testOnAcquire} configuration attribute for pools created with this configuration instance. * * @return the current setting of {@code testOnAcquire} for this configuration instance. */ public boolean isTestOnAcquire() { return testOnAcquire; } /** * Get the value for the {@code testOnRelease} configuration attribute for pools created with this configuration instance. * * @return the current setting of {@code testOnRelease} for this configuration instance. */ public boolean isTestOnRelease() { return testOnRelease; } /** * Builder for {@link BasePoolConfig}. */ public abstract static
BasePoolConfig
java
apache__camel
components/camel-netty/src/main/java/org/apache/camel/component/netty/handlers/ServerResponseFutureListener.java
{ "start": 1421, "end": 3354 }
class ____ implements ChannelFutureListener { private static final Logger LOG = LoggerFactory.getLogger(ServerResponseFutureListener.class); private final NettyConsumer consumer; private final Exchange exchange; private final SocketAddress remoteAddress; public ServerResponseFutureListener(NettyConsumer consumer, Exchange exchange, SocketAddress remoteAddress) { this.consumer = consumer; this.exchange = exchange; this.remoteAddress = remoteAddress; } @Override public void operationComplete(ChannelFuture future) throws Exception { // if it was not a success then thrown an exception if (!future.isSuccess()) { Exception e = new CamelExchangeException("Cannot write response to " + remoteAddress, exchange, future.cause()); consumer.getExceptionHandler().handleException(e); } // should channel be closed after complete? Boolean close; if (exchange.hasOut()) { close = exchange.getOut().getHeader(NettyConstants.NETTY_CLOSE_CHANNEL_WHEN_COMPLETE, Boolean.class); } else { close = exchange.getIn().getHeader(NettyConstants.NETTY_CLOSE_CHANNEL_WHEN_COMPLETE, Boolean.class); } // check the setting on the exchange property if (close == null) { close = exchange.getProperty(NettyConstants.NETTY_CLOSE_CHANNEL_WHEN_COMPLETE, Boolean.class); } // should we disconnect, the header can override the configuration boolean disconnect = consumer.getConfiguration().isDisconnect(); if (close != null) { disconnect = close; } if (disconnect) { if (LOG.isTraceEnabled()) { LOG.trace("Closing channel when complete at address: {}", remoteAddress); } NettyHelper.close(future.channel()); } } }
ServerResponseFutureListener
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/DefaultCharsetTest.java
{ "start": 1945, "end": 2837 }
class ____ { void f(String s, byte[] b, OutputStream out, InputStream in) throws Exception { // BUG: Diagnostic contains: s.getBytes(UTF_8); s.getBytes(); // BUG: Diagnostic contains: new String(b, UTF_8); new String(b); // BUG: Diagnostic contains: new String(b, 0, 0, UTF_8); new String(b, 0, 0); // BUG: Diagnostic contains: new OutputStreamWriter(out, UTF_8); new OutputStreamWriter(out); // BUG: Diagnostic contains: new InputStreamReader(in, UTF_8); new InputStreamReader(in); } } """) .doTest(); } @Test public void reader() { compilationHelper .addSourceLines( "Test.java", """ import java.io.*;
Test
java
apache__flink
flink-tests/src/test/java/org/apache/flink/test/state/ChangelogRecoveryCachingITCase.java
{ "start": 10127, "end": 11715 }
class ____ extends LocalFileSystem { private final Set<Path> openedPaths = new HashSet<>(); @Override public FSDataInputStream open(Path f) throws IOException { Assert.assertTrue(f + " was already opened", openedPaths.add(f)); return super.open(f); } @Override public boolean isDistributedFS() { return true; } private boolean hasOpenedPaths() { return !openedPaths.isEmpty(); } } private static void registerFileSystem(FileSystem fs, String scheme) { FileSystem.initialize( new Configuration(), new TestingPluginManager( singletonMap( FileSystemFactory.class, Collections.singleton( new FileSystemFactory() { @Override public FileSystem create(URI fsUri) { return fs; } @Override public String getScheme() { return scheme; } }) .iterator()))); } }
OpenOnceFileSystem
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ZendeskEndpointBuilderFactory.java
{ "start": 1442, "end": 1577 }
interface ____ { /** * Builder for endpoint consumers for the Zendesk component. */ public
ZendeskEndpointBuilderFactory
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/ArrayAggFunctionTest.java
{ "start": 4320, "end": 4812 }
class ____ extends NumberArrayAggFunctionTestBase<Float> { @Override protected Float getValue(String v) { return Float.valueOf(v); } @Override protected AggregateFunction<ArrayData, ArrayAggFunction.ArrayAggAccumulator<Float>> getAggregator() { return new ArrayAggFunction<>(DataTypes.FLOAT().getLogicalType(), true); } } /** Test for {@link DoubleType}. */ @Nested final
FloatArrayAggTest
java
apache__hadoop
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInputPolicy.java
{ "start": 2424, "end": 5270 }
enum ____ { Normal(FS_OPTION_OPENFILE_READ_POLICY_DEFAULT, false, true), Random(FS_OPTION_OPENFILE_READ_POLICY_RANDOM, true, false), Sequential(FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL, false, false); /** Policy name. */ private final String policy; /** Is this random IO? */ private final boolean randomIO; /** Is this an adaptive policy? */ private final boolean adaptive; S3AInputPolicy(String policy, boolean randomIO, boolean adaptive) { this.policy = policy; this.randomIO = randomIO; this.adaptive = adaptive; } @Override public String toString() { return policy; } String getPolicy() { return policy; } boolean isRandomIO() { return randomIO; } boolean isAdaptive() { return adaptive; } /** * Choose an access policy. * @param name strategy name from a configuration option, etc. * @param defaultPolicy default policy to fall back to. * @return the chosen strategy or null if there was no match and * the value of {@code defaultPolicy} was "null". */ public static S3AInputPolicy getPolicy( String name, @Nullable S3AInputPolicy defaultPolicy) { String trimmed = name.trim().toLowerCase(Locale.ENGLISH); switch (trimmed) { case FS_OPTION_OPENFILE_READ_POLICY_ADAPTIVE: case FS_OPTION_OPENFILE_READ_POLICY_DEFAULT: case Constants.INPUT_FADV_NORMAL: return Normal; // all these options currently map to random IO. case FS_OPTION_OPENFILE_READ_POLICY_HBASE: case FS_OPTION_OPENFILE_READ_POLICY_RANDOM: case FS_OPTION_OPENFILE_READ_POLICY_VECTOR: return Random; // columnar formats currently map to random IO, // though in future this may be enhanced. case FS_OPTION_OPENFILE_READ_POLICY_COLUMNAR: case FS_OPTION_OPENFILE_READ_POLICY_ORC: case FS_OPTION_OPENFILE_READ_POLICY_PARQUET: return Random; // handle the sequential formats. case FS_OPTION_OPENFILE_READ_POLICY_AVRO: case FS_OPTION_OPENFILE_READ_POLICY_CSV: case FS_OPTION_OPENFILE_READ_POLICY_JSON: case FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL: case FS_OPTION_OPENFILE_READ_POLICY_WHOLE_FILE: return Sequential; default: return defaultPolicy; } } /** * Scan the list of input policies, returning the first one supported. * @param policies list of policies. * @param defaultPolicy fallback * @return a policy or the defaultPolicy, which may be null */ public static S3AInputPolicy getFirstSupportedPolicy( Collection<String> policies, @Nullable S3AInputPolicy defaultPolicy) { for (String s : policies) { S3AInputPolicy nextPolicy = S3AInputPolicy.getPolicy(s, null); if (nextPolicy != null) { return nextPolicy; } } return defaultPolicy; } }
S3AInputPolicy
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java
{ "start": 1656, "end": 1757 }
class ____ GZ compression codec, base class * of none and LZO compression classes. * */ public
using
java
apache__camel
core/camel-support/src/main/java/org/apache/camel/support/component/AbstractApiProducer.java
{ "start": 1288, "end": 1338 }
class ____ API based Producers */ public abstract
for
java
quarkusio__quarkus
independent-projects/qute/core/src/test/java/io/quarkus/qute/MapResolverTest.java
{ "start": 186, "end": 1170 }
class ____ { @Test public void tesMapResolver() { Map<String, String> map = new LinkedHashMap<>(); map.put("name", "Lu"); map.put("foo.bar", "Ondrej"); map.put("foo and bar", "Bug"); Engine engine = Engine.builder().addDefaults().build(); assertEquals("Lu,Lu,3,false,true,[name,foo.bar,foo and bar],Ondrej,Ondrej,Bug", engine.parse( "{this.name}," + "{this['name']}," + "{this.size}," + "{this.empty}," + "{this.containsKey('name')}," + "[{#each this.keys}{it}{#if it_hasNext},{/if}{/each}]," + "{this.get('foo.bar')}," + "{this['foo.bar']}," + "{this['foo and bar']}") .render(map)); } }
MapResolverTest
java
apache__thrift
lib/java/src/main/java/org/apache/thrift/TNonblockingMultiFetchClient.java
{ "start": 6246, "end": 6388 }
class ____ does real fetch job. Users are not allowed to directly use this class, as * its run() function may run forever. */ private
that
java
apache__maven
compat/maven-compat/src/main/java/org/apache/maven/repository/DelegatingLocalArtifactRepository.java
{ "start": 1504, "end": 5531 }
class ____ extends MavenArtifactRepository { private LocalArtifactRepository buildReactor; private LocalArtifactRepository ideWorkspace; private ArtifactRepository userLocalArtifactRepository; public DelegatingLocalArtifactRepository(ArtifactRepository artifactRepository) { this.userLocalArtifactRepository = artifactRepository; } public void setBuildReactor(LocalArtifactRepository localRepository) { this.buildReactor = localRepository; } public void setIdeWorkspace(LocalArtifactRepository localRepository) { this.ideWorkspace = localRepository; } /** * @deprecated instead use {@link #getIdeWorkspace()} */ @Deprecated public LocalArtifactRepository getIdeWorspace() { return ideWorkspace; } public LocalArtifactRepository getIdeWorkspace() { return getIdeWorspace(); } @Override public Artifact find(Artifact artifact) { if (!artifact.isRelease() && buildReactor != null) { artifact = buildReactor.find(artifact); } if (!artifact.isResolved() && ideWorkspace != null) { artifact = ideWorkspace.find(artifact); } if (!artifact.isResolved()) { artifact = userLocalArtifactRepository.find(artifact); } return artifact; } @Override public List<String> findVersions(Artifact artifact) { Collection<String> versions = new LinkedHashSet<>(); if (buildReactor != null) { versions.addAll(buildReactor.findVersions(artifact)); } if (ideWorkspace != null) { versions.addAll(ideWorkspace.findVersions(artifact)); } versions.addAll(userLocalArtifactRepository.findVersions(artifact)); return Collections.unmodifiableList(new ArrayList<>(versions)); } @Override public String pathOfLocalRepositoryMetadata(ArtifactMetadata metadata, ArtifactRepository repository) { return userLocalArtifactRepository.pathOfLocalRepositoryMetadata(metadata, repository); } @Override public String getId() { return userLocalArtifactRepository.getId(); } @Override public String pathOf(Artifact artifact) { return userLocalArtifactRepository.pathOf(artifact); } @Override public String getBasedir() { return (userLocalArtifactRepository != null) ? userLocalArtifactRepository.getBasedir() : null; } @Override public ArtifactRepositoryLayout getLayout() { return userLocalArtifactRepository.getLayout(); } @Override public ArtifactRepositoryPolicy getReleases() { return userLocalArtifactRepository.getReleases(); } @Override public ArtifactRepositoryPolicy getSnapshots() { return userLocalArtifactRepository.getSnapshots(); } @Override public String getKey() { return userLocalArtifactRepository.getKey(); } @Override public String getUrl() { return userLocalArtifactRepository.getUrl(); } @Override public int hashCode() { int hash = 17; hash = hash * 31 + (buildReactor == null ? 0 : buildReactor.hashCode()); hash = hash * 31 + (ideWorkspace == null ? 0 : ideWorkspace.hashCode()); hash = hash * 31 + (userLocalArtifactRepository == null ? 0 : userLocalArtifactRepository.hashCode()); return hash; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } DelegatingLocalArtifactRepository other = (DelegatingLocalArtifactRepository) obj; return eq(buildReactor, other.buildReactor) && eq(ideWorkspace, other.ideWorkspace) && eq(userLocalArtifactRepository, other.userLocalArtifactRepository); } }
DelegatingLocalArtifactRepository
java
elastic__elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java
{ "start": 681, "end": 1157 }
interface ____ extends VersionedNamedWriteable { Set<String> RESERVED_ML_FIELD_NAMES = new HashSet<>( Arrays.asList(WarningInferenceResults.WARNING.getPreferredName(), TrainedModelConfig.MODEL_ID.getPreferredName()) ); boolean isSupported(InferenceConfig config); /** * Is this an empty update. * @return True if empty */ default boolean isEmpty() { return false; } String getResultsField();
InferenceConfigUpdate
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/schemafilter/SchemaFilterTest.java
{ "start": 5918, "end": 6169 }
class ____ { @Id private long id; public long getId() { return id; } public void setId( long id ) { this.id = id; } } @Entity @jakarta.persistence.Table(name = "the_entity_2", schema = "the_schema_1") public static
Schema1Entity1
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/processor/async/AsyncEndpointRecipientListBeanTest.java
{ "start": 2970, "end": 3111 }
class ____ { @RecipientList public String doSomething() { return "async:bye:camel"; } } }
MyRecipientBean
java
apache__commons-lang
src/test/java/org/apache/commons/lang3/builder/CompareToBuilderTest.java
{ "start": 47086, "end": 51226 }
class ____(to1, to2, to3, true, null); // subclass assertXYZCompareOrder(tso1, tso2, tso3, true, null); } @Test void testReflectionHierarchyCompareExcludeFields() { final String[] excludeFields = { "b" }; testReflectionHierarchyCompare(true, excludeFields); TestSubObject x; TestSubObject y; TestSubObject z; x = new TestSubObject(1, 1); y = new TestSubObject(2, 1); z = new TestSubObject(3, 1); assertXYZCompareOrder(x, y, z, true, excludeFields); x = new TestSubObject(1, 3); y = new TestSubObject(2, 2); z = new TestSubObject(3, 1); assertXYZCompareOrder(x, y, z, true, excludeFields); } @Test void testReflectionHierarchyCompareTransients() { testReflectionHierarchyCompare(true, null); TestTransientSubObject x; TestTransientSubObject y; TestTransientSubObject z; x = new TestTransientSubObject(1, 1); y = new TestTransientSubObject(2, 2); z = new TestTransientSubObject(3, 3); assertXYZCompareOrder(x, y, z, true, null); x = new TestTransientSubObject(1, 1); y = new TestTransientSubObject(1, 2); z = new TestTransientSubObject(1, 3); assertXYZCompareOrder(x, y, z, true, null); } @Test void testShort() { final short o1 = 1; final short o2 = 2; assertEquals(0, new CompareToBuilder().append(o1, o1).toComparison()); assertTrue(new CompareToBuilder().append(o1, o2).toComparison() < 0); assertTrue(new CompareToBuilder().append(o2, o1).toComparison() > 0); assertTrue(new CompareToBuilder().append(o1, Short.MAX_VALUE).toComparison() < 0); assertTrue(new CompareToBuilder().append(Short.MAX_VALUE, o1).toComparison() > 0); assertTrue(new CompareToBuilder().append(o1, Short.MIN_VALUE).toComparison() > 0); assertTrue(new CompareToBuilder().append(Short.MIN_VALUE, o1).toComparison() < 0); } @Test void testShortArray() { final short[] obj1 = new short[2]; obj1[0] = 5; obj1[1] = 6; final short[] obj2 = new short[2]; obj2[0] = 5; obj2[1] = 6; final short[] obj3 = new short[3]; obj3[0] = 5; obj3[1] = 6; obj3[2] = 7; assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison()); assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison()); assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0); assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0); obj1[1] = 7; assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0); assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0); assertTrue(new CompareToBuilder().append(obj1, null).toComparison() > 0); assertEquals(0, new CompareToBuilder().append((short[]) null, null).toComparison()); assertTrue(new CompareToBuilder().append(null, obj1).toComparison() < 0); } @Test void testShortArrayHiddenByObject() { final short[] array1 = new short[2]; array1[0] = 5; array1[1] = 6; final short[] array2 = new short[2]; array2[0] = 5; array2[1] = 6; final short[] array3 = new short[3]; array3[0] = 5; array3[1] = 6; array3[2] = 7; final Object obj1 = array1; final Object obj2 = array2; final Object obj3 = array3; assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison()); assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison()); assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0); assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0); array1[1] = 7; assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0); assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0); } }
assertXYZCompareOrder
java
google__dagger
javatests/dagger/hilt/android/EarlyEntryPointCustomApplicationTest.java
{ "start": 1936, "end": 1998 }
interface ____ { Foo foo(); } public static
FooEntryPoint
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/asm/ClassVisitor.java
{ "start": 8787, "end": 9304 }
class ____ of the annotation class. * @param visible {@literal true} if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or {@literal null} if this visitor is not * interested in visiting this annotation. */ public AnnotationVisitor visitAnnotation(final String descriptor, final boolean visible) { if (cv != null) { return cv.visitAnnotation(descriptor, visible); } return null; } /** * Visits an annotation on a type in the
descriptor
java
apache__hadoop
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditorFlags.java
{ "start": 933, "end": 1036 }
enum ____ { /** * Are out of band operations allowed? */ PermitOutOfBandOperations }
AuditorFlags
java
elastic__elasticsearch
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Authenticator.java
{ "start": 4264, "end": 10941 }
class ____ implements Closeable { private final ThreadContext threadContext; private final AuthenticationService.AuditableRequest request; private final User fallbackUser; private final boolean allowAnonymous; private final boolean extractCredentials; private final Realms realms; private final List<AuthenticationToken> authenticationTokens; private final List<String> unsuccessfulMessages = new ArrayList<>(); private boolean handleNullToken = true; private SecureString bearerString = null; private SecureString apiKeyString = null; private List<Realm> defaultOrderedRealmList = null; private List<Realm> unlicensedRealms = null; /** * Context constructor that provides the authentication token directly as an argument. * This avoids extracting any tokens from the thread context, which is the regular way that authn works. * In this case, the authentication process will simply verify the provided token, and will never fall back to the null-token case * (i.e. in case the token CAN NOT be verified, the user IS NOT authenticated as the anonymous or the fallback user, and * instead the authentication process fails, see {@link AuthenticatorChain#doAuthenticate}). If a {@code null} token is provided * the authentication will invariably fail. */ Context( ThreadContext threadContext, AuthenticationService.AuditableRequest request, Realms realms, @Nullable AuthenticationToken token ) { this.threadContext = threadContext; this.request = request; this.realms = realms; // when a token is directly supplied for authn, don't extract other tokens, and don't handle the null-token case this.authenticationTokens = token != null ? List.of(token) : List.of(); // no other tokens should be added this.extractCredentials = false; this.handleNullToken = false; // if handleNullToken is false, fallbackUser and allowAnonymous are irrelevant this.fallbackUser = null; this.allowAnonymous = false; } /** * Context constructor where authentication looks for credentials in the thread context. */ public Context( ThreadContext threadContext, AuthenticationService.AuditableRequest request, User fallbackUser, boolean allowAnonymous, Realms realms ) { this.threadContext = threadContext; this.request = request; this.extractCredentials = true; // the extracted tokens, in order, for each {@code Authenticator} this.authenticationTokens = new ArrayList<>(); this.fallbackUser = fallbackUser; this.allowAnonymous = allowAnonymous; this.realms = realms; } public ThreadContext getThreadContext() { return threadContext; } public AuthenticationService.AuditableRequest getRequest() { return request; } public User getFallbackUser() { return fallbackUser; } public boolean isAllowAnonymous() { return allowAnonymous; } public void setHandleNullToken(boolean value) { handleNullToken = value; } public boolean shouldHandleNullToken() { return handleNullToken; } /** * Returns {@code true}, if {@code Authenticator}s should first be tried in order to extract the credentials token * from the thread context. The extracted tokens are appended to this authenticator context with * {@link #addAuthenticationToken(AuthenticationToken)}. * If {@code false}, the credentials token is directly passed in to this authenticator context, and the authenticators * themselves are only consulted to authenticate the token, and never to extract any tokens from the thread context. */ public boolean shouldExtractCredentials() { return extractCredentials; } public List<String> getUnsuccessfulMessages() { return unsuccessfulMessages; } public void addAuthenticationToken(AuthenticationToken authenticationToken) { authenticationTokens.add(authenticationToken); } @Nullable public AuthenticationToken getMostRecentAuthenticationToken() { return authenticationTokens.isEmpty() ? null : authenticationTokens.get(authenticationTokens.size() - 1); } public SecureString getBearerString() { if (bearerString == null) { bearerString = extractBearerTokenFromHeader(threadContext); } return bearerString; } public SecureString getApiKeyString() { if (apiKeyString == null) { apiKeyString = extractApiKeyFromHeader(threadContext); } return apiKeyString; } public List<Realm> getDefaultOrderedRealmList() { if (defaultOrderedRealmList == null) { defaultOrderedRealmList = realms.getActiveRealms(); } return defaultOrderedRealmList; } public List<Realm> getUnlicensedRealms() { if (unlicensedRealms == null) { unlicensedRealms = realms.getUnlicensedRealms(); } return unlicensedRealms; } public void addUnsuccessfulMessage(String message) { unsuccessfulMessages.add(message); } @Override public void close() throws IOException { authenticationTokens.forEach(AuthenticationToken::clearCredentials); } public void addUnsuccessfulMessageToMetadata(final ElasticsearchSecurityException ese) { if (false == getUnsuccessfulMessages().isEmpty()) { ese.addMetadata("es.additional_unsuccessful_credentials", getUnsuccessfulMessages()); } } @Override public String toString() { return Strings.format( "%s{tokens=%s, messages=%s}", getClass().getSimpleName(), this.authenticationTokens.stream() .map(t -> t.getClass().getSimpleName() + ":" + t.principal()) .collect(Collectors.joining(",", "[", "]")), this.unsuccessfulMessages ); } } }
Context
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/serializer/prettyFormat/ArrayListFieldTest.java
{ "start": 713, "end": 903 }
class ____ { private final List<Entity> entries = new ArrayList<Entity>(); public List<Entity> getEntries() { return entries; } } public static
VO
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/issues/Issue170Test.java
{ "start": 1023, "end": 2064 }
class ____ extends ContextTestSupport { protected final String qOne = "seda:Q1"; protected final String qTwo = "mock:Q2"; protected final String qThree = "mock:Q3"; @Test public void testSendMessagesGetCorrectCounts() throws Exception { MockEndpoint q2 = getMockEndpoint(qTwo); MockEndpoint q3 = getMockEndpoint(qThree); String body1 = "<message id='1'/>"; String body2 = "<message id='2'/>"; q2.expectedBodiesReceived(body1, body2); q3.expectedBodiesReceived(body1, body2); template.sendBodyAndHeader("direct:start", body1, "counter", 1); template.sendBodyAndHeader("direct:start", body2, "counter", 2); assertMockEndpointsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from("direct:start").to(qOne); from(qOne).to(qTwo, qThree); // write to Q3 but not to Q2 } }; } }
Issue170Test
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/jmx/ITestBean.java
{ "start": 667, "end": 758 }
interface ____ { void setName(String name) throws Exception; String getName(); }
ITestBean
java
google__dagger
javatests/dagger/hilt/processor/internal/definecomponent/DefineComponentProcessorTest.java
{ "start": 4481, "end": 5217 }
class ____ {}"); HiltCompilerTests.hiltCompiler(component) .compile( subject -> { subject.hasErrorCount(1); subject.hasErrorContaining( "@DefineComponent is only allowed on interfaces. Found: test.FooComponent"); }); } @Test public void testDefineComponentWithTypeParameters_fails() { Source component = HiltCompilerTests.javaSource( "test.FooComponent", "package test;", "", "import dagger.hilt.components.SingletonComponent;", "import dagger.hilt.DefineComponent;", "", "@DefineComponent( parent = SingletonComponent.class )", "
FooComponent
java
spring-projects__spring-boot
module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/web/WebEndpointHttpMethod.java
{ "start": 814, "end": 979 }
enum ____ { /** * An HTTP GET request. */ GET, /** * An HTTP POST request. */ POST, /** * An HTTP DELETE request. */ DELETE }
WebEndpointHttpMethod
java
elastic__elasticsearch
x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java
{ "start": 1237, "end": 7245 }
class ____ extends RRFRetrieverBuilderIT { private static final String LAST_30D_FIELD = "views.last30d"; private static final String ALL_TIME_FIELD = "views.all"; @Override protected void setupIndex() { String mapping = """ { "properties": { "vector": { "type": "dense_vector", "dims": 1, "element_type": "float", "similarity": "l2_norm", "index": true, "index_options": { "type": "hnsw" } }, "text": { "type": "text" }, "doc": { "type": "keyword" }, "topic": { "type": "keyword" }, "views": { "type": "nested", "properties": { "last30d": { "type": "integer" }, "all": { "type": "integer" } } } } } """; createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)).build()); admin().indices().preparePutMapping(INDEX).setSource(mapping, XContentType.JSON).get(); indexDoc(INDEX, "doc_1", DOC_FIELD, "doc_1", TOPIC_FIELD, "technology", TEXT_FIELD, "term", LAST_30D_FIELD, 100); indexDoc( INDEX, "doc_2", DOC_FIELD, "doc_2", TOPIC_FIELD, "astronomy", TEXT_FIELD, "search term term", VECTOR_FIELD, new float[] { 2.0f }, LAST_30D_FIELD, 3 ); indexDoc(INDEX, "doc_3", DOC_FIELD, "doc_3", TOPIC_FIELD, "technology", VECTOR_FIELD, new float[] { 3.0f }); indexDoc( INDEX, "doc_4", DOC_FIELD, "doc_4", TOPIC_FIELD, "technology", TEXT_FIELD, "term term term term", ALL_TIME_FIELD, 100, LAST_30D_FIELD, 40 ); indexDoc(INDEX, "doc_5", DOC_FIELD, "doc_5", TOPIC_FIELD, "science", TEXT_FIELD, "irrelevant stuff"); indexDoc( INDEX, "doc_6", DOC_FIELD, "doc_6", TEXT_FIELD, "search term term term term term term", VECTOR_FIELD, new float[] { 6.0f }, LAST_30D_FIELD, 15 ); indexDoc( INDEX, "doc_7", DOC_FIELD, "doc_7", TOPIC_FIELD, "biology", TEXT_FIELD, "term term term term term term term", VECTOR_FIELD, new float[] { 7.0f }, ALL_TIME_FIELD, 1000 ); refresh(INDEX); } public void testRRFRetrieverWithNestedQuery() { final int rankWindowSize = 100; final int rankConstant = 10; SearchSourceBuilder source = new SearchSourceBuilder(); // this one retrieves docs 1 StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( QueryBuilders.nestedQuery("views", QueryBuilders.rangeQuery(LAST_30D_FIELD).gte(50L), ScoreMode.Avg) ); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( QueryBuilders.boolQuery() .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); // this one retrieves docs 6 KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder( VECTOR_FIELD, new float[] { 6.0f }, null, 1, 100, null, null, null ); source.retriever( new RRFRetrieverBuilder( Arrays.asList( new CompoundRetrieverBuilder.RetrieverSource(standard0, null), new CompoundRetrieverBuilder.RetrieverSource(standard1, null), new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) ), rankWindowSize, rankConstant ) ); source.fetchField(TOPIC_FIELD); source.explain(true); SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); assertThat(resp.getHits().getTotalHits().value(), equalTo(3L)); assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat((double) resp.getHits().getAt(0).getScore(), closeTo(0.1742, 1e-4)); assertThat( Arrays.stream(resp.getHits().getHits()).skip(1).map(SearchHit::getId).toList(), containsInAnyOrder("doc_1", "doc_2") ); assertThat((double) resp.getHits().getAt(1).getScore(), closeTo(0.0909, 1e-4)); assertThat((double) resp.getHits().getAt(2).getScore(), closeTo(0.0909, 1e-4)); }); } }
RRFRetrieverBuilderNestedDocsIT
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/uniqueconstraint/UniqueConstraintBatchingTest.java
{ "start": 1320, "end": 2723 }
class ____ { @RegisterExtension public LoggerInspectionExtension logInspection = LoggerInspectionExtension.builder().setLogger( SQLExceptionLogging.ERROR_LOG ).build(); private Triggerable triggerable; @BeforeEach public void setUp() { triggerable = logInspection.watchForLogMessages( "Unique index" ); triggerable.reset(); } @Test public void testBatching(EntityManagerFactoryScope scope) throws Exception { Room livingRoom = new Room(); scope.inTransaction( entityManager -> { livingRoom.setId( 1L ); livingRoom.setName( "livingRoom" ); entityManager.persist( livingRoom ); } ); scope.inTransaction( entityManager -> { House house = new House(); house.setId( 1L ); house.setCost( 100 ); house.setHeight( 1000L ); house.setRoom( livingRoom ); entityManager.persist( house ); } ); try { scope.inTransaction( entityManager -> { House house2 = new House(); house2.setId( 2L ); house2.setCost( 100 ); house2.setHeight( 1001L ); house2.setRoom( livingRoom ); entityManager.persist( house2 ); } ); fail( "Should throw exception" ); } catch (PersistenceException e) { assertEquals( 1, triggerable.triggerMessages().size() ); assertTrue( triggerable.triggerMessage().startsWith( "Unique index or primary key violation" ) ); } } }
UniqueConstraintBatchingTest
java
grpc__grpc-java
services/src/generated/main/grpc/io/grpc/reflection/v1/ServerReflectionGrpc.java
{ "start": 12056, "end": 12232 }
class ____ extends ServerReflectionBaseDescriptorSupplier { ServerReflectionFileDescriptorSupplier() {} } private static final
ServerReflectionFileDescriptorSupplier
java
micronaut-projects__micronaut-core
http-client-tck/src/main/java/io/micronaut/http/client/tck/tests/FormUrlEncodedTest.java
{ "start": 1967, "end": 2235 }
class ____ { @Post("/submit") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) @Produces(MediaType.TEXT_HTML) String submit(@Body Map<String, String> form) { return form.get("firstName"); } } }
EncodingTestController
java
apache__camel
components/camel-platform-http-vertx/src/test/java/org/apache/camel/component/platform/http/vertx/model/Category.java
{ "start": 1001, "end": 1182 }
class ____ adhere to the schema defined in the Pet Store OpenAPI specification JSON / YAML. */ @JsonInclude(JsonInclude.Include.NON_NULL) @XmlRootElement(name = "Category") public
must
java
square__retrofit
retrofit/src/main/java/retrofit2/ParameterHandler.java
{ "start": 8750, "end": 9496 }
class ____<T> extends ParameterHandler<T> { private final String name; private final Converter<T, String> valueConverter; private final boolean encoded; Field(String name, Converter<T, String> valueConverter, boolean encoded) { this.name = Objects.requireNonNull(name, "name == null"); this.valueConverter = valueConverter; this.encoded = encoded; } @Override void apply(RequestBuilder builder, @Nullable T value) throws IOException { if (value == null) return; // Skip null values. String fieldValue = valueConverter.convert(value); if (fieldValue == null) return; // Skip null converted values builder.addFormField(name, fieldValue, encoded); } } static final
Field
java
apache__kafka
streams/src/test/java/org/apache/kafka/streams/processor/internals/assignment/GraphTest.java
{ "start": 1475, "end": 18822 }
class ____ { private Graph<Integer> graph; @BeforeEach public void setUp() { /* * Node 0 and 2 are both connected to node 1 and 3. There's a flow of 1 unit from 0 to 1 and 2 to * 3. The total cost in this case is 5. Min cost should be 2 by flowing 1 unit from 0 to 3 and 2 * to 1 */ graph = new Graph<>(); graph.addEdge(0, 1, 1, 3, 1); graph.addEdge(0, 3, 1, 1, 0); graph.addEdge(2, 1, 1, 1, 0); graph.addEdge(2, 3, 1, 2, 1); graph.addEdge(-1, 0, 1, 0, 1); graph.addEdge(-1, 2, 1, 0, 1); graph.addEdge(1, 99, 1, 0, 1); graph.addEdge(3, 99, 1, 0, 1); graph.setSourceNode(-1); graph.setSinkNode(99); } @Test public void testBasic() { final Set<Integer> nodes = graph.nodes(); assertEquals(6, nodes.size()); assertThat(nodes, contains(-1, 0, 1, 2, 3, 99)); Map<Integer, Graph<Integer>.Edge> edges = graph.edges(0); assertEquals(2, edges.size()); assertEquals(getEdge(1, 1, 3, 0, 1), edges.get(1)); assertEquals(getEdge(3, 1, 1, 1, 0), edges.get(3)); edges = graph.edges(2); assertEquals(2, edges.size()); assertEquals(getEdge(1, 1, 1, 1, 0), edges.get(1)); assertEquals(getEdge(3, 1, 2, 0, 1), edges.get(3)); edges = graph.edges(1); assertEquals(1, edges.size()); assertEquals(getEdge(99, 1, 0, 0, 1), edges.get(99)); edges = graph.edges(3); assertEquals(1, edges.size()); assertEquals(getEdge(99, 1, 0, 0, 1), edges.get(99)); edges = graph.edges(-1); assertEquals(2, edges.size()); assertEquals(getEdge(0, 1, 0, 0, 1), edges.get(0)); assertEquals(getEdge(2, 1, 0, 0, 1), edges.get(2)); edges = graph.edges(99); assertTrue(edges.isEmpty()); assertFalse(graph.isResidualGraph()); } @Test public void testResidualGraph() { final Graph<Integer> residualGraph = graph.residualGraph(); final Graph<Integer> residualGraph1 = residualGraph.residualGraph(); assertSame(residualGraph1, residualGraph); final Set<Integer> nodes = residualGraph.nodes(); assertEquals(6, nodes.size()); assertThat(nodes, contains(-1, 0, 1, 2, 3, 99)); Map<Integer, Graph<Integer>.Edge> edges = residualGraph.edges(0); assertEquals(3, edges.size()); assertEquals(getEdge(1, 1, 3, 0, 1), edges.get(1)); assertEquals(getEdge(3, 1, 1, 1, 0), edges.get(3)); assertEquals(getEdge(-1, 1, 0, 1, 0, false), edges.get(-1)); edges = residualGraph.edges(2); assertEquals(3, edges.size()); assertEquals(getEdge(1, 1, 1, 1, 0), edges.get(1)); assertEquals(getEdge(3, 1, 2, 0, 1), edges.get(3)); assertEquals(getEdge(-1, 1, 0, 1, 0, false), edges.get(-1)); edges = residualGraph.edges(1); assertEquals(3, edges.size()); assertEquals(getEdge(0, 1, -3, 1, 0, false), edges.get(0)); assertEquals(getEdge(2, 1, -1, 0, 0, false), edges.get(2)); assertEquals(getEdge(99, 1, 0, 0, 1), edges.get(99)); edges = residualGraph.edges(3); assertEquals(3, edges.size()); assertEquals(getEdge(0, 1, -1, 0, 0, false), edges.get(0)); assertEquals(getEdge(2, 1, -2, 1, 0, false), edges.get(2)); assertEquals(getEdge(99, 1, 0, 0, 1), edges.get(99)); assertTrue(residualGraph.isResidualGraph()); } @Test public void testInvalidOperation() { final Graph<Integer> graph1 = new Graph<>(); Exception exception = assertThrows(IllegalArgumentException.class, () -> graph1.addEdge(0, 1, -1, 0, 0)); assertEquals("Edge capacity cannot be negative", exception.getMessage()); exception = assertThrows(IllegalArgumentException.class, () -> graph1.addEdge(0, 1, 1, 0, 2)); assertEquals("Edge flow 2 cannot exceed capacity 1", exception.getMessage()); graph1.addEdge(0, 1, 1, 1, 1); exception = assertThrows(IllegalArgumentException.class, () -> graph1.addEdge(1, 0, 1, 0, 0)); assertEquals("There is already an edge from 0 to 1. Can not add an edge from 1 to 0 since " + "there will create a cycle between two nodes", exception.getMessage()); final Graph<Integer> residualGraph = graph1.residualGraph(); exception = assertThrows(IllegalStateException.class, residualGraph::solveMinCostFlow); assertEquals("Should not be residual graph to solve min cost flow", exception.getMessage()); } @Test public void testInvalidSource() { final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 1, 1, 0); graph1.addEdge(1, 2, 1, 1, 0); graph1.setSourceNode(1); graph1.setSinkNode(2); final Exception exception = assertThrows(IllegalStateException.class, graph1::solveMinCostFlow); assertEquals("Source node 1 shouldn't have input 0", exception.getMessage()); } @Test public void testInvalidSink() { final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 1, 1, 0); graph1.addEdge(1, 2, 1, 1, 0); graph1.setSourceNode(0); graph1.setSinkNode(1); final Exception exception = assertThrows(IllegalStateException.class, graph1::solveMinCostFlow); assertEquals("Sink node 1 shouldn't have output", exception.getMessage()); } @Test public void testInvalidFlow() { final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 1, 1, 1); graph1.addEdge(0, 2, 2, 1, 2); graph1.addEdge(1, 3, 1, 1, 1); graph1.addEdge(2, 3, 2, 1, 0); // Missing flow from 2 to 3 graph1.setSourceNode(0); graph1.setSinkNode(3); final Exception exception = assertThrows(IllegalStateException.class, graph1::solveMinCostFlow); assertEquals("Input flow for node 2 is 2 which doesn't match output flow 0", exception.getMessage()); } @Test public void testMissingSource() { final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 1, 1, 1); graph1.addEdge(0, 2, 2, 1, 2); graph1.addEdge(1, 3, 1, 1, 1); graph1.addEdge(2, 3, 2, 1, 2); graph1.setSinkNode(3); final Exception exception = assertThrows(IllegalStateException.class, graph1::solveMinCostFlow); assertEquals("Output flow for source null is null which doesn't match input flow 3 for sink 3", exception.getMessage()); } @Test public void testDisconnectedGraph() { final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 1, 1, 1); graph1.addEdge(2, 3, 2, 1, 2); graph1.setSourceNode(0); graph1.setSinkNode(1); final Exception exception = assertThrows(IllegalStateException.class, graph1::solveMinCostFlow); assertEquals("Input flow for node 3 is 2 which doesn't match output flow null", exception.getMessage()); } @Test public void testDisconnectedGraphCrossSourceSink() { final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 1, 1, 1); graph1.addEdge(2, 3, 2, 1, 2); graph1.setSourceNode(0); graph1.setSinkNode(3); final Exception exception = assertThrows(IllegalStateException.class, graph1::solveMinCostFlow); assertEquals("Input flow for node 1 is 1 which doesn't match output flow null", exception.getMessage()); } @Test public void testNullNode() { final Graph<Integer> graph1 = new Graph<>(); assertThrows(NullPointerException.class, () -> graph1.addEdge(null, 1, 1, 1, 1)); assertThrows(NullPointerException.class, () -> graph1.addEdge(1, null, 1, 1, 1)); } @Test public void testJustSourceSink() { final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 1, 1, 1); graph1.setSourceNode(0); graph1.setSinkNode(1); graph1.solveMinCostFlow(); assertEquals(1, graph1.totalCost()); } @Test public void testMinCostFlow() { // Original graph, flow from 0 to 1 and 2 to 3 Map<Integer, Graph<Integer>.Edge> edges = graph.edges(0); Graph<Integer>.Edge edge = edges.get(1); assertEquals(1, edge.flow); assertEquals(0, edge.residualFlow); edge = edges.get(3); assertEquals(0, edge.flow); assertEquals(1, edge.residualFlow); edges = graph.edges(2); edge = edges.get(3); assertEquals(1, edge.flow); assertEquals(0, edge.residualFlow); edge = edges.get(1); assertEquals(0, edge.flow); assertEquals(1, edge.residualFlow); assertEquals(5, graph.totalCost()); graph.solveMinCostFlow(); assertEquals(2, graph.totalCost()); edges = graph.edges(0); assertEquals(2, edges.size()); // No flow from 0 to 1 edge = edges.get(1); assertEquals(0, edge.flow); assertEquals(1, edge.residualFlow); // Flow from 0 to 3 now edge = edges.get(3); assertEquals(1, edge.flow); assertEquals(0, edge.residualFlow); edges = graph.edges(2); assertEquals(2, edges.size()); // No flow from 2 to 3 edge = edges.get(3); assertEquals(0, edge.flow); assertEquals(1, edge.residualFlow); // Flow from 2 to 1 now edge = edges.get(1); assertEquals(1, edge.flow); assertEquals(0, edge.residualFlow); } @Test public void testMinCostDetectNodeNotInNegativeCycle() { final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(-1, 0, 1, 0, 1); graph1.addEdge(-1, 1, 1, 0, 1); graph1.addEdge(0, 2, 1, 1, 0); graph1.addEdge(0, 3, 1, 1, 0); graph1.addEdge(0, 4, 1, 10, 1); graph1.addEdge(1, 2, 1, 1, 0); graph1.addEdge(1, 3, 1, 10, 1); graph1.addEdge(1, 4, 1, 1, 0); graph1.addEdge(2, 99, 0, 0, 0); graph1.addEdge(3, 99, 1, 0, 1); graph1.addEdge(4, 99, 1, 0, 1); graph1.setSourceNode(-1); graph1.setSinkNode(99); assertEquals(20, graph1.totalCost()); // In this graph, the node we found for negative cycle is 2. However 2 isn't in the negative // cycle itself. Negative cycle is 1 -> 4 -> 0 -> 3 -> 1 graph1.solveMinCostFlow(); assertEquals(2, graph1.totalCost()); Map<Integer, Graph<Integer>.Edge> edges = graph1.edges(-1); assertEquals(getEdge(0, 1, 0, 0, 1), edges.get(0)); assertEquals(getEdge(1, 1, 0, 0, 1), edges.get(1)); edges = graph1.edges(0); assertEquals(getEdge(2, 1, 1, 1, 0), edges.get(2)); assertEquals(getEdge(3, 1, 1, 0, 1), edges.get(3)); assertEquals(getEdge(4, 1, 10, 1, 0), edges.get(4)); edges = graph1.edges(1); assertEquals(getEdge(2, 1, 1, 1, 0), edges.get(2)); assertEquals(getEdge(3, 1, 10, 1, 0), edges.get(3)); assertEquals(getEdge(4, 1, 1, 0, 1), edges.get(4)); edges = graph1.edges(2); assertEquals(getEdge(99, 0, 0, 0, 0), edges.get(99)); edges = graph1.edges(3); assertEquals(getEdge(99, 1, 0, 0, 1), edges.get(99)); edges = graph1.edges(4); assertEquals(getEdge(99, 1, 0, 0, 1), edges.get(99)); } @Test public void testDeterministic() { final List<TestEdge> edgeList = new ArrayList<>(); edgeList.add(new TestEdge(0, 1, 1, 2, 1)); edgeList.add(new TestEdge(0, 2, 1, 1, 0)); edgeList.add(new TestEdge(0, 3, 1, 1, 0)); edgeList.add(new TestEdge(0, 4, 1, 1, 0)); edgeList.add(new TestEdge(1, 5, 1, 1, 1)); edgeList.add(new TestEdge(2, 5, 1, 1, 0)); edgeList.add(new TestEdge(3, 5, 1, 1, 0)); edgeList.add(new TestEdge(4, 5, 1, 1, 0)); // Test no matter the order of adding edges, min cost flow flows from 0 to 2 and then from 2 to 5 for (int i = 0; i < 10; i++) { Collections.shuffle(edgeList); final Graph<Integer> graph1 = new Graph<>(); for (final TestEdge edge : edgeList) { graph1.addEdge(edge.source, edge.destination, edge.capacity, edge.cost, edge.flow); } graph1.setSourceNode(0); graph1.setSinkNode(5); assertEquals(3, graph1.totalCost()); graph1.solveMinCostFlow(); assertEquals(2, graph1.totalCost()); Map<Integer, Graph<Integer>.Edge> edges = graph1.edges(0); assertEquals(4, edges.size()); assertEquals(getEdge(1, 1, 2, 1, 0), edges.get(1)); assertEquals(getEdge(2, 1, 1, 0, 1), edges.get(2)); assertEquals(getEdge(3, 1, 1, 1, 0), edges.get(3)); assertEquals(getEdge(4, 1, 1, 1, 0), edges.get(4)); edges = graph1.edges(1); assertEquals(1, edges.size()); assertEquals(getEdge(5, 1, 1, 1, 0), edges.get(5)); edges = graph1.edges(2); assertEquals(1, edges.size()); assertEquals(getEdge(5, 1, 1, 0, 1), edges.get(5)); edges = graph1.edges(3); assertEquals(1, edges.size()); assertEquals(getEdge(5, 1, 1, 1, 0), edges.get(5)); edges = graph1.edges(4); assertEquals(1, edges.size()); assertEquals(getEdge(5, 1, 1, 1, 0), edges.get(5)); } } @Test public void testMaxFlowOnlySourceAndSink() { final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 100, 0, 0); graph1.setSourceNode(0); graph1.setSinkNode(1); final long maxFlow = graph1.calculateMaxFlow(); assertEquals(100, maxFlow); final Map<Integer, Graph<Integer>.Edge> edges = graph1.edges(0); assertEquals(100, edges.get(1).flow); assertEquals(0, edges.get(1).residualFlow); } @Test public void testMaxFlowBoundBySinkEdges() { // Edges connected to sink have less capacity final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 10, 0, 0); graph1.addEdge(0, 2, 10, 0, 0); graph1.addEdge(0, 3, 10, 0, 0); graph1.addEdge(1, 4, 5, 0, 0); graph1.addEdge(2, 5, 5, 0, 0); graph1.addEdge(3, 6, 5, 0, 0); graph1.addEdge(4, 7, 2, 0, 0); graph1.addEdge(5, 7, 2, 0, 0); graph1.addEdge(6, 7, 2, 0, 0); graph1.setSourceNode(0); graph1.setSinkNode(7); final long maxFlow = graph1.calculateMaxFlow(); assertEquals(6, maxFlow); Map<Integer, Graph<Integer>.Edge> edges = graph1.edges(0); assertEquals(2, edges.get(1).flow); assertEquals(2, edges.get(2).flow); assertEquals(2, edges.get(3).flow); edges = graph1.edges(1); assertEquals(2, edges.get(4).flow); edges = graph1.edges(2); assertEquals(2, edges.get(5).flow); edges = graph1.edges(3); assertEquals(2, edges.get(6).flow); edges = graph1.edges(4); assertEquals(2, edges.get(7).flow); edges = graph1.edges(5); assertEquals(2, edges.get(7).flow); edges = graph1.edges(6); assertEquals(2, edges.get(7).flow); } @Test public void testMaxFlowBoundBySourceEdges() { // Edges connected to source have less capacity final Graph<Integer> graph1 = new Graph<>(); graph1.addEdge(0, 1, 1, 0, 0); graph1.addEdge(0, 2, 2, 0, 0); graph1.addEdge(0, 3, 3, 0, 0); graph1.addEdge(1, 4, 5, 0, 0); graph1.addEdge(2, 5, 5, 0, 0); graph1.addEdge(3, 6, 5, 0, 0); graph1.addEdge(4, 7, 10, 0, 0); graph1.addEdge(5, 7, 10, 0, 0); graph1.addEdge(6, 7, 10, 0, 0); graph1.setSourceNode(0); graph1.setSinkNode(7); final long maxFlow = graph1.calculateMaxFlow(); assertEquals(6, maxFlow); Map<Integer, Graph<Integer>.Edge> edges = graph1.edges(0); assertEquals(1, edges.get(1).flow); assertEquals(2, edges.get(2).flow); assertEquals(3, edges.get(3).flow); edges = graph1.edges(1); assertEquals(1, edges.get(4).flow); edges = graph1.edges(2); assertEquals(2, edges.get(5).flow); edges = graph1.edges(3); assertEquals(3, edges.get(6).flow); edges = graph1.edges(4); assertEquals(1, edges.get(7).flow); edges = graph1.edges(5); assertEquals(2, edges.get(7).flow); edges = graph1.edges(6); assertEquals(3, edges.get(7).flow); } private static Graph<Integer>.Edge getEdge(final int destination, final int capacity, final int cost, final int residualFlow, final int flow) { return getEdge(destination, capacity, cost, residualFlow, flow, true); } private static Graph<Integer>.Edge getEdge(final int destination, final int capacity, final int cost, final int residualFlow, final int flow, final boolean forwardEdge) { return new Graph<Integer>().new Edge(destination, capacity, cost, residualFlow, flow, forwardEdge); } private static
GraphTest
java
apache__camel
components/camel-ignite/src/test/java/org/apache/camel/component/ignite/IgniteIdGenTest.java
{ "start": 1223, "end": 5370 }
class ____ extends AbstractIgniteTest { @Override protected String getScheme() { return "ignite-idgen"; } @Override protected AbstractIgniteComponent createComponent() { return IgniteIdGenComponent.fromConfiguration(createConfiguration()); } @Test public void testOperations() { Assertions .assertThat( template.requestBody("ignite-idgen:" + resourceUid + "?initialValue=0&operation=GET", null, Long.class)) .isZero(); Assertions.assertThat(template .requestBody("ignite-idgen:" + resourceUid + "?initialValue=0&operation=GET_AND_INCREMENT", null, Long.class)) .isZero(); Assertions.assertThat(template .requestBody("ignite-idgen:" + resourceUid + "?initialValue=0&operation=INCREMENT_AND_GET", null, Long.class)) .isEqualTo(2); Assertions.assertThat( template.requestBody("ignite-idgen:" + resourceUid + "?initialValue=0&operation=ADD_AND_GET", 5, Long.class)) .isEqualTo(7); Assertions.assertThat( template.requestBody("ignite-idgen:" + resourceUid + "?initialValue=0&operation=GET_AND_ADD", 5, Long.class)) .isEqualTo(7); Assertions .assertThat( template.requestBody("ignite-idgen:" + resourceUid + "?initialValue=0&operation=GET", 5, Long.class)) .isEqualTo(12); } @Test public void testInitialValue() { Assertions.assertThat( template.requestBody("ignite-idgen:" + resourceUid + "?operation=GET&initialValue=100", null, Long.class)) .isEqualTo(100); Assertions .assertThat(template.requestBody( "ignite-idgen:" + resourceUid + "?operation=GET_AND_INCREMENT&initialValue=100", null, Long.class)) .isEqualTo(100); Assertions .assertThat(template.requestBody( "ignite-idgen:" + resourceUid + "?operation=INCREMENT_AND_GET&initialValue=100", null, Long.class)) .isEqualTo(102); Assertions.assertThat( template.requestBody("ignite-idgen:" + resourceUid + "?operation=ADD_AND_GET&initialValue=100", 5, Long.class)) .isEqualTo(107); Assertions.assertThat( template.requestBody("ignite-idgen:" + resourceUid + "?operation=GET_AND_ADD&initialValue=100", 5, Long.class)) .isEqualTo(107); Assertions .assertThat( template.requestBody("ignite-idgen:" + resourceUid + "?operation=GET&initialValue=100", 5, Long.class)) .isEqualTo(112); } @Test public void testDifferentOperation() { Assertions.assertThat( template.requestBody("ignite-idgen:" + resourceUid + "?operation=GET&initialValue=100", null, Long.class)) .isEqualTo(100); Assertions .assertThat(template.requestBodyAndHeader( "ignite-idgen:" + resourceUid + "?operation=GET_AND_INCREMENT&initialValue=100", null, IgniteConstants.IGNITE_IDGEN_OPERATION, IgniteIdGenOperation.INCREMENT_AND_GET, Long.class)) .isEqualTo(101); } @Test public void testBatchSize() { IgniteIdGenEndpoint endpoint = context.getEndpoint( "ignite-idgen:" + resourceUid + "?operation=GET&initialValue=100&batchSize=100", IgniteIdGenEndpoint.class); Assertions.assertThat(template.requestBody(endpoint, null, Long.class)).isEqualTo(100); // Cannot test much here with a single Ignite instance, let's just test // that the parameter could be set. Assertions.assertThat(endpoint.getBatchSize()).isEqualTo(100); } @AfterEach public void deleteSets() { IgniteAtomicSequence seq = ignite().atomicSequence(resourceUid, 0, false); if (seq != null) { seq.close(); } } }
IgniteIdGenTest
java
apache__spark
sql/core/src/test/java/test/org/apache/spark/sql/JavaUDFSuite.java
{ "start": 1781, "end": 2684 }
class ____ implements Serializable { private transient SparkSession spark; @BeforeEach public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); } @AfterEach public void tearDown() { spark.stop(); spark = null; } @Test public void udf1Test() { spark.udf().register("stringLengthTest", (String str) -> str.length(), DataTypes.IntegerType); Row result = spark.sql("SELECT stringLengthTest('test')").head(); Assertions.assertEquals(4, result.getInt(0)); } @Test public void udf2Test() { spark.udf().register("stringLengthTest", (String str1, String str2) -> str1.length() + str2.length(), DataTypes.IntegerType); Row result = spark.sql("SELECT stringLengthTest('test', 'test2')").head(); Assertions.assertEquals(9, result.getInt(0)); } public static
JavaUDFSuite
java
processing__processing4
app/src/processing/app/platform/DefaultPlatform.java
{ "start": 5615, "end": 8584 }
class ____ extends Font { // public NonUIResourceFont(final Font font) { // super(font); // } // } /* // Rewritten from https://stackoverflow.com/a/7434935 static private void setUIFont(FontUIResource f) { for (Object key : UIManager.getLookAndFeelDefaults().keySet()) { Object value = UIManager.get(key); if (value instanceof FontUIResource) { UIManager.put(key, f); } } } */ public void setInterfaceZoom() throws Exception { // Specify font when scaling is active. if (!Preferences.getBoolean("editor.zoom.auto")) { for (String widgetName : FONT_SCALING_WIDGETS) { scaleDefaultFont(widgetName); } // Font defaultFont = Toolkit.getSansFont(14, Font.PLAIN); // UIManager.put("defaultFont", defaultFont); // String fontName = Preferences.get("ui.font.family"); // int fontSize = Preferences.getInteger("ui.font.size"); // FontUIResource uiFont = new FontUIResource(fontName, Font.PLAIN, Toolkit.zoom(fontSize)); // UIManager.put("Label.font", uiFont); // UIManager.put("TextField.font", uiFont); } } /** * Handle any platform-specific languages saving. This is necessary on OS X * because of how bundles are handled, but perhaps your platform would like * to Think Different too? * @param languageCode 2-digit lowercase ISO language code */ public void saveLanguage(String languageCode) { } /** * This function should throw an exception or return a value. * Do not return null. */ public File getSettingsFolder() throws Exception { File override = Base.getSettingsOverride(); if (override != null) { return override; } // If no subclass has a behavior, default to making a // ".processing" directory in the user's home directory. File home = new File(System.getProperty("user.home")); return new File(home, ".processing"); } /** * @return if not overridden, a folder named "sketchbook" in user.home. * @throws Exception so that subclasses can throw a fit */ public File getDefaultSketchbookFolder() throws Exception { return new File(System.getProperty("user.home"), "sketchbook"); } // TODO this should be openLink(), as in PApplet, but need to look // into what else it might break by changing it [fry 220202] public void openURL(String url) throws Exception { if (!ShimAWT.openLink(url)) { PApplet.launch(url); } } public boolean openFolderAvailable() { return Desktop.isDesktopSupported() && Desktop.getDesktop().isSupported(Desktop.Action.OPEN); } public void openFolder(File file) throws Exception { // TODO Looks like this should instead be Action.BROWSE_FILE_DIR, // which was added in Java 9. (Also update available method.) Desktop.getDesktop().open(file); } // . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . // public
NonUIResourceFont
java
apache__camel
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/infra/InfraList.java
{ "start": 1118, "end": 1354 }
class ____ extends InfraBaseCommand { public InfraList(CamelJBangMain main) { super(main); } @Override public Integer doCall() throws Exception { return listServices(rows -> { }); } }
InfraList
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/deser/enums/EnumSetDeserializer5203Test.java
{ "start": 881, "end": 925 }
enum ____ { FOO } static
MyEnum
java
apache__dubbo
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/impl/UnserializableBoxDemoServiceImpl.java
{ "start": 985, "end": 1202 }
class ____ implements DemoService { public String sayName(String name) { return "say:" + name; } public Box getBox() { return new UnserializableBox(); } }
UnserializableBoxDemoServiceImpl
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/LambdaFunctionalInterfaceTest.java
{ "start": 11618, "end": 12397 }
class ____ { private static <T extends Number> List<T> numToTFunction(Function<Double, T> converter) { List<T> namedNumberIntervals = new ArrayList<>(); T min = converter.apply(2.9); T max = converter.apply(5.6); namedNumberIntervals.add(min); namedNumberIntervals.add(max); return namedNumberIntervals; } public List<Integer> getIntList() { List<Integer> result = numToTFunction(num -> 2); return result; } public List<Double> getDoubleList() { List<Double> result = numToTFunction(num -> 3.2); return result; } } """) .addOutputLines( "out/NumbertoT.java", """ import java.util.ArrayList; import java.util.List; import java.util.function.DoubleFunction; import java.util.function.Function; public
NumbertoT
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/InstantiationException.java
{ "start": 165, "end": 219 }
class ____ runtime. * * @author Gavin King */ public
at
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/TruthContainsExactlyElementsInUsageTest.java
{ "start": 7487, "end": 8100 }
class ____ { void test() { assertThat(ImmutableList.of(1, 2, 3)).containsExactly(1, 2, 3).inOrder(); } } """) .doTest(); } @Test public void refactoringTruthContainsExactlyElementsInUsageWithStaticallyImportedAsList() { refactoringHelper .addInputLines( "ExampleClassTest.java", """ import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableList; import static java.util.Arrays.asList; public
ExampleClassTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java
{ "start": 1430, "end": 1833 }
class ____ { static final String THREAD_POOL = "thread_pool"; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.THREAD_POOL); for (ThreadPool.Info info : infos) { info.toXContent(builder, params); } builder.endObject(); return builder; } }
Fields
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/IncreaseContainersResourceResponse.java
{ "start": 1630, "end": 3485 }
class ____ { @Private @Unstable public static IncreaseContainersResourceResponse newInstance( List<ContainerId> successfullyIncreasedContainers, Map<ContainerId, SerializedException> failedRequests) { IncreaseContainersResourceResponse response = Records.newRecord(IncreaseContainersResourceResponse.class); response.setSuccessfullyIncreasedContainers( successfullyIncreasedContainers); response.setFailedRequests(failedRequests); return response; } /** * Get the list of containerIds of containers whose resource * have been successfully increased. * * @return the list of containerIds of containers whose resource have * been successfully increased. */ @Public @Unstable public abstract List<ContainerId> getSuccessfullyIncreasedContainers(); /** * Set the list of containerIds of containers whose resource have * been successfully increased. * * @param succeedIncreasedContainers list of containerIds of containers whose resource have * been successfully increased. */ @Private @Unstable public abstract void setSuccessfullyIncreasedContainers( List<ContainerId> succeedIncreasedContainers); /** * Get the containerId-to-exception map in which the exception indicates * error from each container for failed requests. * @return map of containerId-to-exception */ @Public @Unstable public abstract Map<ContainerId, SerializedException> getFailedRequests(); /** * Set the containerId-to-exception map in which the exception indicates * error from each container for failed requests. * * @param failedRequests map of containerId-to-exception. */ @Private @Unstable public abstract void setFailedRequests( Map<ContainerId, SerializedException> failedRequests); }
IncreaseContainersResourceResponse
java
junit-team__junit5
junit-platform-reporting/src/testFixtures/java/org/junit/platform/reporting/testutil/FileUtils.java
{ "start": 491, "end": 988 }
class ____ { public static Path findPath(Path rootDir, String syntaxAndPattern) { var matcher = rootDir.getFileSystem().getPathMatcher(syntaxAndPattern); try (var files = Files.walk(rootDir)) { return files.filter(matcher::matches).findFirst() // .orElseThrow(() -> new AssertionError( "Failed to find file matching '%s' in %s".formatted(syntaxAndPattern, rootDir))); } catch (IOException e) { throw new UncheckedIOException(e); } } private FileUtils() { } }
FileUtils
java
grpc__grpc-java
s2a/src/test/java/io/grpc/s2a/internal/channel/S2AHandshakerServiceChannelTest.java
{ "start": 9987, "end": 10286 }
class ____ extends SimpleServiceGrpc.SimpleServiceImplBase { @Override public void unaryRpc(SimpleRequest request, StreamObserver<SimpleResponse> streamObserver) { streamObserver.onNext(SimpleResponse.getDefaultInstance()); streamObserver.onCompleted(); } } }
SimpleServiceImpl
java
elastic__elasticsearch
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/ValidatingSubstitutorTests.java
{ "start": 419, "end": 3276 }
class ____ extends ESTestCase { public void testReplace() { var sub = new ValidatingSubstitutor(Map.of("key", "value", "key2", "value2"), "${", "}"); assertThat(sub.replace("super:${key}", "setting"), is("super:value")); assertThat(sub.replace("super, ${key}, ${key2}", "setting"), is("super, value, value2")); assertThat(sub.replace("super", "setting"), is("super")); } public void testReplace_MatchesComplexPlaceholder() { var sub = new ValidatingSubstitutor(Map.of("\t\b\f'\"\\key", "value"), "${", "}"); assertThat(sub.replace("super, ${\t\b\f'\"\\key}", "setting"), is("super, value")); } public void testReplace_IgnoresPlaceholdersWithNewlines() { var sub = new ValidatingSubstitutor(Map.of("key", "value", "key2", "value2"), "${", "}"); assertThat(sub.replace("super:${key\n}", "setting"), is("super:${key\n}")); assertThat(sub.replace("super:${\nkey}", "setting"), is("super:${\nkey}")); } public void testReplace_ThrowsException_WhenPlaceHolderStillExists() { { var sub = new ValidatingSubstitutor(Map.of("some_key", "value", "key2", "value2"), "${", "}"); var exception = expectThrows(IllegalStateException.class, () -> sub.replace("super:${key}", "setting")); assertThat( exception.getMessage(), is( "Found placeholder [${key}] in field [setting] after replacement call, " + "please check that all templates have a corresponding field definition." ) ); } // only reports the first placeholder pattern { var sub = new ValidatingSubstitutor(Map.of("some_key", "value", "some_key2", "value2"), "${", "}"); var exception = expectThrows(IllegalStateException.class, () -> sub.replace("super, ${key}, ${key2}", "setting")); assertThat( exception.getMessage(), is( "Found placeholder [${key}] in field [setting] after replacement call, " + "please check that all templates have a corresponding field definition." ) ); } { var sub = new ValidatingSubstitutor(Map.of("some_key", "value", "key2", "value2"), "${", "}"); var exception = expectThrows(IllegalStateException.class, () -> sub.replace("super:${ \\/\tkey\"}", "setting")); assertThat( exception.getMessage(), is( "Found placeholder [${ \\/\tkey\"}] in field [setting] after replacement call," + " please check that all templates have a corresponding field definition." ) ); } } }
ValidatingSubstitutorTests
java
spring-projects__spring-boot
configuration-metadata/spring-boot-configuration-metadata/src/main/java/org/springframework/boot/configurationmetadata/ConfigurationMetadataRepository.java
{ "start": 810, "end": 1364 }
interface ____ { /** * Defines the name of the "root" group, that is the group that gathers all the * properties that aren't attached to a specific group. */ String ROOT_GROUP = "_ROOT_GROUP_"; /** * Return the groups, indexed by id. * @return all configuration meta-data groups */ Map<String, ConfigurationMetadataGroup> getAllGroups(); /** * Return the properties, indexed by id. * @return all configuration meta-data properties */ Map<String, ConfigurationMetadataProperty> getAllProperties(); }
ConfigurationMetadataRepository
java
apache__kafka
clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientsTestUtils.java
{ "start": 20650, "end": 21103 }
class ____ implements ConsumerRebalanceListener { public int callsToAssigned = 0; public int callsToRevoked = 0; @Override public void onPartitionsAssigned(Collection<TopicPartition> partitions) { callsToAssigned += 1; } @Override public void onPartitionsRevoked(Collection<TopicPartition> partitions) { callsToRevoked += 1; } } }
TestConsumerReassignmentListener
java
google__auto
common/src/main/java/com/google/auto/common/MoreElements.java
{ "start": 3519, "end": 5517 }
class ____ extends CastingElementVisitor<TypeElement> { private static final TypeElementVisitor INSTANCE = new TypeElementVisitor(); TypeElementVisitor() { super("type element"); } @Override public TypeElement visitType(TypeElement e, Void ignore) { return e; } } /** * Returns true if the given {@link Element} instance is a {@link TypeElement}. * * <p>This method is functionally equivalent to an {@code instanceof} check, but should always be * used over that idiom as instructed in the documentation for {@link Element}. * * @throws NullPointerException if {@code element} is {@code null} */ public static boolean isType(Element element) { return element.getKind().isClass() || element.getKind().isInterface(); } /** * Returns the given {@link Element} instance as {@link TypeElement}. * * <p>This method is functionally equivalent to an {@code instanceof} check and a cast, but should * always be used over that idiom as instructed in the documentation for {@link Element}. * * @throws NullPointerException if {@code element} is {@code null} * @throws IllegalArgumentException if {@code element} isn't a {@link TypeElement}. */ public static TypeElement asType(Element element) { return element.accept(TypeElementVisitor.INSTANCE, null); } /** * Returns the given {@link Element} instance as {@link TypeParameterElement}. * * <p>This method is functionally equivalent to an {@code instanceof} check and a cast, but should * always be used over that idiom as instructed in the documentation for {@link Element}. * * @throws NullPointerException if {@code element} is {@code null} * @throws IllegalArgumentException if {@code element} isn't a {@link TypeParameterElement}. */ public static TypeParameterElement asTypeParameter(Element element) { return element.accept(TypeParameterElementVisitor.INSTANCE, null); } private static final
TypeElementVisitor
java
google__dagger
javatests/dagger/functional/assisted/AssistedFactoryBindsTest.java
{ "start": 1732, "end": 1857 }
interface ____ extends FooFactory { @Override FooImpl create(AssistedDep assistedDep); } static final
FooFactoryImpl
java
apache__camel
components/camel-splunk/src/test/java/org/apache/camel/component/splunk/integration/SavedSearchManualTest.java
{ "start": 1359, "end": 2803 }
class ____ extends SplunkTest { // before run there should be created a saved search 'junit' in splunk @Test public void testSavedSearch() throws Exception { MockEndpoint searchMock = getMockEndpoint("mock:search-saved"); searchMock.expectedMessageCount(1); MockEndpoint.assertIsSatisfied(context, 20, TimeUnit.SECONDS); SplunkEvent received = searchMock.getReceivedExchanges().get(0).getIn().getBody(SplunkEvent.class); assertNotNull(received); Map<String, String> data = received.getEventData(); assertEquals("value1", data.get("key1")); assertEquals("value2", data.get("key2")); assertEquals("value3", data.get("key3")); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from("direct:submit") .to("splunk://submit?username=" + SPLUNK_USERNAME + "&password=" + SPLUNK_PASSWORD + "&index=" + INDEX + "&sourceType=testSource&source=test") .to("mock:submit-result"); from("splunk://savedsearch?delay=5000&username=" + SPLUNK_USERNAME + "&password=" + SPLUNK_PASSWORD + "&initEarliestTime=-10s&latestTime=now" + "&savedSearch=junit") .to("mock:search-saved"); } }; } }
SavedSearchManualTest
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/MultiVariableDeclarationTest.java
{ "start": 2560, "end": 2992 }
class ____ { int a = 1; int x = 1; int y = 2; int b = 1; } """) .doTest(TEXT_MATCH); } @Test public void positiveWithNeighbouringScopes() { BugCheckerRefactoringTestHelper.newInstance(MultiVariableDeclaration.class, getClass()) .addInputLines( "in/A.java", """ package a; public
A