language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/CanIgnoreReturnValueSuggesterTest.java | {
"start": 7851,
"end": 8374
} | class ____ {
public String method(String a, String b) {
if (System.currentTimeMillis() > 0) {
return a;
}
return b;
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void returnsInputParamWithMultipleReturns_oneReturnIsConstant() {
helper
.addInputLines(
"Client.java",
"""
package com.google.frobber;
public final | Client |
java | google__truth | core/src/test/java/com/google/common/truth/BigDecimalSubjectTest.java | {
"start": 1138,
"end": 4741
} | class ____ {
@SuppressWarnings("TruthSelfEquals")
@Test
public void isEqualTo() {
// make sure this still works
assertThat(TEN).isEqualTo(TEN);
}
@Test
public void isEquivalentAccordingToCompareTo() {
// make sure this still works
assertThat(TEN).isEquivalentAccordingToCompareTo(TEN);
}
@Test
public void isEqualToIgnoringScale_bigDecimal() {
assertThat(TEN).isEqualToIgnoringScale(TEN);
assertThat(TEN).isEqualToIgnoringScale(new BigDecimal(10));
AssertionError e =
expectFailure(
whenTesting -> whenTesting.that(TEN).isEqualToIgnoringScale(new BigDecimal(3)));
assertFailureKeys(e, "expected", "but was", "(scale is ignored)");
assertFailureValue(e, "expected", "3");
assertFailureValue(e, "but was", "10");
}
@Test
public void isEqualToIgnoringScale_int() {
assertThat(TEN).isEqualToIgnoringScale(10);
AssertionError e =
expectFailure(whenTesting -> whenTesting.that(TEN).isEqualToIgnoringScale(3));
assertFailureKeys(e, "expected", "but was", "(scale is ignored)");
assertFailureValue(e, "expected", "3");
assertFailureValue(e, "but was", "10");
}
@Test
public void isEqualToIgnoringScale_long() {
assertThat(TEN).isEqualToIgnoringScale(10L);
AssertionError e =
expectFailure(whenTesting -> whenTesting.that(TEN).isEqualToIgnoringScale(3L));
assertFailureKeys(e, "expected", "but was", "(scale is ignored)");
assertFailureValue(e, "expected", "3");
assertFailureValue(e, "but was", "10");
}
@Test
public void isEqualToIgnoringScale_string() {
assertThat(TEN).isEqualToIgnoringScale("10");
assertThat(TEN).isEqualToIgnoringScale("10.");
assertThat(TEN).isEqualToIgnoringScale("10.0");
assertThat(TEN).isEqualToIgnoringScale("10.00");
AssertionError e =
expectFailure(whenTesting -> whenTesting.that(TEN).isEqualToIgnoringScale("3"));
assertFailureKeys(e, "expected", "but was", "(scale is ignored)");
assertFailureValue(e, "expected", "3");
assertFailureValue(e, "but was", "10");
}
@Test
public void isEqualToIgnoringScale_stringWithDecimals() {
BigDecimal tenFour = new BigDecimal("10.4");
assertThat(tenFour).isEqualToIgnoringScale("10.4");
assertThat(tenFour).isEqualToIgnoringScale("10.4");
assertThat(tenFour).isEqualToIgnoringScale("10.40");
assertThat(tenFour).isEqualToIgnoringScale("10.400");
AssertionError e =
expectFailure(whenTesting -> whenTesting.that(tenFour).isEqualToIgnoringScale("3.4"));
assertFailureKeys(e, "expected", "but was", "(scale is ignored)");
assertFailureValue(e, "expected", "3.4");
assertFailureValue(e, "but was", "10.4");
}
@Test
public void isEqualToIgnoringScale_nullActual() {
AssertionError e =
expectFailure(
whenTesting -> whenTesting.that((BigDecimal) null).isEqualToIgnoringScale("3.4"));
assertFailureKeys(e, "expected", "but was");
assertFailureValue(e, "expected", "3.4");
assertFailureValue(e, "but was", "null");
}
@Test
public void isEqualToIgnoringScale_nullExpected() {
AssertionError e =
expectFailure(
whenTesting ->
whenTesting.that(new BigDecimal("3.4")).isEqualToIgnoringScale((BigDecimal) null));
assertFailureKeys(e, "expected", "but was");
assertFailureValue(e, "expected", "null");
assertFailureValue(e, "but was", "3.4");
}
@Test
public void isEqualToIgnoringScale_bothNull() {
assertThat((BigDecimal) null).isEqualToIgnoringScale((BigDecimal) null);
}
}
| BigDecimalSubjectTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDecommissioningStatusWithBackoffMonitor.java | {
"start": 2019,
"end": 2106
} | class ____ provide the same set of
* tests for the backoff Monitor version.
*/
public | to |
java | quarkusio__quarkus | integration-tests/hibernate-reactive-panache/src/main/java/io/quarkus/it/panache/reactive/DogDto.java | {
"start": 207,
"end": 430
} | class ____ {
public String name;
public PersonDto owner;
public DogDto(String name, PersonDto owner) {
this.name = name;
this.owner = owner;
}
@NestedProjectedClass
public static | DogDto |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/htmlunit/WebRequestMatcher.java | {
"start": 989,
"end": 1273
} | interface ____ {
/**
* Whether this matcher matches on the supplied web request.
* @param request the {@link WebRequest} to attempt to match on
* @return {@code true} if this matcher matches on the {@code WebRequest}
*/
boolean matches(WebRequest request);
}
| WebRequestMatcher |
java | spring-projects__spring-boot | module/spring-boot-pulsar/src/main/java/org/springframework/boot/pulsar/autoconfigure/PulsarProperties.java | {
"start": 19316,
"end": 20283
} | class ____ {
/**
* SchemaType of the consumed messages.
*/
private @Nullable SchemaType schemaType;
/**
* Number of threads used by listener container.
*/
private @Nullable Integer concurrency;
/**
* Whether to record observations for when the Observations API is available and
* the client supports it.
*/
private boolean observationEnabled;
public @Nullable SchemaType getSchemaType() {
return this.schemaType;
}
public void setSchemaType(@Nullable SchemaType schemaType) {
this.schemaType = schemaType;
}
public @Nullable Integer getConcurrency() {
return this.concurrency;
}
public void setConcurrency(@Nullable Integer concurrency) {
this.concurrency = concurrency;
}
public boolean isObservationEnabled() {
return this.observationEnabled;
}
public void setObservationEnabled(boolean observationEnabled) {
this.observationEnabled = observationEnabled;
}
}
public static | Listener |
java | google__dagger | javatests/dagger/internal/codegen/DuplicateBindingsValidationTest.java | {
"start": 10142,
"end": 10276
} | class ____ {}",
"",
" @Component(modules = { TestModule1.class, TestModule2.class })",
" | TestModule3 |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/extension/Wrapper.java | {
"start": 952,
"end": 1062
} | class ____ only work as a wrapper when the condition matches.
*/
@Retention(RetentionPolicy.RUNTIME)
public @ | will |
java | apache__hadoop | hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java | {
"start": 1040,
"end": 1922
} | class ____ extends RequestWithHandle {
private final String name;
private final SetAttr3 objAttr;
public static MKDIR3Request deserialize(XDR xdr) throws IOException {
FileHandle handle = readHandle(xdr);
String name = xdr.readString();
SetAttr3 objAttr = new SetAttr3();
objAttr.deserialize(xdr);
return new MKDIR3Request(handle, name, objAttr);
}
public MKDIR3Request(FileHandle handle, String name, SetAttr3 objAttr) {
super(handle);
this.name = name;
this.objAttr = objAttr;
}
public String getName() {
return name;
}
public SetAttr3 getObjAttr() {
return objAttr;
}
@Override
public void serialize(XDR xdr) {
handle.serialize(xdr);
xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
objAttr.serialize(xdr);
}
}
| MKDIR3Request |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/dagger/ProvidesNullTest.java | {
"start": 5522,
"end": 5731
} | class ____ {
@Provides
@Nullable
public Object providesObject() {
return null;
}
}
""")
.doTest();
}
}
| Test |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxSerializationTests.java | {
"start": 445,
"end": 866
} | class ____ extends AbstractExpressionSerializationTests<Max> {
@Override
protected Max createTestInstance() {
return new Max(randomSource(), randomChild());
}
@Override
protected Max mutateInstance(Max instance) throws IOException {
return new Max(instance.source(), randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild));
}
}
| MaxSerializationTests |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChunkBytesRefEvaluator.java | {
"start": 1185,
"end": 4286
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ChunkBytesRefEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator str;
private final ChunkingSettings chunkingSettings;
private final DriverContext driverContext;
private Warnings warnings;
public ChunkBytesRefEvaluator(Source source, EvalOperator.ExpressionEvaluator str,
ChunkingSettings chunkingSettings, DriverContext driverContext) {
this.source = source;
this.str = str;
this.chunkingSettings = chunkingSettings;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) {
BytesRefVector strVector = strBlock.asVector();
if (strVector == null) {
return eval(page.getPositionCount(), strBlock);
}
return eval(page.getPositionCount(), strVector);
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += str.baseRamBytesUsed();
return baseRamBytesUsed;
}
public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock) {
try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
BytesRef strScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
switch (strBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
BytesRef str = strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch);
Chunk.process(result, str, this.chunkingSettings);
}
return result.build();
}
}
public BytesRefBlock eval(int positionCount, BytesRefVector strVector) {
try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
BytesRef strScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
BytesRef str = strVector.getBytesRef(p, strScratch);
Chunk.process(result, str, this.chunkingSettings);
}
return result.build();
}
}
@Override
public String toString() {
return "ChunkBytesRefEvaluator[" + "str=" + str + ", chunkingSettings=" + chunkingSettings + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(str);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | ChunkBytesRefEvaluator |
java | apache__camel | components/camel-aws/camel-aws2-ecs/src/test/java/org/apache/camel/component/aws2/ecs/ECS2ComponentConfigurationTest.java | {
"start": 1158,
"end": 4462
} | class ____ extends CamelTestSupport {
@Test
public void createEndpointWithComponentElements() throws Exception {
ECS2Component component = context.getComponent("aws2-ecs", ECS2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
ECS2Endpoint endpoint = (ECS2Endpoint) component.createEndpoint("aws2-ecs://label");
assertEquals("XXX", endpoint.getConfiguration().getAccessKey());
assertEquals("YYY", endpoint.getConfiguration().getSecretKey());
}
@Test
public void createEndpointWithComponentAndEndpointElements() throws Exception {
ECS2Component component = context.getComponent("aws2-ecs", ECS2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
component.getConfiguration().setRegion(Region.US_WEST_1.toString());
ECS2Endpoint endpoint
= (ECS2Endpoint) component.createEndpoint("aws2-ecs://label?accessKey=xxxxxx&secretKey=yyyyy®ion=US_EAST_1");
assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion());
}
@Test
public void createEndpointWithComponentEndpointElementsAndProxy() throws Exception {
ECS2Component component = context.getComponent("aws2-ecs", ECS2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
component.getConfiguration().setRegion(Region.US_WEST_1.toString());
ECS2Endpoint endpoint = (ECS2Endpoint) component
.createEndpoint(
"aws2-ecs://label?accessKey=xxxxxx&secretKey=yyyyy®ion=US_EAST_1&proxyHost=localhost&proxyPort=9000&proxyProtocol=HTTP");
assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion());
assertEquals(Protocol.HTTP, endpoint.getConfiguration().getProxyProtocol());
assertEquals("localhost", endpoint.getConfiguration().getProxyHost());
assertEquals(Integer.valueOf(9000), endpoint.getConfiguration().getProxyPort());
}
@Test
public void createEndpointWithEndpointOverride() throws Exception {
ECS2Component component = context.getComponent("aws2-ecs", ECS2Component.class);
ECS2Endpoint endpoint
= (ECS2Endpoint) component.createEndpoint(
"aws2-ecs://label?accessKey=xxxxxx&secretKey=yyyyy®ion=US_EAST_1&overrideEndpoint=true&uriEndpointOverride=http://localhost:9090");
assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion());
assertTrue(endpoint.getConfiguration().isOverrideEndpoint());
assertEquals("http://localhost:9090", endpoint.getConfiguration().getUriEndpointOverride());
}
}
| ECS2ComponentConfigurationTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/where/hbm/EagerToManyWhereTest.java | {
"start": 1036,
"end": 5426
} | class ____ {
@AfterEach
void dropTestData(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
@JiraKey( value = "HHH-13011" )
public void testAssociatedWhereClause(SessionFactoryScope factoryScope) {
var product = new Product();
var flowers = new Category();
flowers.setId( 1 );
flowers.setName( "flowers" );
flowers.setDescription( "FLOWERS" );
product.getCategoriesOneToMany().add( flowers );
product.getCategoriesWithDescOneToMany().add( flowers );
product.getCategoriesManyToMany().add( flowers );
product.getCategoriesWithDescManyToMany().add( flowers );
product.getCategoriesWithDescIdLt4ManyToMany().add( flowers );
var vegetables = new Category();
vegetables.setId( 2 );
vegetables.setName( "vegetables" );
vegetables.setDescription( "VEGETABLES" );
product.getCategoriesOneToMany().add( vegetables );
product.getCategoriesWithDescOneToMany().add( vegetables );
product.getCategoriesManyToMany().add( vegetables );
product.getCategoriesWithDescManyToMany().add( vegetables );
product.getCategoriesWithDescIdLt4ManyToMany().add( vegetables );
var dogs = new Category();
dogs.setId( 3 );
dogs.setName( "dogs" );
dogs.setDescription( null );
product.getCategoriesOneToMany().add( dogs );
product.getCategoriesWithDescOneToMany().add( dogs );
product.getCategoriesManyToMany().add( dogs );
product.getCategoriesWithDescManyToMany().add( dogs );
product.getCategoriesWithDescIdLt4ManyToMany().add( dogs );
var building = new Category();
building.setId( 4 );
building.setName( "building" );
building.setDescription( "BUILDING" );
product.getCategoriesOneToMany().add( building );
product.getCategoriesWithDescOneToMany().add( building );
product.getCategoriesManyToMany().add( building );
product.getCategoriesWithDescManyToMany().add( building );
product.getCategoriesWithDescIdLt4ManyToMany().add( building );
factoryScope.inTransaction( (session) -> {
session.persist( flowers );
session.persist( vegetables );
session.persist( dogs );
session.persist( building );
session.persist( product );
} );
factoryScope.inTransaction( (session) -> {
var p = session.find( Product.class, product.getId() );
assertNotNull( p );
assertEquals( 4, p.getCategoriesOneToMany().size() );
checkIds( p.getCategoriesOneToMany(), new Integer[] { 1, 2, 3, 4 } );
assertEquals( 3, p.getCategoriesWithDescOneToMany().size() );
checkIds( p.getCategoriesWithDescOneToMany(), new Integer[] { 1, 2, 4 } );
assertEquals( 4, p.getCategoriesManyToMany().size() );
checkIds( p.getCategoriesManyToMany(), new Integer[] { 1, 2, 3, 4 } );
assertEquals( 3, p.getCategoriesWithDescManyToMany().size() );
checkIds( p.getCategoriesWithDescManyToMany(), new Integer[] { 1, 2, 4 } );
assertEquals( 2, p.getCategoriesWithDescIdLt4ManyToMany().size() );
checkIds( p.getCategoriesWithDescIdLt4ManyToMany(), new Integer[] { 1, 2 } );
} );
factoryScope.inTransaction( (session) -> {
var c = session.find( Category.class, flowers.getId() );
assertNotNull( c );
c.setInactive( 1 );
} );
factoryScope.inTransaction( (session) -> {
var c = session.find( Category.class, flowers.getId() );
assertNull( c );
} );
factoryScope.inTransaction( (session) -> {
var p = session.find( Product.class, product.getId() );
assertNotNull( p );
assertEquals( 3, p.getCategoriesOneToMany().size() );
checkIds( p.getCategoriesOneToMany(), new Integer[] { 2, 3, 4 } );
assertEquals( 2, p.getCategoriesWithDescOneToMany().size() );
checkIds( p.getCategoriesWithDescOneToMany(), new Integer[] { 2, 4 } );
assertEquals( 3, p.getCategoriesManyToMany().size() );
checkIds( p.getCategoriesManyToMany(), new Integer[] { 2, 3, 4 } );
assertEquals( 2, p.getCategoriesWithDescManyToMany().size() );
checkIds( p.getCategoriesWithDescManyToMany(), new Integer[] { 2, 4 } );
assertEquals( 1, p.getCategoriesWithDescIdLt4ManyToMany().size() );
checkIds( p.getCategoriesWithDescIdLt4ManyToMany(), new Integer[] { 2 } );
} );
}
private void checkIds(Set<Category> categories, Integer[] expectedIds) {
final Set<Integer> expectedIdSet = new HashSet<>( Arrays.asList( expectedIds ) );
for ( Category category : categories ) {
expectedIdSet.remove( category.getId() );
}
assertTrue( expectedIdSet.isEmpty() );
}
}
| EagerToManyWhereTest |
java | google__truth | core/src/main/java/com/google/common/truth/FloatSubject.java | {
"start": 3094,
"end": 15090
} | interface ____ {
void compareAgainst(float other);
}
/**
* Prepares for a check that the actual value is a finite number within the given tolerance of an
* expected value that will be provided in the next call in the fluent chain.
*
* <p>The check will fail if either the actual value or the expected value is {@link
* Float#POSITIVE_INFINITY}, {@link Float#NEGATIVE_INFINITY}, or {@link Float#NaN}. To check for
* those values, use {@link #isPositiveInfinity}, {@link #isNegativeInfinity}, {@link #isNaN}, or
* (with more generality) {@link #isEqualTo}.
*
* <p>The check will pass if both values are zero, even if one is {@code 0.0f} and the other is
* {@code -0.0f}. Use {@link #isEqualTo} to assert that a value is exactly {@code 0.0f} or that it
* is exactly {@code -0.0f}.
*
* <p>You can use a tolerance of {@code 0.0f} to assert the exact equality of finite floats, but
* often {@link #isEqualTo} is preferable (note the different behaviours around non-finite values
* and {@code -0.0f}). See the documentation on {@link #isEqualTo} for advice on when exact
* equality assertions are appropriate.
*
* @param tolerance an inclusive upper bound on the difference between the actual value and
* expected value allowed by the check, which must be a non-negative finite value, i.e. not
* {@link Float#NaN}, {@link Float#POSITIVE_INFINITY}, or negative, including {@code -0.0f}
*/
public TolerantFloatComparison isWithin(float tolerance) {
return TolerantFloatComparison.comparing(
other -> {
if (!Float.isFinite(tolerance)) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because tolerance was not"
+ " finite"),
numericFact("expected", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (Float.compare(tolerance, 0.0f) < 0) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because tolerance was negative"),
numericFact("expected", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (!Float.isFinite(other)) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because expected value was not"
+ " finite"),
numericFact("expected", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (actual == null || !Float.isFinite(actual)) {
failWithoutActual(
numericFact("expected a finite value near", other),
numericFact("but was", actual),
numericFact("tolerance", tolerance));
} else if (!equalWithinTolerance(actual, other, tolerance)) {
failWithoutActual(
numericFact("expected", other),
numericFact("but was", actual),
numericFact("outside tolerance", tolerance));
}
});
}
/**
* Prepares for a check that the actual value is a finite number not within the given tolerance of
* an expected value that will be provided in the next call in the fluent chain.
*
* <p>The check will fail if either the actual value or the expected value is {@link
* Float#POSITIVE_INFINITY}, {@link Float#NEGATIVE_INFINITY}, or {@link Float#NaN}. See {@link
* #isFinite}, {@link #isNotNaN}, or {@link #isNotEqualTo} for checks with other behaviours.
*
* <p>The check will fail if both values are zero, even if one is {@code 0.0f} and the other is
* {@code -0.0f}. Use {@link #isNotEqualTo} for a test which fails for a value of exactly zero
* with one sign but passes for zero with the opposite sign.
*
* <p>You can use a tolerance of {@code 0.0f} to assert the exact non-equality of finite floats,
* but sometimes {@link #isNotEqualTo} is preferable (note the different behaviours around
* non-finite values and {@code -0.0f}).
*
* @param tolerance an exclusive lower bound on the difference between the actual value and
* expected value allowed by the check, which must be a non-negative finite value, i.e. not
* {@link Float#NaN}, {@link Float#POSITIVE_INFINITY}, or negative, including {@code -0.0f}
*/
public TolerantFloatComparison isNotWithin(float tolerance) {
return TolerantFloatComparison.comparing(
other -> {
if (!Float.isFinite(tolerance)) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because tolerance was not"
+ " finite"),
numericFact("expected not to be", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (Float.compare(tolerance, 0.0f) < 0) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because tolerance was negative"),
numericFact("expected not to be", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (!Float.isFinite(other)) {
failWithoutActual(
simpleFact(
"could not perform approximate-equality check because expected value was not"
+ " finite"),
numericFact("expected not to be", other),
numericFact("was", actual),
numericFact("tolerance", tolerance));
} else if (actual == null || !Float.isFinite(actual)) {
failWithoutActual(
numericFact("expected a finite value that is not near", other),
numericFact("but was", actual),
numericFact("tolerance", tolerance));
} else if (!notEqualWithinTolerance(actual, other, tolerance)) {
failWithoutActual(
numericFact("expected not to be", other),
numericFact("but was", actual),
numericFact("within tolerance", tolerance));
}
});
}
/**
* Asserts that the actual value is exactly equal to the given value, with equality defined as by
* {@link Float#equals}. This method is <i>not</i> recommended when the code under test is doing
* any kind of arithmetic: use {@link #isWithin} with a suitable tolerance in that case. (Remember
* that the exact result of floating point arithmetic is sensitive to apparently trivial changes
* such as replacing {@code (a + b) + c} with {@code a + (b + c)}.) This method is recommended
* when the code under test is specified as either copying a value without modification from its
* input or returning a well-defined literal or constant value.
*
* <p><b>Note:</b> The assertion {@code isEqualTo(0.0f)} fails for an input of {@code -0.0f}, and
* vice versa. For an assertion that passes for either {@code 0.0f} or {@code -0.0f}, use {@link
* #isZero}.
*/
@Override
public void isEqualTo(@Nullable Object expected) {
super.isEqualTo(expected);
}
/**
* Asserts that the actual value is not exactly equal to the given value, with equality defined as
* by {@link Float#equals}. See {@link #isEqualTo} for advice on when exact equality is
* recommended. Use {@link #isNotWithin} for an assertion with a tolerance.
*
* <p><b>Note:</b> The assertion {@code isNotEqualTo(0.0f)} passes for {@code -0.0f}, and vice
* versa. For an assertion that fails for either {@code 0.0f} or {@code -0.0f}, use {@link
* #isNonZero}.
*/
@Override
public void isNotEqualTo(@Nullable Object other) {
super.isNotEqualTo(other);
}
/**
* @deprecated Use {@link #isWithin} or {@link #isEqualTo} instead (see documentation for advice).
*/
@Override
@Deprecated
public void isEquivalentAccordingToCompareTo(@Nullable Float expected) {
super.isEquivalentAccordingToCompareTo(expected);
}
/** Asserts that the actual value is zero (i.e. it is either {@code 0.0f} or {@code -0.0f}). */
public void isZero() {
if (actual == null || actual != 0.0f) {
failWithActual(simpleFact("expected zero"));
}
}
/**
* Asserts that the actual value is a non-null value other than zero (i.e. it is not {@code 0.0f},
* {@code -0.0f} or {@code null}).
*/
public void isNonZero() {
if (actual == null) {
failWithActual(simpleFact("expected a float other than zero"));
} else if (actual == 0.0f) {
failWithActual(simpleFact("expected not to be zero"));
}
}
/** Asserts that the actual value is {@link Float#POSITIVE_INFINITY}. */
public void isPositiveInfinity() {
isEqualTo(POSITIVE_INFINITY);
}
/** Asserts that the actual value is {@link Float#NEGATIVE_INFINITY}. */
public void isNegativeInfinity() {
isEqualTo(NEGATIVE_INFINITY);
}
/** Asserts that the actual value is {@link Float#NaN}. */
public void isNaN() {
isEqualTo(NaN);
}
/**
* Asserts that the actual value is finite, i.e. not {@link Float#POSITIVE_INFINITY}, {@link
* Float#NEGATIVE_INFINITY}, or {@link Float#NaN}.
*/
public void isFinite() {
if (actual == null || actual.isNaN() || actual.isInfinite()) {
failWithActual(simpleFact("expected to be finite"));
}
}
/**
* Asserts that the actual value is a non-null value other than {@link Float#NaN} (but it may be
* {@link Float#POSITIVE_INFINITY} or {@link Float#NEGATIVE_INFINITY}).
*/
public void isNotNaN() {
if (actual == null) {
failWithActual(simpleFact("expected a float other than NaN"));
} else {
isNotEqualTo(NaN);
}
}
/**
* Checks that the actual value is greater than {@code other}.
*
* <p>To check that the actual value is greater than <i>or equal to</i> {@code other}, use {@link
* #isAtLeast}.
*/
public void isGreaterThan(int other) {
/*
* We must perform the comparison as a `double` in order to compare `float` to `int` without
* loss of precision.
*
* The only downside to delegating to `DoubleSubject` should be that we may display the actual
* value with greater precision than would be required to uniquely identify it as a `float`.
* (Similarly, we will display the `int` as a `double`. But that may be just as well for
* consistency reasons.)
*
* We could instead perform the comparison manually here, but it would require duplicating the
* code from `ComparableSubject.isGreaterThan`.
*/
asDouble.isGreaterThan(other);
}
/**
* Checks that the actual value is less than {@code other}.
*
* <p>To check that the actual value is less than <i>or equal to</i> {@code other}, use {@link
* #isAtMost} .
*/
public void isLessThan(int other) {
// For discussion of this delegation, see isGreaterThan.
asDouble.isLessThan(other);
}
/**
* Checks that the actual value is less than or equal to {@code other}.
*
* <p>To check that the actual value is <i>strictly</i> less than {@code other}, use {@link
* #isLessThan}.
*/
public void isAtMost(int other) {
// For discussion of this delegation, see isGreaterThan.
asDouble.isAtMost(other);
}
/**
* Checks that the actual value is greater than or equal to {@code other}.
*
* <p>To check that the actual value is <i>strictly</i> greater than {@code other}, use {@link
* #isGreaterThan}.
*/
public void isAtLeast(int other) {
// For discussion of this delegation, see isGreaterThan.
asDouble.isAtLeast(other);
}
static Factory<FloatSubject, Float> floats() {
return FloatSubject::new;
}
}
| FloatComparer |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesBeanTests.java | {
"start": 21944,
"end": 22222
} | class ____ implements ImportSelector {
@Override
public String[] selectImports(AnnotationMetadata importingClassMetadata) {
return new String[] { NonAnnotatedBeanConfiguration.class.getName() };
}
}
@ConfigurationProperties
| NonAnnotatedBeanConfigurationImportSelector |
java | quarkusio__quarkus | extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/vertx/EventBusInstrumenterVertxTracer.java | {
"start": 4399,
"end": 5852
} | enum ____ implements MessagingAttributesGetter<Message, Message> {
INSTANCE;
@Override
public String getSystem(final Message message) {
return "vert.x";
}
@Override
public String getDestination(final Message message) {
return message.address();
}
@Override
public String getDestinationTemplate(Message message) {
return "";
}
@Override
public boolean isTemporaryDestination(final Message message) {
return false;
}
@Override
public boolean isAnonymousDestination(Message message) {
return false;
}
@Override
public String getConversationId(final Message message) {
return message.replyAddress();
}
@Override
public Long getMessageBodySize(Message message) {
return 0L;
}
@Override
public Long getMessageEnvelopeSize(Message message) {
return 0L;
}
@Override
public String getMessageId(final Message message, final Message message2) {
return null;
}
@Override
public String getClientId(Message message) {
return "";
}
@Override
public Long getBatchMessageCount(Message message, Message message2) {
return 0L;
}
}
}
| EventBusAttributesGetter |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 97353,
"end": 97799
} | class ____ implements Procedure {
@ProcedureHint(input = @DataTypeHint("INT"))
public Integer[] call(Object procedureContext, Integer n) {
return null;
}
@ProcedureHint(input = @DataTypeHint("STRING"))
public Integer[] call(Object procedureContext, String n) {
return null;
}
}
@ProcedureHint(output = @DataTypeHint("INT"))
private static | GlobalOutputProcedureHint |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/completion/GoogleVertexAiChatCompletionServiceSettings.java | {
"start": 2404,
"end": 2509
} | class ____ the settings required to configure a Google Vertex AI chat completion service.
*/
public | contains |
java | elastic__elasticsearch | libs/lz4/src/test/java/org/elasticsearch/lz4/AbstractLZ4TestCase.java | {
"start": 1519,
"end": 1803
} | interface ____<T> {
T allocate(int length);
T copyOf(byte[] array);
byte[] copyOf(T data, int off, int len);
int maxCompressedLength(int len);
void fill(T instance, byte b);
// Modified to remove redundant modifiers
| TesterBase |
java | spring-projects__spring-security | docs/src/test/java/org/springframework/security/docs/servlet/test/testmethodmetaannotations/WithMockUserTests.java | {
"start": 1546,
"end": 1968
} | class ____ {
@Autowired
MessageService messageService;
@Test
// tag::snippet[]
@WithMockUser(username = "admin", roles = {"USER", "ADMIN"})
// end::snippet[]
void getMessageWithMockUserAdminRoles() {
String message = messageService.getMessage();
assertThat(message)
.contains("admin")
.contains("ROLE_ADMIN")
.contains("ROLE_USER");
}
@EnableMethodSecurity
@Configuration
static | WithMockUserTests |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1100/Issue969.java | {
"start": 397,
"end": 1311
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
JSONObject jsonObject = new JSONObject();
JSONArray jsonArray = new JSONArray();
jsonArray.add(new Model());
jsonObject.put("models", jsonArray);
List list = jsonObject.getObject("models", new TypeReference<List<Model>>(){});
assertEquals(1, list.size());
assertEquals(Model.class, list.get(0).getClass());
}
public void test_for_issue_1() throws Exception {
JSONObject jsonObject = new JSONObject();
JSONArray jsonArray = new JSONArray();
jsonArray.add(new Model());
jsonObject.put("models", jsonArray);
List list = jsonObject.getObject("models", new TypeReference<List<Model>>(){}.getType());
assertEquals(1, list.size());
assertEquals(Model.class, list.get(0).getClass());
}
public static | Issue969 |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractUniversalComparableAssert.java | {
"start": 957,
"end": 2086
} | class ____ better compatibility than {@link ComparableAssert} and related implementations, currently limited
* due to the upper bound of {@link ComparableAssert}'s type parameters.
* <p>
* Let's take an example with a class <code>Name</code> implementing <code>Comparable<Name></code>.
* <pre><code class='java'> Comparable<Name> name1 = new Name("abc");</code></pre>
* <p>
* The following does not compile or work as expected:
* <pre><code class='java'> // does not compile as assertThat(name1) resolves to Object assertions
* assertThat(name1).isLessThanOrEqualTo(name1);
*
* // compiles fine but requires a raw Comparable cast (assertThat resolves to AbstractComparableAssert)
* assertThat((Comparable)name1).isLessThanOrEqualTo(name1);
*
* // does not compile: Cannot infer type arguments for GenericComparableAssert<>
* new GenericComparableAssert<>(name1).isLessThanOrEqualTo(name3);
*
* // compiles fine with the concrete type (assertThat resolves to AbstractComparableAssert)
* Name name = name1;
* assertThat(name).isEqualByComparingTo(name);</code></pre>
* <p>
* This | offers |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/mixins/MixInRemovalTest.java | {
"start": 456,
"end": 597
} | class ____ {
@JSONField(name = "apple")
public int a;
@JSONField(name = "banana")
public int b;
}
| MixIn1 |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/NoVersionAttributeException.java | {
"start": 1229,
"end": 1600
} | class ____ extends PathIOException {
/**
* Constructs a NoVersionAttributeException.
*
* @param path the path accessed when the condition was detected
* @param message a message providing more details about the condition
*/
public NoVersionAttributeException(String path,
String message) {
super(path, message);
}
}
| NoVersionAttributeException |
java | quarkusio__quarkus | integration-tests/oidc-wiremock/src/main/java/io/quarkus/it/keycloak/CustomTenantResolver.java | {
"start": 235,
"end": 1765
} | class ____ implements TenantResolver {
@Override
public String resolve(RoutingContext context) {
String path = context.normalizedPath();
// `/hr-classic-perm-check` and '/hr-classic-and-jaxrs-perm-check'
// require policy checks which force an authentication before @Tenant is resolved
if (path.contains("/hr") && !path.contains("/hr-classic-perm-check")
&& !path.contains("/hr-classic-and-jaxrs-perm-check")) {
throw new RuntimeException("@Tenant annotation only must be used to set "
+ "a tenant id on the '" + path + "' request path");
}
if (context.get(OidcUtils.TENANT_ID_ATTRIBUTE) != null) {
if (context.get(OidcUtils.TENANT_ID_SET_BY_SESSION_COOKIE) == null
&& context.get(OidcUtils.TENANT_ID_SET_BY_STATE_COOKIE) == null) {
throw new RuntimeException("Tenant id must have been set by either the session or state cookie");
}
// Expect an already resolved tenant context be used
return null;
}
if (path.contains("recovered-no-discovery")) {
return "no-discovery";
}
if (path.endsWith("code-flow") || path.endsWith("code-flow/logout")) {
return "code-flow";
}
if (path.endsWith("code-flow-form-post") || path.endsWith("code-flow-form-post/front-channel-logout")) {
return "code-flow-form-post";
}
return null;
}
}
| CustomTenantResolver |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/fuse/LinearConfig.java | {
"start": 441,
"end": 659
} | enum ____ {
NONE,
L2_NORM,
MINMAX
}
public static String NORMALIZER = "normalizer";
public static LinearConfig DEFAULT_CONFIG = new LinearConfig(Normalizer.NONE, Map.of());
}
| Normalizer |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/beans/factory/xml/QualifierAnnotationTests.java | {
"start": 11945,
"end": 12174
} | class ____ {
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Qualifier("special")
@SimpleValueQualifier("special")
private static | Person |
java | apache__camel | components/camel-braintree/src/generated/java/org/apache/camel/component/braintree/ClientTokenGatewayEndpointConfiguration.java | {
"start": 628,
"end": 912
} | class ____ not need to be instantiated directly",
apiMethods = {@ApiMethod(methodName = "generate", signatures={"String generate()", "String generate(com.braintreegateway.ClientTokenRequest request)"}), }, aliases = {})
@UriParams
@Configurer(extended = true)
public final | does |
java | apache__camel | components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWithToolsIT.java | {
"start": 1924,
"end": 9605
} | class ____ extends CamelTestSupport {
private static final String USER_DB_NAME = "John Doe";
private static final String WEATHER_INFO = "sunny, 25°C";
private static final String WEATHER_INFO_1 = "sunny";
private static final String WEATHER_INFO_2 = "25";
protected ChatModel chatModel;
@RegisterExtension
static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
? null
: OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) : ModelHelper.loadFromEnv();
}
@Test
void testAgentWithUserDatabaseTools() throws InterruptedException {
MockEndpoint mockEndpoint = this.context.getEndpoint("mock:agent-response", MockEndpoint.class);
mockEndpoint.expectedMessageCount(1);
String response = template.requestBody(
"direct:agent-with-user-tools",
"What is the name of user ID 123?",
String.class);
mockEndpoint.assertIsSatisfied();
assertNotNull(response, "AI response should not be null");
assertTrue(response.contains(USER_DB_NAME),
"Response should contain the user name from the database tool");
}
@Test
void testAgentWithWeatherTools() throws InterruptedException {
MockEndpoint mockEndpoint = this.context.getEndpoint("mock:agent-response", MockEndpoint.class);
mockEndpoint.expectedMessageCount(1);
String response = template.requestBody(
"direct:agent-with-weather-tools",
"What's the weather like in New York?",
String.class);
mockEndpoint.assertIsSatisfied();
assertNotNull(response, "AI response should not be null");
assertTrue(response.toLowerCase().contains(WEATHER_INFO_1),
"Response should contain weather information from the weather tool");
assertTrue(response.toLowerCase().contains(WEATHER_INFO_2),
"Response should contain weather information from the weather tool");
}
@Test
void testAgentWithMultipleTagsAndChatMessages() throws InterruptedException {
MockEndpoint mockEndpoint = this.context.getEndpoint("mock:agent-response", MockEndpoint.class);
mockEndpoint.expectedMessageCount(1);
String systemMessage = "You are a helpful assistant that can access user database and weather information. " +
"Use the available tools to provide accurate information.";
String userMessage = "Can you tell me the name of user 123 and the weather in New York?";
AiAgentBody<?> aiAgentBody = new AiAgentBody<>(systemMessage, userMessage, null);
String response = template.requestBody(
"direct:agent-with-multiple-tools",
aiAgentBody,
String.class);
mockEndpoint.assertIsSatisfied();
assertNotNull(response, "AI response should not be null");
assertTrue(response.contains(USER_DB_NAME),
"Response should contain the user name from the database tool");
assertTrue(response.toLowerCase().contains(WEATHER_INFO_1),
"Response should contain weather information from the weather tool");
assertTrue(response.toLowerCase().contains(WEATHER_INFO_2),
"Response should contain weather information from the weather tool");
}
@Test
void testAgentWithConfiguredTags() throws InterruptedException {
MockEndpoint mockEndpoint = this.context.getEndpoint("mock:agent-response", MockEndpoint.class);
mockEndpoint.expectedMessageCount(1);
String response = template.requestBody(
"direct:agent-with-configured-tags",
"What's the weather in Paris?",
String.class);
mockEndpoint.assertIsSatisfied();
assertNotNull(response, "AI response should not be null");
assertTrue(response.toLowerCase().contains(WEATHER_INFO_1),
"Response should contain weather information from the weather tool");
assertTrue(response.toLowerCase().contains(WEATHER_INFO_2),
"Response should contain weather information from the weather tool");
}
@Test
void testAgentWithoutToolsNoTagsProvided() throws InterruptedException {
MockEndpoint mockEndpoint = this.context.getEndpoint("mock:agent-response", MockEndpoint.class);
mockEndpoint.expectedMessageCount(1);
String response = template.requestBody(
"direct:agent-without-tools",
"What is Apache Camel?",
String.class);
mockEndpoint.assertIsSatisfied();
assertNotNull(response, "AI response should not be null");
assertTrue(response.contains("Apache Camel"),
"Response should contain information about Apache Camel");
}
@Override
protected RouteBuilder createRouteBuilder() {
// Create agent configuration for tools testing (no memory, RAG, or guardrails)
AgentConfiguration configuration = new AgentConfiguration()
.withChatModel(chatModel)
.withInputGuardrailClasses(List.of())
.withOutputGuardrailClasses(List.of());
Agent agentWithTools = new AgentWithoutMemory(configuration);
// Register agent in the context
this.context.getRegistry().bind("agentWithTools", agentWithTools);
return new RouteBuilder() {
public void configure() {
from("direct:agent-with-user-tools")
.to("langchain4j-agent:test-agent?agent=#agentWithTools&tags=users")
.to("mock:agent-response");
from("direct:agent-with-weather-tools")
.to("langchain4j-agent:test-agent?agent=#agentWithTools&tags=weather")
.to("mock:agent-response");
from("direct:agent-with-multiple-tools")
.to("langchain4j-agent:test-agent?agent=#agentWithTools&tags=users,weather")
.to("mock:agent-response");
from("direct:agent-with-configured-tags")
.to("langchain4j-agent:test-agent?agent=#agentWithTools&tags=weather")
.to("mock:agent-response");
from("direct:agent-without-tools")
.to("langchain4j-agent:test-agent?agent=#agentWithTools")
.to("mock:agent-response");
from("direct:agent-check-no-tools")
.to("langchain4j-agent:test-agent?agent=#agentWithTools&tags=nonexistent")
.to("mock:check-no-tools");
from("langchain4j-tools:userDb?tags=users&description=Query user database by user ID¶meter.userId=integer")
.setBody(constant("{\"name\": \"" + USER_DB_NAME + "\", \"id\": \"123\"}"));
from("langchain4j-tools:weatherService?tags=weather&description=Get weather information for a city¶meter.city=string")
.setBody(constant("{\"weather\": \"" + WEATHER_INFO + "\", \"city\": \"New York\"}"));
from("langchain4j-tools:parisWeather?tags=weather&description=Get weather information for Paris¶meter.location=string")
.setBody(constant("{\"weather\": \"" + WEATHER_INFO + "\", \"city\": \"Paris\"}"));
}
};
}
}
| LangChain4jAgentWithToolsIT |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeEqualIgnoringWhitespace.java | {
"start": 851,
"end": 1625
} | class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link org.assertj.core.error.ShouldNotBeEqualIgnoringWhitespace}</code>.
* @param actual the actual value in the failed assertion.
* @param expected the expected value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotBeEqualIgnoringWhitespace(CharSequence actual, CharSequence expected) {
return new ShouldNotBeEqualIgnoringWhitespace(actual, expected);
}
private ShouldNotBeEqualIgnoringWhitespace(CharSequence actual, CharSequence expected) {
super("%nExpecting actual:%n %s%nnot to be equal to:%n %s%nignoring whitespace differences", actual, expected);
}
}
| ShouldNotBeEqualIgnoringWhitespace |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemafilter/SchemaFilterTest.java | {
"start": 5250,
"end": 5918
} | class ____ implements SchemaFilter {
@Override
public boolean includeNamespace(Namespace namespace) {
// exclude schema "the_schema_2"
Identifier identifier = namespace.getName().schema();
if ( identifier != null ) {
return !"the_schema_2".equals( identifier.getText() );
}
return true;
}
@Override
public boolean includeTable(Table table) {
// exclude table "the_entity_2"
return !"the_entity_2".equals( table.getName() );
}
@Override
public boolean includeSequence(Sequence sequence) {
return true;
}
}
@Entity
@jakarta.persistence.Table(name = "the_entity_1", schema = "the_schema_1")
public static | TestSchemaFilter |
java | apache__maven | compat/maven-toolchain-builder/src/main/java/org/apache/maven/toolchain/building/DefaultToolchainsBuilder.java | {
"start": 2087,
"end": 7458
} | class ____ implements ToolchainsBuilder {
private MavenToolchainMerger toolchainsMerger = new MavenToolchainMerger();
@Inject
private ToolchainsWriter toolchainsWriter;
@Inject
private ToolchainsReader toolchainsReader;
@Override
public ToolchainsBuildingResult build(ToolchainsBuildingRequest request) throws ToolchainsBuildingException {
ProblemCollector problems = ProblemCollectorFactory.newInstance(null);
PersistedToolchains globalToolchains = readToolchains(request.getGlobalToolchainsSource(), request, problems);
PersistedToolchains userToolchains = readToolchains(request.getUserToolchainsSource(), request, problems);
toolchainsMerger.merge(userToolchains, globalToolchains, TrackableBase.GLOBAL_LEVEL);
problems.setSource("");
userToolchains = interpolate(userToolchains, problems);
if (hasErrors(problems.getProblems())) {
throw new ToolchainsBuildingException(problems.getProblems());
}
return new DefaultToolchainsBuildingResult(userToolchains, problems.getProblems());
}
private PersistedToolchains interpolate(PersistedToolchains toolchains, ProblemCollector problems) {
StringWriter stringWriter = new StringWriter(1024 * 4);
try {
toolchainsWriter.write(stringWriter, null, toolchains);
} catch (IOException e) {
throw new IllegalStateException("Failed to serialize toolchains to memory", e);
}
String serializedToolchains = stringWriter.toString();
RegexBasedInterpolator interpolator = new RegexBasedInterpolator();
try {
interpolator.addValueSource(new EnvarBasedValueSource());
} catch (IOException e) {
problems.add(
Problem.Severity.WARNING,
"Failed to use environment variables for interpolation: " + e.getMessage(),
-1,
-1,
e);
}
interpolator.addPostProcessor(new InterpolationPostProcessor() {
@Override
public Object execute(String expression, Object value) {
if (value != null) {
// we're going to parse this back in as XML so we need to escape XML markup
value = value.toString()
.replace("&", "&")
.replace("<", "<")
.replace(">", ">");
return value;
}
return null;
}
});
try {
serializedToolchains = interpolator.interpolate(serializedToolchains);
} catch (InterpolationException e) {
problems.add(Problem.Severity.ERROR, "Failed to interpolate toolchains: " + e.getMessage(), -1, -1, e);
return toolchains;
}
PersistedToolchains result;
try {
Map<String, ?> options = Collections.singletonMap(ToolchainsReader.IS_STRICT, Boolean.FALSE);
result = toolchainsReader.read(new StringReader(serializedToolchains), options);
} catch (IOException e) {
problems.add(Problem.Severity.ERROR, "Failed to interpolate toolchains: " + e.getMessage(), -1, -1, e);
return toolchains;
}
return result;
}
private PersistedToolchains readToolchains(
Source toolchainsSource, ToolchainsBuildingRequest request, ProblemCollector problems) {
if (toolchainsSource == null) {
return new PersistedToolchains();
}
PersistedToolchains toolchains;
try {
Map<String, ?> options = Collections.singletonMap(ToolchainsReader.IS_STRICT, Boolean.TRUE);
try {
toolchains = toolchainsReader.read(toolchainsSource.getInputStream(), options);
} catch (ToolchainsParseException e) {
options = Collections.singletonMap(ToolchainsReader.IS_STRICT, Boolean.FALSE);
toolchains = toolchainsReader.read(toolchainsSource.getInputStream(), options);
problems.add(Problem.Severity.WARNING, e.getMessage(), e.getLineNumber(), e.getColumnNumber(), e);
}
} catch (ToolchainsParseException e) {
problems.add(
Problem.Severity.FATAL,
"Non-parseable toolchains " + toolchainsSource.getLocation() + ": " + e.getMessage(),
e.getLineNumber(),
e.getColumnNumber(),
e);
return new PersistedToolchains();
} catch (IOException e) {
problems.add(
Problem.Severity.FATAL,
"Non-readable toolchains " + toolchainsSource.getLocation() + ": " + e.getMessage(),
-1,
-1,
e);
return new PersistedToolchains();
}
return toolchains;
}
private boolean hasErrors(List<Problem> problems) {
if (problems != null) {
for (Problem problem : problems) {
if (Problem.Severity.ERROR.compareTo(problem.getSeverity()) >= 0) {
return true;
}
}
}
return false;
}
}
| DefaultToolchainsBuilder |
java | elastic__elasticsearch | x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java | {
"start": 19855,
"end": 20066
} | class ____ extends LocalStateCompositeXPackPlugin {
public LocalStateEsqlAsync(final Settings settings, final Path configPath) {
super(settings, configPath);
}
}
}
| LocalStateEsqlAsync |
java | quarkusio__quarkus | extensions/funqy/funqy-knative-events/deployment/src/test/java/io/quarkus/funqy/test/WithDuplicateAttributeFilter.java | {
"start": 243,
"end": 1670
} | class ____ {
@Funq
@CloudEventMapping(trigger = "listOfStrings", attributes = { @EventAttribute(name = "source", value = "test") })
public String toCommaSeparated(List<Identity> identityList) {
return identityList
.stream()
.map(Identity::getName)
.collect(Collectors.joining(","));
}
@Funq
@CloudEventMapping(trigger = "listOfStrings", attributes = { @EventAttribute(name = "source", value = "test") })
public String toSemicolonSeparated(List<Identity> identityList) {
return identityList
.stream()
.map(Identity::getName)
.collect(Collectors.joining(";"));
}
@Funq
@CloudEventMapping(trigger = "toDashSeparated", attributes = { @EventAttribute(name = "source", value = "test") })
public String toDashSeparated(List<Identity> identityList) {
return identityList
.stream()
.map(Identity::getName)
.collect(Collectors.joining("-"));
}
@Funq
@CloudEventMapping(trigger = "toDashSeparated", attributes = { @EventAttribute(name = "source", value = "test") })
public String toColonSeparated(List<Identity> identityList) {
return identityList
.stream()
.map(Identity::getName)
.collect(Collectors.joining(":"));
}
}
| WithDuplicateAttributeFilter |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/OpportunisticContainerAllocatorAMService.java | {
"start": 5198,
"end": 5771
} | class ____
extends ApplicationMasterService implements DistributedSchedulingAMProtocol,
EventHandler<SchedulerEvent> {
private static final Logger LOG =
LoggerFactory.getLogger(OpportunisticContainerAllocatorAMService.class);
private final NodeQueueLoadMonitor nodeMonitor;
private final OpportunisticContainerAllocator oppContainerAllocator;
private final int numNodes;
private final long cacheRefreshInterval;
private volatile List<RemoteNode> cachedNodes;
private volatile long lastCacheUpdateTime;
| OpportunisticContainerAllocatorAMService |
java | elastic__elasticsearch | test/external-modules/jvm-crash/src/javaRestTest/java/org/elasticsearch/test/jvm_crash/JvmCrashIT.java | {
"start": 2424,
"end": 7622
} | class ____ extends AbstractLocalClusterSpecBuilder<ElasticsearchCluster> {
private StdOutCatchingClusterBuilder() {
this.settings(new DefaultSettingsProvider());
this.environment(new DefaultEnvironmentProvider());
}
@Override
public ElasticsearchCluster build() {
// redirect stdout before the nodes start up
// they are referenced directly by ProcessUtils, so can't be changed afterwards
redirectStdout();
return new DefaultLocalElasticsearchCluster<>(
this::buildClusterSpec,
new DefaultLocalClusterFactory(
new LocalDistributionResolver(new SnapshotDistributionResolver(new ReleasedDistributionResolver()))
)
);
}
}
private static PrintStream originalOut;
private static ByteArrayOutputStream stdOutput;
private static void redirectStdout() {
if (originalOut == null) {
originalOut = System.out;
stdOutput = new ByteArrayOutputStream();
// this duplicates the crash messages, but not the log output. That's ok.
System.setOut(new TeePrintStream(originalOut, stdOutput));
}
}
@ClassRule
public static ElasticsearchCluster cluster = new StdOutCatchingClusterBuilder().distribution(DistributionType.INTEG_TEST)
.nodes(1)
.module("test-jvm-crash")
.setting("xpack.security.enabled", "false")
.jvmArg("-Djvm.crash=true")
.build();
@AfterClass
public static void resetStdout() {
if (originalOut != null) {
System.setOut(originalOut);
}
}
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
public void testJvmCrash() throws Exception {
final long pid = getElasticsearchPid();
assertJvmArgs(pid, containsString("-Djvm.crash=true"));
expectThrows(IOException.class, () -> client().performRequest(new Request("GET", "/_crash")));
// the Elasticsearch process should die
assertBusy(() -> assertJvmArgs(pid, not(containsString("-Djvm.crash=true"))));
// parse the logs and ensure that Elasticsearch died with the expected cause
assertThat(
stdOutput,
hasToString(
matchesRegex(
Pattern.compile(".*# A fatal error has been detected by the Java Runtime Environment:.*SIGSEGV.*", Pattern.DOTALL)
)
)
);
}
private Process startJcmd(long pid) throws IOException {
final String jcmdPath = PathUtils.get(System.getProperty("java.home"), "bin/jcmd").toString();
return new ProcessBuilder().command(jcmdPath, Long.toString(pid), "VM.command_line").redirectErrorStream(true).start();
}
private void assertJvmArgs(long pid, Matcher<String> matcher) throws IOException, InterruptedException {
Process jcmdProcess = startJcmd(pid);
if (Constants.WINDOWS) {
// jcmd on windows appears to have a subtle bug where if the process being connected to
// dies while jcmd is running, it can hang indefinitely. Here we detect this case by
// waiting a fixed amount of time, and then killing/retrying the process
boolean exited = jcmdProcess.waitFor(10, TimeUnit.SECONDS);
if (exited == false) {
logger.warn("jcmd hung, killing process and retrying");
jcmdProcess.destroyForcibly();
jcmdProcess = startJcmd(pid);
}
}
List<String> outputLines = readLines(jcmdProcess.getInputStream());
String jvmArgs = outputLines.stream().filter(l -> l.startsWith("jvm_args")).findAny().orElse(null);
try {
assertThat(jvmArgs, matcher);
} catch (AssertionError ae) {
logger.error("Failed matcher for jvm pid " + pid);
logger.error("jcmd output: " + String.join("\n", outputLines));
throw ae;
}
}
private long getElasticsearchPid() throws IOException {
Response response = client().performRequest(new Request("GET", "/_nodes/process"));
@SuppressWarnings("unchecked")
var nodesInfo = (Map<String, Object>) entityAsMap(response).get("nodes");
@SuppressWarnings("unchecked")
var nodeInfo = (Map<String, Object>) nodesInfo.values().iterator().next();
@SuppressWarnings("unchecked")
var processInfo = (Map<String, Object>) nodeInfo.get("process");
Object stringPid = processInfo.get("id");
return Long.parseLong(stringPid.toString());
}
private List<String> readLines(InputStream is) throws IOException {
try (BufferedReader in = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) {
return in.lines().toList();
}
}
@Override
protected boolean preserveClusterUponCompletion() {
// as the cluster is dead its state can not be wiped successfully so we have to bypass wiping the cluster
return true;
}
}
| StdOutCatchingClusterBuilder |
java | google__guava | android/guava-tests/test/com/google/common/math/BigIntegerMathTest.java | {
"start": 2493,
"end": 18608
} | class ____ extends TestCase {
public void testCeilingPowerOfTwo() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
BigInteger result = BigIntegerMath.ceilingPowerOfTwo(x);
assertTrue(BigIntegerMath.isPowerOfTwo(result));
assertThat(result).isAtLeast(x);
assertThat(result).isLessThan(x.add(x));
}
}
public void testFloorPowerOfTwo() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
BigInteger result = BigIntegerMath.floorPowerOfTwo(x);
assertTrue(BigIntegerMath.isPowerOfTwo(result));
assertThat(result).isAtMost(x);
assertThat(result.add(result)).isGreaterThan(x);
}
}
public void testCeilingPowerOfTwoNegative() {
for (BigInteger x : NEGATIVE_BIGINTEGER_CANDIDATES) {
assertThrows(IllegalArgumentException.class, () -> BigIntegerMath.ceilingPowerOfTwo(x));
}
}
public void testFloorPowerOfTwoNegative() {
for (BigInteger x : NEGATIVE_BIGINTEGER_CANDIDATES) {
assertThrows(IllegalArgumentException.class, () -> BigIntegerMath.floorPowerOfTwo(x));
}
}
public void testCeilingPowerOfTwoZero() {
assertThrows(
IllegalArgumentException.class, () -> BigIntegerMath.ceilingPowerOfTwo(BigInteger.ZERO));
}
public void testFloorPowerOfTwoZero() {
assertThrows(
IllegalArgumentException.class, () -> BigIntegerMath.floorPowerOfTwo(BigInteger.ZERO));
}
@GwtIncompatible // TODO
public void testConstantSqrt2PrecomputedBits() {
assertEquals(
BigIntegerMath.sqrt(
BigInteger.ZERO.setBit(2 * BigIntegerMath.SQRT2_PRECOMPUTE_THRESHOLD + 1), FLOOR),
BigIntegerMath.SQRT2_PRECOMPUTED_BITS);
}
public void testIsPowerOfTwo() {
for (BigInteger x : ALL_BIGINTEGER_CANDIDATES) {
// Checks for a single bit set.
boolean expected = x.signum() > 0 & x.and(x.subtract(ONE)).equals(ZERO);
assertEquals(expected, BigIntegerMath.isPowerOfTwo(x));
}
}
public void testLog2ZeroAlwaysThrows() {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertThrows(IllegalArgumentException.class, () -> BigIntegerMath.log2(ZERO, mode));
}
}
public void testLog2NegativeAlwaysThrows() {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertThrows(
IllegalArgumentException.class, () -> BigIntegerMath.log2(BigInteger.valueOf(-1), mode));
}
}
public void testLog2Floor() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
for (RoundingMode mode : asList(FLOOR, DOWN)) {
int result = BigIntegerMath.log2(x, mode);
assertThat(ZERO.setBit(result)).isAtMost(x);
assertThat(ZERO.setBit(result + 1)).isGreaterThan(x);
}
}
}
public void testLog2Ceiling() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
for (RoundingMode mode : asList(CEILING, UP)) {
int result = BigIntegerMath.log2(x, mode);
assertThat(ZERO.setBit(result)).isAtLeast(x);
assertTrue(result == 0 || ZERO.setBit(result - 1).compareTo(x) < 0);
}
}
}
// Relies on the correctness of isPowerOfTwo(BigInteger).
public void testLog2Exact() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
// We only expect an exception if x was not a power of 2.
boolean isPowerOf2 = BigIntegerMath.isPowerOfTwo(x);
try {
assertEquals(x, ZERO.setBit(BigIntegerMath.log2(x, UNNECESSARY)));
assertTrue(isPowerOf2);
} catch (ArithmeticException e) {
assertFalse(isPowerOf2);
}
}
}
public void testLog2HalfUp() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
int result = BigIntegerMath.log2(x, HALF_UP);
BigInteger x2 = x.pow(2);
// x^2 < 2^(2 * result + 1), or else we would have rounded up
assertThat(ZERO.setBit(2 * result + 1)).isGreaterThan(x2);
// x^2 >= 2^(2 * result - 1), or else we would have rounded down
assertTrue(result == 0 || ZERO.setBit(2 * result - 1).compareTo(x2) <= 0);
}
}
public void testLog2HalfDown() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
int result = BigIntegerMath.log2(x, HALF_DOWN);
BigInteger x2 = x.pow(2);
// x^2 <= 2^(2 * result + 1), or else we would have rounded up
assertThat(ZERO.setBit(2 * result + 1)).isAtLeast(x2);
// x^2 > 2^(2 * result - 1), or else we would have rounded down
assertTrue(result == 0 || ZERO.setBit(2 * result - 1).compareTo(x2) < 0);
}
}
// Relies on the correctness of log2(BigInteger, {HALF_UP,HALF_DOWN}).
public void testLog2HalfEven() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
int halfEven = BigIntegerMath.log2(x, HALF_EVEN);
// Now figure out what rounding mode we should behave like (it depends if FLOOR was
// odd/even).
boolean floorWasEven = (BigIntegerMath.log2(x, FLOOR) & 1) == 0;
assertEquals(BigIntegerMath.log2(x, floorWasEven ? HALF_DOWN : HALF_UP), halfEven);
}
}
@GwtIncompatible // TODO
public void testLog10ZeroAlwaysThrows() {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertThrows(IllegalArgumentException.class, () -> BigIntegerMath.log10(ZERO, mode));
}
}
@GwtIncompatible // TODO
public void testLog10NegativeAlwaysThrows() {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertThrows(
IllegalArgumentException.class, () -> BigIntegerMath.log10(BigInteger.valueOf(-1), mode));
}
}
@GwtIncompatible // TODO
public void testLog10Floor() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
for (RoundingMode mode : asList(FLOOR, DOWN)) {
int result = BigIntegerMath.log10(x, mode);
assertThat(TEN.pow(result)).isAtMost(x);
assertThat(TEN.pow(result + 1)).isGreaterThan(x);
}
}
}
@GwtIncompatible // TODO
public void testLog10Ceiling() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
for (RoundingMode mode : asList(CEILING, UP)) {
int result = BigIntegerMath.log10(x, mode);
assertThat(TEN.pow(result)).isAtLeast(x);
assertTrue(result == 0 || TEN.pow(result - 1).compareTo(x) < 0);
}
}
}
// Relies on the correctness of log10(BigInteger, FLOOR).
@GwtIncompatible // TODO
public void testLog10Exact() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
int logFloor = BigIntegerMath.log10(x, FLOOR);
boolean expectSuccess = TEN.pow(logFloor).equals(x);
try {
assertEquals(logFloor, BigIntegerMath.log10(x, UNNECESSARY));
assertTrue(expectSuccess);
} catch (ArithmeticException e) {
assertFalse(expectSuccess);
}
}
}
@GwtIncompatible // TODO
public void testLog10HalfUp() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
int result = BigIntegerMath.log10(x, HALF_UP);
BigInteger x2 = x.pow(2);
// x^2 < 10^(2 * result + 1), or else we would have rounded up
assertThat(TEN.pow(2 * result + 1)).isGreaterThan(x2);
// x^2 >= 10^(2 * result - 1), or else we would have rounded down
assertTrue(result == 0 || TEN.pow(2 * result - 1).compareTo(x2) <= 0);
}
}
@GwtIncompatible // TODO
public void testLog10HalfDown() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
int result = BigIntegerMath.log10(x, HALF_DOWN);
BigInteger x2 = x.pow(2);
// x^2 <= 10^(2 * result + 1), or else we would have rounded up
assertThat(TEN.pow(2 * result + 1)).isAtLeast(x2);
// x^2 > 10^(2 * result - 1), or else we would have rounded down
assertTrue(result == 0 || TEN.pow(2 * result - 1).compareTo(x2) < 0);
}
}
// Relies on the correctness of log10(BigInteger, {HALF_UP,HALF_DOWN}).
@GwtIncompatible // TODO
public void testLog10HalfEven() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
int halfEven = BigIntegerMath.log10(x, HALF_EVEN);
// Now figure out what rounding mode we should behave like (it depends if FLOOR was
// odd/even).
boolean floorWasEven = (BigIntegerMath.log10(x, FLOOR) & 1) == 0;
assertEquals(BigIntegerMath.log10(x, floorWasEven ? HALF_DOWN : HALF_UP), halfEven);
}
}
@GwtIncompatible // TODO
public void testLog10TrivialOnPowerOf10() {
BigInteger x = BigInteger.TEN.pow(100);
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertEquals(100, BigIntegerMath.log10(x, mode));
}
}
@GwtIncompatible // TODO
public void testSqrtZeroAlwaysZero() {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertEquals(ZERO, BigIntegerMath.sqrt(ZERO, mode));
}
}
@GwtIncompatible // TODO
public void testSqrtNegativeAlwaysThrows() {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertThrows(
IllegalArgumentException.class, () -> BigIntegerMath.sqrt(BigInteger.valueOf(-1), mode));
}
}
@GwtIncompatible // TODO
public void testSqrtFloor() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
for (RoundingMode mode : asList(FLOOR, DOWN)) {
BigInteger result = BigIntegerMath.sqrt(x, mode);
assertThat(result).isGreaterThan(ZERO);
assertThat(result.pow(2)).isAtMost(x);
assertThat(result.add(ONE).pow(2)).isGreaterThan(x);
}
}
}
@GwtIncompatible // TODO
public void testSqrtCeiling() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
for (RoundingMode mode : asList(CEILING, UP)) {
BigInteger result = BigIntegerMath.sqrt(x, mode);
assertThat(result).isGreaterThan(ZERO);
assertThat(result.pow(2)).isAtLeast(x);
assertTrue(result.signum() == 0 || result.subtract(ONE).pow(2).compareTo(x) < 0);
}
}
}
// Relies on the correctness of sqrt(BigInteger, FLOOR).
@GwtIncompatible // TODO
public void testSqrtExact() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
BigInteger floor = BigIntegerMath.sqrt(x, FLOOR);
// We only expect an exception if x was not a perfect square.
boolean isPerfectSquare = floor.pow(2).equals(x);
try {
assertEquals(floor, BigIntegerMath.sqrt(x, UNNECESSARY));
assertTrue(isPerfectSquare);
} catch (ArithmeticException e) {
assertFalse(isPerfectSquare);
}
}
}
@GwtIncompatible // TODO
public void testSqrtHalfUp() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
BigInteger result = BigIntegerMath.sqrt(x, HALF_UP);
BigInteger plusHalfSquared = result.pow(2).add(result).shiftLeft(2).add(ONE);
BigInteger x4 = x.shiftLeft(2);
// sqrt(x) < result + 0.5, so 4 * x < (result + 0.5)^2 * 4
// (result + 0.5)^2 * 4 = (result^2 + result)*4 + 1
assertThat(plusHalfSquared).isGreaterThan(x4);
BigInteger minusHalfSquared = result.pow(2).subtract(result).shiftLeft(2).add(ONE);
// sqrt(x) > result - 0.5, so 4 * x > (result - 0.5)^2 * 4
// (result - 0.5)^2 * 4 = (result^2 - result)*4 + 1
assertTrue(result.equals(ZERO) || x4.compareTo(minusHalfSquared) >= 0);
}
}
@GwtIncompatible // TODO
public void testSqrtHalfDown() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
BigInteger result = BigIntegerMath.sqrt(x, HALF_DOWN);
BigInteger plusHalfSquared = result.pow(2).add(result).shiftLeft(2).add(ONE);
BigInteger x4 = x.shiftLeft(2);
// sqrt(x) <= result + 0.5, so 4 * x <= (result + 0.5)^2 * 4
// (result + 0.5)^2 * 4 = (result^2 + result)*4 + 1
assertThat(plusHalfSquared).isAtLeast(x4);
BigInteger minusHalfSquared = result.pow(2).subtract(result).shiftLeft(2).add(ONE);
// sqrt(x) > result - 0.5, so 4 * x > (result - 0.5)^2 * 4
// (result - 0.5)^2 * 4 = (result^2 - result)*4 + 1
assertTrue(result.equals(ZERO) || x4.compareTo(minusHalfSquared) > 0);
}
}
// Relies on the correctness of sqrt(BigInteger, {HALF_UP,HALF_DOWN}).
@GwtIncompatible // TODO
public void testSqrtHalfEven() {
for (BigInteger x : POSITIVE_BIGINTEGER_CANDIDATES) {
BigInteger halfEven = BigIntegerMath.sqrt(x, HALF_EVEN);
// Now figure out what rounding mode we should behave like (it depends if FLOOR was
// odd/even).
boolean floorWasOdd = BigIntegerMath.sqrt(x, FLOOR).testBit(0);
assertEquals(BigIntegerMath.sqrt(x, floorWasOdd ? HALF_UP : HALF_DOWN), halfEven);
}
}
@GwtIncompatible // TODO
@AndroidIncompatible // slow
public void testDivNonZero() {
for (BigInteger p : NONZERO_BIGINTEGER_CANDIDATES) {
for (BigInteger q : NONZERO_BIGINTEGER_CANDIDATES) {
for (RoundingMode mode : ALL_SAFE_ROUNDING_MODES) {
BigInteger expected =
new BigDecimal(p).divide(new BigDecimal(q), 0, mode).toBigIntegerExact();
assertEquals(expected, BigIntegerMath.divide(p, q, mode));
}
}
}
}
private static final BigInteger BAD_FOR_ANDROID_P = new BigInteger("-9223372036854775808");
private static final BigInteger BAD_FOR_ANDROID_Q = new BigInteger("-1");
@GwtIncompatible // TODO
@AndroidIncompatible // slow
public void testDivNonZeroExact() {
String runtimeName = System.getProperty("java.runtime.name");
boolean isAndroid = runtimeName != null && runtimeName.contains("Android");
for (BigInteger p : NONZERO_BIGINTEGER_CANDIDATES) {
for (BigInteger q : NONZERO_BIGINTEGER_CANDIDATES) {
if (isAndroid && p.equals(BAD_FOR_ANDROID_P) && q.equals(BAD_FOR_ANDROID_Q)) {
// https://issuetracker.google.com/issues/37074172
continue;
}
boolean dividesEvenly = p.remainder(q).equals(ZERO);
try {
BigInteger quotient = BigIntegerMath.divide(p, q, UNNECESSARY);
BigInteger undone = quotient.multiply(q);
if (!p.equals(undone)) {
failFormat("expected %s.multiply(%s) = %s; got %s", quotient, q, p, undone);
}
assertTrue(dividesEvenly);
} catch (ArithmeticException e) {
assertFalse(dividesEvenly);
}
}
}
}
@GwtIncompatible // TODO
public void testZeroDivIsAlwaysZero() {
for (BigInteger q : NONZERO_BIGINTEGER_CANDIDATES) {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertEquals(ZERO, BigIntegerMath.divide(ZERO, q, mode));
}
}
}
@GwtIncompatible // TODO
public void testDivByZeroAlwaysFails() {
for (BigInteger p : ALL_BIGINTEGER_CANDIDATES) {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertThrows(ArithmeticException.class, () -> BigIntegerMath.divide(p, ZERO, mode));
}
}
}
public void testFactorial() {
BigInteger expected = BigInteger.ONE;
for (int i = 1; i <= 200; i++) {
expected = expected.multiply(BigInteger.valueOf(i));
assertEquals(expected, BigIntegerMath.factorial(i));
}
}
public void testFactorial0() {
assertEquals(BigInteger.ONE, BigIntegerMath.factorial(0));
}
public void testFactorialNegative() {
assertThrows(IllegalArgumentException.class, () -> BigIntegerMath.factorial(-1));
}
public void testBinomialSmall() {
runBinomialTest(0, 30);
}
@GwtIncompatible // too slow
public void testBinomialLarge() {
runBinomialTest(31, 100);
}
// Depends on the correctness of BigIntegerMath.factorial
private static void runBinomialTest(int firstN, int lastN) {
for (int n = firstN; n <= lastN; n++) {
for (int k = 0; k <= n; k++) {
BigInteger expected =
BigIntegerMath.factorial(n)
.divide(BigIntegerMath.factorial(k))
.divide(BigIntegerMath.factorial(n - k));
assertEquals(expected, BigIntegerMath.binomial(n, k));
}
}
}
public void testBinomialOutside() {
for (int i = 0; i <= 50; i++) {
int n = i;
assertThrows(IllegalArgumentException.class, () -> BigIntegerMath.binomial(n, -1));
assertThrows(IllegalArgumentException.class, () -> BigIntegerMath.binomial(n, n + 1));
}
}
@J2ktIncompatible
@GwtIncompatible // EnumSet.complementOf
private static final | BigIntegerMathTest |
java | quarkusio__quarkus | integration-tests/test-extension/tests/src/test/java/io/quarkus/it/extension/TestRecordRecorderTest.java | {
"start": 256,
"end": 462
} | class ____ {
@Test
public void test() {
assertEquals("foo", TestRecordRecorder.testRecord.name());
assertEquals(100, TestRecordRecorder.testRecord.age());
}
}
| TestRecordRecorderTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java | {
"start": 1277,
"end": 1602
} | class ____ {
public static void registerCommands(CommandFactory factory) {
factory.addClass(MoveFromLocal.class, "-moveFromLocal");
factory.addClass(MoveToLocal.class, "-moveToLocal");
factory.addClass(Rename.class, "-mv");
}
/**
* Move local files to a remote filesystem
*/
public static | MoveCommands |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResolveIndexActionTests.java | {
"start": 1310,
"end": 2875
} | class ____ extends ESTestCase {
public void testAddResolveCrossProjectBasedOnSettingValue() throws Exception {
final boolean cpsEnabled = randomBoolean();
final Settings settings = Settings.builder().put("serverless.cross_project.enabled", cpsEnabled).build();
final var action = new RestResolveIndexAction(settings);
final var request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
.withPath("/_resolve/index/foo")
.build();
final NodeClient nodeClient = new NodeClient(settings, mock(ThreadPool.class), TestProjectResolvers.DEFAULT_PROJECT_ONLY) {
@SuppressWarnings("unchecked")
@Override
public <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
final var resolveIndexRequest = asInstanceOf(ResolveIndexAction.Request.class, request);
assertThat(resolveIndexRequest.indicesOptions().resolveCrossProjectIndexExpression(), equalTo(cpsEnabled));
listener.onResponse((Response) new ResolveIndexAction.Response(List.of(), List.of(), List.of()));
}
};
final var restChannel = new FakeRestChannel(request, true, 1);
action.handleRequest(request, restChannel, nodeClient);
assertThat(restChannel.responses().get(), equalTo(1));
}
}
| RestResolveIndexActionTests |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/access/IpAddressAuthorizationManager.java | {
"start": 1420,
"end": 2402
} | class ____ implements AuthorizationManager<RequestAuthorizationContext> {
private final IpAddressMatcher ipAddressMatcher;
IpAddressAuthorizationManager(String ipAddress) {
this.ipAddressMatcher = new IpAddressMatcher(ipAddress);
}
/**
* Creates an instance of {@link IpAddressAuthorizationManager} with the provided IP
* address.
* @param ipAddress the address or range of addresses from which the request must
* @return the new instance
*/
public static IpAddressAuthorizationManager hasIpAddress(String ipAddress) {
Assert.notNull(ipAddress, "ipAddress cannot be null");
return new IpAddressAuthorizationManager(ipAddress);
}
@Override
public AuthorizationResult authorize(Supplier<? extends @Nullable Authentication> authentication,
RequestAuthorizationContext requestAuthorizationContext) {
return new AuthorizationDecision(
this.ipAddressMatcher.matcher(requestAuthorizationContext.getRequest()).isMatch());
}
}
| IpAddressAuthorizationManager |
java | elastic__elasticsearch | distribution/tools/keystore-cli/src/main/java/org/elasticsearch/cli/keystore/HasPasswordKeyStoreCommand.java | {
"start": 859,
"end": 2129
} | class ____ extends KeyStoreAwareCommand {
static final int NO_PASSWORD_EXIT_CODE = 1;
HasPasswordKeyStoreCommand() {
super(
"Succeeds if the keystore exists and is password-protected, " + "fails with exit code " + NO_PASSWORD_EXIT_CODE + " otherwise."
);
}
@Override
public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception {
final Path configFile = env.configDir();
final KeyStoreWrapper keyStore = KeyStoreWrapper.load(configFile);
// We handle error printing here so we can respect the "--silent" flag
// We have to throw an exception to get a nonzero exit code
if (keyStore == null) {
terminal.errorPrintln(Terminal.Verbosity.NORMAL, "ERROR: Elasticsearch keystore not found");
throw new UserException(NO_PASSWORD_EXIT_CODE, null);
}
if (keyStore.hasPassword() == false) {
terminal.errorPrintln(Terminal.Verbosity.NORMAL, "ERROR: Keystore is not password-protected");
throw new UserException(NO_PASSWORD_EXIT_CODE, null);
}
terminal.println(Terminal.Verbosity.NORMAL, "Keystore is password-protected");
}
}
| HasPasswordKeyStoreCommand |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 114700,
"end": 115801
} | interface ____ {",
" Builder blam(String x);",
" Builder whut();",
" Baz build();",
" }",
"}");
Compilation compilation =
javac()
.withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor())
.compile(javaFileObject);
assertThat(compilation)
.hadErrorContaining(
"Method without arguments should be a build method returning foo.bar.Baz, or a getter"
+ " method with the same name and type as a property method of foo.bar.Baz, or"
+ " fooBuilder() where foo() or getFoo() is a property method of foo.bar.Baz")
.inFile(javaFileObject)
.onLineContaining("Builder whut()");
}
@Test
public void autoValueBuilderAlienMethod1() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"",
"@AutoValue",
"public abstract | Builder |
java | apache__camel | components/camel-zendesk/src/main/java/org/apache/camel/component/zendesk/internal/ZendeskHelper.java | {
"start": 1063,
"end": 1140
} | class ____ {
private ZendeskHelper() {
// hide utility | ZendeskHelper |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/CollectionIncompatibleTypeTest.java | {
"start": 8421,
"end": 8697
} | class ____ extends ArrayList<Integer> {}
public void methodArgumentIsSubclassWithDifferentTypeParameters(
Collection<String> collection, MyArrayList myArrayList) {
// BUG: Diagnostic contains:
collection.containsAll(myArrayList);
}
private static | MyArrayList |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/custom/registration/CustomTypeRegistrationTests.java | {
"start": 820,
"end": 1799
} | class ____ {
@Test
public void verifyModel(DomainModelScope scope) {
scope.withHierarchy( User.class, (userDescriptor) -> {
final Property emailAddressesProperty = userDescriptor.getProperty( "emailAddresses" );
final Collection emailAddressesMapping = (Collection) emailAddressesProperty.getValue();
assertThat( emailAddressesMapping.getCollectionSemantics().getCollectionClassification() ).isEqualTo( LIST );
assertThat( emailAddressesMapping.getCollectionSemantics() ).isInstanceOf( CustomCollectionTypeSemantics.class );
final CustomCollectionTypeSemantics semantics = (CustomCollectionTypeSemantics) emailAddressesMapping.getCollectionSemantics();
assertThat( semantics.getCollectionType() ).isInstanceOf( CustomCollectionType.class );
final CustomCollectionType collectionType = (CustomCollectionType) semantics.getCollectionType();
assertThat( collectionType.getUserType() ).isInstanceOf( MyListType.class );
} );
}
}
| CustomTypeRegistrationTests |
java | spring-projects__spring-security | access/src/main/java/org/springframework/security/access/method/MapBasedMethodSecurityMetadataSource.java | {
"start": 1912,
"end": 4860
} | class ____ extends AbstractFallbackMethodSecurityMetadataSource
implements BeanClassLoaderAware {
@SuppressWarnings("NullAway")
private @Nullable ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader();
/**
* Map from RegisteredMethod to ConfigAttribute list
*/
protected final Map<RegisteredMethod, List<ConfigAttribute>> methodMap = new HashMap<>();
/**
* Map from RegisteredMethod to name pattern used for registration
*/
private final Map<RegisteredMethod, String> nameMap = new HashMap<>();
public MapBasedMethodSecurityMetadataSource() {
}
/**
* Creates the <tt>MapBasedMethodSecurityMetadataSource</tt> from a
* @param methodMap map of method names to <tt>ConfigAttribute</tt>s.
*/
public MapBasedMethodSecurityMetadataSource(Map<String, List<ConfigAttribute>> methodMap) {
for (Map.Entry<String, List<ConfigAttribute>> entry : methodMap.entrySet()) {
addSecureMethod(entry.getKey(), entry.getValue());
}
}
/**
* Implementation does not support class-level attributes.
*/
@Override
protected @Nullable Collection<ConfigAttribute> findAttributes(Class<?> clazz) {
return null;
}
/**
* Will walk the method inheritance tree to find the most specific declaration
* applicable.
*/
@Override
protected @Nullable Collection<ConfigAttribute> findAttributes(Method method, Class<?> targetClass) {
if (targetClass == null) {
return null;
}
return findAttributesSpecifiedAgainst(method, targetClass);
}
private @Nullable List<ConfigAttribute> findAttributesSpecifiedAgainst(Method method, Class<?> clazz) {
RegisteredMethod registeredMethod = new RegisteredMethod(method, clazz);
if (this.methodMap.containsKey(registeredMethod)) {
return this.methodMap.get(registeredMethod);
}
// Search superclass
if (clazz.getSuperclass() != null) {
return findAttributesSpecifiedAgainst(method, clazz.getSuperclass());
}
return null;
}
/**
* Add configuration attributes for a secure method. Method names can end or start
* with <code>*</code> for matching multiple methods.
* @param name type and method name, separated by a dot
* @param attr the security attributes associated with the method
*/
private void addSecureMethod(String name, List<ConfigAttribute> attr) {
int lastDotIndex = name.lastIndexOf(".");
Assert.isTrue(lastDotIndex != -1, () -> "'" + name + "' is not a valid method name: format is FQN.methodName");
String methodName = name.substring(lastDotIndex + 1);
Assert.hasText(methodName, () -> "Method not found for '" + name + "'");
String typeName = name.substring(0, lastDotIndex);
Class<?> type = ClassUtils.resolveClassName(typeName, this.beanClassLoader);
addSecureMethod(type, methodName, attr);
}
/**
* Add configuration attributes for a secure method. Mapped method names can end or
* start with <code>*</code> for matching multiple methods.
* @param javaType target | MapBasedMethodSecurityMetadataSource |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestProtosLegacy.java | {
"start": 319323,
"end": 329342
} | class ____ extends
com.google.protobuf.GeneratedMessage
implements SleepRequestProto2OrBuilder {
// Use SleepRequestProto2.newBuilder() to construct.
private SleepRequestProto2(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SleepRequestProto2(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SleepRequestProto2 defaultInstance;
public static SleepRequestProto2 getDefaultInstance() {
return defaultInstance;
}
public SleepRequestProto2 getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SleepRequestProto2(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
sleepTime_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.Builder.class);
}
public static com.google.protobuf.Parser<SleepRequestProto2> PARSER =
new com.google.protobuf.AbstractParser<SleepRequestProto2>() {
public SleepRequestProto2 parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SleepRequestProto2(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SleepRequestProto2> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional int64 sleep_time = 1;
public static final int SLEEP_TIME_FIELD_NUMBER = 1;
private long sleepTime_;
/**
* <code>optional int64 sleep_time = 1;</code>
*/
public boolean hasSleepTime() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 sleep_time = 1;</code>
*/
public long getSleepTime() {
return sleepTime_;
}
private void initFields() {
sleepTime_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, sleepTime_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, sleepTime_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2) obj;
boolean result = true;
result = result && (hasSleepTime() == other.hasSleepTime());
if (hasSleepTime()) {
result = result && (getSleepTime()
== other.getSleepTime());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSleepTime()) {
hash = (37 * hash) + SLEEP_TIME_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getSleepTime());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.SleepRequestProto2}
*/
public static final | SleepRequestProto2 |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java | {
"start": 1129,
"end": 3841
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SpatialContainsGeoSourceAndSourceEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator left;
private final EvalOperator.ExpressionEvaluator right;
private final DriverContext driverContext;
private Warnings warnings;
public SpatialContainsGeoSourceAndSourceEvaluator(Source source,
EvalOperator.ExpressionEvaluator left, EvalOperator.ExpressionEvaluator right,
DriverContext driverContext) {
this.source = source;
this.left = left;
this.right = right;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (BytesRefBlock leftBlock = (BytesRefBlock) left.eval(page)) {
try (BytesRefBlock rightBlock = (BytesRefBlock) right.eval(page)) {
return eval(page.getPositionCount(), leftBlock, rightBlock);
}
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += left.baseRamBytesUsed();
baseRamBytesUsed += right.baseRamBytesUsed();
return baseRamBytesUsed;
}
public BooleanBlock eval(int positionCount, BytesRefBlock leftBlock, BytesRefBlock rightBlock) {
try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
boolean allBlocksAreNulls = true;
if (!leftBlock.isNull(p)) {
allBlocksAreNulls = false;
}
if (!rightBlock.isNull(p)) {
allBlocksAreNulls = false;
}
if (allBlocksAreNulls) {
result.appendNull();
continue position;
}
try {
SpatialContains.processGeoSourceAndSource(result, p, leftBlock, rightBlock);
} catch (IllegalArgumentException | IOException e) {
warnings().registerException(e);
result.appendNull();
}
}
return result.build();
}
}
@Override
public String toString() {
return "SpatialContainsGeoSourceAndSourceEvaluator[" + "left=" + left + ", right=" + right + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(left, right);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | SpatialContainsGeoSourceAndSourceEvaluator |
java | quarkusio__quarkus | integration-tests/gradle/src/main/resources/additional-source-set-as-dependency/src/main/java/org/acme/GreetingResource.java | {
"start": 195,
"end": 355
} | class ____ implements GreetApi {
@GET
@Produces(MediaType.TEXT_PLAIN)
public String hello() {
return "Hello from Quarkus REST...";
}
}
| GreetingResource |
java | spring-projects__spring-framework | spring-web/src/jmh/java/org/springframework/http/codec/json/Jackson2JsonEncoderBenchmark.java | {
"start": 1679,
"end": 1949
} | class ____ {
/**
* Benchmark data holding {@link Project} to be serialized by the JSON Encoder.
* A {@code projectCount} parameter can be used to grow the size of the object graph to serialize.
*/
@State(Scope.Benchmark)
public static | Jackson2JsonEncoderBenchmark |
java | google__dagger | javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java | {
"start": 47908,
"end": 48528
} | class ____ {",
" @Inject String s;",
" @Inject AllInjections(String s) {}",
" @Inject void s(String s) {}",
"}");
daggerCompiler(file)
.compile(
subject -> {
subject.hasErrorCount(0);
assertSourceMatchesGolden(subject, "test/AllInjections_Factory");
});
}
@Test
public void wildcardDependency() {
Source file =
CompilerTests.javaSource("test.InjectConstructor",
"package test;",
"",
"import java.util.List;",
"import javax.inject.Inject;",
"",
" | AllInjections |
java | apache__dubbo | dubbo-registry/dubbo-registry-api/src/test/java/org/apache/dubbo/registry/client/ServiceDiscoveryRegistryTest.java | {
"start": 2821,
"end": 8006
} | class ____ {
public static final String APP_NAME1 = "app1";
public static final String APP_NAME2 = "app2";
public static final String APP_NAME3 = "app3";
private static AbstractServiceNameMapping mapping = mock(AbstractServiceNameMapping.class);
private static Lock lock = new ReentrantLock();
private static URL registryURL =
URL.valueOf("zookeeper://127.0.0.1:2181/org.apache.dubbo.registry.RegistryService");
private static URL url =
URL.valueOf("consumer://127.0.0.1/TestService?interface=TestService1&check=false&protocol=dubbo");
private static NotifyListener testServiceListener = mock(NotifyListener.class);
private static List<ServiceInstance> instanceList1 = new ArrayList<>();
private static List<ServiceInstance> instanceList2 = new ArrayList<>();
private ServiceDiscoveryRegistry serviceDiscoveryRegistry;
private ServiceDiscovery serviceDiscovery;
private MockServiceInstancesChangedListener instanceListener;
private ServiceNameMapping serviceNameMapping;
@BeforeAll
public static void setUp() {
instanceList1.add(new DefaultServiceInstance());
instanceList1.add(new DefaultServiceInstance());
instanceList1.add(new DefaultServiceInstance());
instanceList2.add(new DefaultServiceInstance());
instanceList2.add(new DefaultServiceInstance());
}
@AfterEach
public void teardown() {
FrameworkModel.destroyAll();
}
@BeforeEach
public void init() {
serviceDiscovery = mock(ServiceDiscovery.class);
instanceListener = spy(new MockServiceInstancesChangedListener(Collections.emptySet(), serviceDiscovery));
doNothing().when(instanceListener).onEvent(any());
when(serviceDiscovery.createListener(any())).thenReturn(instanceListener);
when(serviceDiscovery.getInstances(any())).thenReturn(Collections.emptyList());
when(serviceDiscovery.getUrl()).thenReturn(url);
ApplicationModel applicationModel = spy(ApplicationModel.defaultModel());
when(applicationModel.getDefaultExtension(ServiceNameMapping.class)).thenReturn(mapping);
registryURL = registryURL.setScopeModel(applicationModel);
serviceDiscoveryRegistry = new ServiceDiscoveryRegistry(registryURL, serviceDiscovery, mapping);
when(mapping.getMappingLock(any())).thenReturn(lock);
when(testServiceListener.getConsumerUrl()).thenReturn(url);
}
/**
* Test subscribe
* - Normal case
* - Exceptional case
* - check=true
* - check=false
*/
@Test
void testDoSubscribe() {
ApplicationModel applicationModel = spy(ApplicationModel.defaultModel());
when(applicationModel.getDefaultExtension(ServiceNameMapping.class)).thenReturn(mapping);
// Exceptional case, no interface-app mapping found
when(mapping.getAndListen(any(), any(), any())).thenReturn(Collections.emptySet());
// when check = false
try {
registryURL = registryURL.setScopeModel(applicationModel);
serviceDiscoveryRegistry = new ServiceDiscoveryRegistry(registryURL, serviceDiscovery, mapping);
serviceDiscoveryRegistry.doSubscribe(url, testServiceListener);
} finally {
registryURL = registryURL.setScopeModel(null);
serviceDiscoveryRegistry.unsubscribe(url, testServiceListener);
}
// // when check = true
URL checkURL = url.addParameter(CHECK_KEY, true);
checkURL.setScopeModel(url.getApplicationModel());
// Exception exceptionShouldHappen = null;
// try {
// serviceDiscoveryRegistry.doSubscribe(checkURL, testServiceListener);
// } catch (IllegalStateException e) {
// exceptionShouldHappen = e;
// } finally {
// serviceDiscoveryRegistry.unsubscribe(checkURL, testServiceListener);
// }
// if (exceptionShouldHappen == null) {
// fail();
// }
// Normal case
Set<String> singleApp = new HashSet<>();
singleApp.add(APP_NAME1);
when(mapping.getAndListen(any(), any(), any())).thenReturn(singleApp);
try {
serviceDiscoveryRegistry.doSubscribe(checkURL, testServiceListener);
} finally {
serviceDiscoveryRegistry.unsubscribe(checkURL, testServiceListener);
}
// test provider case
checkURL = url.addParameter(PROVIDED_BY, APP_NAME1);
try {
serviceDiscoveryRegistry.doSubscribe(checkURL, testServiceListener);
} finally {
serviceDiscoveryRegistry.unsubscribe(checkURL, testServiceListener);
}
}
/**
* Test instance listener registration
* - one app
* - multi apps
* - repeat same multi apps, instance listener shared
* - protocol included in key
* - instance listener gets notified
* - instance listener and service listener rightly mapped
*/
@Test
void testSubscribeURLs() {
// | ServiceDiscoveryRegistryTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/graphs/LoadEntityGraphWithCompositeKeyCollectionsTest.java | {
"start": 9210,
"end": 9694
} | class ____ {
@EmbeddedId
private ActivityDocumentId activityDocumentId;
private String name;
public ActivityDocument() {
}
public ActivityDocument(ActivityDocumentId activityDocumentId, String name) {
this.activityDocumentId = activityDocumentId;
this.name = name;
}
public ActivityDocument(Activity activity, String questionId, String name) {
this( new ActivityDocumentId( activity, questionId ), name );
}
}
@Embeddable
public static | ActivityDocument |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/type/TypeFactoryTest.java | {
"start": 14396,
"end": 19678
} | class
____ superType = subtype.getSuperClass();
assertNotNull(superType);
assertEquals(AbstractList.class, superType.getRawClass());
}
// for [databind#3876]
@SuppressWarnings("rawtypes")
@Test
public void testCollectionsHashCode()
{
TypeFactory tf = newTypeFactory();
JavaType listOfCollection = tf.constructType(new TypeReference<List<Collection>>() { });
JavaType collectionOfList = tf.constructType(new TypeReference<Collection<List>>() { });
assertNotEquals(listOfCollection, collectionOfList);
assertNotEquals(listOfCollection.hashCode(), collectionOfList.hashCode());
}
/*
/**********************************************************
/* Unit tests: map type parameter resolution
/**********************************************************
*/
@Test
public void testMaps()
{
TypeFactory tf = newTypeFactory();
// Ok, first: let's test what happens when we pass 'raw' Map:
JavaType t = tf.constructType(HashMap.class);
assertEquals(MapType.class, t.getClass());
assertSame(HashMap.class, t.getRawClass());
assertEqualsAndHash(t, tf.constructType(HashMap.class));
// Then explicit construction
t = tf.constructMapType(TreeMap.class, String.class, Integer.class);
assertEquals(MapType.class, t.getClass());
assertSame(String.class, ((MapType) t).getKeyType().getRawClass());
assertSame(Integer.class, ((MapType) t).getContentType().getRawClass());
assertEqualsAndHash(t, tf.constructMapType(TreeMap.class, String.class, Integer.class));
// And then with TypeReference
t = tf.constructType(new TypeReference<HashMap<String,Integer>>() { });
assertEquals(MapType.class, t.getClass());
assertSame(HashMap.class, t.getRawClass());
MapType mt = (MapType) t;
assertEquals(tf.constructType(String.class), mt.getKeyType());
assertEquals(tf.constructType(Integer.class), mt.getContentType());
assertEqualsAndHash(t, tf.constructType(new TypeReference<HashMap<String,Integer>>() { }));
t = tf.constructType(new TypeReference<LongValuedMap<Boolean>>() { });
assertEquals(MapType.class, t.getClass());
assertSame(LongValuedMap.class, t.getRawClass());
mt = (MapType) t;
assertEquals(tf.constructType(Boolean.class), mt.getKeyType());
assertEquals(tf.constructType(Long.class), mt.getContentType());
assertEqualsAndHash(t, tf.constructType(new TypeReference<LongValuedMap<Boolean>>() { }));
JavaType type = tf.constructType(new TypeReference<Map<String,Boolean>>() { });
MapType mapType = (MapType) type;
assertEquals(tf.constructType(String.class), mapType.getKeyType());
assertEquals(tf.constructType(Boolean.class), mapType.getContentType());
assertEqualsAndHash(type, tf.constructType(new TypeReference<Map<String,Boolean>>() { }));
}
// for [databind#3876]
@Test
public void testMapsHashCode()
{
TypeFactory tf = newTypeFactory();
JavaType mapStringInt = tf.constructType(new TypeReference<Map<String,Integer>>() {});
JavaType mapIntString = tf.constructType(new TypeReference<Map<Integer,String>>() {});
assertNotEquals(mapStringInt, mapIntString);
assertNotEquals(
mapStringInt.hashCode(),
mapIntString.hashCode(),
"hashCode should depend on parameter order");
JavaType mapStringString = tf.constructType(new TypeReference<Map<String,String>>() {});
JavaType mapIntInt = tf.constructType(new TypeReference<Map<Integer,Integer>>() {});
assertNotEquals(mapStringString, mapIntInt);
assertNotEquals(mapStringString.hashCode(), mapIntInt.hashCode());
}
// since 2.7
@Test
public void testMapTypesRefined()
{
TypeFactory tf = newTypeFactory();
JavaType type = tf.constructType(new TypeReference<Map<String,List<Integer>>>() { });
assertEquals(MapType.class, type.getClass());
MapType mapType = (MapType) type;
assertEquals(Map.class, mapType.getRawClass());
assertEquals(String.class, mapType.getKeyType().getRawClass());
assertEquals(List.class, mapType.getContentType().getRawClass());
assertEquals(Integer.class, mapType.getContentType().getContentType().getRawClass());
// No super-class, since it's an interface:
assertNull(type.getSuperClass());
assertEqualsAndHash(type, tf.constructType(new TypeReference<Map<String,List<Integer>>>() { }));
// But then refine to reflect sub-classing
JavaType subtype = tf.constructSpecializedType(type, LinkedHashMap.class);
assertEquals(LinkedHashMap.class, subtype.getRawClass());
assertEquals(String.class, subtype.getKeyType().getRawClass());
assertEquals(List.class, subtype.getContentType().getRawClass());
assertEquals(Integer.class, subtype.getContentType().getContentType().getRawClass());
assertEqualsAndHash(subtype, tf.constructSpecializedType(type, LinkedHashMap.class));
// but with refinement, should have non-null super | JavaType |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/VersionedRecordIterator.java | {
"start": 911,
"end": 1257
} | interface ____ {@link VersionedRecord VersionedRecord<V>}.
* <p>
* Users must call its {@code close} method explicitly upon completeness to release resources,
* or use try-with-resources statement (available since JDK7) for this {@link Closeable} class.
* Note that {@code remove()} is not supported.
*
* @param <V> Type of values
*/
public | of |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/TaskExecutorRegistrationSuccess.java | {
"start": 1334,
"end": 3579
} | class ____ extends RegistrationResponse.Success
implements Serializable {
private static final long serialVersionUID = 1L;
private final InstanceID registrationId;
private final ResourceID resourceManagerResourceId;
private final ClusterInformation clusterInformation;
@Nullable private final byte[] initialTokens;
/**
* Create a new {@code TaskExecutorRegistrationSuccess} message.
*
* @param registrationId The ID that the ResourceManager assigned the registration.
* @param resourceManagerResourceId The unique ID that identifies the ResourceManager.
* @param clusterInformation information about the cluster
* @param initialTokens initial tokens for the TaskExecutor
*/
public TaskExecutorRegistrationSuccess(
InstanceID registrationId,
ResourceID resourceManagerResourceId,
ClusterInformation clusterInformation,
@Nullable byte[] initialTokens) {
this.registrationId = Preconditions.checkNotNull(registrationId);
this.resourceManagerResourceId = Preconditions.checkNotNull(resourceManagerResourceId);
this.clusterInformation = Preconditions.checkNotNull(clusterInformation);
this.initialTokens = initialTokens;
}
/** Gets the ID that the ResourceManager assigned the registration. */
public InstanceID getRegistrationId() {
return registrationId;
}
/** Gets the unique ID that identifies the ResourceManager. */
public ResourceID getResourceManagerId() {
return resourceManagerResourceId;
}
/** Gets the cluster information. */
public ClusterInformation getClusterInformation() {
return clusterInformation;
}
/** Gets the initial tokens. */
public byte[] getInitialTokens() {
return initialTokens;
}
@Override
public String toString() {
return "TaskExecutorRegistrationSuccess{"
+ "registrationId="
+ registrationId
+ ", resourceManagerResourceId="
+ resourceManagerResourceId
+ ", clusterInformation="
+ clusterInformation
+ '}';
}
}
| TaskExecutorRegistrationSuccess |
java | apache__camel | catalog/camel-route-parser/src/test/java/org/apache/camel/parser/java/MyBasePortRouteBuilder.java | {
"start": 905,
"end": 1019
} | class ____ extends RouteBuilder {
public int getNextPort() {
return 8080;
}
}
| MyBasePortRouteBuilder |
java | alibaba__nacos | client/src/main/java/com/alibaba/nacos/client/naming/NacosNamingMaintainService.java | {
"start": 2368,
"end": 8341
} | class ____ implements NamingMaintainService {
private String namespace;
private NamingHttpClientProxy serverProxy;
private NamingServerListManager serverListManager;
private SecurityProxy securityProxy;
private ScheduledExecutorService executorService;
public NacosNamingMaintainService(String serverList) throws NacosException {
Properties properties = new Properties();
properties.setProperty(PropertyKeyConst.SERVER_ADDR, serverList);
init(properties);
}
public NacosNamingMaintainService(Properties properties) throws NacosException {
init(properties);
}
private void init(Properties properties) throws NacosException {
final NacosClientProperties nacosClientProperties = NacosClientProperties.PROTOTYPE.derive(properties);
ValidatorUtils.checkInitParam(nacosClientProperties);
namespace = InitUtils.initNamespaceForNaming(nacosClientProperties);
InitUtils.initSerialization();
InitUtils.initWebRootContext(nacosClientProperties);
serverListManager = new NamingServerListManager(nacosClientProperties, namespace);
serverListManager.start();
securityProxy = new SecurityProxy(serverListManager,
NamingHttpClientManager.getInstance().getNacosRestTemplate());
initSecurityProxy(properties);
serverProxy = new NamingHttpClientProxy(namespace, securityProxy, serverListManager, nacosClientProperties);
}
private void initSecurityProxy(Properties properties) {
this.executorService = new ScheduledThreadPoolExecutor(1,
new NameThreadFactory("com.alibaba.nacos.client.naming.maintainService.security"));
this.securityProxy.login(properties);
this.executorService
.scheduleWithFixedDelay(() -> securityProxy.login(properties), 0, SECURITY_INFO_REFRESH_INTERVAL_MILLS,
TimeUnit.MILLISECONDS);
}
@Override
public void updateInstance(String serviceName, Instance instance) throws NacosException {
updateInstance(serviceName, Constants.DEFAULT_GROUP, instance);
}
@Override
public void updateInstance(String serviceName, String groupName, Instance instance) throws NacosException {
serverProxy.updateInstance(serviceName, groupName, instance);
}
@Override
public Service queryService(String serviceName) throws NacosException {
return queryService(serviceName, Constants.DEFAULT_GROUP);
}
@Override
public Service queryService(String serviceName, String groupName) throws NacosException {
return serverProxy.queryService(serviceName, groupName);
}
@Override
public void createService(String serviceName) throws NacosException {
createService(serviceName, Constants.DEFAULT_GROUP);
}
@Override
public void createService(String serviceName, String groupName) throws NacosException {
createService(serviceName, groupName, Constants.DEFAULT_PROTECT_THRESHOLD);
}
@Override
public void createService(String serviceName, String groupName, float protectThreshold) throws NacosException {
Service service = new Service();
service.setName(serviceName);
service.setGroupName(groupName);
service.setProtectThreshold(protectThreshold);
createService(service, new NoneSelector());
}
@Override
public void createService(String serviceName, String groupName, float protectThreshold, String expression)
throws NacosException {
Service service = new Service();
service.setName(serviceName);
service.setGroupName(groupName);
service.setProtectThreshold(protectThreshold);
ExpressionSelector selector = new ExpressionSelector();
selector.setExpression(expression);
createService(service, selector);
}
@Override
public void createService(Service service, AbstractSelector selector) throws NacosException {
serverProxy.createService(service, selector);
}
@Override
public boolean deleteService(String serviceName) throws NacosException {
return deleteService(serviceName, Constants.DEFAULT_GROUP);
}
@Override
public boolean deleteService(String serviceName, String groupName) throws NacosException {
return serverProxy.deleteService(serviceName, groupName);
}
@Override
public void updateService(String serviceName, String groupName, float protectThreshold) throws NacosException {
Service service = new Service();
service.setName(serviceName);
service.setGroupName(groupName);
service.setProtectThreshold(protectThreshold);
updateService(service, new NoneSelector());
}
@Override
public void updateService(String serviceName, String groupName, float protectThreshold,
Map<String, String> metadata) throws NacosException {
Service service = new Service();
service.setName(serviceName);
service.setGroupName(groupName);
service.setProtectThreshold(protectThreshold);
service.setMetadata(metadata);
updateService(service, new NoneSelector());
}
@Override
public void updateService(Service service, AbstractSelector selector) throws NacosException {
serverProxy.updateService(service, selector);
}
@Override
public void shutDown() throws NacosException {
String className = this.getClass().getName();
NAMING_LOGGER.info("{} do shutdown begin", className);
serverListManager.shutdown();
serverProxy.shutdown();
ThreadUtils.shutdownThreadPool(executorService, NAMING_LOGGER);
NAMING_LOGGER.info("{} do shutdown stop", className);
}
}
| NacosNamingMaintainService |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/FieldSerializer.java | {
"start": 1306,
"end": 1396
} | class ____ to be called from
* these readObject/writeObject methods.
*/
@Internal
public | are |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/password/HaveIBeenPwnedRestApiPasswordChecker.java | {
"start": 1800,
"end": 4091
} | class ____ implements CompromisedPasswordChecker {
private static final String API_URL = "https://api.pwnedpasswords.com/range/";
private static final int PREFIX_LENGTH = 5;
private final Log logger = LogFactory.getLog(getClass());
private final MessageDigest sha1Digest;
private RestClient restClient = RestClient.builder().baseUrl(API_URL).build();
public HaveIBeenPwnedRestApiPasswordChecker() {
this.sha1Digest = getSha1Digest();
}
@Override
public CompromisedPasswordDecision check(@Nullable String password) {
if (password == null) {
return new CompromisedPasswordDecision(false);
}
byte[] hash = this.sha1Digest.digest(password.getBytes(StandardCharsets.UTF_8));
String encoded = new String(Hex.encode(hash)).toUpperCase(Locale.ROOT);
String prefix = encoded.substring(0, PREFIX_LENGTH);
String suffix = encoded.substring(PREFIX_LENGTH);
List<String> passwords = getLeakedPasswordsForPrefix(prefix);
boolean isLeaked = findLeakedPassword(passwords, suffix);
return new CompromisedPasswordDecision(isLeaked);
}
/**
* Sets the {@link RestClient} to use when making requests to Have I Been Pwned REST
* API. By default, a {@link RestClient} with a base URL of {@link #API_URL} is used.
* @param restClient the {@link RestClient} to use
*/
public void setRestClient(RestClient restClient) {
Assert.notNull(restClient, "restClient cannot be null");
this.restClient = restClient;
}
private boolean findLeakedPassword(List<String> passwords, String suffix) {
for (String pw : passwords) {
if (pw.startsWith(suffix)) {
return true;
}
}
return false;
}
private List<String> getLeakedPasswordsForPrefix(String prefix) {
try {
String response = this.restClient.get().uri(prefix).retrieve().body(String.class);
if (!StringUtils.hasText(response)) {
return Collections.emptyList();
}
return response.lines().toList();
}
catch (RestClientException ex) {
this.logger.error("Request for leaked passwords failed", ex);
return Collections.emptyList();
}
}
private static MessageDigest getSha1Digest() {
try {
return MessageDigest.getInstance("SHA-1");
}
catch (NoSuchAlgorithmException ex) {
throw new RuntimeException(ex.getMessage());
}
}
}
| HaveIBeenPwnedRestApiPasswordChecker |
java | google__guava | android/guava/src/com/google/common/collect/TreeTraverser.java | {
"start": 4592,
"end": 6197
} | class ____ extends UnmodifiableIterator<T> {
private final Deque<Iterator<T>> stack;
PreOrderIterator(T root) {
this.stack = new ArrayDeque<>();
stack.addLast(singletonIterator(checkNotNull(root)));
}
@Override
public boolean hasNext() {
return !stack.isEmpty();
}
@Override
public T next() {
Iterator<T> itr = stack.getLast(); // throws NSEE if empty
T result = checkNotNull(itr.next());
if (!itr.hasNext()) {
stack.removeLast();
}
Iterator<T> childItr = children(result).iterator();
if (childItr.hasNext()) {
stack.addLast(childItr);
}
return result;
}
}
/**
* Returns an unmodifiable iterable over the nodes in a tree structure, using post-order
* traversal. That is, each node's subtrees are traversed before the node itself is returned.
*
* <p>No guarantees are made about the behavior of the traversal when nodes change while iteration
* is in progress or when the iterators generated by {@link #children} are advanced.
*
* @deprecated Use {@link com.google.common.graph.Traverser#depthFirstPostOrder} instead, which
* has the same behavior.
*/
@Deprecated
public final FluentIterable<T> postOrderTraversal(T root) {
checkNotNull(root);
return new FluentIterable<T>() {
@Override
public UnmodifiableIterator<T> iterator() {
return postOrderIterator(root);
}
};
}
UnmodifiableIterator<T> postOrderIterator(T root) {
return new PostOrderIterator(root);
}
private static final | PreOrderIterator |
java | apache__spark | examples/src/main/java/org/apache/spark/examples/ml/JavaLogisticRegressionWithElasticNetExample.java | {
"start": 1123,
"end": 2534
} | class ____ {
public static void main(String[] args) {
SparkSession spark = SparkSession
.builder()
.appName("JavaLogisticRegressionWithElasticNetExample")
.getOrCreate();
// $example on$
// Load training data
Dataset<Row> training = spark.read().format("libsvm")
.load("data/mllib/sample_libsvm_data.txt");
LogisticRegression lr = new LogisticRegression()
.setMaxIter(10)
.setRegParam(0.3)
.setElasticNetParam(0.8);
// Fit the model
LogisticRegressionModel lrModel = lr.fit(training);
// Print the coefficients and intercept for logistic regression
System.out.println("Coefficients: "
+ lrModel.coefficients() + " Intercept: " + lrModel.intercept());
// We can also use the multinomial family for binary classification
LogisticRegression mlr = new LogisticRegression()
.setMaxIter(10)
.setRegParam(0.3)
.setElasticNetParam(0.8)
.setFamily("multinomial");
// Fit the model
LogisticRegressionModel mlrModel = mlr.fit(training);
// Print the coefficients and intercepts for logistic regression with multinomial family
System.out.println("Multinomial coefficients: " + lrModel.coefficientMatrix()
+ "\nMultinomial intercepts: " + mlrModel.interceptVector());
// $example off$
spark.stop();
}
}
| JavaLogisticRegressionWithElasticNetExample |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/TlsSyslogAppenderTest.java | {
"start": 1729,
"end": 5038
} | class ____ extends SyslogAppenderTest {
private SSLServerSocketFactory serverSocketFactory;
private SslConfiguration sslConfiguration;
public TlsSyslogAppenderTest() throws StoreConfigurationException {
initServerSocketFactory();
root = ctx.getLogger("TLSSyslogAppenderTest");
}
@Test
void sendLargeLegacyBsdMessageOverTls() throws IOException, InterruptedException {
final String prefix = "BEGIN";
initTlsTestEnvironment(1, TlsSyslogMessageFormat.LEGACY_BSD);
final char[] msg = new char[2 * 1024 * 2014 + prefix.length()];
Arrays.fill(msg, 'a');
System.arraycopy(prefix.toCharArray(), 0, msg, 0, prefix.length());
sendAndCheckLegacyBsdMessage(new String(msg));
}
@Test
void sendLegacyBsdMessagesOverTls() throws IOException, InterruptedException {
final int numberOfMessages = 100;
initTlsTestEnvironment(numberOfMessages, TlsSyslogMessageFormat.LEGACY_BSD);
final List<String> generatedMessages =
TlsSyslogTestUtil.generateMessages(numberOfMessages, TlsSyslogMessageFormat.LEGACY_BSD);
sendAndCheckLegacyBsdMessages(generatedMessages);
}
@Test
void sendStructuredMessageOverTls() throws InterruptedException, IOException {
initTlsTestEnvironment(1, TlsSyslogMessageFormat.SYSLOG);
sendAndCheckStructuredMessage();
}
@Test
void sendStructuredMessagesOverTls() throws IOException, InterruptedException {
final int numberOfMessages = 100;
initTlsTestEnvironment(numberOfMessages, TlsSyslogMessageFormat.SYSLOG);
sendAndCheckStructuredMessages(numberOfMessages);
}
private void initServerSocketFactory() throws StoreConfigurationException {
final KeyStoreConfiguration ksc = new KeyStoreConfiguration(
SslKeyStoreConstants.KEYSTORE_LOCATION, SslKeyStoreConstants::KEYSTORE_PWD, null, null);
final TrustStoreConfiguration tsc = new TrustStoreConfiguration(
SslKeyStoreConstants.TRUSTSTORE_LOCATION, SslKeyStoreConstants::TRUSTSTORE_PWD, null, null);
sslConfiguration = SslConfiguration.createSSLConfiguration(null, ksc, tsc);
serverSocketFactory = sslConfiguration.getSslContext() != null
? sslConfiguration.getSslContext().getServerSocketFactory()
: (SSLServerSocketFactory) SSLServerSocketFactory.getDefault();
}
private void initTlsTestEnvironment(final int numberOfMessages, final TlsSyslogMessageFormat messageFormat)
throws IOException {
final SSLServerSocket sslServerSocket = (SSLServerSocket) serverSocketFactory.createServerSocket(0);
syslogServer = MockSyslogServerFactory.createTLSSyslogServer(numberOfMessages, messageFormat, sslServerSocket);
syslogServer.start();
initAppender(Protocol.SSL, messageFormat, syslogServer.getLocalPort());
}
@Override
protected Builder<?> newSyslogAppenderBuilder(
final Protocol protocol, final TlsSyslogMessageFormat format, final boolean newLine, final int port) {
return super.newSyslogAppenderBuilder(protocol, format, newLine, port)
.setSslConfiguration(protocol == Protocol.SSL ? sslConfiguration : null);
}
}
| TlsSyslogAppenderTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/co/KeyedCoProcessOperator.java | {
"start": 4735,
"end": 5971
} | class ____<K, IN1, IN2, OUT>
extends KeyedCoProcessFunction<K, IN1, IN2, OUT>.Context {
private final TimerService timerService;
private StreamRecord<?> element;
ContextImpl(KeyedCoProcessFunction<K, IN1, IN2, OUT> function, TimerService timerService) {
function.super();
this.timerService = checkNotNull(timerService);
}
@Override
public Long timestamp() {
checkState(element != null);
if (element.hasTimestamp()) {
return element.getTimestamp();
} else {
return null;
}
}
@Override
public TimerService timerService() {
return timerService;
}
@Override
public <X> void output(OutputTag<X> outputTag, X value) {
if (outputTag == null) {
throw new IllegalArgumentException("OutputTag must not be null.");
}
output.collect(outputTag, new StreamRecord<>(value, element.getTimestamp()));
}
@Override
public K getCurrentKey() {
return (K) KeyedCoProcessOperator.this.getCurrentKey();
}
}
private | ContextImpl |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/server/authentication/ott/ServerGenerateOneTimeTokenRequestResolver.java | {
"start": 1036,
"end": 1361
} | interface ____ {
/**
* Resolves {@link GenerateOneTimeTokenRequest} from {@link ServerWebExchange}
* @param exchange {@link ServerWebExchange} to resolve
* @return {@link GenerateOneTimeTokenRequest}
*/
Mono<GenerateOneTimeTokenRequest> resolve(ServerWebExchange exchange);
}
| ServerGenerateOneTimeTokenRequestResolver |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorActionTests.java | {
"start": 716,
"end": 1505
} | class ____ extends ESTestCase {
private RestUpdateConnectorErrorAction action;
@Override
public void setUp() throws Exception {
super.setUp();
action = new RestUpdateConnectorErrorAction();
}
public void testPrepareRequest_emptyPayload_badRequestError() {
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT)
.withPath("/_connector/123/_error")
.build();
final ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> action.prepareRequest(request, mock(NodeClient.class))
);
assertThat(e, hasToString(containsString("request body is required")));
}
}
| RestUpdateConnectorErrorActionTests |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ZooKeeperEndpointBuilderFactory.java | {
"start": 21489,
"end": 21801
} | interface ____
extends
AdvancedZooKeeperEndpointConsumerBuilder,
AdvancedZooKeeperEndpointProducerBuilder {
default ZooKeeperEndpointBuilder basic() {
return (ZooKeeperEndpointBuilder) this;
}
}
public | AdvancedZooKeeperEndpointBuilder |
java | spring-projects__spring-framework | framework-docs/src/main/java/org/springframework/docs/web/webmvc/mvcconfig/mvcconfigstaticresources/VersionedConfiguration.java | {
"start": 1039,
"end": 1394
} | class ____ implements WebMvcConfigurer {
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/resources/**")
.addResourceLocations("/public/")
.resourceChain(true)
.addResolver(new VersionResourceResolver().addContentVersionStrategy("/**"));
}
}
// end::snippet[]
| VersionedConfiguration |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/TrustedChannelResolver.java | {
"start": 1259,
"end": 1374
} | class ____ be overridden to provide custom logic to determine
* whether a channel is trusted or not.
* The custom | can |
java | elastic__elasticsearch | x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/AuthorizationTaskExecutorMultipleNodesIT.java | {
"start": 2480,
"end": 8560
} | class ____ extends ESIntegTestCase {
private static final int NUM_DATA_NODES = 2;
private static final int NUM_MASTER_NODES = 2;
private static final String AUTH_TASK_ACTION = AuthorizationPoller.TASK_NAME + "[c]";
private static final MockWebServer webServer = new MockWebServer();
private static String gatewayUrl;
@BeforeClass
public static void initClass() throws IOException {
webServer.start();
gatewayUrl = getUrl(webServer);
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(EMPTY_AUTH_RESPONSE));
}
@Before
public void startNodes() {
// Ensure we have multiple master and data nodes so we have somewhere to place the inference indices and so that we can safely
// shut down the node that is running the authorization task. If there is only one master and it is running the task,
// we'll get an error that we can't shut down the only eligible master node
internalCluster().startMasterOnlyNodes(NUM_MASTER_NODES);
internalCluster().ensureAtLeastNumDataNodes(NUM_DATA_NODES);
ensureStableCluster(NUM_MASTER_NODES + NUM_DATA_NODES);
}
@AfterClass
public static void cleanUpClass() {
webServer.close();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(LocalStateInferencePlugin.class);
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
// Disable CCM to ensure that only the authorization task executor is initialized in the inference plugin when it is created
.put(CCMSettings.CCM_SUPPORTED_ENVIRONMENT.getKey(), false)
.put(LicenseSettings.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial")
.put(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_URL.getKey(), gatewayUrl)
.put(ElasticInferenceServiceSettings.PERIODIC_AUTHORIZATION_ENABLED.getKey(), false)
.build();
}
public void testCancellingAuthorizationTaskRestartsIt() throws Exception {
cancelAuthorizationTask(admin());
}
public void testAuthorizationTaskGetsRelocatedToAnotherNode_WhenTheNodeThatIsRunningItShutsDown() throws Exception {
var nodeNameMapping = getNodeNames(internalCluster().getNodeNames());
var pollerTask = waitForTask(AUTH_TASK_ACTION, admin());
var endpoints = getAllEndpoints();
assertTrue(
"expected no authorized EIS endpoints",
endpoints.getEndpoints().stream().noneMatch(endpoint -> endpoint.getService().equals(ElasticInferenceService.NAME))
);
// queue a response that authorizes one model
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(AUTHORIZED_RAINBOW_SPRINKLES_RESPONSE));
assertTrue("expected the node to shutdown properly", internalCluster().stopNode(nodeNameMapping.get(pollerTask.node())));
assertBusy(() -> {
var relocatedPollerTask = waitForTask(AUTH_TASK_ACTION, admin());
assertThat(relocatedPollerTask.node(), not(is(pollerTask.node())));
});
assertBusy(() -> {
var allEndpoints = getAllEndpoints();
var eisEndpoints = allEndpoints.getEndpoints()
.stream()
.filter(endpoint -> endpoint.getService().equals(ElasticInferenceService.NAME))
.toList();
assertThat(eisEndpoints.size(), is(1));
var rainbowSprinklesEndpoint = eisEndpoints.get(0);
assertThat(rainbowSprinklesEndpoint.getService(), is(ElasticInferenceService.NAME));
assertThat(
rainbowSprinklesEndpoint.getInferenceEntityId(),
is(InternalPreconfiguredEndpoints.DEFAULT_CHAT_COMPLETION_ENDPOINT_ID_V1)
);
assertThat(rainbowSprinklesEndpoint.getTaskType(), is(TaskType.CHAT_COMPLETION));
});
}
private record NodeNameMapping(Map<String, String> nodeNamesMap) {
public String get(String rawNodeName) {
var nodeName = nodeNamesMap.get(rawNodeName);
if (nodeName == null) {
throw new IllegalArgumentException("No node name found for raw node name: " + rawNodeName);
}
return nodeName;
}
}
/**
* The node names created by the integration test framework take the form of "node_#", but the task api gives a raw node name
* like 02PT2SBzRxC3cG-9mKCigQ, so we need to map between them to be able to act on a node that the task is currently running on.
*/
private static NodeNameMapping getNodeNames(String[] nodes) {
var nodeNamesMap = new HashMap<String, String>();
for (var node : nodes) {
var nodeTasks = admin().cluster().prepareListTasks(node).get();
assertThat(nodeTasks.getTasks().size(), greaterThanOrEqualTo(1));
nodeNamesMap.put(nodeTasks.getTasks().getFirst().node(), node);
}
return new NodeNameMapping(nodeNamesMap);
}
private GetInferenceModelAction.Response getAllEndpoints() throws Exception {
var getAllEndpointsRequest = new GetInferenceModelAction.Request("*", TaskType.ANY, true);
var allEndpointsRef = new AtomicReference<GetInferenceModelAction.Response>();
assertBusy(() -> {
try {
allEndpointsRef.set(
internalCluster().masterClient().execute(GetInferenceModelAction.INSTANCE, getAllEndpointsRequest).actionGet()
);
} catch (Exception e) {
// We probably got an all shards failed exception because the indices aren't ready yet, we'll just try again
logger.warn("Failed to retrieve endpoints", e);
fail("Failed to retrieve endpoints");
}
});
return allEndpointsRef.get();
}
}
| AuthorizationTaskExecutorMultipleNodesIT |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/TestTypedDeserialization.java | {
"start": 927,
"end": 1067
} | class ____ {
public String name;
protected Animal(String n) { name = n; }
}
@JsonTypeName("doggie")
static | Animal |
java | spring-projects__spring-boot | system-test/spring-boot-image-system-tests/src/systemTest/java/org/springframework/boot/image/assertions/ImageAssert.java | {
"start": 2456,
"end": 4289
} | class ____ extends AbstractAssert<LayerContentAssert, Layer> {
public LayerContentAssert(Layer layer) {
super(layer, LayerContentAssert.class);
}
public ListAssert<String> entries() {
List<String> entryNames = new ArrayList<>();
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
this.actual.writeTo(out);
try (TarArchiveInputStream in = new TarArchiveInputStream(
new ByteArrayInputStream(out.toByteArray()))) {
TarArchiveEntry entry = in.getNextEntry();
while (entry != null) {
if (!entry.isDirectory()) {
entryNames.add(entry.getName().replaceFirst("^/workspace/", ""));
}
entry = in.getNextEntry();
}
}
}
catch (IOException ex) {
failWithMessage("IOException while reading image layer archive: '%s'", ex.getMessage());
}
return Assertions.assertThat(entryNames);
}
public void jsonEntry(String name, Consumer<JsonContentAssert> assertConsumer) {
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
this.actual.writeTo(out);
try (TarArchiveInputStream in = new TarArchiveInputStream(
new ByteArrayInputStream(out.toByteArray()))) {
TarArchiveEntry entry = in.getNextEntry();
while (entry != null) {
if (entry.getName().equals(name)) {
ByteArrayOutputStream entryOut = new ByteArrayOutputStream();
StreamUtils.copy(in, entryOut);
assertConsumer.accept(new JsonContentAssert(LayerContentAssert.class, entryOut.toString()));
return;
}
entry = in.getNextEntry();
}
}
failWithMessage("Expected JSON entry '%s' in layer with digest '%s'", name, this.actual.getId());
}
catch (IOException ex) {
failWithMessage("IOException while reading image layer archive: '%s'", ex.getMessage());
}
}
}
}
| LayerContentAssert |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/simple/JdbcClient.java | {
"start": 5276,
"end": 11673
} | interface ____ {
/**
* Apply the given fetch size to any subsequent query statement.
* @param fetchSize the fetch size
* @since 7.0
* @see org.springframework.jdbc.core.JdbcTemplate#setFetchSize
*/
StatementSpec withFetchSize(int fetchSize);
/**
* Apply the given maximum number of rows to any subsequent query statement.
* @param maxRows the maximum number of rows
* @since 7.0
* @see org.springframework.jdbc.core.JdbcTemplate#setMaxRows
*/
StatementSpec withMaxRows(int maxRows);
/**
* Apply the given query timeout to any subsequent query statement.
* @param queryTimeout the query timeout in seconds
* @since 7.0
* @see org.springframework.jdbc.core.JdbcTemplate#setQueryTimeout
*/
StatementSpec withQueryTimeout(int queryTimeout);
/**
* Bind a positional JDBC statement parameter for "?" placeholder resolution
* by implicit order of parameter value registration.
* <p>This is primarily intended for statements with a single parameter
* or very few parameters, registering each parameter value in the order
* of the parameter's occurrence in the SQL statement.
* @param value the parameter value to bind
* @return this statement specification (for chaining)
* @see java.sql.PreparedStatement#setObject(int, Object)
*/
StatementSpec param(@Nullable Object value);
/**
* Bind a positional JDBC statement parameter for "?" placeholder resolution
* by explicit JDBC statement parameter index.
* @param jdbcIndex the JDBC-style index (starting with 1)
* @param value the parameter value to bind
* @return this statement specification (for chaining)
* @see java.sql.PreparedStatement#setObject(int, Object)
*/
StatementSpec param(int jdbcIndex, @Nullable Object value);
/**
* Bind a positional JDBC statement parameter for "?" placeholder resolution
* by explicit JDBC statement parameter index.
* @param jdbcIndex the JDBC-style index (starting with 1)
* @param value the parameter value to bind
* @param sqlType the associated SQL type (see {@link java.sql.Types})
* @return this statement specification (for chaining)
* @see java.sql.PreparedStatement#setObject(int, Object, int)
*/
StatementSpec param(int jdbcIndex, @Nullable Object value, int sqlType);
/**
* Bind a named statement parameter for ":x" placeholder resolution,
* with each "x" name matching a ":x" placeholder in the SQL statement.
* @param name the parameter name
* @param value the parameter value to bind
* @return this statement specification (for chaining)
* @see org.springframework.jdbc.core.namedparam.MapSqlParameterSource#addValue(String, Object)
*/
StatementSpec param(String name, @Nullable Object value);
/**
* Bind a named statement parameter for ":x" placeholder resolution,
* with each "x" name matching a ":x" placeholder in the SQL statement.
* @param name the parameter name
* @param value the parameter value to bind
* @param sqlType the associated SQL type (see {@link java.sql.Types})
* @return this statement specification (for chaining)
* @see org.springframework.jdbc.core.namedparam.MapSqlParameterSource#addValue(String, Object, int)
*/
StatementSpec param(String name, @Nullable Object value, int sqlType);
/**
* Bind a var-args list of positional parameters for "?" placeholder resolution.
* <p>The given list will be added to existing positional parameters, if any.
* Each element from the complete list will be bound as a JDBC positional
* parameter with a corresponding JDBC index (i.e. list index + 1).
* @param values the parameter values to bind
* @return this statement specification (for chaining)
* @see #param(Object)
* @see #params(List)
*/
StatementSpec params(Object... values);
/**
* Bind a list of positional parameters for "?" placeholder resolution.
* <p>The given list will be added to existing positional parameters, if any.
* Each element from the complete list will be bound as a JDBC positional
* parameter with a corresponding JDBC index (i.e. list index + 1).
* @param values the parameter values to bind
* @return this statement specification (for chaining)
* @see #param(Object)
*/
StatementSpec params(List<?> values);
/**
* Bind named statement parameters for ":x" placeholder resolution.
* <p>The given map will be merged into existing named parameters, if any.
* @param paramMap a map of names and parameter values to bind
* @return this statement specification (for chaining)
* @see #param(String, Object)
*/
StatementSpec params(Map<String, ?> paramMap);
/**
* Bind named statement parameters for ":x" placeholder resolution.
* <p>The given parameter object will define all named parameters
* based on its JavaBean properties, record components, or raw fields.
* A Map instance can be provided as a complete parameter source as well.
* @param namedParamObject a custom parameter object (for example, a JavaBean,
* record class, or field holder) with named properties serving as
* statement parameters
* @return this statement specification (for chaining)
* @see #paramSource(SqlParameterSource)
* @see org.springframework.jdbc.core.namedparam.MapSqlParameterSource
* @see org.springframework.jdbc.core.namedparam.SimplePropertySqlParameterSource
*/
StatementSpec paramSource(Object namedParamObject);
/**
* Bind named statement parameters for ":x" placeholder resolution.
* <p>The given parameter source will define all named parameters,
* possibly associating specific SQL types with each value.
* @param namedParamSource a custom {@link SqlParameterSource} instance
* @return this statement specification (for chaining)
* @see org.springframework.jdbc.core.namedparam.AbstractSqlParameterSource#registerSqlType
*/
StatementSpec paramSource(SqlParameterSource namedParamSource);
/**
* Proceed towards execution of a query, with several result options
* available in the returned query specification.
* @return the result query specification
* @see java.sql.PreparedStatement#executeQuery()
*/
ResultQuerySpec query();
/**
* Proceed towards execution of a mapped query, with several options
* available in the returned query specification.
* @param mappedClass the target | StatementSpec |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/ContextCustomizerFactories.java | {
"start": 4352,
"end": 4816
} | enum ____ {
/**
* Indicates that locally declared factories should be merged with the
* default factories.
* <p>The merging algorithm ensures that duplicates are removed from the
* list and that locally declared factories are appended to the list of
* default factories when merged.
*/
MERGE_WITH_DEFAULTS,
/**
* Indicates that locally declared factories should replace the default
* factories.
*/
REPLACE_DEFAULTS
}
}
| MergeMode |
java | spring-projects__spring-boot | module/spring-boot-security-oauth2-client/src/test/java/org/springframework/boot/security/oauth2/client/autoconfigure/OAuth2ClientAutoConfigurationTests.java | {
"start": 1522,
"end": 4210
} | class ____ {
private static final String REGISTRATION_PREFIX = "spring.security.oauth2.client.registration";
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(OAuth2ClientAutoConfiguration.class));
@Test
void beansShouldNotBeCreatedWhenPropertiesAbsent() {
this.contextRunner.run((context) -> assertThat(context).doesNotHaveBean(ClientRegistrationRepository.class)
.doesNotHaveBean(OAuth2AuthorizedClientService.class));
}
@Test
void beansAreCreatedWhenPropertiesPresent() {
this.contextRunner
.withPropertyValues(REGISTRATION_PREFIX + ".foo.client-id=abcd",
REGISTRATION_PREFIX + ".foo.client-secret=secret", REGISTRATION_PREFIX + ".foo.provider=github")
.run((context) -> {
assertThat(context).hasSingleBean(ClientRegistrationRepository.class);
assertThat(context).hasSingleBean(OAuth2AuthorizedClientService.class);
ClientRegistrationRepository repository = context.getBean(ClientRegistrationRepository.class);
ClientRegistration registration = repository.findByRegistrationId("foo");
assertThat(registration).isNotNull();
assertThat(registration.getClientSecret()).isEqualTo("secret");
});
}
@Test
void clientServiceBeanIsConditionalOnMissingBean() {
this.contextRunner
.withBean("testAuthorizedClientService", OAuth2AuthorizedClientService.class,
() -> mock(OAuth2AuthorizedClientService.class))
.run((context) -> {
assertThat(context).hasSingleBean(OAuth2AuthorizedClientService.class);
assertThat(context).hasBean("testAuthorizedClientService");
});
}
@Test
void clientServiceBeanIsCreatedWithUserDefinedClientRegistrationRepository() {
this.contextRunner
.withBean(ClientRegistrationRepository.class,
() -> new InMemoryClientRegistrationRepository(getClientRegistration("test", "test")))
.run((context) -> assertThat(context).hasSingleBean(OAuth2AuthorizedClientService.class));
}
private ClientRegistration getClientRegistration(String id, String userInfoUri) {
ClientRegistration.Builder builder = ClientRegistration.withRegistrationId(id);
builder.clientName("foo")
.clientId("foo")
.clientAuthenticationMethod(
org.springframework.security.oauth2.core.ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.scope("read")
.clientSecret("secret")
.redirectUri("https://redirect-uri.com")
.authorizationUri("https://authorization-uri.com")
.tokenUri("https://token-uri.com")
.userInfoUri(userInfoUri)
.userNameAttributeName("login");
return builder.build();
}
}
| OAuth2ClientAutoConfigurationTests |
java | apache__rocketmq | container/src/main/java/org/apache/rocketmq/container/ContainerClientHouseKeepingService.java | {
"start": 1023,
"end": 3797
} | class ____ implements ChannelEventListener {
private final IBrokerContainer brokerContainer;
public ContainerClientHouseKeepingService(final IBrokerContainer brokerContainer) {
this.brokerContainer = brokerContainer;
}
@Override
public void onChannelConnect(String remoteAddr, Channel channel) {
onChannelOperation(CallbackCode.CONNECT, remoteAddr, channel);
}
@Override
public void onChannelClose(String remoteAddr, Channel channel) {
onChannelOperation(CallbackCode.CLOSE, remoteAddr, channel);
}
@Override
public void onChannelException(String remoteAddr, Channel channel) {
onChannelOperation(CallbackCode.EXCEPTION, remoteAddr, channel);
}
@Override
public void onChannelIdle(String remoteAddr, Channel channel) {
onChannelOperation(CallbackCode.IDLE, remoteAddr, channel);
}
@Override
public void onChannelActive(String remoteAddr, Channel channel) {
onChannelOperation(CallbackCode.ACTIVE, remoteAddr, channel);
}
private void onChannelOperation(CallbackCode callbackCode, String remoteAddr, Channel channel) {
Collection<InnerBrokerController> masterBrokers = this.brokerContainer.getMasterBrokers();
Collection<InnerSalveBrokerController> slaveBrokers = this.brokerContainer.getSlaveBrokers();
for (BrokerController masterBroker : masterBrokers) {
brokerOperation(masterBroker, callbackCode, remoteAddr, channel);
}
for (InnerSalveBrokerController slaveBroker : slaveBrokers) {
brokerOperation(slaveBroker, callbackCode, remoteAddr, channel);
}
}
private void brokerOperation(BrokerController brokerController, CallbackCode callbackCode, String remoteAddr,
Channel channel) {
if (callbackCode == CallbackCode.CONNECT) {
brokerController.getBrokerStatsManager().incChannelConnectNum();
return;
}
boolean removed = brokerController.getProducerManager().doChannelCloseEvent(remoteAddr, channel);
removed &= brokerController.getConsumerManager().doChannelCloseEvent(remoteAddr, channel);
if (removed) {
switch (callbackCode) {
case CLOSE:
brokerController.getBrokerStatsManager().incChannelCloseNum();
break;
case EXCEPTION:
brokerController.getBrokerStatsManager().incChannelExceptionNum();
break;
case IDLE:
brokerController.getBrokerStatsManager().incChannelIdleNum();
break;
default:
break;
}
}
}
public | ContainerClientHouseKeepingService |
java | grpc__grpc-java | rls/src/test/java/io/grpc/rls/ChildLbResolvedAddressFactoryTest.java | {
"start": 1035,
"end": 2025
} | class ____ {
@Test
public void create() {
List<EquivalentAddressGroup> addrs = new ArrayList<>();
addrs.add(new EquivalentAddressGroup(mock(SocketAddress.class)));
Attributes attr = Attributes.newBuilder().build();
ChildLbResolvedAddressFactory factory = new ChildLbResolvedAddressFactory(addrs, attr);
Object config1 = new Object();
ResolvedAddresses resolvedAddress = factory.create(config1);
assertThat(resolvedAddress.getAddresses()).isEqualTo(addrs);
assertThat(resolvedAddress.getAttributes()).isEqualTo(attr);
assertThat(resolvedAddress.getLoadBalancingPolicyConfig()).isEqualTo(config1);
Object config2 = "different object";
resolvedAddress = factory.create(config2);
assertThat(resolvedAddress.getAddresses()).isEqualTo(addrs);
assertThat(resolvedAddress.getAttributes()).isEqualTo(attr);
assertThat(resolvedAddress.getLoadBalancingPolicyConfig()).isEqualTo(config2);
}
}
| ChildLbResolvedAddressFactoryTest |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/uri/UriTemplate.java | {
"start": 21511,
"end": 43667
} | enum ____ {
TEXT, // raw text
VAR_START, // the start of a URI variable i.e. {
VAR_CONTENT, // within a URI variable. i.e. {var}
VAR_NEXT, // within the next variable in a URI variable declaration i.e. {var, var2}
VAR_MODIFIER, // within a variable modifier i.e. {var:1}
VAR_NEXT_MODIFIER // within a variable modifier of a next variable i.e. {var, var2:1}
}
State state = State.TEXT;
char operator = OPERATOR_NONE; // zero means no operator
char modifier = OPERATOR_NONE; // zero means no modifier
String varDelimiter = null;
boolean isQuerySegment = false;
char[] chars = templateText.toCharArray();
var buff = new StringBuilder();
var modBuff = new StringBuilder();
int varCount = 0;
for (char c : chars) {
switch (state) {
case TEXT:
if (c == VAR_START) {
if (!buff.isEmpty()) {
String val = buff.toString();
addRawContentSegment(segments, val, isQuerySegment);
}
buff.delete(0, buff.length());
state = State.VAR_START;
} else {
if (c == QUERY_OPERATOR || c == HASH_OPERATOR) {
isQuerySegment = true;
}
buff.append(c);
}
continue;
case VAR_MODIFIER:
case VAR_NEXT_MODIFIER:
if (c == ' ') {
continue;
}
// fall through
case VAR_NEXT:
case VAR_CONTENT:
switch (c) {
case ':':
case EXPAND_MODIFIER: // arrived to expansion modifier
if (state == State.VAR_MODIFIER || state == State.VAR_NEXT_MODIFIER) {
modBuff.append(c);
continue;
}
modifier = c;
state = state == State.VAR_NEXT ? State.VAR_NEXT_MODIFIER : State.VAR_MODIFIER;
continue;
case ',': // arrived to new variable
state = State.VAR_NEXT;
// fall through
case VAR_END: // arrived to variable end
if (!buff.isEmpty()) {
String val = buff.toString();
final String prefix;
final String delimiter;
final boolean encode;
final boolean repeatPrefix;
switch (operator) {
case '+':
encode = false;
prefix = null;
delimiter = ",";
repeatPrefix = varCount < 1;
break;
case HASH_OPERATOR:
encode = false;
repeatPrefix = varCount < 1;
prefix = String.valueOf(operator);
delimiter = ",";
break;
case DOT_OPERATOR:
case SLASH_OPERATOR:
encode = true;
repeatPrefix = varCount < 1;
prefix = String.valueOf(operator);
delimiter = modifier == EXPAND_MODIFIER ? prefix : ",";
break;
case ';':
encode = true;
repeatPrefix = true;
prefix = operator + val + '=';
delimiter = modifier == EXPAND_MODIFIER ? prefix : ",";
break;
case QUERY_OPERATOR:
case AND_OPERATOR:
encode = true;
repeatPrefix = true;
prefix = varCount < 1 ? operator + val + '=' : val + "=";
delimiter = modifier == EXPAND_MODIFIER ? AND_OPERATOR + val + '=' : ",";
break;
default:
repeatPrefix = varCount < 1;
encode = true;
prefix = null;
delimiter = ",";
}
String modifierStr = modBuff.toString();
String previous = state == State.VAR_NEXT || state == State.VAR_NEXT_MODIFIER ? varDelimiter : null;
addVariableSegment(segments, val, prefix, delimiter, encode, repeatPrefix, modifierStr, modifier, operator, previous, isQuerySegment);
}
boolean hasAnotherVar = state == State.VAR_NEXT && c != VAR_END;
if (hasAnotherVar) {
varDelimiter = switch (operator) {
case ';' -> null;
case QUERY_OPERATOR, AND_OPERATOR -> "&";
case DOT_OPERATOR, SLASH_OPERATOR -> String.valueOf(operator);
default -> ",";
};
varCount++;
} else {
varCount = 0;
}
state = hasAnotherVar ? State.VAR_NEXT : State.TEXT;
modBuff.delete(0, modBuff.length());
buff.delete(0, buff.length());
modifier = OPERATOR_NONE;
if (!hasAnotherVar) {
operator = OPERATOR_NONE;
}
continue;
default:
switch (modifier) {
case EXPAND_MODIFIER:
throw new IllegalStateException("Expansion modifier * must be immediately followed by a closing brace '}'");
case ':':
modBuff.append(c);
continue;
default:
buff.append(c);
continue;
}
}
case VAR_START:
switch (c) {
case ' ':
continue;
case ';':
case QUERY_OPERATOR:
case AND_OPERATOR:
case HASH_OPERATOR:
isQuerySegment = true;
// fall through
case '+':
case DOT_OPERATOR:
case SLASH_OPERATOR:
operator = c;
state = State.VAR_CONTENT;
continue;
default:
state = State.VAR_CONTENT;
buff.append(c);
continue;
}
default:
// no-op
}
}
if (state == State.TEXT && !buff.isEmpty()) {
String val = buff.toString();
addRawContentSegment(segments, val, isQuerySegment);
}
}
/**
* Adds a raw content segment.
*
* @param segments The segments
* @param value The value
* @param isQuerySegment Whether is a query segment
*/
protected void addRawContentSegment(List<PathSegment> segments, String value, boolean isQuerySegment) {
segments.add(new RawPathSegment(isQuerySegment, value));
}
/**
* Adds a new variable segment.
*
* @param segments The segments to augment
* @param variable The variable
* @param prefix The prefix to use when expanding the variable
* @param delimiter The delimiter to use when expanding the variable
* @param encode Whether to URL encode the variable
* @param repeatPrefix Whether to repeat the prefix for each expanded variable
* @param modifierStr The modifier string
* @param modifierChar The modifier as char
* @param operator The currently active operator
* @param previousDelimiter The delimiter to use if a variable appeared before this variable
* @param isQuerySegment Whether is a query segment
*/
protected void addVariableSegment(List<PathSegment> segments,
String variable,
String prefix,
String delimiter,
boolean encode,
boolean repeatPrefix,
String modifierStr,
char modifierChar,
char operator,
String previousDelimiter, boolean isQuerySegment) {
segments.add(new VariablePathSegment(isQuerySegment, variable, prefix, delimiter, encode, modifierChar, operator, modifierStr, previousDelimiter, repeatPrefix));
}
private record RawPathSegment(boolean isQuerySegment, String value) implements PathSegment {
@Override
public String expand(Map<String, Object> parameters, boolean previousHasContent, boolean anyPreviousHasOperator) {
return value;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RawPathSegment that = (RawPathSegment) o;
if (isQuerySegment != that.isQuerySegment) {
return false;
}
return Objects.equals(value, that.value);
}
@Override
public int hashCode() {
return ObjectUtils.hash(isQuerySegment, value);
}
@Override
public int length() {
return value.length();
}
@Override
public char charAt(int index) {
return value.charAt(index);
}
@Override
public CharSequence subSequence(int start, int end) {
return value.subSequence(start, end);
}
@Override
public String toString() {
return value;
}
}
private record VariablePathSegment(boolean isQuerySegment, String variable, String prefix,
String delimiter, boolean encode, char modifierChar,
char operator, String modifierStr,
String previousDelimiter,
boolean repeatPrefix) implements PathSegment {
@Override
public Optional<String> getVariable() {
return Optional.of(variable);
}
@Override
public int length() {
return toString().length();
}
@Override
public char charAt(int index) {
return toString().charAt(index);
}
@Override
public CharSequence subSequence(int start, int end) {
return toString().subSequence(start, end);
}
@Override
public String toString() {
var builder = new StringBuilder();
builder.append(variable);
if (modifierChar != OPERATOR_NONE) {
builder.append(modifierChar);
if (null != modifierStr) {
builder.append(modifierStr);
}
}
return builder.toString();
}
private String escape(String v) {
return v.replace("%", "%25").replaceAll("\\s", "%20");
}
@Override
public String expand(Map<String, Object> parameters, boolean previousHasContent, boolean anyPreviousHasOperator) {
Object found = parameters.get(variable);
boolean isOptional = found instanceof Optional;
if (found != null && !(isOptional && ((Optional<?>) found).isEmpty())) {
if (isOptional) {
found = ((Optional<?>) found).get();
}
String prefixToUse = prefix;
if (operator == QUERY_OPERATOR && !anyPreviousHasOperator && prefix != null && !prefix.startsWith(String.valueOf(operator))) {
prefixToUse = operator + prefix;
}
String result;
if (found.getClass().isArray()) {
found = Arrays.asList((Object[]) found);
}
boolean isQuery = operator == QUERY_OPERATOR;
if (modifierChar == EXPAND_MODIFIER) {
found = expandPOJO(found); // Turn POJO into a Map
}
if (found instanceof Iterable<?> iterable) {
if (iterable instanceof Collection<?> collection && collection.isEmpty()) {
return "";
}
var joiner = new StringJoiner(delimiter);
for (Object o : iterable) {
if (o != null) {
String v = o.toString();
joiner.add(encode ? encode(v, isQuery) : escape(v));
}
}
result = joiner.toString();
} else if (found instanceof Map<?, ?> map) {
if (map.isEmpty()) {
return StringUtils.EMPTY_STRING;
}
final StringJoiner joiner;
if (modifierChar == EXPAND_MODIFIER) {
joiner = switch (operator) {
case AND_OPERATOR, QUERY_OPERATOR -> {
prefixToUse = String.valueOf(anyPreviousHasOperator ? AND_OPERATOR : operator);
yield new StringJoiner(String.valueOf(AND_OPERATOR));
}
case ';' -> {
prefixToUse = String.valueOf(operator);
yield new StringJoiner(prefixToUse);
}
default -> new StringJoiner(delimiter);
};
} else {
joiner = new StringJoiner(delimiter);
}
map.forEach((key, some) -> {
if (some == null) {
return;
}
String ks = key.toString();
Iterable<?> values = (some instanceof Iterable<?> i) ? i : Collections.singletonList(some);
for (Object value : values) {
if (value == null) {
continue;
}
String vs = value.toString();
String ek = encode ? encode(ks, isQuery) : escape(ks);
String ev = encode ? encode(vs, isQuery) : escape(vs);
if (modifierChar == EXPAND_MODIFIER) {
String finalValue = ek + '=' + ev;
joiner.add(finalValue);
} else {
joiner.add(ek);
joiner.add(ev);
}
}
});
if (joiner.length() == 0) {
// only null entries
return StringUtils.EMPTY_STRING;
} else {
result = joiner.toString();
}
} else {
String str = found.toString();
str = applyModifier(modifierStr, modifierChar, str, str.length());
result = encode ? encode(str, isQuery) : escape(str);
}
int len = result.length();
var finalResult = new StringBuilder(previousHasContent && previousDelimiter != null ? previousDelimiter : StringUtils.EMPTY_STRING);
if (len == 0) {
switch (operator) {
case SLASH_OPERATOR:
break;
case ';':
if (prefixToUse != null && prefixToUse.endsWith("=")) {
finalResult.append(prefixToUse, 0, prefixToUse.length() - 1).append(result);
break;
}
// fall through
default:
if (prefixToUse != null) {
finalResult.append(prefixToUse).append(result);
} else {
finalResult.append(result);
}
}
} else if (prefixToUse != null && repeatPrefix) {
finalResult.append(prefixToUse).append(result);
} else {
finalResult.append(result);
}
return finalResult.toString();
} else {
if (operator == SLASH_OPERATOR) {
return null;
}
return StringUtils.EMPTY_STRING;
}
}
private String applyModifier(String modifierStr, char modifierChar, String result, int len) {
if (modifierChar == ':' && !modifierStr.isEmpty() && Character.isDigit(modifierStr.charAt(0))) {
try {
int subResult = Integer.parseInt(modifierStr.trim(), 10);
if (subResult < len) {
result = result.substring(0, subResult);
}
} catch (NumberFormatException e) {
result = ":" + modifierStr;
}
}
return result;
}
private String encode(String str, boolean query) {
String encoded = URLEncoder.encode(str, StandardCharsets.UTF_8);
return query ? encoded : encoded.replace("+", "%20");
}
private Object expandPOJO(Object found) {
// Check for common expanded types, such as list or Map
if (found instanceof Iterable || found instanceof Map) {
return found;
}
// If a simple value, just use that
if (found == null || ClassUtils.isJavaLangType(found.getClass())) {
return found;
}
// Otherwise, expand the object into properties (after all, the user asked for an expanded parameter)
return BeanMap.of(found);
}
}
}
}
| State |
java | apache__flink | flink-clients/src/main/java/org/apache/flink/client/deployment/application/JarManifestParser.java | {
"start": 1384,
"end": 2069
} | class ____ {
private final File jarFile;
private final String entryClass;
private JarFileWithEntryClass(File jarFile, String entryClass) {
this.jarFile = requireNonNull(jarFile, "jarFile");
this.entryClass = requireNonNull(entryClass, "entryClass");
}
File getJarFile() {
return jarFile;
}
String getEntryClass() {
return entryClass;
}
@Override
public String toString() {
return String.format("%s (entry class: %s)", jarFile.getAbsolutePath(), entryClass);
}
}
/**
* Returns a JAR file with its entry | JarFileWithEntryClass |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/mapping/Filterable.java | {
"start": 278,
"end": 497
} | interface ____ {
void addFilter(
String name, String condition, boolean autoAliasInjection,
Map<String,String> aliasTableMap, Map<String,String> aliasEntityMap);
List<FilterConfiguration> getFilters();
}
| Filterable |
java | processing__processing4 | java/src/processing/mode/java/preproc/PreprocessorResult.java | {
"start": 966,
"end": 2107
} | class ____ {
private final int headerOffset;
private final String className;
private final List<ImportStatement> importStatements;
private final PdePreprocessor.Mode programType;
private final List<TextTransform.Edit> edits;
private final List<PdePreprocessIssue> preprocessIssues;
private final String sketchWidth;
private final String sketchHeight;
private final String sketchRenderer;
/**
* Create a new PreprocessorResult indicating that there were issues in preprocessing.
*
* @param newPreprocessIssues The list of issues encoutnered.
* @return New preprocessor result.
*/
public static PreprocessorResult reportPreprocessIssues(
List<PdePreprocessIssue> newPreprocessIssues) {
assert newPreprocessIssues.size() > 0;
return new PreprocessorResult(newPreprocessIssues);
}
/**
* Create a new preprocessing result.
*
* @param newProgramType The type of program that has be preprocessed.
* @param newHeaderOffset The offset (in number of chars) from the start of the program at which
* the header finishes.
* @param newClassName The name of the | PreprocessorResult |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/callbacks/returning/NumberMapperWithContext.java | {
"start": 463,
"end": 913
} | class ____ {
public static final NumberMapperWithContext INSTANCE = Mappers.getMapper( NumberMapperWithContext.class );
public abstract Number integerToNumber(Integer number);
public abstract void integerToNumber(Integer number, @MappingTarget Number target);
public abstract Map<String, Integer> longMapToIntegerMap(Map<String, Long> target);
public abstract List<String> setToList(Set<Integer> target);
}
| NumberMapperWithContext |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/resolution/deepresultmap/DeepResultMapTest.java | {
"start": 1140,
"end": 2067
} | class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void setUp() throws Exception {
// create an SqlSessionFactory
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/resolution/deepresultmap/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
// populate in-memory database
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/resolution/CreateDB.sql");
}
@Test
void shouldGetAUser() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
MapperA mapper = sqlSession.getMapper(MapperA.class);
User user = mapper.getUser(1);
Assertions.assertEquals(Integer.valueOf(1), user.getId());
Assertions.assertEquals("User1", user.getName());
}
}
}
| DeepResultMapTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/createTable/OracleCreateTableTest37.java | {
"start": 1067,
"end": 4719
} | class ____ extends OracleTest {
public void test_types() throws Exception {
String sql = //
"CREATE TABLE hash_products "
+ " ( product_id NUMBER(6) PRIMARY KEY"
+ " , product_name VARCHAR2(50) "
+ " , product_description VARCHAR2(2000) "
+ " , category_id NUMBER(2) "
+ " , weight_class NUMBER(1) "
+ " , warranty_period INTERVAL YEAR TO MONTH "
+ " , supplier_id NUMBER(6) "
+ " , product_status VARCHAR2(20) "
+ " , list_price NUMBER(8,2) "
+ " , min_price NUMBER(8,2) "
+ " , catalog_url VARCHAR2(50) "
+ " , CONSTRAINT product_status_lov_demo "
+ " CHECK (product_status in ('orderable' "
+ " ,'planned' "
+ " ,'under development' "
+ " ,'obsolete') "
+ ") ) "
+ " PARTITION BY HASH (product_id) "
+ " PARTITIONS 4 "
+ " STORE IN (tbs_01, tbs_02, tbs_03, tbs_04);";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("CREATE TABLE hash_products ("
+ "\n\tproduct_id NUMBER(6) PRIMARY KEY,"
+ "\n\tproduct_name VARCHAR2(50),"
+ "\n\tproduct_description VARCHAR2(2000),"
+ "\n\tcategory_id NUMBER(2),"
+ "\n\tweight_class NUMBER(1),"
+ "\n\twarranty_period INTERVAL YEAR TO MONTH,"
+ "\n\tsupplier_id NUMBER(6),"
+ "\n\tproduct_status VARCHAR2(20),"
+ "\n\tlist_price NUMBER(8, 2),"
+ "\n\tmin_price NUMBER(8, 2),"
+ "\n\tcatalog_url VARCHAR2(50),"
+ "\n\tCONSTRAINT product_status_lov_demo CHECK (product_status IN ('orderable', 'planned', 'under development', 'obsolete'))"
+ "\n)"
+ "\nPARTITION BY HASH (product_id) PARTITIONS 4"
+ "\nSTORE IN (tbs_01, tbs_02, tbs_03, tbs_04);",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(11, visitor.getColumns().size());
assertTrue(visitor.getColumns().contains(new TableStat.Column("hash_products", "product_id")));
}
}
| OracleCreateTableTest37 |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/condition/AnyOf_toString_Test.java | {
"start": 1055,
"end": 2160
} | class ____ {
@Test
void should_implement_toString_showing_descriptions_of_inner_Conditions() {
// GIVEN
TestCondition<Object> condition1 = new TestCondition<>("Condition 1");
TestCondition<Object> condition2 = new TestCondition<>("Condition 2");
Condition<Object> anyOf = anyOf(condition1, condition2);
// THEN
then(anyOf).hasToString(format("any of:[%n" +
" Condition 1,%n" +
" Condition 2%n" +
"]"));
}
@Test
void should_implement_toString_showing_descriptions_of_inner_Conditions_list() {
// GIVEN
TestCondition<Object> condition1 = new TestCondition<>("Condition 1");
TestCondition<Object> condition2 = new TestCondition<>("Condition 2");
Condition<Object> anyOf = anyOf(list(condition1, condition2));
// THEN
then(anyOf).hasToString(format("any of:[%n" +
" Condition 1,%n" +
" Condition 2%n" +
"]"));
}
}
| AnyOf_toString_Test |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/SyntheticBeanWithStereotypeTest.java | {
"start": 2732,
"end": 2829
} | interface ____ {
}
@Interceptor
@Priority(1)
@SimpleBinding
static | SimpleBinding |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/support/ServiceSupportTest.java | {
"start": 4357,
"end": 4727
} | class ____ extends ServiceSupport {
public ServiceSupportTestExOnStart() {
// just for testing force it to not be stopped
status = SUSPENDED;
}
@Override
protected void doStart() {
throw new RuntimeException("This service throws an exception when starting");
}
}
}
| ServiceSupportTestExOnStart |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/usertype/internal/ZonedDateTimeCompositeUserType.java | {
"start": 440,
"end": 1290
} | class ____ extends AbstractTimeZoneStorageCompositeUserType<ZonedDateTime> {
@Override
public Object getPropertyValue(ZonedDateTime component, int property) throws HibernateException {
return switch ( property ) {
case 0 -> component.toInstant();
case 1 -> component.getOffset();
default -> null;
};
}
@Override
public ZonedDateTime instantiate(ValueAccess values) {
final Instant instant = values.getValue( 0, Instant.class );
final ZoneOffset zoneOffset = values.getValue( 1, ZoneOffset.class );
return instant == null || zoneOffset == null ? null : ZonedDateTime.ofInstant( instant, zoneOffset );
}
@Override
public Class<?> embeddable() {
return ZonedDateTimeEmbeddable.class;
}
@Override
public Class<ZonedDateTime> returnedClass() {
return ZonedDateTime.class;
}
public static | ZonedDateTimeCompositeUserType |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/registration/RegistrationResponse.java | {
"start": 1050,
"end": 1263
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
// ----------------------------------------------------------------------------
/**
* Base | RegistrationResponse |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/fields/RecursiveComparisonAssert_isEqualTo_comparingOnlyFields_Test.java | {
"start": 16306,
"end": 17174
} | class ____ {
String name;
String subject;
int rollNo;
Student(String name, String subject, int rollNo) {
this.name = name;
this.subject = subject;
this.rollNo = rollNo;
}
@Override
public String toString() {
return "Student[name=%s, subject=%s, rollNo=%s]".formatted(this.name, this.subject, this.rollNo);
}
}
@Test
void should_only_check_compared_fields_existence_at_the_root_level() {
// GIVEN
Collection<Name> names = newHashSet(new Name("john", "doe"), new Name("jane", "smith"));
WithNames actual = new WithNames(newHashSet(names));
WithNames expected = new WithNames(newHashSet(names));
// WHEN/THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.comparingOnlyFields("names")
.isEqualTo(expected);
}
static | Student |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java | {
"start": 18182,
"end": 18420
} | class ____ configure the ZooKeeper client connection.
* @param zkPrincipal Optional.
*/
public HadoopZookeeperFactory(String zkPrincipal) {
this(zkPrincipal, null, null);
}
/**
* Constructor for the helper | to |
java | quarkusio__quarkus | extensions/funqy/funqy-google-cloud-functions/runtime/src/main/java/io/quarkus/funqy/gcp/functions/FunqyBackgroundFunction.java | {
"start": 247,
"end": 2376
} | class ____ implements RawBackgroundFunction {
protected static final String deploymentStatus;
protected static boolean started = false;
static {
StringWriter error = new StringWriter();
PrintWriter errorWriter = new PrintWriter(error, true);
if (Application.currentApplication() == null) { // were we already bootstrapped? Needed for mock unit testing.
// For GCP functions, we need to set the TCCL to the QuarkusHttpFunction classloader then restore it.
// Without this, we have a lot of classloading issues (ClassNotFoundException on existing classes)
// during static init.
ClassLoader currentCl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(FunqyBackgroundFunction.class.getClassLoader());
Class<?> appClass = Class.forName("io.quarkus.runner.ApplicationImpl");
String[] args = {};
Application app = (Application) appClass.getConstructor().newInstance();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
app.stop();
}
});
app.start(args);
errorWriter.println("Quarkus bootstrapped successfully.");
started = true;
} catch (Exception ex) {
errorWriter.println("Quarkus bootstrap failed.");
ex.printStackTrace(errorWriter);
} finally {
Thread.currentThread().setContextClassLoader(currentCl);
}
} else {
errorWriter.println("Quarkus bootstrapped successfully.");
started = true;
}
deploymentStatus = error.toString();
}
@Override
public void accept(String event, Context context) {
if (!started) {
throw new RuntimeException(deploymentStatus);
}
FunqyCloudFunctionsBindingRecorder.handle(event, context);
}
}
| FunqyBackgroundFunction |
java | apache__camel | core/camel-management-api/src/main/java/org/apache/camel/api/management/mbean/ManagedProcessMBean.java | {
"start": 916,
"end": 1141
} | interface ____ extends ManagedProcessorMBean {
@ManagedAttribute(description = "Reference to the Processor to lookup in the registry to use")
String getRef();
@ManagedAttribute(description = "The | ManagedProcessMBean |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/server/DefaultPathContainer.java | {
"start": 6424,
"end": 8443
} | class ____ implements PathSegment {
private static final MultiValueMap<String, String> EMPTY_PARAMS =
CollectionUtils.unmodifiableMultiValueMap(new LinkedMultiValueMap<>());
private final String value;
private final String valueToMatch;
private final MultiValueMap<String, String> parameters;
/**
* Factory for segments without decoding and parsing.
*/
static DefaultPathSegment from(String value, DefaultSeparator separator) {
String valueToMatch = value.contains(separator.encodedSequence()) ?
value.replaceAll(separator.encodedSequence(), separator.value()) : value;
return from(value, valueToMatch);
}
/**
* Factory for decoded and parsed segments.
*/
static DefaultPathSegment from(String value, String valueToMatch) {
return new DefaultPathSegment(value, valueToMatch, EMPTY_PARAMS);
}
/**
* Factory for decoded and parsed segments.
*/
static DefaultPathSegment from(String value, String valueToMatch, MultiValueMap<String, String> params) {
return new DefaultPathSegment(value, valueToMatch, CollectionUtils.unmodifiableMultiValueMap(params));
}
private DefaultPathSegment(String value, String valueToMatch, MultiValueMap<String, String> params) {
this.value = value;
this.valueToMatch = valueToMatch;
this.parameters = params;
}
@Override
public String value() {
return this.value;
}
@Override
public String valueToMatch() {
return this.valueToMatch;
}
@Override
public char[] valueToMatchAsChars() {
return this.valueToMatch.toCharArray();
}
@Override
public MultiValueMap<String, String> parameters() {
return this.parameters;
}
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof PathSegment that && this.value.equals(that.value())));
}
@Override
public int hashCode() {
return this.value.hashCode();
}
@Override
public String toString() {
return "[value='" + this.value + "']";
}
}
}
| DefaultPathSegment |
java | spring-projects__spring-boot | module/spring-boot-security/src/test/java/org/springframework/boot/security/autoconfigure/actuate/web/servlet/JerseyEndpointRequestIntegrationTests.java | {
"start": 5393,
"end": 5621
} | class ____ {
@Bean
TomcatServletWebServerFactory tomcat() {
return new TomcatServletWebServerFactory(0);
}
@Bean
ResourceConfig resourceConfig() {
return new ResourceConfig();
}
}
}
| JerseyEndpointConfiguration |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-uses-wagon/src/main/java/org/apache/maven/plugin/coreit/LoadResourceMojo.java | {
"start": 2148,
"end": 2335
} | class ____.
*/
@Parameter(property = "wagon.wagonClassLoaderOutput")
private File wagonClassLoaderOutput;
/**
* The role hint for the wagon provider to load. The | loader |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactories.java | {
"start": 1991,
"end": 2120
} | class ____ a defined factory.
*
* @param c input c.
* @param conf input configuration.
* @return a new instance of a | with |
java | elastic__elasticsearch | x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java | {
"start": 2944,
"end": 30307
} | class ____ extends ESTestCase {
private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(
"index",
Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()).build()
);
private static final ByteSizeValue PART_SIZE = ByteSizeValue.ofBytes(Long.MAX_VALUE);
private static final ShardId shardId = new ShardId(INDEX_SETTINGS.getIndex(), 1);
private String shardHistoryUUID;
private final AtomicLong clock = new AtomicLong();
@Before
public void setUpHistoryUUID() {
shardHistoryUUID = UUIDs.randomBase64UUID();
}
public void testOnlyUsesSourceFilesWhenUseSnapshotsFlagIsFalse() throws Exception {
createStore(store -> {
Store.MetadataSnapshot targetMetadataSnapshot = generateRandomTargetState(store);
writeRandomDocs(store, randomIntBetween(10, 100));
Store.MetadataSnapshot sourceMetadata = store.getMetadata(null);
long startingSeqNo = randomNonNegativeLong();
int translogOps = randomIntBetween(1, 100);
ShardRecoveryPlan shardRecoveryPlan = computeShardRecoveryPlan(
randomBoolean() ? randomAlphaOfLength(10) : null,
sourceMetadata,
targetMetadataSnapshot,
startingSeqNo,
translogOps,
new ShardSnapshotsService(null, null, null, null) {
@Override
public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener<Optional<ShardSnapshot>> listener) {
assert false : "Unexpected call";
}
},
false,
randomBoolean()
);
assertPlanIsValid(shardRecoveryPlan, sourceMetadata);
assertAllSourceFilesAreAvailableInSource(shardRecoveryPlan, sourceMetadata);
assertAllIdenticalFilesAreAvailableInTarget(shardRecoveryPlan, targetMetadataSnapshot);
assertThat(shardRecoveryPlan.getSnapshotFilesToRecover(), is(equalTo(ShardRecoveryPlan.SnapshotFilesToRecover.EMPTY)));
assertThat(shardRecoveryPlan.canRecoverSnapshotFilesFromSourceNode(), is(equalTo(true)));
assertThat(shardRecoveryPlan.getStartingSeqNo(), equalTo(startingSeqNo));
assertThat(shardRecoveryPlan.getTranslogOps(), equalTo(translogOps));
});
}
public void testFallbacksToRegularPlanIfThereAreNotAvailableSnapshotsOrThereIsAFailureDuringFetch() throws Exception {
createStore(store -> {
Store.MetadataSnapshot targetMetadataSnapshot = generateRandomTargetState(store);
writeRandomDocs(store, randomIntBetween(10, 100));
final Store.MetadataSnapshot sourceMetadata = store.getMetadata(null);
long startingSeqNo = randomNonNegativeLong();
int translogOps = randomIntBetween(1, 100);
ShardRecoveryPlan shardRecoveryPlan = computeShardRecoveryPlan(
null,
sourceMetadata,
targetMetadataSnapshot,
startingSeqNo,
translogOps,
new ShardSnapshotsService(null, null, null, null) {
@Override
public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener<Optional<ShardSnapshot>> listener) {
if (randomBoolean()) {
listener.onResponse(Optional.empty());
} else {
listener.onFailure(new IOException("Boom!"));
}
}
},
true,
randomBoolean()
);
assertPlanIsValid(shardRecoveryPlan, sourceMetadata);
assertAllSourceFilesAreAvailableInSource(shardRecoveryPlan, sourceMetadata);
assertAllIdenticalFilesAreAvailableInTarget(shardRecoveryPlan, targetMetadataSnapshot);
assertThat(shardRecoveryPlan.getSnapshotFilesToRecover(), is(equalTo(ShardRecoveryPlan.SnapshotFilesToRecover.EMPTY)));
assertThat(shardRecoveryPlan.canRecoverSnapshotFilesFromSourceNode(), is(equalTo(true)));
assertThat(shardRecoveryPlan.getStartingSeqNo(), equalTo(startingSeqNo));
assertThat(shardRecoveryPlan.getTranslogOps(), equalTo(translogOps));
});
}
public void testLogicallyEquivalentSnapshotIsUsed() throws Exception {
createStore(store -> {
Store.MetadataSnapshot targetSourceMetadata = generateRandomTargetState(store);
writeRandomDocs(store, randomIntBetween(10, 100));
Store.MetadataSnapshot sourceMetadata = store.getMetadata(null);
ShardSnapshot shardSnapshotData = createShardSnapshotThatSharesSegmentFiles(store, "repo");
// The shardStateIdentifier is shared with the latest snapshot,
// meaning that the current shard and the snapshot are logically equivalent
String shardStateIdentifier = shardSnapshotData.getShardStateIdentifier();
long startingSeqNo = randomNonNegativeLong();
int translogOps = randomIntBetween(1, 100);
ShardRecoveryPlan shardRecoveryPlan = computeShardRecoveryPlan(
shardStateIdentifier,
sourceMetadata,
targetSourceMetadata,
startingSeqNo,
translogOps,
new ShardSnapshotsService(null, null, null, null) {
@Override
public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener<Optional<ShardSnapshot>> listener) {
listener.onResponse(Optional.of(shardSnapshotData));
}
},
true,
randomBoolean()
);
assertPlanIsValid(shardRecoveryPlan, sourceMetadata);
assertAllSourceFilesAreAvailableInSource(shardRecoveryPlan, sourceMetadata);
assertAllIdenticalFilesAreAvailableInTarget(shardRecoveryPlan, targetSourceMetadata);
assertUsesExpectedSnapshot(shardRecoveryPlan, shardSnapshotData);
assertThat(shardRecoveryPlan.canRecoverSnapshotFilesFromSourceNode(), is(equalTo(true)));
assertThat(shardRecoveryPlan.getStartingSeqNo(), equalTo(startingSeqNo));
assertThat(shardRecoveryPlan.getTranslogOps(), equalTo(translogOps));
});
}
public void testLogicallyEquivalentSnapshotIsUsedEvenIfFilesAreDifferent() throws Exception {
createStore(store -> {
boolean shareFilesWithSource = randomBoolean();
Store.MetadataSnapshot targetSourceMetadata = generateRandomTargetState(store, shareFilesWithSource);
writeRandomDocs(store, randomIntBetween(10, 100));
Store.MetadataSnapshot sourceMetadata = store.getMetadata(null);
boolean compatibleVersion = randomBoolean();
final IndexVersion snapshotVersion;
final Version luceneVersion;
if (compatibleVersion) {
snapshotVersion = IndexVersionUtils.randomCompatibleVersion(random());
luceneVersion = snapshotVersion.luceneVersion();
} else {
snapshotVersion = IndexVersion.fromId(Integer.MAX_VALUE);
luceneVersion = org.apache.lucene.util.Version.parse("255.255.255");
}
// The snapshot shardStateIdentifier is the same as the source, but the files are different.
// This can happen after a primary fail-over.
ShardSnapshot latestSnapshot = createShardSnapshotThatDoNotShareSegmentFiles("repo", snapshotVersion, luceneVersion);
String shardStateIdentifier = latestSnapshot.getShardStateIdentifier();
long startingSeqNo = randomNonNegativeLong();
int translogOps = randomIntBetween(1, 100);
ShardRecoveryPlan shardRecoveryPlan = computeShardRecoveryPlan(
shardStateIdentifier,
sourceMetadata,
targetSourceMetadata,
startingSeqNo,
translogOps,
new ShardSnapshotsService(null, null, null, null) {
@Override
public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener<Optional<ShardSnapshot>> listener) {
listener.onResponse(Optional.of(latestSnapshot));
}
},
true,
randomBoolean()
);
if (shareFilesWithSource || compatibleVersion == false) {
assertPlanIsValid(shardRecoveryPlan, sourceMetadata);
assertAllSourceFilesAreAvailableInSource(shardRecoveryPlan, sourceMetadata);
assertAllIdenticalFilesAreAvailableInTarget(shardRecoveryPlan, targetSourceMetadata);
assertThat(shardRecoveryPlan.getSnapshotFilesToRecover(), is(equalTo(ShardRecoveryPlan.SnapshotFilesToRecover.EMPTY)));
} else {
assertPlanIsValid(shardRecoveryPlan, latestSnapshot.getMetadataSnapshot());
assertUsesExpectedSnapshot(shardRecoveryPlan, latestSnapshot);
assertThat(shardRecoveryPlan.getSourceFilesToRecover(), is(empty()));
assertAllIdenticalFilesAreAvailableInTarget(shardRecoveryPlan, targetSourceMetadata);
assertThat(shardRecoveryPlan.getStartingSeqNo(), equalTo(startingSeqNo));
assertThat(shardRecoveryPlan.getTranslogOps(), equalTo(translogOps));
assertThat(shardRecoveryPlan.canRecoverSnapshotFilesFromSourceNode(), is(equalTo(false)));
ShardRecoveryPlan fallbackPlan = shardRecoveryPlan.getFallbackPlan();
assertThat(fallbackPlan, is(notNullValue()));
assertPlanIsValid(fallbackPlan, sourceMetadata);
assertAllSourceFilesAreAvailableInSource(fallbackPlan, sourceMetadata);
assertAllIdenticalFilesAreAvailableInTarget(fallbackPlan, targetSourceMetadata);
assertThat(fallbackPlan.getSnapshotFilesToRecover(), is(equalTo(ShardRecoveryPlan.SnapshotFilesToRecover.EMPTY)));
}
});
}
public void testPlannerTriesToUseMostFilesFromSnapshots() throws Exception {
createStore(store -> {
Store.MetadataSnapshot targetMetadataSnapshot = generateRandomTargetState(store);
List<ShardSnapshot> availableSnapshots = new ArrayList<>();
int numberOfStaleSnapshots = randomIntBetween(0, 5);
for (int i = 0; i < numberOfStaleSnapshots; i++) {
availableSnapshots.add(createShardSnapshotThatDoNotShareSegmentFiles("stale-repo-" + i));
}
int numberOfValidSnapshots = randomIntBetween(0, 10);
for (int i = 0; i < numberOfValidSnapshots; i++) {
writeRandomDocs(store, randomIntBetween(10, 100));
availableSnapshots.add(createShardSnapshotThatSharesSegmentFiles(store, "repo-" + i));
}
// Write new segments
writeRandomDocs(store, randomIntBetween(20, 50));
Store.MetadataSnapshot latestSourceMetadata = store.getMetadata(null);
String latestShardIdentifier = randomAlphaOfLength(10);
long startingSeqNo = randomNonNegativeLong();
int translogOps = randomIntBetween(0, 100);
ShardRecoveryPlan shardRecoveryPlan = computeShardRecoveryPlan(
latestShardIdentifier,
latestSourceMetadata,
targetMetadataSnapshot,
startingSeqNo,
translogOps,
new ShardSnapshotsService(null, null, null, null) {
@Override
public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener<Optional<ShardSnapshot>> listener) {
if (availableSnapshots.isEmpty()) {
listener.onResponse(Optional.empty());
} else {
listener.onResponse(Optional.of(availableSnapshots.get(availableSnapshots.size() - 1)));
}
}
},
true,
randomBoolean()
);
assertPlanIsValid(shardRecoveryPlan, latestSourceMetadata);
assertAllSourceFilesAreAvailableInSource(shardRecoveryPlan, latestSourceMetadata);
assertAllIdenticalFilesAreAvailableInTarget(shardRecoveryPlan, targetMetadataSnapshot);
assertThat(shardRecoveryPlan.canRecoverSnapshotFilesFromSourceNode(), is(equalTo(true)));
if (numberOfValidSnapshots > 0) {
ShardSnapshot latestValidSnapshot = availableSnapshots.get(availableSnapshots.size() - 1);
assertUsesExpectedSnapshot(shardRecoveryPlan, latestValidSnapshot);
} else {
assertThat(shardRecoveryPlan.getSnapshotFilesToRecover(), is(equalTo(ShardRecoveryPlan.SnapshotFilesToRecover.EMPTY)));
}
assertThat(shardRecoveryPlan.getStartingSeqNo(), equalTo(startingSeqNo));
assertThat(shardRecoveryPlan.getTranslogOps(), equalTo(translogOps));
});
}
public void testSnapshotsWithADifferentHistoryUUIDAreUsedIfFilesAreShared() throws Exception {
createStore(store -> {
Store.MetadataSnapshot targetMetadataSnapshot = generateRandomTargetState(store);
List<ShardSnapshot> availableSnapshots = new ArrayList<>();
int numberOfValidSnapshots = randomIntBetween(1, 4);
for (int i = 0; i < numberOfValidSnapshots; i++) {
writeRandomDocs(store, randomIntBetween(10, 100));
availableSnapshots.add(createShardSnapshotThatSharesSegmentFiles(store, "repo-" + i));
}
// Simulate a restore/stale primary allocation
shardHistoryUUID = UUIDs.randomBase64UUID();
String latestShardIdentifier = randomAlphaOfLength(10);
// Write new segments
writeRandomDocs(store, randomIntBetween(20, 50));
Store.MetadataSnapshot latestSourceMetadata = store.getMetadata(null);
long startingSeqNo = randomNonNegativeLong();
int translogOps = randomIntBetween(0, 100);
ShardRecoveryPlan shardRecoveryPlan = computeShardRecoveryPlan(
latestShardIdentifier,
latestSourceMetadata,
targetMetadataSnapshot,
startingSeqNo,
translogOps,
new ShardSnapshotsService(null, null, null, null) {
@Override
public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener<Optional<ShardSnapshot>> listener) {
listener.onResponse(Optional.of(availableSnapshots.get(availableSnapshots.size() - 1)));
}
},
true,
randomBoolean()
);
assertPlanIsValid(shardRecoveryPlan, latestSourceMetadata);
assertAllSourceFilesAreAvailableInSource(shardRecoveryPlan, latestSourceMetadata);
assertAllIdenticalFilesAreAvailableInTarget(shardRecoveryPlan, targetMetadataSnapshot);
assertUsesExpectedSnapshot(shardRecoveryPlan, availableSnapshots.get(availableSnapshots.size() - 1));
assertThat(shardRecoveryPlan.canRecoverSnapshotFilesFromSourceNode(), is(equalTo(true)));
assertThat(shardRecoveryPlan.getStartingSeqNo(), equalTo(startingSeqNo));
assertThat(shardRecoveryPlan.getTranslogOps(), equalTo(translogOps));
});
}
private ShardRecoveryPlan computeShardRecoveryPlan(
String shardIdentifier,
Store.MetadataSnapshot sourceMetadataSnapshot,
Store.MetadataSnapshot targetMetadataSnapshot,
long startingSeqNo,
int translogOps,
ShardSnapshotsService shardSnapshotsService,
boolean snapshotRecoveriesEnabled,
boolean primaryRelocation
) throws Exception {
return computeShardRecoveryPlan(
shardIdentifier,
sourceMetadataSnapshot,
targetMetadataSnapshot,
startingSeqNo,
translogOps,
shardSnapshotsService,
snapshotRecoveriesEnabled,
IndexVersion.current(),
primaryRelocation
);
}
private ShardRecoveryPlan computeShardRecoveryPlan(
String shardIdentifier,
Store.MetadataSnapshot sourceMetadataSnapshot,
Store.MetadataSnapshot targetMetadataSnapshot,
long startingSeqNo,
int translogOps,
ShardSnapshotsService shardSnapshotsService,
boolean snapshotRecoveriesEnabled,
IndexVersion version,
boolean primaryRelocation
) throws Exception {
SnapshotsRecoveryPlannerService recoveryPlannerService = new SnapshotsRecoveryPlannerService(shardSnapshotsService, () -> true);
PlainActionFuture<ShardRecoveryPlan> planFuture = new PlainActionFuture<>();
recoveryPlannerService.computeRecoveryPlan(
shardId,
shardIdentifier,
sourceMetadataSnapshot,
targetMetadataSnapshot,
startingSeqNo,
translogOps,
version,
snapshotRecoveriesEnabled,
primaryRelocation,
planFuture
);
final ShardRecoveryPlan shardRecoveryPlan = planFuture.get();
assertThat(shardRecoveryPlan, notNullValue());
return shardRecoveryPlan;
}
private void assertPlanIsValid(ShardRecoveryPlan shardRecoveryPlan, Store.MetadataSnapshot expectedMetadataSnapshot) {
List<StoreFileMetadata> planFiles = new ArrayList<>();
planFiles.addAll(shardRecoveryPlan.getFilesPresentInTarget());
planFiles.addAll(shardRecoveryPlan.getSourceFilesToRecover());
for (BlobStoreIndexShardSnapshot.FileInfo fileInfo : shardRecoveryPlan.getSnapshotFilesToRecover()) {
planFiles.add(fileInfo.metadata());
}
final ArrayList<StoreFileMetadata> storeFileMetadata = iterableAsArrayList(expectedMetadataSnapshot);
List<StoreFileMetadata> missingFiles = storeFileMetadata.stream()
.filter(f -> containsFile(planFiles, f) == false)
.collect(Collectors.toList());
List<StoreFileMetadata> unexpectedFiles = planFiles.stream()
.filter(f -> containsFile(storeFileMetadata, f) == false)
.collect(Collectors.toList());
assertThat(missingFiles, is(empty()));
assertThat(unexpectedFiles, is(empty()));
assertThat(planFiles.size(), is(equalTo(storeFileMetadata.size())));
Store.MetadataSnapshot sourceMetadataSnapshot = shardRecoveryPlan.getSourceMetadataSnapshot();
assertThat(sourceMetadataSnapshot.size(), equalTo(expectedMetadataSnapshot.size()));
assertThat(sourceMetadataSnapshot.getHistoryUUID(), equalTo(expectedMetadataSnapshot.getHistoryUUID()));
}
private void assertAllSourceFilesAreAvailableInSource(
ShardRecoveryPlan shardRecoveryPlan,
Store.MetadataSnapshot sourceMetadataSnapshot
) {
for (StoreFileMetadata sourceFile : shardRecoveryPlan.getSourceFilesToRecover()) {
final StoreFileMetadata actual = sourceMetadataSnapshot.get(sourceFile.name());
assertThat(actual, is(notNullValue()));
assertThat(actual.isSame(sourceFile), is(equalTo(true)));
}
}
private void assertAllIdenticalFilesAreAvailableInTarget(
ShardRecoveryPlan shardRecoveryPlan,
Store.MetadataSnapshot targetMetadataSnapshot
) {
for (StoreFileMetadata identicalFile : shardRecoveryPlan.getFilesPresentInTarget()) {
final StoreFileMetadata targetFile = targetMetadataSnapshot.get(identicalFile.name());
assertThat(targetFile, notNullValue());
assertThat(targetFile.isSame(identicalFile), is(equalTo(true)));
}
}
private void assertUsesExpectedSnapshot(ShardRecoveryPlan shardRecoveryPlan, ShardSnapshot expectedSnapshotToUse) {
assertThat(shardRecoveryPlan.getSnapshotFilesToRecover().indexId(), equalTo(expectedSnapshotToUse.getIndexId()));
assertThat(shardRecoveryPlan.getSnapshotFilesToRecover().repository(), equalTo(expectedSnapshotToUse.getRepository()));
final Store.MetadataSnapshot shardSnapshotMetadataSnapshot = expectedSnapshotToUse.getMetadataSnapshot();
for (BlobStoreIndexShardSnapshot.FileInfo fileInfo : shardRecoveryPlan.getSnapshotFilesToRecover()) {
final StoreFileMetadata snapshotFile = shardSnapshotMetadataSnapshot.get(fileInfo.metadata().name());
assertThat(snapshotFile, is(notNullValue()));
assertThat(snapshotFile.isSame(fileInfo.metadata()), is(equalTo(true)));
}
}
// StoreFileMetadata doesn't implement #equals, we rely on StoreFileMetadata#isSame for equality checks
private boolean containsFile(List<StoreFileMetadata> files, StoreFileMetadata fileMetadata) {
for (StoreFileMetadata file : files) {
if (fileMetadata.name().equals(file.name()) && file.isSame(fileMetadata)) {
return true;
}
}
return false;
}
private void createStore(CheckedConsumer<Store, Exception> testBody) throws Exception {
BaseDirectoryWrapper baseDirectoryWrapper = newFSDirectory(createTempDir());
Store store = new Store(shardId, INDEX_SETTINGS, baseDirectoryWrapper, new DummyShardLock(shardId));
try {
testBody.accept(store);
} finally {
IOUtils.close(store);
}
}
private Store.MetadataSnapshot generateRandomTargetState(Store store) throws IOException {
return generateRandomTargetState(store, randomBoolean());
}
private Store.MetadataSnapshot generateRandomTargetState(Store store, boolean shareFilesWithSource) throws IOException {
final Store.MetadataSnapshot targetMetadataSnapshot;
if (shareFilesWithSource) {
// The target can share some files with the source
writeRandomDocs(store, randomIntBetween(20, 50));
targetMetadataSnapshot = store.getMetadata(null);
} else {
if (randomBoolean()) {
targetMetadataSnapshot = Store.MetadataSnapshot.EMPTY;
} else {
// None of the files in the target would match
final int filesInTargetCount = randomIntBetween(1, 20);
Map<String, StoreFileMetadata> filesInTarget = IntStream.range(0, filesInTargetCount)
.mapToObj(i -> randomStoreFileMetadata())
.collect(Collectors.toMap(StoreFileMetadata::name, Function.identity()));
targetMetadataSnapshot = new Store.MetadataSnapshot(filesInTarget, Collections.emptyMap(), 0);
}
}
return targetMetadataSnapshot;
}
private void writeRandomDocs(Store store, int numDocs) throws IOException {
Directory dir = store.directory();
// Disable merges to control the files that are used in this tests
IndexWriterConfig indexWriterConfig = new IndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE)
.setMergeScheduler(NoMergeScheduler.INSTANCE);
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
for (int i = 0; i < numDocs; i++) {
Document document = new Document();
document.add(new StringField("id", Integer.toString(i), Field.Store.YES));
document.add(newField("field", randomUnicodeOfCodepointLengthBetween(1, 10), TextField.TYPE_STORED));
writer.addDocument(document);
}
Map<String, String> userData = new HashMap<>();
userData.put(HISTORY_UUID_KEY, shardHistoryUUID);
writer.setLiveCommitData(userData.entrySet());
writer.commit();
writer.close();
}
private ShardSnapshot createShardSnapshotThatDoNotShareSegmentFiles(String repoName) {
return createShardSnapshotThatDoNotShareSegmentFiles(repoName, IndexVersion.current(), IndexVersion.current().luceneVersion());
}
private ShardSnapshot createShardSnapshotThatDoNotShareSegmentFiles(
String repoName,
IndexVersion version,
org.apache.lucene.util.Version luceneVersion
) {
List<BlobStoreIndexShardSnapshot.FileInfo> snapshotFiles = randomList(10, 20, () -> {
StoreFileMetadata storeFileMetadata = randomStoreFileMetadata();
return new BlobStoreIndexShardSnapshot.FileInfo(randomAlphaOfLength(10), storeFileMetadata, PART_SIZE);
});
return createShardSnapshot(repoName, snapshotFiles, version, luceneVersion);
}
private ShardSnapshot createShardSnapshotThatSharesSegmentFiles(Store store, String repository) throws Exception {
Store.MetadataSnapshot sourceMetadata = store.getMetadata(null);
assertThat(sourceMetadata.size(), is(greaterThan(1)));
List<BlobStoreIndexShardSnapshot.FileInfo> snapshotFiles = new ArrayList<>(sourceMetadata.size());
for (StoreFileMetadata storeFileMetadata : sourceMetadata) {
BlobStoreIndexShardSnapshot.FileInfo fileInfo = new BlobStoreIndexShardSnapshot.FileInfo(
randomAlphaOfLength(10),
storeFileMetadata,
PART_SIZE
);
snapshotFiles.add(fileInfo);
}
return createShardSnapshot(repository, snapshotFiles, IndexVersion.current(), IndexVersion.current().luceneVersion());
}
private ShardSnapshot createShardSnapshot(
String repoName,
List<BlobStoreIndexShardSnapshot.FileInfo> snapshotFiles,
IndexVersion version,
org.apache.lucene.util.Version luceneVersion
) {
String shardIdentifier = randomAlphaOfLength(10);
Snapshot snapshot = new Snapshot(repoName, new SnapshotId("snap", UUIDs.randomBase64UUID(random())));
IndexId indexId = randomIndexId();
ShardSnapshotInfo shardSnapshotInfo = new ShardSnapshotInfo(
indexId,
shardId,
snapshot,
randomAlphaOfLength(10),
shardIdentifier,
clock.incrementAndGet()
);
Map<String, String> luceneCommitUserData = version == null
? Collections.emptyMap()
: Collections.singletonMap(ES_VERSION, version.toString());
return new ShardSnapshot(shardSnapshotInfo, snapshotFiles, luceneCommitUserData, luceneVersion);
}
private StoreFileMetadata randomStoreFileMetadata() {
return new StoreFileMetadata(
"_" + randomAlphaOfLength(10),
randomLongBetween(1, 100),
randomAlphaOfLength(10),
IndexVersion.current().luceneVersion().toString()
);
}
private IndexId randomIndexId() {
return new IndexId(shardId.getIndexName(), randomAlphaOfLength(10));
}
}
| SnapshotsRecoveryPlannerServiceTests |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.