language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/commands/ListCommandIntegrationTests.java | {
"start": 1953,
"end": 13651
} | class ____ extends TestSupport {
private final RedisCommands<String, String> redis;
@Inject
protected ListCommandIntegrationTests(RedisCommands<String, String> redis) {
this.redis = redis;
}
@BeforeEach
void setUp() {
this.redis.flushall();
}
@Test
void blpop() {
redis.rpush("two", "2", "3");
assertThat(redis.blpop(1, "one", "two")).isEqualTo(kv("two", "2"));
}
@Test
@EnabledOnCommand("BLMOVE") // Redis 6.2
void blpopDoubleTimeout() {
redis.rpush("two", "2", "3");
assertThat(redis.blpop(0.1, "one", "two")).isEqualTo(kv("two", "2"));
assertThat(redis.blpop(0.1, "one", "two")).isEqualTo(kv("two", "3"));
assertThat(redis.blpop(0.1, "one", "two")).isNull();
}
@Test
@EnabledOnCommand("BLMPOP") // Redis 7.0
void blmpop() {
redis.rpush("{0}two", "1", "2", "3");
LMPopArgs args = LMPopArgs.Builder.left().count(2);
assertThat(redis.blmpop(0.1, args, "{0}one", "{0}two")).isEqualTo(kv("{0}two", Arrays.asList("1", "2")));
assertThat(redis.blmpop(0.1, args, "{0}one", "{0}two")).isEqualTo(kv("{0}two", Collections.singletonList("3")));
assertThat(redis.blmpop(0.1, args, "{0}one", "{0}two")).isNull();
}
@Test
void blpopTimeout() {
redis.setTimeout(Duration.ofSeconds(10));
assertThat(redis.blpop(1, key)).isNull();
}
@Test
void brpop() {
redis.rpush("two", "2", "3");
assertThat(redis.brpop(1, "one", "two")).isEqualTo(kv("two", "3"));
}
@Test
@EnabledOnCommand("BLMOVE") // Redis 6.2
void brpopDoubleTimeout() {
redis.rpush("two", "2", "3");
assertThat(redis.brpop(0.5, "one", "two")).isEqualTo(kv("two", "3"));
}
@Test
void brpoplpush() {
redis.rpush("one", "1", "2");
redis.rpush("two", "3", "4");
assertThat(redis.brpoplpush(1, "one", "two")).isEqualTo("2");
assertThat(redis.lrange("one", 0, -1)).isEqualTo(list("1"));
assertThat(redis.lrange("two", 0, -1)).isEqualTo(list("2", "3", "4"));
}
@Test
@EnabledOnCommand("BLMOVE") // Redis 6.2
void brpoplpushDoubleTimeout() {
redis.rpush("one", "1", "2");
redis.rpush("two", "3", "4");
assertThat(redis.brpoplpush(0.5, "one", "two")).isEqualTo("2");
assertThat(redis.lrange("one", 0, -1)).isEqualTo(list("1"));
assertThat(redis.lrange("two", 0, -1)).isEqualTo(list("2", "3", "4"));
}
@Test
void lindex() {
assertThat(redis.lindex(key, 0)).isNull();
redis.rpush(key, "one");
assertThat(redis.lindex(key, 0)).isEqualTo("one");
}
@Test
void linsert() {
assertThat(redis.linsert(key, false, "one", "two")).isEqualTo(0);
redis.rpush(key, "one");
redis.rpush(key, "three");
assertThat(redis.linsert(key, true, "three", "two")).isEqualTo(3);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("one", "two", "three"));
}
@Test
void llen() {
assertThat((long) redis.llen(key)).isEqualTo(0);
redis.lpush(key, "one");
assertThat((long) redis.llen(key)).isEqualTo(1);
}
@Test
@EnabledOnCommand("LMPOP") // Redis 7.0
void lmpop() {
redis.rpush("{0}two", "1", "2", "3", "4");
redis.rpush("{0}one", "1");
assertThat(redis.lmpop(LMPopArgs.Builder.left().count(2), "{0}two")).isEqualTo(kv("{0}two", Arrays.asList("1", "2")));
assertThat(redis.lmpop(LMPopArgs.Builder.right().count(1), "{0}two"))
.isEqualTo(kv("{0}two", Collections.singletonList("4")));
assertThat(redis.lmpop(LMPopArgs.Builder.right().count(2), "{0}two", "{0}one"))
.isEqualTo(kv("{0}two", Collections.singletonList("3")));
assertThat(redis.lmpop(LMPopArgs.Builder.right().count(1), "{0}two")).isNull();
}
@Test
void lpop() {
assertThat(redis.lpop(key)).isNull();
redis.rpush(key, "one", "two");
assertThat(redis.lpop(key)).isEqualTo("one");
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("two"));
}
@Test
@EnabledOnCommand("BLMOVE") // Redis 6.2
void lpopCount() {
assertThat(redis.lpop(key, 1)).isEqualTo(list());
redis.rpush(key, "one", "two");
assertThat(redis.lpop(key, 3)).isEqualTo(list("one", "two"));
}
@Test
@EnabledOnCommand("LPOS")
void lpos() {
redis.rpush(key, "a", "b", "c", "1", "2", "3", "c", "c");
assertThat(redis.lpos(key, "a")).isEqualTo(0);
assertThat(redis.lpos(key, "c")).isEqualTo(2);
assertThat(redis.lpos(key, "c", LPosArgs.Builder.rank(1))).isEqualTo(2);
assertThat(redis.lpos(key, "c", LPosArgs.Builder.rank(2))).isEqualTo(6);
assertThat(redis.lpos(key, "c", LPosArgs.Builder.rank(4))).isNull();
assertThat(redis.lpos(key, "c", 0)).contains(2L, 6L, 7L);
assertThat(redis.lpos(key, "c", 0, LPosArgs.Builder.maxlen(1))).isEmpty();
}
@Test
void lpush() {
assertThat((long) redis.lpush(key, "two")).isEqualTo(1);
assertThat((long) redis.lpush(key, "one")).isEqualTo(2);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("one", "two"));
assertThat((long) redis.lpush(key, "three", "four")).isEqualTo(4);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("four", "three", "one", "two"));
}
@Test
void lpushx() {
assertThat((long) redis.lpushx(key, "two")).isEqualTo(0);
redis.lpush(key, "two");
assertThat((long) redis.lpushx(key, "one")).isEqualTo(2);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("one", "two"));
}
@Test
void lpushxVariadic() {
assumeTrue(RedisConditions.of(redis).hasCommandArity("LPUSHX", -3));
assertThat((long) redis.lpushx(key, "one", "two")).isEqualTo(0);
redis.lpush(key, "two");
assertThat((long) redis.lpushx(key, "one", "zero")).isEqualTo(3);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("zero", "one", "two"));
}
@Test
void lrange() {
assertThat(redis.lrange(key, 0, 10).isEmpty()).isTrue();
redis.rpush(key, "one", "two", "three");
List<String> range = redis.lrange(key, 0, 1);
assertThat(range).hasSize(2);
assertThat(range.get(0)).isEqualTo("one");
assertThat(range.get(1)).isEqualTo("two");
assertThat(redis.lrange(key, 0, -1)).hasSize(3);
}
@Test
void lrangeStreaming() {
assertThat(redis.lrange(key, 0, 10).isEmpty()).isTrue();
redis.rpush(key, "one", "two", "three");
ListStreamingAdapter<String> adapter = new ListStreamingAdapter<>();
Long count = redis.lrange(adapter, key, 0, 1);
assertThat(count.longValue()).isEqualTo(2);
List<String> range = adapter.getList();
assertThat(range).hasSize(2);
assertThat(range.get(0)).isEqualTo("one");
assertThat(range.get(1)).isEqualTo("two");
assertThat(redis.lrange(key, 0, -1)).hasSize(3);
}
@Test
void lrem() {
assertThat(redis.lrem(key, 0, value)).isEqualTo(0);
redis.rpush(key, "1", "2", "1", "2", "1");
assertThat((long) redis.lrem(key, 1, "1")).isEqualTo(1);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("2", "1", "2", "1"));
redis.lpush(key, "1");
assertThat((long) redis.lrem(key, -1, "1")).isEqualTo(1);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("1", "2", "1", "2"));
redis.lpush(key, "1");
assertThat((long) redis.lrem(key, 0, "1")).isEqualTo(3);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("2", "2"));
}
@Test
void lset() {
redis.rpush(key, "one", "two", "three");
assertThat(redis.lset(key, 2, "san")).isEqualTo("OK");
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("one", "two", "san"));
}
@Test
void ltrim() {
redis.rpush(key, "1", "2", "3", "4", "5", "6");
assertThat(redis.ltrim(key, 0, 3)).isEqualTo("OK");
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("1", "2", "3", "4"));
assertThat(redis.ltrim(key, -2, -1)).isEqualTo("OK");
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("3", "4"));
}
@Test
void rpop() {
assertThat(redis.rpop(key)).isNull();
redis.rpush(key, "one", "two");
assertThat(redis.rpop(key)).isEqualTo("two");
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("one"));
}
@Test
@EnabledOnCommand("BLMOVE") // Redis 6.2
void rpopCount() {
assertThat(redis.rpop(key, 1)).isEqualTo(list());
redis.rpush(key, "one", "two");
assertThat(redis.rpop(key, 3)).isEqualTo(list("two", "one"));
}
@Test
void rpoplpush() {
assertThat(redis.rpoplpush("one", "two")).isNull();
redis.rpush("one", "1", "2");
redis.rpush("two", "3", "4");
assertThat(redis.rpoplpush("one", "two")).isEqualTo("2");
assertThat(redis.lrange("one", 0, -1)).isEqualTo(list("1"));
assertThat(redis.lrange("two", 0, -1)).isEqualTo(list("2", "3", "4"));
}
@Test
void rpush() {
assertThat((long) redis.rpush(key, "one")).isEqualTo(1);
assertThat((long) redis.rpush(key, "two")).isEqualTo(2);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("one", "two"));
assertThat((long) redis.rpush(key, "three", "four")).isEqualTo(4);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("one", "two", "three", "four"));
}
@Test
void rpushx() {
assertThat((long) redis.rpushx(key, "one")).isEqualTo(0);
redis.rpush(key, "one");
assertThat((long) redis.rpushx(key, "two")).isEqualTo(2);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("one", "two"));
}
@Test
void rpushxVariadic() {
assumeTrue(RedisConditions.of(redis).hasCommandArity("RPUSHX", -3));
assertThat((long) redis.rpushx(key, "two", "three")).isEqualTo(0);
redis.rpush(key, "one");
assertThat((long) redis.rpushx(key, "two", "three")).isEqualTo(3);
assertThat(redis.lrange(key, 0, -1)).isEqualTo(list("one", "two", "three"));
}
@Test
@EnabledOnCommand("LMOVE") // Redis 6.2
void lmove() {
String list1 = key;
String list2 = "38o54"; // yields in same slot as "key"
redis.rpush(list1, "one", "two", "three");
redis.lmove(list1, list2, LMoveArgs.Builder.rightLeft());
assertThat(redis.lrange(list1, 0, -1)).containsExactly("one", "two");
assertThat(redis.lrange(list2, 0, -1)).containsOnly("three");
}
@Test
@EnabledOnCommand("BLMOVE") // Redis 6.2
void blmove() {
String list1 = key;
String list2 = "38o54"; // yields in same slot as "key"
redis.rpush(list1, "one", "two", "three");
redis.blmove(list1, list2, LMoveArgs.Builder.leftRight(), 1000);
assertThat(redis.lrange(list1, 0, -1)).containsExactly("two", "three");
assertThat(redis.lrange(list2, 0, -1)).containsOnly("one");
}
@Test
@EnabledOnCommand("BLMOVE") // Redis 6.2
void blmoveDoubleTimeout() {
String list1 = key;
String list2 = "38o54"; // yields in same slot as "key"
redis.rpush(list1, "one", "two", "three");
redis.blmove(list1, list2, LMoveArgs.Builder.leftRight(), 1.5);
assertThat(redis.lrange(list1, 0, -1)).containsExactly("two", "three");
assertThat(redis.lrange(list2, 0, -1)).containsOnly("one");
}
}
| ListCommandIntegrationTests |
java | apache__camel | components/camel-micrometer-observability/src/test/java/org/apache/camel/micrometer/observability/DisableEndpointTest.java | {
"start": 1523,
"end": 3780
} | class ____ extends MicrometerObservabilityTracerPropagationTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
tst.setTraceProcessors(true);
tst.setExcludePatterns("log*,to*,setVariable*");
return super.createCamelContext();
}
@Test
void testProcessorsTraceRequest() throws IOException {
template.sendBody("direct:start", "my-body");
Map<String, OtelTrace> traces = otelExtension.getTraces();
assertEquals(1, traces.size());
checkTrace(traces.values().iterator().next());
}
@Test
void testExcludedVariableIsPresent() throws InterruptedException {
MockEndpoint endpoint = context().getEndpoint("mock:variable", MockEndpoint.class);
endpoint.expectedMessageCount(1);
template.sendBody("direct:variable", "Test Message");
endpoint.assertIsSatisfied();
Exchange first = endpoint.getReceivedExchanges().get(0);
String myVar = first.getVariable("myVar", String.class);
Assertions.assertEquals("testValue", myVar);
}
private void checkTrace(OtelTrace trace) {
List<SpanData> spans = trace.getSpans();
assertEquals(2, spans.size());
SpanData testProducer = spans.get(0);
SpanData direct = spans.get(1);
// Validate span completion
assertTrue(testProducer.hasEnded());
assertTrue(direct.hasEnded());
// Validate same trace
assertEquals(testProducer.getTraceId(), direct.getTraceId());
// Validate hierarchy
assertEquals(SpanId.getInvalid(), testProducer.getParentSpanId());
assertEquals(testProducer.getSpanId(), direct.getParentSpanId());
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.routeId("start")
.log("A message")
.to("log:info");
from("direct:variable")
.setVariable("myVar", constant("testValue"))
.to("mock:variable");
}
};
}
}
| DisableEndpointTest |
java | spring-projects__spring-boot | module/spring-boot-liquibase/src/test/java/org/springframework/boot/liquibase/autoconfigure/LiquibaseAutoConfigurationTests.java | {
"start": 26230,
"end": 26693
} | class ____ {
@Bean
@Primary
DataSource normalDataSource() {
return DataSourceBuilder.create().url("jdbc:h2:mem:normal" + UUID.randomUUID()).username("sa").build();
}
@LiquibaseDataSource
@Bean
DataSource liquibaseDataSource() {
return DataSourceBuilder.create()
.url("jdbc:h2:mem:liquibasetest" + UUID.randomUUID())
.username("sa")
.build();
}
}
@Configuration(proxyBeanMethods = false)
static | LiquibaseDataSourceConfiguration |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/condition/config/model/ConditionRuleParser.java | {
"start": 1686,
"end": 3205
} | class ____ {
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(ConditionRuleParser.class);
public static AbstractRouterRule parse(String rawRule) {
AbstractRouterRule rule;
Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions()));
Map<String, Object> map = yaml.load(rawRule);
String confVersion = (String) map.get(CONFIG_VERSION_KEY);
if (confVersion != null && confVersion.toLowerCase().startsWith(RULE_VERSION_V31)) {
rule = MultiDestConditionRouterRule.parseFromMap(map);
if (CollectionUtils.isEmpty(((MultiDestConditionRouterRule) rule).getConditions())) {
rule.setValid(false);
}
} else if (confVersion != null && confVersion.compareToIgnoreCase(RULE_VERSION_V31) > 0) {
logger.warn(
CLUSTER_FAILED_RULE_PARSING,
"Invalid condition config version number.",
"",
"Ignore this configuration. Only " + RULE_VERSION_V31 + " and below are supported in this release");
rule = null;
} else {
// for under v3.1
rule = ConditionRouterRule.parseFromMap(map);
if (CollectionUtils.isEmpty(((ConditionRouterRule) rule).getConditions())) {
rule.setValid(false);
}
}
if (rule != null) {
rule.setRawRule(rawRule);
}
return rule;
}
}
| ConditionRuleParser |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/WithAssertions_delegation_Test.java | {
"start": 3914,
"end": 5745
} | class ____ {
private final String name;
private final String value;
public TestItem(final String name, final String value) {
super();
this.name = name;
this.value = value;
}
@SuppressWarnings("unused")
public String getName() {
return name;
}
@SuppressWarnings("unused")
public String getValue() {
return value;
}
}
private static final TestItem[] ITEMS = { new TestItem("n1", "v1"), new TestItem("n2", "v2") };
/**
* Test that the delegate method is called.
*/
@Test
void withAssertions_filter_array_Test() {
assertThat(filter(ITEMS).with("name").equalsTo("n1").get()).containsExactly(ITEMS[0]);
}
/**
* Test that the delegate method is called.
*/
@Test
void withAssertions_filter_iterable_Test() {
assertThat(filter(Arrays.asList(ITEMS)).with("name").equalsTo("n1").get()).containsExactly(ITEMS[0]);
}
/**
* Test that the delegate method is called.
*/
@Test
void withAssertions_fail_Test() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> fail("Failed"));
}
/**
* Test that the delegate method is called.
*/
@Test
void withAssertions_fail_with_throwable_Test() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> fail("Failed", new RuntimeException("expected")));
}
/**
* Test that the delegate method is called.
*/
@Test
void withAssertions_not_Test() {
assertThat("Solo").is(not(JEDI));
}
/**
* Test that the delegate method is called.
*/
@Test
void withAssertions_assertThat_object_Test() {
assertThat(ITEMS[0]).isNotNull();
}
/**
* Test that the delegate method is called.
*/
@Test
void withAssertions_assertThat_Test() {
assertThat(ITEMS[0]).isNotNull();
}
private static | TestItem |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/cluster/PartitionsConsensusImpl.java | {
"start": 400,
"end": 560
} | class ____ {
/**
* Votes for {@link Partitions} that contains the most known (previously existing) nodes.
*/
static final | PartitionsConsensusImpl |
java | playframework__playframework | core/play/src/test/java/play/libs/concurrent/FuturesTest.java | {
"start": 581,
"end": 971
} | class ____ {
private ActorSystem system;
private Futures futures;
@Before
public void setup() {
system = ActorSystem.create();
futures = new DefaultFutures(new play.api.libs.concurrent.DefaultFutures(system));
}
@After
public void teardown() {
system.terminate();
futures = null;
}
@Test
public void successfulTimeout() throws Exception {
| FuturesTest |
java | elastic__elasticsearch | modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregatorTests.java | {
"start": 1014,
"end": 3342
} | class ____ extends AggregationTestCase {
public void testTooManyFilters() {
int maxFilters = IndexSearcher.getMaxClauseCount();
int maxFiltersPlusOne = maxFilters + 1;
Map<String, QueryBuilder> filters = Maps.newMapWithExpectedSize(maxFilters);
for (int i = 0; i < maxFiltersPlusOne; i++) {
filters.put("filter" + i, new MatchAllQueryBuilder());
}
AdjacencyMatrixAggregationBuilder tooBig = new AdjacencyMatrixAggregationBuilder("dummy", filters);
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> testCase(iw -> {}, r -> {}, new AggTestConfig(tooBig))
);
assertThat(
ex.getMessage(),
startsWith(
"Number of filters is too large, must be less than or equal to: [" + maxFilters + "] but was [" + maxFiltersPlusOne + "]."
)
);
}
public void testNoFilters() throws IOException {
AdjacencyMatrixAggregationBuilder aggregationBuilder = new AdjacencyMatrixAggregationBuilder("dummy", Map.of());
testCase(iw -> iw.addDocument(List.of()), r -> {
InternalAdjacencyMatrix result = (InternalAdjacencyMatrix) r;
assertThat(result.getBuckets(), equalTo(List.of()));
}, new AggTestConfig(aggregationBuilder));
}
public void testAFewFilters() throws IOException {
AdjacencyMatrixAggregationBuilder aggregationBuilder = new AdjacencyMatrixAggregationBuilder(
"dummy",
Map.of("a", new MatchAllQueryBuilder(), "b", new MatchAllQueryBuilder())
);
testCase(iw -> iw.addDocument(List.of()), r -> {
InternalAdjacencyMatrix result = (InternalAdjacencyMatrix) r;
assertThat(result.getBuckets(), hasSize(3));
InternalAdjacencyMatrix.InternalBucket a = result.getBucketByKey("a");
InternalAdjacencyMatrix.InternalBucket b = result.getBucketByKey("b");
InternalAdjacencyMatrix.InternalBucket ab = result.getBucketByKey("a&b");
assertThat(a.getDocCount(), equalTo(1L));
assertThat(b.getDocCount(), equalTo(1L));
assertThat(ab.getDocCount(), equalTo(1L));
}, new AggTestConfig(aggregationBuilder));
}
}
| AdjacencyMatrixAggregatorTests |
java | apache__flink | flink-kubernetes/src/test/java/org/apache/flink/kubernetes/artifact/DummyFs.java | {
"start": 1112,
"end": 1954
} | class ____ extends LocalFileSystem {
static final URI FS_URI = URI.create("dummyfs:///");
private int existsCallCounter;
private int createCallCounter;
@Override
public URI getUri() {
return FS_URI;
}
@Override
public boolean exists(Path f) throws IOException {
++existsCallCounter;
return super.exists(f);
}
@Override
public FSDataOutputStream create(Path filePath, WriteMode overwrite) throws IOException {
++createCallCounter;
return super.create(filePath, overwrite);
}
public void resetCallCounters() {
createCallCounter = 0;
existsCallCounter = 0;
}
public int getExistsCallCounter() {
return existsCallCounter;
}
public int getCreateCallCounter() {
return createCallCounter;
}
}
| DummyFs |
java | quarkusio__quarkus | integration-tests/oidc-tenancy/src/main/java/io/quarkus/it/keycloak/AcrValueValidator.java | {
"start": 571,
"end": 1214
} | class ____ implements Validator {
@Override
public String validate(JwtContext jwtContext) throws MalformedClaimException {
var jwtClaims = jwtContext.getJwtClaims();
if (jwtClaims.hasClaim(acr.name())) {
var acrClaim = jwtClaims.getStringListClaimValue(acr.name());
if (acrClaim.contains("delta") && acrClaim.contains("epsilon") && acrClaim.contains("zeta")) {
return null;
}
}
String requiredAcrValues = "delta,epsilon,zeta";
throw new AuthenticationFailedException(Map.of(OidcConstants.ACR_VALUES, requiredAcrValues));
}
}
| AcrValueValidator |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java | {
"start": 902,
"end": 5807
} | class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("top", ElementType.BOOLEAN) );
private final DriverContext driverContext;
private final TopBooleanAggregator.SingleState state;
private final List<Integer> channels;
private final int limit;
private final boolean ascending;
public TopBooleanAggregatorFunction(DriverContext driverContext, List<Integer> channels,
TopBooleanAggregator.SingleState state, int limit, boolean ascending) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
this.limit = limit;
this.ascending = ascending;
}
public static TopBooleanAggregatorFunction create(DriverContext driverContext,
List<Integer> channels, int limit, boolean ascending) {
return new TopBooleanAggregatorFunction(driverContext, channels, TopBooleanAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
BooleanBlock vBlock = page.getBlock(channels.get(0));
BooleanVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock, mask);
return;
}
addRawVector(vVector, mask);
}
private void addRawInputNotMasked(Page page) {
BooleanBlock vBlock = page.getBlock(channels.get(0));
BooleanVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock);
return;
}
addRawVector(vVector);
}
private void addRawVector(BooleanVector vVector) {
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
boolean vValue = vVector.getBoolean(valuesPosition);
TopBooleanAggregator.combine(state, vValue);
}
}
private void addRawVector(BooleanVector vVector, BooleanVector mask) {
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
boolean vValue = vVector.getBoolean(valuesPosition);
TopBooleanAggregator.combine(state, vValue);
}
}
private void addRawBlock(BooleanBlock vBlock) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
boolean vValue = vBlock.getBoolean(vOffset);
TopBooleanAggregator.combine(state, vValue);
}
}
}
private void addRawBlock(BooleanBlock vBlock, BooleanVector mask) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
boolean vValue = vBlock.getBoolean(vOffset);
TopBooleanAggregator.combine(state, vValue);
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block topUncast = page.getBlock(channels.get(0));
if (topUncast.areAllValuesNull()) {
return;
}
BooleanBlock top = (BooleanBlock) topUncast;
assert top.getPositionCount() == 1;
TopBooleanAggregator.combineIntermediate(state, top);
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
blocks[offset] = TopBooleanAggregator.evaluateFinal(state, driverContext);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
| TopBooleanAggregatorFunction |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/test/java/org/apache/hadoop/yarn/service/client/TestSecureApiServiceClient.java | {
"start": 2975,
"end": 6399
} | class ____ extends HttpServlet {
private static boolean headerFound = false;
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
Enumeration<String> headers = req.getHeaderNames();
while(headers.hasMoreElements()) {
String header = headers.nextElement();
LOG.info(header);
}
if (req.getHeader("Authorization")!=null) {
headerFound = true;
resp.setStatus(HttpServletResponse.SC_OK);
} else {
headerFound = false;
resp.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.setStatus(HttpServletResponse.SC_OK);
}
@Override
protected void doPut(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.setStatus(HttpServletResponse.SC_OK);
}
@Override
protected void doDelete(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.setStatus(HttpServletResponse.SC_OK);
}
public static boolean isHeaderExist() {
return headerFound;
}
}
@BeforeEach
public void setUp() throws Exception {
startMiniKdc();
keytabFile = new File(getWorkDir(), "keytab");
getKdc().createPrincipal(keytabFile, clientPrincipal, server1Principal,
server2Principal);
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS,
testConf);
UserGroupInformation.setConfiguration(testConf);
UserGroupInformation.setShouldRenewImmediatelyForTests(true);
props = new HashMap<String, String>();
props.put(Sasl.QOP, QualityOfProtection.AUTHENTICATION.saslQop);
server = new Server(8088);
((QueuedThreadPool)server.getThreadPool()).setMaxThreads(20);
ServletContextHandler context = new ServletContextHandler();
context.setContextPath("/app");
server.setHandler(context);
context.addServlet(new ServletHolder(TestServlet.class), "/*");
((ServerConnector)server.getConnectors()[0]).setHost("localhost");
server.start();
List<String> rmServers = new ArrayList<String>();
rmServers.add("localhost:8088");
testConf.set("yarn.resourcemanager.webapp.address",
"localhost:8088");
testConf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
asc = new ApiServiceClient() {
@Override
List<String> getRMHAWebAddresses(Configuration conf) {
return rmServers;
}
};
asc.serviceInit(testConf);
}
@AfterEach
public void tearDown() throws Exception {
server.stop();
stopMiniKdc();
}
@Test
void testHttpSpnegoChallenge() throws Exception {
UserGroupInformation.loginUserFromKeytab(clientPrincipal, keytabFile
.getCanonicalPath());
String challenge = YarnClientUtils.generateToken("localhost");
assertNotNull(challenge);
}
@Test
void testAuthorizationHeader() throws Exception {
UserGroupInformation.loginUserFromKeytab(clientPrincipal, keytabFile
.getCanonicalPath());
String rmAddress = asc.getRMWebAddress();
if (TestServlet.isHeaderExist()) {
assertEquals(rmAddress, "http://localhost:8088");
} else {
fail("Did not see Authorization header.");
}
}
}
| TestServlet |
java | apache__logging-log4j2 | log4j-iostreams/src/test/java/org/apache/logging/log4j/io/LoggerWriterTest.java | {
"start": 901,
"end": 1200
} | class ____ extends AbstractLoggerWriterTest {
@Override
protected StringWriter createWriter() {
return null;
}
@Override
protected Writer createWriterWrapper() {
return IoBuilder.forLogger(getExtendedLogger()).setLevel(LEVEL).buildWriter();
}
}
| LoggerWriterTest |
java | google__dagger | javatests/dagger/internal/codegen/DuplicateBindingsValidationTest.java | {
"start": 2799,
"end": 3544
} | interface ____ {",
" B getB();",
" }",
"}");
CompilerTests.daggerCompiler(component)
.withProcessingOptions(
ImmutableMap.<String, String>builder()
.putAll(fullBindingGraphValidationOption())
.buildOrThrow())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
message(
"Outer.A is bound multiple times:",
" @Provides Outer.A Outer.AModule.provideA(String)",
" Outer.A Outer.Parent.getA()"))
.onSource(component)
.onLineContaining(" | Child |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/data/TestTimeConversions.java | {
"start": 11782,
"end": 11918
} | class
____ model = new ReflectData();
model.addLogicalTypeConversion(conversion);
return model.getSchema(cls);
}
}
| ReflectData |
java | google__error-prone | annotations/src/main/java/com/google/errorprone/annotations/RestrictedApi.java | {
"start": 1975,
"end": 3634
} | class ____ {
* @RestrictedApi(
* explanation="You could shoot yourself in the foot with Foo.bar if you aren't careful",
* link="https://edsger.dijkstra/foo_bar_consider_harmful.html",
* allowedOnPath="testsuite/.*", // Unsafe behavior in tests is ok.
* allowlistAnnotations = {ReviewedFooBar.class},
* allowlistWithWarningAnnotations = {LegacyUnsafeFooBar.class})
* public void bar() {
* if (complicatedCondition) {
* shoot_your_foot();
* } else {
* solve_your_problem();
* }
* }
* boolean complicatedCondition = true;
*
* @ReviewedFooBar(
* reviewer="bangert",
* comments="Makes sure complicatedCondition isn't true, so bar is safe!"
* )
* public void safeBar() {
* if (!complicatedCondition) {
* bar();
* }
* }
*
* @LegacyUnsafeFooBar
* public void someOldCode() {
* // ...
* bar()
* // ...
* }
* }
* }</pre>
*
* <p>The {@code @RestrictedApi} annotation can also be used on a record's component to restrict the
* visibility of the record's accessor methods. For example:
*
* <pre>{@code
* public record User(
* String name,
* @RestrictedApi(
* explanation = "Only allow safe accessors to the password",
* allowlistAnnotations = {ReviewedFooBar.class},
* link = "")
* String password) {}
* }</pre>
*
* <p>All users will be able to call the record's constructor, but only users annotated with
* {@code @ReviewedFooBar} will be able to call the {@code password()} accessor method.
*/
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
public @ | Foo |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/convert/UpdateViaObjectReaderTest.java | {
"start": 1171,
"end": 1200
} | class ____ {}
public | NumView |
java | spring-projects__spring-framework | spring-core-test/src/main/java/org/springframework/core/test/tools/DynamicFile.java | {
"start": 995,
"end": 2439
} | class ____ permits SourceFile, ResourceFile {
private final String path;
private final String content;
protected DynamicFile(String path, String content) {
Assert.hasText(path, "'path' must not be empty");
Assert.hasText(content, "'content' must not be empty");
this.path = path;
this.content = content;
}
protected static String toString(WritableContent writableContent) {
try {
StringBuilder stringBuilder = new StringBuilder();
writableContent.writeTo(stringBuilder);
return stringBuilder.toString();
}
catch (IOException ex) {
throw new IllegalStateException("Unable to read content", ex);
}
}
/**
* Return the contents of the file as a byte array.
* @return the file contents as a byte array
*/
public byte[] getBytes() {
return this.content.getBytes(StandardCharsets.UTF_8);
}
/**
* Return the contents of the file.
* @return the file contents
*/
public String getContent() {
return this.content;
}
/**
* Return the relative path of the file.
* @return the file path
*/
public String getPath() {
return this.path;
}
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof DynamicFile that &&
this.path.equals(that.path) && this.content.equals(that.content)));
}
@Override
public int hashCode() {
return this.path.hashCode();
}
@Override
public String toString() {
return this.path;
}
}
| DynamicFile |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/session/DefaultMockitoSessionBuilder.java | {
"start": 602,
"end": 2898
} | class ____ implements MockitoSessionBuilder {
private final List<Object> testClassInstances = new ArrayList<Object>();
private String name;
private Strictness strictness;
private MockitoSessionLogger logger;
@Override
public MockitoSessionBuilder initMocks(Object testClassInstance) {
if (testClassInstance != null) {
this.testClassInstances.add(testClassInstance);
}
return this;
}
@Override
public MockitoSessionBuilder initMocks(Object... testClassInstances) {
if (testClassInstances != null) {
for (Object instance : testClassInstances) {
initMocks(instance);
}
}
return this;
}
@Override
public MockitoSessionBuilder name(String name) {
this.name = name;
return this;
}
@Override
public MockitoSessionBuilder strictness(Strictness strictness) {
this.strictness = strictness;
return this;
}
@Override
public MockitoSessionBuilder logger(MockitoSessionLogger logger) {
this.logger = logger;
return this;
}
@Override
public MockitoSession startMocking() {
// Configure default values
List<Object> effectiveTestClassInstances;
String effectiveName;
if (testClassInstances.isEmpty()) {
effectiveTestClassInstances = emptyList();
effectiveName = this.name == null ? "<Unnamed Session>" : this.name;
} else {
effectiveTestClassInstances = new ArrayList<>(testClassInstances);
Object lastTestClassInstance = testClassInstances.get(testClassInstances.size() - 1);
effectiveName =
this.name == null ? lastTestClassInstance.getClass().getName() : this.name;
}
Strictness effectiveStrictness =
this.strictness == null ? Strictness.STRICT_STUBS : this.strictness;
MockitoLogger logger =
this.logger == null
? Plugins.getMockitoLogger()
: new MockitoLoggerAdapter(this.logger);
return new DefaultMockitoSession(
effectiveTestClassInstances, effectiveName, effectiveStrictness, logger);
}
}
| DefaultMockitoSessionBuilder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DistinctVarargsCheckerTest.java | {
"start": 1016,
"end": 1691
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(DistinctVarargsChecker.class, getClass());
private final BugCheckerRefactoringTestHelper refactoringHelper =
BugCheckerRefactoringTestHelper.newInstance(DistinctVarargsChecker.class, getClass());
@Test
public void distinctVarargsChecker_sameVariableInFuturesVaragsMethods_shouldFlag() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
public | DistinctVarargsCheckerTest |
java | google__guava | android/guava/src/com/google/common/hash/Crc32cHashFunction.java | {
"start": 917,
"end": 1263
} | class ____ extends AbstractHashFunction {
static final HashFunction CRC_32_C = new Crc32cHashFunction();
@Override
public int bits() {
return 32;
}
@Override
public Hasher newHasher() {
return new Crc32cHasher();
}
@Override
public String toString() {
return "Hashing.crc32c()";
}
static final | Crc32cHashFunction |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/protocol/TJSONProtocol.java | {
"start": 7907,
"end": 28526
} | class ____ {
private boolean hasData_;
private final byte[] data_ = new byte[1];
// Return and consume the next byte to be read, either taking it from the
// data buffer if present or getting it from the transport otherwise.
protected byte read() throws TException {
if (hasData_) {
hasData_ = false;
} else {
trans_.readAll(data_, 0, 1);
}
return data_[0];
}
// Return the next byte to be read without consuming, filling the data
// buffer if it has not been filled already.
protected byte peek() throws TException {
if (!hasData_) {
trans_.readAll(data_, 0, 1);
}
hasData_ = true;
return data_[0];
}
}
// Stack of nested contexts that we may be in
private final Stack<JSONBaseContext> contextStack_ = new Stack<>();
// Current context that we are in
private JSONBaseContext context_ = new JSONBaseContext();
// Reader that manages a 1-byte buffer
private LookaheadReader reader_ = new LookaheadReader();
// Write out the TField names as a string instead of the default integer value
private boolean fieldNamesAsString_ = false;
// Push a new JSON context onto the stack.
private void pushContext(JSONBaseContext c) {
contextStack_.push(context_);
context_ = c;
}
// Pop the last JSON context off the stack
private void popContext() {
context_ = contextStack_.pop();
}
// Reset the context stack to its initial state
private void resetContext() {
while (!contextStack_.isEmpty()) {
popContext();
}
}
/** Constructor */
public TJSONProtocol(TTransport trans) {
super(trans);
}
public TJSONProtocol(TTransport trans, boolean fieldNamesAsString) {
super(trans);
fieldNamesAsString_ = fieldNamesAsString;
}
@Override
public void reset() {
contextStack_.clear();
context_ = new JSONBaseContext();
reader_ = new LookaheadReader();
}
// Temporary buffer used by several methods
private final byte[] tmpbuf_ = new byte[4];
// Read a byte that must match b[0]; otherwise an exception is thrown.
// Marked protected to avoid synthetic accessor in JSONListContext.read
// and JSONPairContext.read
protected void readJSONSyntaxChar(byte[] b) throws TException {
byte ch = reader_.read();
if (ch != b[0]) {
throw new TProtocolException(
TProtocolException.INVALID_DATA, "Unexpected character:" + (char) ch);
}
}
// Convert a byte containing a hex char ('0'-'9' or 'a'-'f') into its
// corresponding hex value
private static byte hexVal(byte ch) throws TException {
if ((ch >= '0') && (ch <= '9')) {
return (byte) ((char) ch - '0');
} else if ((ch >= 'a') && (ch <= 'f')) {
return (byte) ((char) ch - 'a' + 10);
} else {
throw new TProtocolException(TProtocolException.INVALID_DATA, "Expected hex character");
}
}
// Convert a byte containing a hex value to its corresponding hex character
private static byte hexChar(byte val) {
val &= 0x0F;
if (val < 10) {
return (byte) ((char) val + '0');
} else {
return (byte) ((char) (val - 10) + 'a');
}
}
// Write the bytes in array buf as a JSON characters, escaping as needed
private void writeJSONString(byte[] b) throws TException {
context_.write();
trans_.write(QUOTE);
int len = b.length;
for (int i = 0; i < len; i++) {
if ((b[i] & 0x00FF) >= 0x30) {
if (b[i] == BACKSLASH[0]) {
trans_.write(BACKSLASH);
trans_.write(BACKSLASH);
} else {
trans_.write(b, i, 1);
}
} else {
tmpbuf_[0] = JSON_CHAR_TABLE[b[i]];
if (tmpbuf_[0] == 1) {
trans_.write(b, i, 1);
} else if (tmpbuf_[0] > 1) {
trans_.write(BACKSLASH);
trans_.write(tmpbuf_, 0, 1);
} else {
trans_.write(ESCSEQ);
tmpbuf_[0] = hexChar((byte) (b[i] >> 4));
tmpbuf_[1] = hexChar(b[i]);
trans_.write(tmpbuf_, 0, 2);
}
}
}
trans_.write(QUOTE);
}
// Write out number as a JSON value. If the context dictates so, it will be
// wrapped in quotes to output as a JSON string.
private void writeJSONInteger(long num) throws TException {
context_.write();
String str = Long.toString(num);
boolean escapeNum = context_.escapeNum();
if (escapeNum) {
trans_.write(QUOTE);
}
byte[] buf = str.getBytes(StandardCharsets.UTF_8);
trans_.write(buf);
if (escapeNum) {
trans_.write(QUOTE);
}
}
// Write out a double as a JSON value. If it is NaN or infinity or if the
// context dictates escaping, write out as JSON string.
private void writeJSONDouble(double num) throws TException {
context_.write();
String str = Double.toString(num);
boolean special = false;
switch (str.charAt(0)) {
case 'N': // NaN
case 'I': // Infinity
special = true;
break;
case '-':
if (str.charAt(1) == 'I') { // -Infinity
special = true;
}
break;
default:
break;
}
boolean escapeNum = special || context_.escapeNum();
if (escapeNum) {
trans_.write(QUOTE);
}
byte[] b = str.getBytes(StandardCharsets.UTF_8);
trans_.write(b, 0, b.length);
if (escapeNum) {
trans_.write(QUOTE);
}
}
// Write out contents of byte array b as a JSON string with base-64 encoded
// data
private void writeJSONBase64(byte[] b, int offset, int length) throws TException {
context_.write();
trans_.write(QUOTE);
int len = length;
int off = offset;
while (len >= 3) {
// Encode 3 bytes at a time
TBase64Utils.encode(b, off, 3, tmpbuf_, 0);
trans_.write(tmpbuf_, 0, 4);
off += 3;
len -= 3;
}
if (len > 0) {
// Encode remainder
TBase64Utils.encode(b, off, len, tmpbuf_, 0);
trans_.write(tmpbuf_, 0, len + 1);
}
trans_.write(QUOTE);
}
private void writeJSONObjectStart() throws TException {
context_.write();
trans_.write(LBRACE);
pushContext(new JSONPairContext());
}
private void writeJSONObjectEnd() throws TException {
popContext();
trans_.write(RBRACE);
}
private void writeJSONArrayStart() throws TException {
context_.write();
trans_.write(LBRACKET);
pushContext(new JSONListContext());
}
private void writeJSONArrayEnd() throws TException {
popContext();
trans_.write(RBRACKET);
}
@Override
public void writeMessageBegin(TMessage message) throws TException {
resetContext(); // THRIFT-3743
writeJSONArrayStart();
writeJSONInteger(VERSION);
byte[] b = message.name.getBytes(StandardCharsets.UTF_8);
writeJSONString(b);
writeJSONInteger(message.type);
writeJSONInteger(message.seqid);
}
@Override
public void writeMessageEnd() throws TException {
writeJSONArrayEnd();
}
@Override
public void writeStructBegin(TStruct struct) throws TException {
writeJSONObjectStart();
}
@Override
public void writeStructEnd() throws TException {
writeJSONObjectEnd();
}
@Override
public void writeFieldBegin(TField field) throws TException {
if (fieldNamesAsString_) {
writeString(field.name);
} else {
writeJSONInteger(field.id);
}
writeJSONObjectStart();
writeJSONString(getTypeNameForTypeID(field.type));
}
@Override
public void writeFieldEnd() throws TException {
writeJSONObjectEnd();
}
@Override
public void writeFieldStop() {}
@Override
public void writeMapBegin(TMap map) throws TException {
writeJSONArrayStart();
writeJSONString(getTypeNameForTypeID(map.keyType));
writeJSONString(getTypeNameForTypeID(map.valueType));
writeJSONInteger(map.size);
writeJSONObjectStart();
}
@Override
public void writeMapEnd() throws TException {
writeJSONObjectEnd();
writeJSONArrayEnd();
}
@Override
public void writeListBegin(TList list) throws TException {
writeJSONArrayStart();
writeJSONString(getTypeNameForTypeID(list.elemType));
writeJSONInteger(list.size);
}
@Override
public void writeListEnd() throws TException {
writeJSONArrayEnd();
}
@Override
public void writeSetBegin(TSet set) throws TException {
writeJSONArrayStart();
writeJSONString(getTypeNameForTypeID(set.elemType));
writeJSONInteger(set.size);
}
@Override
public void writeSetEnd() throws TException {
writeJSONArrayEnd();
}
@Override
public void writeBool(boolean b) throws TException {
writeJSONInteger(b ? (long) 1 : (long) 0);
}
@Override
public void writeByte(byte b) throws TException {
writeJSONInteger(b);
}
@Override
public void writeI16(short i16) throws TException {
writeJSONInteger(i16);
}
@Override
public void writeI32(int i32) throws TException {
writeJSONInteger(i32);
}
@Override
public void writeI64(long i64) throws TException {
writeJSONInteger(i64);
}
@Override
public void writeUuid(UUID uuid) throws TException {
writeJSONString(uuid.toString().getBytes(StandardCharsets.UTF_8));
}
@Override
public void writeDouble(double dub) throws TException {
writeJSONDouble(dub);
}
@Override
public void writeString(String str) throws TException {
byte[] b = str.getBytes(StandardCharsets.UTF_8);
writeJSONString(b);
}
@Override
public void writeBinary(ByteBuffer bin) throws TException {
writeJSONBase64(
bin.array(),
bin.position() + bin.arrayOffset(),
bin.limit() - bin.position() - bin.arrayOffset());
}
/** Reading methods. */
// Read in a JSON string, unescaping as appropriate.. Skip reading from the
// context if skipContext is true.
private TByteArrayOutputStream readJSONString(boolean skipContext) throws TException {
TByteArrayOutputStream arr = new TByteArrayOutputStream(DEF_STRING_SIZE);
ArrayList<Character> codeunits = new ArrayList<Character>();
if (!skipContext) {
context_.read();
}
readJSONSyntaxChar(QUOTE);
while (true) {
byte ch = reader_.read();
if (ch == QUOTE[0]) {
break;
}
if (ch == ESCSEQ[0]) {
ch = reader_.read();
if (ch == ESCSEQ[1]) {
trans_.readAll(tmpbuf_, 0, 4);
short cu =
(short)
(((short) hexVal(tmpbuf_[0]) << 12)
+ ((short) hexVal(tmpbuf_[1]) << 8)
+ ((short) hexVal(tmpbuf_[2]) << 4)
+ (short) hexVal(tmpbuf_[3]));
try {
if (Character.isHighSurrogate((char) cu)) {
if (codeunits.size() > 0) {
throw new TProtocolException(
TProtocolException.INVALID_DATA, "Expected low surrogate char");
}
codeunits.add((char) cu);
} else if (Character.isLowSurrogate((char) cu)) {
if (codeunits.size() == 0) {
throw new TProtocolException(
TProtocolException.INVALID_DATA, "Expected high surrogate char");
}
codeunits.add((char) cu);
arr.write(
(new String(new int[] {codeunits.get(0), codeunits.get(1)}, 0, 2))
.getBytes(StandardCharsets.UTF_8));
codeunits.clear();
} else {
arr.write((new String(new int[] {cu}, 0, 1)).getBytes(StandardCharsets.UTF_8));
}
continue;
} catch (IOException ex) {
throw new TProtocolException(
TProtocolException.INVALID_DATA, "Invalid unicode sequence");
}
} else {
int off = ESCAPE_CHARS.indexOf(ch);
if (off == -1) {
throw new TProtocolException(TProtocolException.INVALID_DATA, "Expected control char");
}
ch = ESCAPE_CHAR_VALS[off];
}
}
arr.write(ch);
}
return arr;
}
// Return true if the given byte could be a valid part of a JSON number.
private boolean isJSONNumeric(byte b) {
switch (b) {
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'E':
case 'e':
return true;
}
return false;
}
// Read in a sequence of characters that are all valid in JSON numbers. Does
// not do a complete regex check to validate that this is actually a number.
private String readJSONNumericChars() throws TException {
StringBuilder strbld = new StringBuilder();
while (true) {
byte ch = reader_.peek();
if (!isJSONNumeric(ch)) {
break;
}
strbld.append((char) reader_.read());
}
return strbld.toString();
}
// Read in a JSON number. If the context dictates, read in enclosing quotes.
private long readJSONInteger() throws TException {
context_.read();
if (context_.escapeNum()) {
readJSONSyntaxChar(QUOTE);
}
String str = readJSONNumericChars();
if (context_.escapeNum()) {
readJSONSyntaxChar(QUOTE);
}
try {
return Long.parseLong(str);
} catch (NumberFormatException ex) {
throw new TProtocolException(
TProtocolException.INVALID_DATA, "Bad data encounted in numeric data");
}
}
// Read in a JSON double value. Throw if the value is not wrapped in quotes
// when expected or if wrapped in quotes when not expected.
private double readJSONDouble() throws TException {
context_.read();
if (reader_.peek() == QUOTE[0]) {
TByteArrayOutputStream arr = readJSONString(true);
double dub = Double.parseDouble(arr.toString(StandardCharsets.UTF_8));
if (!context_.escapeNum() && !Double.isNaN(dub) && !Double.isInfinite(dub)) {
// Throw exception -- we should not be in a string in this case
throw new TProtocolException(
TProtocolException.INVALID_DATA, "Numeric data unexpectedly quoted");
}
return dub;
} else {
if (context_.escapeNum()) {
// This will throw - we should have had a quote if escapeNum == true
readJSONSyntaxChar(QUOTE);
}
try {
return Double.parseDouble(readJSONNumericChars());
} catch (NumberFormatException ex) {
throw new TProtocolException(
TProtocolException.INVALID_DATA, "Bad data encounted in numeric data");
}
}
}
// Read in a JSON string containing base-64 encoded data and decode it.
private byte[] readJSONBase64() throws TException {
TByteArrayOutputStream arr = readJSONString(false);
byte[] b = arr.get();
int len = arr.len();
int off = 0;
int size = 0;
// Ignore padding
int bound = len >= 2 ? len - 2 : 0;
for (int i = len - 1; i >= bound && b[i] == '='; --i) {
--len;
}
while (len >= 4) {
// Decode 4 bytes at a time
TBase64Utils.decode(b, off, 4, b, size); // NB: decoded in place
off += 4;
len -= 4;
size += 3;
}
// Don't decode if we hit the end or got a single leftover byte (invalid
// base64 but legal for skip of regular string type)
if (len > 1) {
// Decode remainder
TBase64Utils.decode(b, off, len, b, size); // NB: decoded in place
size += len - 1;
}
// Sadly we must copy the byte[] (any way around this?)
byte[] result = new byte[size];
System.arraycopy(b, 0, result, 0, size);
return result;
}
private void readJSONObjectStart() throws TException {
context_.read();
readJSONSyntaxChar(LBRACE);
pushContext(new JSONPairContext());
}
private void readJSONObjectEnd() throws TException {
readJSONSyntaxChar(RBRACE);
popContext();
}
private void readJSONArrayStart() throws TException {
context_.read();
readJSONSyntaxChar(LBRACKET);
pushContext(new JSONListContext());
}
private void readJSONArrayEnd() throws TException {
readJSONSyntaxChar(RBRACKET);
popContext();
}
@Override
public TMessage readMessageBegin() throws TException {
resetContext(); // THRIFT-3743
readJSONArrayStart();
if (readJSONInteger() != VERSION) {
throw new TProtocolException(
TProtocolException.BAD_VERSION, "Message contained bad version.");
}
String name = readJSONString(false).toString(StandardCharsets.UTF_8);
byte type = (byte) readJSONInteger();
int seqid = (int) readJSONInteger();
return new TMessage(name, type, seqid);
}
@Override
public void readMessageEnd() throws TException {
readJSONArrayEnd();
}
@Override
public TStruct readStructBegin() throws TException {
readJSONObjectStart();
return ANONYMOUS_STRUCT;
}
@Override
public void readStructEnd() throws TException {
readJSONObjectEnd();
}
@Override
public TField readFieldBegin() throws TException {
byte ch = reader_.peek();
byte type;
short id = 0;
if (ch == RBRACE[0]) {
type = TType.STOP;
} else {
id = (short) readJSONInteger();
readJSONObjectStart();
type = getTypeIDForTypeName(readJSONString(false).get());
}
return new TField("", type, id);
}
@Override
public void readFieldEnd() throws TException {
readJSONObjectEnd();
}
@Override
public TMap readMapBegin() throws TException {
readJSONArrayStart();
byte keyType = getTypeIDForTypeName(readJSONString(false).get());
byte valueType = getTypeIDForTypeName(readJSONString(false).get());
int size = (int) readJSONInteger();
readJSONObjectStart();
TMap map = new TMap(keyType, valueType, size);
checkReadBytesAvailable(map);
return map;
}
@Override
public void readMapEnd() throws TException {
readJSONObjectEnd();
readJSONArrayEnd();
}
@Override
public TList readListBegin() throws TException {
readJSONArrayStart();
byte elemType = getTypeIDForTypeName(readJSONString(false).get());
int size = (int) readJSONInteger();
TList list = new TList(elemType, size);
checkReadBytesAvailable(list);
return list;
}
@Override
public void readListEnd() throws TException {
readJSONArrayEnd();
}
@Override
public TSet readSetBegin() throws TException {
readJSONArrayStart();
byte elemType = getTypeIDForTypeName(readJSONString(false).get());
int size = (int) readJSONInteger();
TSet set = new TSet(elemType, size);
checkReadBytesAvailable(set);
return set;
}
@Override
public void readSetEnd() throws TException {
readJSONArrayEnd();
}
@Override
public boolean readBool() throws TException {
return (readJSONInteger() != 0);
}
@Override
public byte readByte() throws TException {
return (byte) readJSONInteger();
}
@Override
public short readI16() throws TException {
return (short) readJSONInteger();
}
@Override
public int readI32() throws TException {
return (int) readJSONInteger();
}
@Override
public long readI64() throws TException {
return readJSONInteger();
}
@Override
public UUID readUuid() throws TException {
return UUID.fromString(readString());
}
@Override
public double readDouble() throws TException {
return readJSONDouble();
}
@Override
public String readString() throws TException {
return readJSONString(false).toString(StandardCharsets.UTF_8);
}
@Override
public ByteBuffer readBinary() throws TException {
return ByteBuffer.wrap(readJSONBase64());
}
/** Return the minimum number of bytes a type will consume on the wire */
@Override
public int getMinSerializedSize(byte type) throws TTransportException {
switch (type) {
case 0:
return 1; // Stop - T_STOP needs to count itself
case 1:
return 1; // Void - T_VOID needs to count itself
case 2:
return 1; // Bool
case 3:
return 1; // Byte
case 4:
return 1; // Double
case 6:
return 1; // I16
case 8:
return 1; // I32
case 10:
return 1; // I64
case 11:
return 2; // string length
case 12:
return 2; // empty struct
case 13:
return 2; // element count Map
case 14:
return 2; // element count Set
case 15:
return 2; // element count List
default:
throw new TTransportException(TTransportException.UNKNOWN, "unrecognized type code");
}
}
}
| LookaheadReader |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/HadoopTestBase.java | {
"start": 1192,
"end": 1487
} | class ____ JUnit4 tests that sets a default timeout for all tests
* that subclass this test.
*
* Threads are named to the method being executed, for ease of diagnostics
* in logs and thread dumps.
*/
@Timeout(value = TEST_DEFAULT_TIMEOUT_VALUE, unit = TimeUnit.MILLISECONDS)
public abstract | for |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/eventbus/ClusteredEventBusTest.java | {
"start": 1790,
"end": 18626
} | class ____ extends ClusteredEventBusTestBase {
@Test
public void testLocalHandlerNotVisibleRemotely() {
startNodes(2);
vertices[1].eventBus().localConsumer(ADDRESS1).handler(msg -> {
fail("Should not receive message");
});
vertices[0].eventBus().send(ADDRESS1, "foo");
vertices[0].eventBus().publish(ADDRESS1, "foo");
vertices[0].setTimer(1000, id -> testComplete());
await();
}
@Test
public void testLocalHandlerClusteredSend() throws Exception {
startNodes(2);
waitFor(2);
vertices[1].eventBus().consumer(ADDRESS1, msg -> complete()).completion().onComplete(v1 -> {
vertices[0].eventBus().localConsumer(ADDRESS1, msg -> complete()).completion().onComplete(v2 -> {
vertices[0].eventBus().send(ADDRESS1, "foo");
vertices[0].eventBus().send(ADDRESS1, "foo");
});
});
await();
}
@Test
public void testLocalHandlerClusteredPublish() throws Exception {
startNodes(2);
waitFor(2);
vertices[1].eventBus().consumer(ADDRESS1, msg -> complete()).completion().onComplete(v1 -> {
vertices[0].eventBus().localConsumer(ADDRESS1, msg -> complete()).completion().onComplete(v2 -> {
vertices[0].eventBus().publish(ADDRESS1, "foo");
});
});
await();
}
@Test
public void testDecoderSendAsymmetric() throws Exception {
startNodes(2);
MessageCodec codec = new MyPOJOEncoder1();
vertices[0].eventBus().registerCodec(codec);
vertices[1].eventBus().registerCodec(codec);
String str = TestUtils.randomAlphaString(100);
testSend(new MyPOJO(str), str, null, new DeliveryOptions().setCodecName(codec.name()));
}
@Test
public void testDecoderReplyAsymmetric() throws Exception {
startNodes(2);
MessageCodec codec = new MyPOJOEncoder1();
vertices[0].eventBus().registerCodec(codec);
vertices[1].eventBus().registerCodec(codec);
String str = TestUtils.randomAlphaString(100);
testReply(new MyPOJO(str), str, null, new DeliveryOptions().setCodecName(codec.name()));
}
@Test
public void testDecoderSendSymmetric() throws Exception {
startNodes(2);
MessageCodec codec = new MyPOJOEncoder2();
vertices[0].eventBus().registerCodec(codec);
vertices[1].eventBus().registerCodec(codec);
String str = TestUtils.randomAlphaString(100);
MyPOJO pojo = new MyPOJO(str);
testSend(pojo, pojo, null, new DeliveryOptions().setCodecName(codec.name()));
}
@Test
public void testDecoderReplySymmetric() throws Exception {
startNodes(2);
MessageCodec codec = new MyPOJOEncoder2();
vertices[0].eventBus().registerCodec(codec);
vertices[1].eventBus().registerCodec(codec);
String str = TestUtils.randomAlphaString(100);
MyPOJO pojo = new MyPOJO(str);
testReply(pojo, pojo, null, new DeliveryOptions().setCodecName(codec.name()));
}
@Test
public void testDefaultDecoderSendAsymmetric() throws Exception {
startNodes(2);
MessageCodec codec = new MyPOJOEncoder1();
vertices[0].eventBus().registerDefaultCodec(MyPOJO.class, codec);
vertices[1].eventBus().registerDefaultCodec(MyPOJO.class, codec);
String str = TestUtils.randomAlphaString(100);
testSend(new MyPOJO(str), str, null, null);
}
@Test
public void testDefaultDecoderReplyAsymmetric() throws Exception {
startNodes(2);
MessageCodec codec = new MyPOJOEncoder1();
vertices[0].eventBus().registerDefaultCodec(MyPOJO.class, codec);
vertices[1].eventBus().registerDefaultCodec(MyPOJO.class, codec);
String str = TestUtils.randomAlphaString(100);
testReply(new MyPOJO(str), str, null, null);
}
@Test
public void testDefaultDecoderSendSymetric() throws Exception {
startNodes(2);
MessageCodec codec = new MyPOJOEncoder2();
vertices[0].eventBus().registerDefaultCodec(MyPOJO.class, codec);
vertices[1].eventBus().registerDefaultCodec(MyPOJO.class, codec);
String str = TestUtils.randomAlphaString(100);
MyPOJO pojo = new MyPOJO(str);
testSend(pojo, pojo, null, null);
}
@Test
public void testDefaultDecoderReplySymetric() throws Exception {
startNodes(2);
MessageCodec codec = new MyPOJOEncoder2();
vertices[0].eventBus().registerDefaultCodec(MyPOJO.class, codec);
vertices[1].eventBus().registerDefaultCodec(MyPOJO.class, codec);
String str = TestUtils.randomAlphaString(100);
MyPOJO pojo = new MyPOJO(str);
testReply(pojo, pojo, null, null);
}
@Test
public void testDefaultCodecReplyExceptionSubclass() throws Exception {
startNodes(2);
MyReplyException myReplyException = new MyReplyException(23, "my exception");
MyReplyExceptionMessageCodec codec = new MyReplyExceptionMessageCodec();
vertices[0].eventBus().registerDefaultCodec(MyReplyException.class, codec);
vertices[1].eventBus().registerDefaultCodec(MyReplyException.class, codec);
MessageConsumer<ReplyException> reg = vertices[0].eventBus().<ReplyException>consumer(ADDRESS1, msg -> {
assertTrue(msg.body() instanceof MyReplyException);
testComplete();
});
reg.completion().onComplete(ar -> {
vertices[1].eventBus().send(ADDRESS1, myReplyException);
});
await();
}
// Make sure ping/pong works ok
@Test
public void testClusteredPong() throws Exception {
VertxOptions options = new VertxOptions();
options.getEventBusOptions().setClusterPingInterval(500).setClusterPingReplyInterval(500);
startNodes(2, options);
AtomicBoolean sending = new AtomicBoolean();
MessageConsumer<String> consumer = vertices[0].eventBus().<String>consumer("foobar").handler(msg -> {
if (!sending.get()) {
sending.set(true);
vertices[1].setTimer(4000, id -> {
vertices[1].eventBus().send("foobar", "whatever2");
});
} else {
testComplete();
}
});
consumer.completion().onComplete(ar -> {
assertTrue(ar.succeeded());
vertices[1].eventBus().send("foobar", "whatever");
});
await();
}
@Test
public void testConsumerHandlesCompletionAsynchronously1() {
startNodes(2);
MessageConsumer<Object> consumer = vertices[0].eventBus().consumer(ADDRESS1);
ThreadLocal<Object> stack = new ThreadLocal<>();
stack.set(true);
consumer.completion().onComplete(v -> {
assertTrue(Vertx.currentContext().isEventLoopContext());
assertNull(stack.get());
testComplete();
});
consumer.handler(msg -> {});
await();
}
@Test
public void testConsumerHandlesCompletionAsynchronously2() {
startNodes(2);
MessageConsumer<Object> consumer = vertices[0].eventBus().consumer(ADDRESS1);
consumer.handler(msg -> {
});
ThreadLocal<Object> stack = new ThreadLocal<>();
stack.set(true);
consumer.completion().onComplete(v -> {
assertTrue(Vertx.currentContext().isEventLoopContext());
assertNull(stack.get());
testComplete();
});
await();
}
@Test
public void testSubsRemovedForClosedNode() throws Exception {
testSubsRemoved(latch -> {
vertices[1].close().onComplete(onSuccess(v -> {
latch.countDown();
}));
});
}
@Test
public void testSubsRemovedForKilledNode() throws Exception {
testSubsRemoved(latch -> {
VertxInternal vi = (VertxInternal) vertices[1];
Promise<Void> promise = vi.getOrCreateContext().promise();
vi.clusterManager().leave(promise);
promise.future().onComplete(onSuccess(v -> {
latch.countDown();
}));
});
}
private void testSubsRemoved(Consumer<CountDownLatch> action) throws Exception {
startNodes(3);
CountDownLatch regLatch = new CountDownLatch(1);
AtomicInteger cnt = new AtomicInteger();
vertices[0].eventBus().consumer(ADDRESS1, msg -> {
int c = cnt.getAndIncrement();
assertEquals(msg.body(), "foo" + c);
if (c == 9) {
testComplete();
}
if (c > 9) {
fail("too many messages");
}
}).completion().onComplete(onSuccess(v -> {
vertices[1].eventBus().consumer(ADDRESS1, msg -> {
fail("shouldn't get message");
}).completion().onComplete(onSuccess(v2 -> {
regLatch.countDown();
}));
}));
awaitLatch(regLatch);
CountDownLatch closeLatch = new CountDownLatch(1);
action.accept(closeLatch);
awaitLatch(closeLatch);
// Allow time for kill to be propagate
Thread.sleep(2000);
vertices[2].runOnContext(v -> {
// Now send some messages from node 2 - they should ALL go to node 0
EventBus ebSender = vertices[2].eventBus();
for (int i = 0; i < 10; i++) {
ebSender.send(ADDRESS1, "foo" + i);
}
});
await();
}
@Test
public void sendNoContext() throws Exception {
int size = 1000;
List<Integer> expected = Stream.iterate(0, i -> i + 1).limit(size).collect(Collectors.toList());
ConcurrentLinkedDeque<Integer> obtained = new ConcurrentLinkedDeque<>();
startNodes(2);
CountDownLatch latch = new CountDownLatch(1);
vertices[1].eventBus().<Integer>consumer(ADDRESS1, msg -> {
obtained.add(msg.body());
if (obtained.size() == expected.size()) {
assertEquals(expected, new ArrayList<>(obtained));
testComplete();
}
}).completion().onComplete(ar -> {
assertTrue(ar.succeeded());
latch.countDown();
});
latch.await();
EventBus bus = vertices[0].eventBus();
expected.forEach(val -> bus.send(ADDRESS1, val));
await();
}
@Test
public void testSendLocalOnly() {
testDeliveryOptionsLocalOnly(true);
}
@Test
public void testPublishLocalOnly() {
testDeliveryOptionsLocalOnly(false);
}
private void testDeliveryOptionsLocalOnly(boolean send) {
waitFor(30);
startNodes(2);
AtomicLong localConsumer0 = new AtomicLong();
vertices[0].eventBus().localConsumer(ADDRESS1).handler(msg -> {
localConsumer0.incrementAndGet();
complete();
});
AtomicLong consumer1 = new AtomicLong();
vertices[1].eventBus().consumer(ADDRESS1).handler(msg -> {
consumer1.incrementAndGet();
}).completion().onComplete(onSuccess(v -> {
for (int i = 0; i < 30; i++) {
if (send) {
vertices[0].eventBus().send(ADDRESS1, "msg", new DeliveryOptions().setLocalOnly(true));
} else {
vertices[0].eventBus().publish(ADDRESS1, "msg", new DeliveryOptions().setLocalOnly(true));
}
}
}));
await();
assertEquals(30, localConsumer0.get());
assertEquals(0, consumer1.get());
}
@Test
public void testLocalOnlyDoesNotApplyToReplies() {
startNodes(2);
vertices[1].eventBus().consumer(ADDRESS1).handler(msg -> {
msg.reply("pong", new DeliveryOptions().setLocalOnly(true));
}).completion().onComplete(onSuccess(v -> {
vertices[0].eventBus().request(ADDRESS1, "ping", new DeliveryOptions().setSendTimeout(500)).onComplete(onSuccess(msg -> testComplete()));
}));
await();
}
@Test
public void testImmediateUnregistration() {
startNodes(1);
MessageConsumer<Object> consumer = vertices[0].eventBus().consumer(ADDRESS1);
AtomicInteger completionCount = new AtomicInteger();
consumer.completion().onComplete(v -> {
// Do not assert success because the handler could be unregistered locally
// before the registration was propagated to the cluster manager
int val = completionCount.getAndIncrement();
assertEquals(0, val);
});
consumer.handler(msg -> {});
consumer.unregister().onComplete(onSuccess(v -> {
int val = completionCount.getAndIncrement();
assertEquals(1, val);
testComplete();
}));
await();
}
@Test
public void testSendWriteHandler() throws Exception {
CountDownLatch updateLatch = new CountDownLatch(3);
startNodes(2, () -> new WrappedClusterManager(getClusterManager()) {
@Override
public void registrationsUpdated(RegistrationUpdateEvent event) {
super.registrationsUpdated(event);
if (event.address().equals(ADDRESS1) && event.registrations().size() == 1) {
updateLatch.countDown();
}
}
@Override
public boolean wantsUpdatesFor(String address) {
return true;
}
});
waitFor(2);
vertices[1]
.eventBus()
.consumer(ADDRESS1, msg -> complete())
.completion().onComplete(onSuccess(v1 -> updateLatch.countDown()));
awaitLatch(updateLatch);
MessageProducer<String> producer = vertices[0].eventBus().sender(ADDRESS1);
producer.write("body").onComplete(onSuccess(v2 -> complete()));
await();
}
@Test
public void testSendWriteHandlerNoConsumer() {
startNodes(2);
MessageProducer<String> producer = vertices[0].eventBus().sender(ADDRESS1);
producer.write("body").onComplete(onFailure(err -> {
assertTrue(err instanceof ReplyException);
ReplyException replyException = (ReplyException) err;
assertEquals(-1, replyException.failureCode());
testComplete();
}));
await();
}
@Test
public void testPublishWriteHandler() {
startNodes(2);
waitFor(2);
vertices[1]
.eventBus()
.consumer(ADDRESS1, msg -> complete())
.completion().onComplete(onSuccess(v1 -> {
MessageProducer<String> producer = vertices[0].eventBus().publisher(ADDRESS1);
producer.write("body").onComplete(onSuccess(v -> complete()));
}));
await();
}
@Test
public void testPublishWriteHandlerNoConsumer() {
startNodes(2);
MessageProducer<String> producer = vertices[0].eventBus().publisher(ADDRESS1);
producer.write("body").onComplete(onFailure(err -> {
assertTrue(err instanceof ReplyException);
ReplyException replyException = (ReplyException) err;
assertEquals(-1, replyException.failureCode());
testComplete();
}));
await();
}
@Test
public void testWriteHandlerConnectFailure() {
VertxOptions options = getOptions();
options.getEventBusOptions()
.setSsl(true)
.setTrustAll(false)
.setKeyCertOptions(Cert.SERVER_JKS.get());
startNodes(2, options);
vertices[1]
.eventBus()
.consumer(ADDRESS1, msg -> {})
.completion().onComplete(onSuccess(v1 -> {
MessageProducer<String> producer = vertices[0].eventBus().sender(ADDRESS1);
producer.write("body").onComplete(onFailure(err -> {
testComplete();
}));
}));
await();
}
@Test
public void testSelectorWantsUpdates() {
WrappedClusterManager wrapped = new WrappedClusterManager(getClusterManager());
startNodes(1, () -> wrapped);
vertices[0].eventBus().consumer(ADDRESS1, msg -> {
assertTrue(wrapped.wantsUpdatesFor(ADDRESS1));
testComplete();
}).completion().onComplete(onSuccess(v -> vertices[0].eventBus().send(ADDRESS1, "foo")));
await();
}
@Test
public void testSelectorDoesNotWantUpdates() {
WrappedClusterManager wrapped = new WrappedClusterManager(getClusterManager());
startNodes(1, () -> wrapped);
assertFalse(wrapped.wantsUpdatesFor(ADDRESS1));
}
@Test
public void testPublisherCanReceiveNoHandlersFailure() {
startNodes(2);
vertices[0].eventBus().publisher("foo").write("bar").onComplete(onFailure(t -> {
if (t instanceof ReplyException) {
ReplyException replyException = (ReplyException) t;
assertEquals(ReplyFailure.NO_HANDLERS, replyException.failureType());
testComplete();
} else {
fail();
}
}));
await();
}
@Test
public void testLocalConsumerNeverGetsMessagePublishedFromRemote() throws Exception {
startNodes(2);
waitFor(3);
CountDownLatch completionLatch = new CountDownLatch(4);
EventBus eb0 = vertices[0].eventBus();
String firstAddress = "foo";
eb0.localConsumer(firstAddress, message -> fail()).completion().onComplete(onSuccess(v -> completionLatch.countDown()));
eb0.consumer(firstAddress, message -> complete()).completion().onComplete(onSuccess(v -> completionLatch.countDown()));
String secondAddress = "bar";
eb0.consumer(secondAddress, message -> complete()).completion().onComplete(onSuccess(v -> completionLatch.countDown()));
eb0.localConsumer(secondAddress, message -> fail()).completion().onComplete(onSuccess(v -> completionLatch.countDown()));
awaitLatch(completionLatch);
EventBus eb1 = vertices[1].eventBus();
eb1.publish(firstAddress, "content");
eb1.publish(secondAddress, "content");
vertx.setTimer(500, l -> complete()); // some delay to make sure no msg has been received by local consumers
await();
}
@Test
public void testLocalConsumerNeverGetsMessageSentFromRemote() throws Exception {
startNodes(2);
int maxMessages = 50;
waitFor(4 * maxMessages);
| ClusteredEventBusTest |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/support/PropertiesBeanDefinitionReader.java | {
"start": 5995,
"end": 6146
} | class ____ still override this.
* <p>Strictly speaking, the rule that a default parent setting does
* not apply to a bean definition that carries a | can |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java | {
"start": 2012,
"end": 10056
} | class ____ extends ESTestCase {
private Client client;
private OriginSettingClient originSettingClient;
private List<DeleteByQueryRequest> capturedDeleteByQueryRequests;
private ActionListener<Boolean> listener;
@Before
@SuppressWarnings("unchecked")
public void setUpTests() {
capturedDeleteByQueryRequests = new ArrayList<>();
client = mock(Client.class);
originSettingClient = MockOriginSettingClient.mockOriginSettingClient(client, ClientHelper.ML_ORIGIN);
listener = mock(ActionListener.class);
when(listener.delegateFailureAndWrap(any())).thenCallRealMethod();
}
public void testRemove_GivenNoJobs() {
givenDBQRequestsSucceed();
createExpiredAnnotationsRemover(Collections.emptyIterator()).remove(1.0f, listener, () -> false);
verify(listener).onResponse(true);
}
public void testRemove_GivenJobsWithoutRetentionPolicy() {
givenDBQRequestsSucceed();
List<Job> jobs = Arrays.asList(JobTests.buildJobBuilder("foo").build(), JobTests.buildJobBuilder("bar").build());
createExpiredAnnotationsRemover(jobs.iterator()).remove(1.0f, listener, () -> false);
verify(listener).onResponse(true);
}
public void testRemove_GivenJobsWithAndWithoutRetentionPolicy() {
givenDBQRequestsSucceed();
givenBucket(new Bucket("id_not_important", new Date(), 60));
List<Job> jobs = Arrays.asList(
JobTests.buildJobBuilder("none").build(),
JobTests.buildJobBuilder("annotations-1").setResultsRetentionDays(10L).build(),
JobTests.buildJobBuilder("annotations-2").setResultsRetentionDays(20L).build()
);
createExpiredAnnotationsRemover(jobs.iterator()).remove(1.0f, listener, () -> false);
assertThat(capturedDeleteByQueryRequests.size(), equalTo(2));
DeleteByQueryRequest dbqRequest = capturedDeleteByQueryRequests.get(0);
assertThat(dbqRequest.indices(), equalTo(new String[] { AnnotationIndex.READ_ALIAS_NAME }));
dbqRequest = capturedDeleteByQueryRequests.get(1);
assertThat(dbqRequest.indices(), equalTo(new String[] { AnnotationIndex.READ_ALIAS_NAME }));
verify(listener).onResponse(true);
}
public void testRemove_GivenTimeout() {
givenDBQRequestsSucceed();
givenBucket(new Bucket("id_not_important", new Date(), 60));
List<Job> jobs = Arrays.asList(
JobTests.buildJobBuilder("annotations-1").setResultsRetentionDays(10L).build(),
JobTests.buildJobBuilder("annotations-2").setResultsRetentionDays(20L).build()
);
final int timeoutAfter = randomIntBetween(0, 1);
AtomicInteger attemptsLeft = new AtomicInteger(timeoutAfter);
createExpiredAnnotationsRemover(jobs.iterator()).remove(1.0f, listener, () -> (attemptsLeft.getAndDecrement() <= 0));
assertThat(capturedDeleteByQueryRequests.size(), equalTo(timeoutAfter));
verify(listener).onResponse(false);
}
public void testRemove_GivenClientRequestsFailed() {
givenDBQRequestsFailed();
givenBucket(new Bucket("id_not_important", new Date(), 60));
List<Job> jobs = Arrays.asList(
JobTests.buildJobBuilder("none").build(),
JobTests.buildJobBuilder("annotations-1").setResultsRetentionDays(10L).build(),
JobTests.buildJobBuilder("annotations-2").setResultsRetentionDays(20L).build()
);
createExpiredAnnotationsRemover(jobs.iterator()).remove(1.0f, listener, () -> false);
assertThat(capturedDeleteByQueryRequests.size(), equalTo(1));
DeleteByQueryRequest dbqRequest = capturedDeleteByQueryRequests.get(0);
assertThat(dbqRequest.indices(), equalTo(new String[] { AnnotationIndex.READ_ALIAS_NAME }));
verify(listener).onFailure(any());
}
@SuppressWarnings("unchecked")
public void testCalcCutoffEpochMs() {
String jobId = "calc-cutoff";
Date latest = new Date();
givenBucket(new Bucket(jobId, latest, 60));
List<Job> jobs = Collections.singletonList(JobTests.buildJobBuilder(jobId).setResultsRetentionDays(1L).build());
ActionListener<AbstractExpiredJobDataRemover.CutoffDetails> cutoffListener = mock(ActionListener.class);
when(cutoffListener.delegateFailureAndWrap(any())).thenCallRealMethod();
createExpiredAnnotationsRemover(jobs.iterator()).calcCutoffEpochMs(jobId, 1L, cutoffListener);
long dayInMills = 60 * 60 * 24 * 1000;
long expectedCutoffTime = latest.getTime() - dayInMills;
verify(cutoffListener).onResponse(eq(new AbstractExpiredJobDataRemover.CutoffDetails(latest.getTime(), expectedCutoffTime)));
}
public void testRemove_GivenIndexNotWritable_ShouldHandleGracefully() {
givenBucket(new Bucket("id_not_important", new Date(), 60));
List<Job> jobs = Arrays.asList(
JobTests.buildJobBuilder("annotations-1").setResultsRetentionDays(10L).build(),
JobTests.buildJobBuilder("annotations-2").setResultsRetentionDays(20L).build()
);
// annotationIndexWritable = false
createExpiredAnnotationsRemover(jobs.iterator(), false).remove(1.0f, listener, () -> false);
// No DBQ requests should be made, but listener should still be called with true
assertThat(capturedDeleteByQueryRequests.size(), equalTo(0));
verify(listener).onResponse(true);
}
private void givenDBQRequestsSucceed() {
givenDBQRequest(true);
}
private void givenDBQRequestsFailed() {
givenDBQRequest(false);
}
@SuppressWarnings("unchecked")
private void givenDBQRequest(boolean shouldSucceed) {
doAnswer(invocationOnMock -> {
capturedDeleteByQueryRequests.add((DeleteByQueryRequest) invocationOnMock.getArguments()[1]);
ActionListener<BulkByScrollResponse> listener = (ActionListener<BulkByScrollResponse>) invocationOnMock.getArguments()[2];
if (shouldSucceed) {
BulkByScrollResponse bulkByScrollResponse = mock(BulkByScrollResponse.class);
when(bulkByScrollResponse.getDeleted()).thenReturn(42L);
listener.onResponse(bulkByScrollResponse);
} else {
listener.onFailure(new RuntimeException("failed"));
}
return null;
}).when(client).execute(same(DeleteByQueryAction.INSTANCE), any(), any());
}
@SuppressWarnings("unchecked")
private void givenBucket(Bucket bucket) {
doAnswer(invocationOnMock -> {
ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) invocationOnMock.getArguments()[2];
listener.onResponse(AbstractExpiredJobDataRemoverTests.createSearchResponse(Collections.singletonList(bucket)));
return null;
}).when(client).execute(eq(TransportSearchAction.TYPE), any(), any());
}
private ExpiredAnnotationsRemover createExpiredAnnotationsRemover(Iterator<Job> jobIterator, boolean annotationIndexWritable) {
ThreadPool threadPool = mock(ThreadPool.class);
ExecutorService executor = mock(ExecutorService.class);
when(threadPool.executor(eq(MachineLearning.UTILITY_THREAD_POOL_NAME))).thenReturn(executor);
doAnswer(invocationOnMock -> {
Runnable run = (Runnable) invocationOnMock.getArguments()[0];
run.run();
return null;
}).when(executor).execute(any());
MockWritableIndexExpander.create(annotationIndexWritable);
return new ExpiredAnnotationsRemover(
originSettingClient,
jobIterator,
new TaskId("test", 0L),
mock(AnomalyDetectionAuditor.class),
threadPool
);
}
private ExpiredAnnotationsRemover createExpiredAnnotationsRemover(Iterator<Job> jobIterator) {
return createExpiredAnnotationsRemover(jobIterator, true);
}
}
| ExpiredAnnotationsRemoverTests |
java | apache__camel | core/camel-management/src/test/java/org/apache/camel/management/ManagedRouteLoadstatisticsTest.java | {
"start": 1541,
"end": 4458
} | class ____ extends ManagementTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testLoadStatisticsAreDisabledByDefault() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// must be a bit more than 1 sec
from("direct:start").to("log:foo").delay(1200).to("mock:result");
}
});
context.start();
boolean load = context.getManagementStrategy().getManagementAgent().getLoadStatisticsEnabled() != null
&& context.getManagementStrategy().getManagementAgent().getLoadStatisticsEnabled();
assertFalse(load);
// get the stats for the route
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = getCamelObjectName(TYPE_ROUTE, context.getRoutes().get(0).getRouteId());
getMockEndpoint("mock:result").expectedMessageCount(1);
template.asyncSendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
String load01 = (String) mbeanServer.getAttribute(on, "Load01");
String load05 = (String) mbeanServer.getAttribute(on, "Load05");
String load15 = (String) mbeanServer.getAttribute(on, "Load15");
assertEquals("", load01);
assertEquals("", load05);
assertEquals("", load15);
}
@Test
public void testEnableLoadStatistics() throws Exception {
context.getManagementStrategy().getManagementAgent().setLoadStatisticsEnabled(true);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// must be a bit more than 1 sec
from("direct:start").to("log:foo").delay(1200).to("mock:result");
}
});
context.start();
// get the stats for the route
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = getCamelObjectName(TYPE_ROUTE, context.getRoutes().get(0).getRouteId());
getMockEndpoint("mock:result").expectedMessageCount(1);
template.asyncSendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
await().atMost(2, TimeUnit.SECONDS).untilAsserted(() -> {
String load01 = (String) mbeanServer.getAttribute(on, "Load01");
String load05 = (String) mbeanServer.getAttribute(on, "Load05");
String load15 = (String) mbeanServer.getAttribute(on, "Load15");
assertNotNull(load01);
assertNotNull(load05);
assertNotNull(load15);
assertTrue(Double.parseDouble(load01.replace(',', '.')) >= 0);
assertTrue(Double.parseDouble(load05.replace(',', '.')) >= 0);
assertTrue(Double.parseDouble(load15.replace(',', '.')) >= 0);
});
}
}
| ManagedRouteLoadstatisticsTest |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/internal/impl/Lifecycles.java | {
"start": 7035,
"end": 8151
} | class ____ implements Lifecycle.Phase {
private final String name;
private final List<Plugin> plugins;
private final Collection<Lifecycle.Link> links;
private final List<Lifecycle.Phase> phases;
DefaultPhase(
String name, List<Plugin> plugins, Collection<Lifecycle.Link> links, List<Lifecycle.Phase> phases) {
this.name = name;
this.plugins = plugins;
this.links = links;
this.phases = phases;
}
@Override
public String name() {
return name;
}
@Override
public List<Plugin> plugins() {
return plugins;
}
@Override
public Collection<Lifecycle.Link> links() {
return links;
}
@Override
public List<Lifecycle.Phase> phases() {
return phases;
}
@Override
public Stream<Lifecycle.Phase> allPhases() {
return Stream.concat(Stream.of(this), phases().stream().flatMap(Lifecycle.Phase::allPhases));
}
}
static | DefaultPhase |
java | apache__flink | flink-python/src/main/java/org/apache/flink/table/runtime/operators/python/AbstractStatelessFunctionOperator.java | {
"start": 1948,
"end": 6301
} | class ____<IN, OUT, UDFIN>
extends AbstractOneInputPythonFunctionOperator<IN, OUT> {
private static final long serialVersionUID = 1L;
/** The input logical type. */
protected final RowType inputType;
/** The user-defined function input logical type. */
protected final RowType udfInputType;
/** The user-defined function output logical type. */
protected final RowType udfOutputType;
/**
* The queue holding the input elements for which the execution results have not been received.
*/
protected transient LinkedList<IN> forwardedInputQueue;
/** Reusable InputStream used to holding the execution results to be deserialized. */
protected transient ByteArrayInputStreamWithPos bais;
/** InputStream Wrapper. */
protected transient DataInputViewStreamWrapper baisWrapper;
/** Reusable OutputStream used to holding the serialized input elements. */
protected transient ByteArrayOutputStreamWithPos baos;
/** OutputStream Wrapper. */
protected transient DataOutputViewStreamWrapper baosWrapper;
public AbstractStatelessFunctionOperator(
Configuration config, RowType inputType, RowType udfInputType, RowType udfOutputType) {
super(config);
this.inputType = Preconditions.checkNotNull(inputType);
this.udfInputType = Preconditions.checkNotNull(udfInputType);
this.udfOutputType = Preconditions.checkNotNull(udfOutputType);
}
@Override
public void open() throws Exception {
forwardedInputQueue = new LinkedList<>();
bais = new ByteArrayInputStreamWithPos();
baisWrapper = new DataInputViewStreamWrapper(bais);
baos = new ByteArrayOutputStreamWithPos();
baosWrapper = new DataOutputViewStreamWrapper(baos);
super.open();
}
@Override
public void processElement(StreamRecord<IN> element) throws Exception {
IN value = element.getValue();
bufferInput(value);
processElementInternal(value);
elementCount++;
checkInvokeFinishBundleByCount();
emitResults();
}
@Override
public PythonFunctionRunner createPythonFunctionRunner() throws IOException {
return BeamTablePythonFunctionRunner.stateless(
getContainingTask().getEnvironment(),
getRuntimeContext().getTaskInfo().getTaskName(),
createPythonEnvironmentManager(),
getFunctionUrn(),
createUserDefinedFunctionsProto(),
getFlinkMetricContainer(),
getContainingTask().getEnvironment().getMemoryManager(),
getOperatorConfig()
.getManagedMemoryFractionOperatorUseCaseOfSlot(
ManagedMemoryUseCase.PYTHON,
getContainingTask().getJobConfiguration(),
getContainingTask()
.getEnvironment()
.getTaskManagerInfo()
.getConfiguration(),
getContainingTask()
.getEnvironment()
.getUserCodeClassLoader()
.asClassLoader()),
createInputCoderInfoDescriptor(udfInputType),
createOutputCoderInfoDescriptor(udfOutputType));
}
/**
* Buffers the specified input, it will be used to construct the operator result together with
* the user-defined function execution result.
*/
public abstract void bufferInput(IN input) throws Exception;
public abstract UDFIN getFunctionInput(IN element);
/** Gets the proto representation of the Python user-defined functions to be executed. */
public abstract FlinkFnApi.UserDefinedFunctions createUserDefinedFunctionsProto();
public abstract String getFunctionUrn();
public abstract FlinkFnApi.CoderInfoDescriptor createInputCoderInfoDescriptor(
RowType runnerInputType);
public abstract FlinkFnApi.CoderInfoDescriptor createOutputCoderInfoDescriptor(
RowType runnerOutType);
public abstract void processElementInternal(IN value) throws Exception;
}
| AbstractStatelessFunctionOperator |
java | google__auto | common/src/main/java/com/google/auto/common/MoreTypes.java | {
"start": 7651,
"end": 15936
} | class ____ extends SimpleTypeVisitor8<Boolean, EqualVisitorParam> {
private static final EqualVisitor INSTANCE = new EqualVisitor();
@Override
protected Boolean defaultAction(TypeMirror a, EqualVisitorParam p) {
return a.getKind().equals(p.type.getKind());
}
@Override
public Boolean visitArray(ArrayType a, EqualVisitorParam p) {
if (p.type.getKind().equals(ARRAY)) {
ArrayType b = (ArrayType) p.type;
return equal(a.getComponentType(), b.getComponentType(), p.visiting);
}
return false;
}
@Override
public Boolean visitDeclared(DeclaredType a, EqualVisitorParam p) {
if (p.type.getKind().equals(DECLARED)) {
DeclaredType b = (DeclaredType) p.type;
Element aElement = a.asElement();
Element bElement = b.asElement();
Set<ComparedElements> newVisiting =
visitingSetPlus(
p.visiting, aElement, a.getTypeArguments(), bElement, b.getTypeArguments());
if (newVisiting.equals(p.visiting)) {
// We're already visiting this pair of elements.
// This can happen for example with Enum in Enum<E extends Enum<E>>. Return a
// provisional true value since if the Elements are not in fact equal the original
// visitor of Enum will discover that. We have to check both Elements being compared
// though to avoid missing the fact that one of the types being compared
// differs at exactly this point.
return true;
}
return aElement.equals(bElement)
&& equal(enclosingType(a), enclosingType(b), newVisiting)
&& equalLists(a.getTypeArguments(), b.getTypeArguments(), newVisiting);
}
return false;
}
@Override
@SuppressWarnings("TypeEquals")
public Boolean visitError(ErrorType a, EqualVisitorParam p) {
return a.equals(p.type);
}
@Override
public Boolean visitExecutable(ExecutableType a, EqualVisitorParam p) {
if (p.type.getKind().equals(EXECUTABLE)) {
ExecutableType b = (ExecutableType) p.type;
return equalLists(a.getParameterTypes(), b.getParameterTypes(), p.visiting)
&& equal(a.getReturnType(), b.getReturnType(), p.visiting)
&& equalLists(a.getThrownTypes(), b.getThrownTypes(), p.visiting)
&& equalLists(a.getTypeVariables(), b.getTypeVariables(), p.visiting);
}
return false;
}
@Override
public Boolean visitIntersection(IntersectionType a, EqualVisitorParam p) {
if (p.type.getKind().equals(INTERSECTION)) {
IntersectionType b = (IntersectionType) p.type;
return equalLists(a.getBounds(), b.getBounds(), p.visiting);
}
return false;
}
@Override
public Boolean visitTypeVariable(TypeVariable a, EqualVisitorParam p) {
if (p.type.getKind().equals(TYPEVAR)) {
TypeVariable b = (TypeVariable) p.type;
TypeParameterElement aElement = (TypeParameterElement) a.asElement();
TypeParameterElement bElement = (TypeParameterElement) b.asElement();
Set<ComparedElements> newVisiting = visitingSetPlus(p.visiting, aElement, bElement);
if (newVisiting.equals(p.visiting)) {
// We're already visiting this pair of elements.
// This can happen with our friend Eclipse when looking at <T extends Comparable<T>>.
// It incorrectly reports the upper bound of T as T itself.
return true;
}
// We use aElement.getBounds() instead of a.getUpperBound() to avoid having to deal with
// the different way intersection types (like <T extends Number & Comparable<T>>) are
// represented before and after Java 8. We do have an issue that this code may consider
// that <T extends Foo & Bar> is different from <T extends Bar & Foo>, but it's very
// hard to avoid that, and not likely to be much of a problem in practice.
return equalLists(aElement.getBounds(), bElement.getBounds(), newVisiting)
&& equal(a.getLowerBound(), b.getLowerBound(), newVisiting)
&& a.asElement().getSimpleName().equals(b.asElement().getSimpleName());
}
return false;
}
@Override
public Boolean visitWildcard(WildcardType a, EqualVisitorParam p) {
if (p.type.getKind().equals(WILDCARD)) {
WildcardType b = (WildcardType) p.type;
return equal(a.getExtendsBound(), b.getExtendsBound(), p.visiting)
&& equal(a.getSuperBound(), b.getSuperBound(), p.visiting);
}
return false;
}
@Override
public Boolean visitUnknown(TypeMirror a, EqualVisitorParam p) {
throw new UnsupportedOperationException();
}
private Set<ComparedElements> visitingSetPlus(
Set<ComparedElements> visiting, Element a, Element b) {
ImmutableList<TypeMirror> noArguments = ImmutableList.of();
return visitingSetPlus(visiting, a, noArguments, b, noArguments);
}
private Set<ComparedElements> visitingSetPlus(
Set<ComparedElements> visiting,
Element a,
List<? extends TypeMirror> aArguments,
Element b,
List<? extends TypeMirror> bArguments) {
ComparedElements comparedElements =
new ComparedElements(
a, ImmutableList.<TypeMirror>copyOf(aArguments),
b, ImmutableList.<TypeMirror>copyOf(bArguments));
Set<ComparedElements> newVisiting = new HashSet<ComparedElements>(visiting);
newVisiting.add(comparedElements);
return newVisiting;
}
}
@SuppressWarnings("TypeEquals")
private static boolean equal(
@Nullable TypeMirror a, @Nullable TypeMirror b, Set<ComparedElements> visiting) {
if (a == b) {
return true;
}
if (a == null || b == null) {
return false;
}
// TypeMirror.equals is not guaranteed to return true for types that are equal, but we can
// assume that if it does return true then the types are equal. This check also avoids getting
// stuck in infinite recursion when Eclipse decrees that the upper bound of the second K in
// <K extends Comparable<K>> is a distinct but equal K.
// The javac implementation of ExecutableType, at least in some versions, does not take thrown
// exceptions into account in its equals implementation, so avoid this optimization for
// ExecutableType.
@SuppressWarnings("TypesEquals")
boolean equal = a.equals(b);
if (equal && a.getKind() != TypeKind.EXECUTABLE) {
return true;
}
EqualVisitorParam p = new EqualVisitorParam();
p.type = b;
p.visiting = visiting;
return a.accept(EqualVisitor.INSTANCE, p);
}
/**
* Returns the type of the innermost enclosing instance, or null if there is none. This is the
* same as {@link DeclaredType#getEnclosingType()} except that it returns null rather than NoType
* for a static type. We need this because of <a
* href="https://bugs.eclipse.org/bugs/show_bug.cgi?id=508222">this bug</a> whereby the Eclipse
* compiler returns a value for static classes that is not NoType.
*/
private static @Nullable TypeMirror enclosingType(DeclaredType t) {
TypeMirror enclosing = t.getEnclosingType();
if (enclosing.getKind().equals(TypeKind.NONE)
|| t.asElement().getModifiers().contains(Modifier.STATIC)) {
return null;
}
return enclosing;
}
private static boolean equalLists(
List<? extends TypeMirror> a, List<? extends TypeMirror> b, Set<ComparedElements> visiting) {
int size = a.size();
if (size != b.size()) {
return false;
}
// Use iterators in case the Lists aren't RandomAccess
Iterator<? extends TypeMirror> aIterator = a.iterator();
Iterator<? extends TypeMirror> bIterator = b.iterator();
while (aIterator.hasNext()) {
// We checked that the lists have the same size, so we know that bIterator.hasNext() too.
TypeMirror nextMirrorA = aIterator.next();
TypeMirror nextMirrorB = bIterator.next();
if (!equal(nextMirrorA, nextMirrorB, visiting)) {
return false;
}
}
return true;
}
private static final int HASH_SEED = 17;
private static final int HASH_MULTIPLIER = 31;
private static final | EqualVisitor |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/create/MySQLCreateMaterializedViewTest0.java | {
"start": 1003,
"end": 2843
} | class ____ extends MysqlTest {
public void test_types() throws Exception {
String sql = //
"CREATE MATERIALIZED VIEW mymv (\n" +
" default_col varchar,\n" +
" PRIMARY KEY(id)\n" +
")\n" +
"DISTRIBUTED BY HASH (id)\n" +
"REFRESH FAST ON DEMAND\n" +
"ENABLE QUERY REWRITE\n" +
"AS SELECT id FROM base;";
SQLStatement stmt = SQLUtils.parseSingleMysqlStatement(sql);
assertEquals("CREATE MATERIALIZED VIEW mymv (\n" +
"\tdefault_col varchar,\n" +
"\tPRIMARY KEY (id)\n" +
")\n" +
"DISTRIBUTED BY HASH(id)\n" +
"REFRESH FAST ON DEMAND\n" +
"ENABLE QUERY REWRITE\n" +
"AS\n" +
"SELECT id\n" +
"FROM base;",
SQLUtils.toSQLString(stmt, DbType.mysql, null, VisitorFeature.OutputDistributedLiteralInCreateTableStmt));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertTrue(visitor.getColumns().contains(new TableStat.Column("base", "id")));
}
}
| MySQLCreateMaterializedViewTest0 |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InterruptionTest.java | {
"start": 3828,
"end": 4355
} | class ____ {
void f(Thread t) {
Thread.currentThread().interrupt();
}
}
""")
.doTest();
}
@Test
public void negativeInTestonlyCode() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.concurrent.Future;
@RunWith(JUnit4.class)
| Test |
java | google__gson | gson/src/test/java/com/google/gson/functional/ObjectTest.java | {
"start": 22281,
"end": 23045
} | class ____ {@code static} field.
*
* <p>Important: It is not documented that this is officially supported; this test just checks the
* current behavior.
*/
@Test
public void testStaticFieldSerialization() {
// By default Gson should ignore static fields
assertThat(gson.toJson(new ClassWithStaticField())).isEqualTo("{}");
Gson gson =
new GsonBuilder()
// Include static fields
.excludeFieldsWithModifiers(0)
.create();
String json = gson.toJson(new ClassWithStaticField());
assertThat(json).isEqualTo("{\"s\":\"initial\"}");
json = gson.toJson(new ClassWithStaticFinalField());
assertThat(json).isEqualTo("{\"s\":\"initial\"}");
}
/**
* Tests deserialization of a | with |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/MetadataCollectorTests.java | {
"start": 1668,
"end": 6264
} | class ____ {
private static final Predicate<ItemMetadata> NO_MERGE = (metadata) -> false;
private static final ConfigurationMetadata SINGLE_ITEM_METADATA = readMetadata("""
{
"properties": [
{ "name": "name", "type": "java.lang.String" }
]
}
""");
@Test
void addSingleItemMetadata() {
MetadataCollector collector = createSimpleCollector();
collector.add(SINGLE_ITEM_METADATA.getItems().get(0));
assertThat(collector.getMetadata()).has(Metadata.withProperty("name", String.class));
}
@Test
void addIfAbsentAddsPropertyIfItDoesNotExist() {
MetadataCollector collector = createSimpleCollector();
collector.addIfAbsent(SINGLE_ITEM_METADATA.getItems().get(0));
ConfigurationMetadata metadata = collector.getMetadata();
assertThat(metadata).has(Metadata.withProperty("name", String.class));
assertThat(metadata.getItems()).hasSize(1);
}
@Test
void addIfAbsentIgnoresExistingProperty() {
MetadataCollector collector = createSimpleCollector();
collector.addIfAbsent(SINGLE_ITEM_METADATA.getItems().get(0));
collector.addIfAbsent(SINGLE_ITEM_METADATA.getItems().get(0));
collector.addIfAbsent(SINGLE_ITEM_METADATA.getItems().get(0));
ConfigurationMetadata metadata = collector.getMetadata();
assertThat(metadata).has(Metadata.withProperty("name", String.class));
assertThat(metadata.getItems()).hasSize(1);
}
@Test
@SuppressWarnings("unchecked")
void addNewMetadataDoesNotInvokeConflictResolution() {
MetadataCollector collector = createSimpleCollector();
Consumer<ItemMetadata> conflictResolution = mock(Consumer.class);
collector.add(SINGLE_ITEM_METADATA.getItems().get(0), conflictResolution);
then(conflictResolution).shouldHaveNoInteractions();
}
@SuppressWarnings("unchecked")
@Test
void addMetadataWithExistingInstanceInvokesConflictResolution() {
MetadataCollector collector = createSimpleCollector();
ItemMetadata metadata = SINGLE_ITEM_METADATA.getItems().get(0);
collector.add(metadata);
Consumer<ItemMetadata> conflictResolution = mock(Consumer.class);
collector.add(metadata, conflictResolution);
then(conflictResolution).should().accept(metadata);
}
@Test
void addSingleItemHint() {
MetadataCollector collector = createSimpleCollector();
collector.add(SINGLE_ITEM_METADATA.getItems().get(0));
ValueHint firstValueHint = new ValueHint("one", "First.");
ValueHint secondValueHint = new ValueHint("two", "Second.");
ItemHint itemHint = new ItemHint("name", List.of(firstValueHint, secondValueHint), Collections.emptyList());
collector.add(itemHint);
assertThat(collector.getMetadata())
.has(Metadata.withHint("name").withValue(0, "one", "First.").withValue(1, "two", "Second."));
}
@Test
@SuppressWarnings("unchecked")
void getMetadataDoesNotInvokeMergeFunctionIfPreviousMetadataIsNull() {
Predicate<ItemMetadata> mergedRequired = mock(Predicate.class);
MetadataCollector collector = new MetadataCollector(mergedRequired, null);
collector.add(SINGLE_ITEM_METADATA.getItems().get(0));
collector.getMetadata();
then(mergedRequired).shouldHaveNoInteractions();
}
@Test
@SuppressWarnings("unchecked")
void getMetadataAddPreviousItemIfMergeFunctionReturnsTrue() {
Predicate<ItemMetadata> mergedRequired = mock(Predicate.class);
ItemMetadata itemMetadata = SINGLE_ITEM_METADATA.getItems().get(0);
given(mergedRequired.test(itemMetadata)).willReturn(true);
MetadataCollector collector = new MetadataCollector(mergedRequired, SINGLE_ITEM_METADATA);
assertThat(collector.getMetadata()).has(Metadata.withProperty("name", String.class));
then(mergedRequired).should().test(itemMetadata);
}
@Test
@SuppressWarnings("unchecked")
void getMetadataDoesNotAddPreviousItemIfMergeFunctionReturnsFalse() {
Predicate<ItemMetadata> mergedRequired = mock(Predicate.class);
ItemMetadata itemMetadata = SINGLE_ITEM_METADATA.getItems().get(0);
given(mergedRequired.test(itemMetadata)).willReturn(false);
MetadataCollector collector = new MetadataCollector(mergedRequired, SINGLE_ITEM_METADATA);
assertThat(collector.getMetadata().getItems()).isEmpty();
then(mergedRequired).should().test(itemMetadata);
}
private MetadataCollector createSimpleCollector() {
return new MetadataCollector(NO_MERGE, null);
}
private static ConfigurationMetadata readMetadata(String json) {
try {
JsonMarshaller marshaller = new JsonMarshaller();
return marshaller.read(new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)));
}
catch (Exception ex) {
throw new IllegalStateException("Invalid JSON: " + json, ex);
}
}
}
| MetadataCollectorTests |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/RestEndpointBuilderFactory.java | {
"start": 1444,
"end": 1573
} | interface ____ {
/**
* Builder for endpoint consumers for the REST component.
*/
public | RestEndpointBuilderFactory |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/ClassDescriptor.java | {
"start": 750,
"end": 2723
} | class ____ this descriptor.
*
* @return the class; never {@code null}
*/
Class<?> getTestClass();
/**
* Get the display name for this descriptor's {@link #getTestClass() class}.
*
* @return the display name for this descriptor's class; never {@code null}
* or blank
*/
String getDisplayName();
/**
* Determine if an annotation of {@code annotationType} is either
* <em>present</em> or <em>meta-present</em> on the {@link Class} for
* this descriptor.
*
* @param annotationType the annotation type to search for; never {@code null}
* @return {@code true} if the annotation is present or meta-present
* @see #findAnnotation(Class)
* @see #findRepeatableAnnotations(Class)
*/
boolean isAnnotated(Class<? extends Annotation> annotationType);
/**
* Find the first annotation of {@code annotationType} that is either
* <em>present</em> or <em>meta-present</em> on the {@link Class} for
* this descriptor.
*
* @param <A> the annotation type
* @param annotationType the annotation type to search for; never {@code null}
* @return an {@code Optional} containing the annotation; never {@code null} but
* potentially empty
* @see #isAnnotated(Class)
* @see #findRepeatableAnnotations(Class)
*/
<A extends Annotation> Optional<A> findAnnotation(Class<A> annotationType);
/**
* Find all <em>repeatable</em> {@linkplain Annotation annotations} of
* {@code annotationType} that are either <em>present</em> or
* <em>meta-present</em> on the {@link Class} for this descriptor.
*
* @param <A> the annotation type
* @param annotationType the repeatable annotation type to search for; never
* {@code null}
* @return the list of all such annotations found; neither {@code null} nor
* mutable, but potentially empty
* @see #isAnnotated(Class)
* @see #findAnnotation(Class)
* @see java.lang.annotation.Repeatable
*/
<A extends Annotation> List<A> findRepeatableAnnotations(Class<A> annotationType);
}
| for |
java | jhy__jsoup | src/main/java11/org/jsoup/helper/HttpClientExecutor.java | {
"start": 6384,
"end": 7462
} | class ____ extends ProxySelector {
// empty list for no proxy:
static final List<Proxy> NoProxy = new ArrayList<>(0);
@Override
public List<Proxy> select(URI uri) {
Proxy proxy = perRequestProxy.get();
if (proxy != null) {
return Collections.singletonList(proxy);
}
ProxySelector defaultSelector = ProxySelector.getDefault();
if (defaultSelector != null && defaultSelector != this) { // avoid recursion if we were set as default
return defaultSelector.select(uri);
}
return NoProxy;
}
@Override
public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
if (perRequestProxy.get() != null) {
return; // no-op
}
ProxySelector defaultSelector = ProxySelector.getDefault();
if (defaultSelector != null && defaultSelector != this) {
defaultSelector.connectFailed(uri, sa, ioe);
}
}
}
}
| ProxyWrap |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/tool/CommandUtils.java | {
"start": 386,
"end": 1855
} | class ____ {
static final SecureRandom SECURE_RANDOM = new SecureRandom();
/**
* Generates a password of a given length from a set of predefined allowed chars.
* @param passwordLength the length of the password
* @return the char array with the password
*/
public static char[] generatePassword(int passwordLength) {
final char[] passwordChars = ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789*-_=+").toCharArray();
char[] characters = new char[passwordLength];
for (int i = 0; i < passwordLength; ++i) {
characters[i] = passwordChars[SECURE_RANDOM.nextInt(passwordChars.length)];
}
return characters;
}
/**
* Generates a string that can be used as a username, possibly consisting of a chosen prefix and suffix
*/
protected static String generateUsername(@Nullable String prefix, @Nullable String suffix, int length) {
final char[] usernameChars = ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789").toCharArray();
final String prefixString = null == prefix ? "" : prefix;
final String suffixString = null == suffix ? "" : prefix;
char[] characters = new char[length];
for (int i = 0; i < length; ++i) {
characters[i] = usernameChars[SECURE_RANDOM.nextInt(usernameChars.length)];
}
return prefixString + new String(characters) + suffixString;
}
}
| CommandUtils |
java | apache__camel | test-infra/camel-test-infra-openai-mock/src/main/java/org/apache/camel/test/infra/openai/mock/ToolExecutionStep.java | {
"start": 1078,
"end": 2023
} | class ____ {
private final List<ToolCallDefinition> toolCalls;
public ToolExecutionStep() {
this.toolCalls = new ArrayList<>();
}
public void addToolCall(ToolCallDefinition toolCall) {
this.toolCalls.add(Objects.requireNonNull(toolCall, "Tool call cannot be null"));
}
public List<ToolCallDefinition> getToolCalls() {
return new ArrayList<>(toolCalls); // Return defensive copy
}
public boolean isEmpty() {
return toolCalls.isEmpty();
}
public int size() {
return toolCalls.size();
}
public ToolCallDefinition getLastToolCall() {
if (toolCalls.isEmpty()) {
throw new IllegalStateException("No tool calls in this step");
}
return toolCalls.get(toolCalls.size() - 1);
}
@Override
public String toString() {
return String.format("ToolExecutionStep{toolCalls=%s}", toolCalls);
}
}
| ToolExecutionStep |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/FileType.java | {
"start": 932,
"end": 1133
} | enum ____ {
/** The log file type for taskmanager. */
LOG,
/** The stdout file type for taskmanager. */
STDOUT,
/** The profiler file type for taskmanager. */
PROFILER,
}
| FileType |
java | quarkusio__quarkus | extensions/panache/mongodb-rest-data-panache/runtime/src/main/java/io/quarkus/mongodb/rest/data/panache/PanacheMongoRepositoryResource.java | {
"start": 891,
"end": 1053
} | interface ____<Repository extends PanacheMongoRepositoryBase<Entity, ID>, Entity, ID>
extends RestDataResource<Entity, ID> {
}
| PanacheMongoRepositoryResource |
java | netty__netty | common/src/main/java/io/netty/util/internal/ClassInitializerUtil.java | {
"start": 767,
"end": 1496
} | class ____ {
private ClassInitializerUtil() { }
/**
* Preload the given classes and so ensure the {@link ClassLoader} has these loaded after this method call.
*
* @param loadingClass the {@link Class} that wants to load the classes.
* @param classes the classes to load.
*/
public static void tryLoadClasses(Class<?> loadingClass, Class<?>... classes) {
ClassLoader loader = PlatformDependent.getClassLoader(loadingClass);
for (Class<?> clazz: classes) {
tryLoadClass(loader, clazz.getName());
}
}
private static void tryLoadClass(ClassLoader classLoader, String className) {
try {
// Load the | ClassInitializerUtil |
java | grpc__grpc-java | examples/src/main/java/io/grpc/examples/deadline/DeadlineServer.java | {
"start": 3115,
"end": 3939
} | class ____ extends GreeterGrpc.GreeterImplBase {
private GreeterGrpc.GreeterBlockingStub clientStub;
void setClientStub(GreeterGrpc.GreeterBlockingStub clientStub) {
this.clientStub = clientStub;
}
@Override
public void sayHello(HelloRequest req, StreamObserver<HelloReply> responseObserver) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
if (req.getName().contains("propagate")) {
clientStub.sayHello(HelloRequest.newBuilder().setName("Server").build());
}
HelloReply reply = HelloReply.newBuilder().setMessage("Hello " + req.getName()).build();
responseObserver.onNext(reply);
responseObserver.onCompleted();
}
}
}
| SlowGreeter |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/Grant.java | {
"start": 881,
"end": 6859
} | class ____ implements Writeable {
public static final String PASSWORD_GRANT_TYPE = "password";
public static final String ACCESS_TOKEN_GRANT_TYPE = "access_token";
private String type;
private String username;
private SecureString password;
private SecureString accessToken;
private String runAsUsername;
private ClientAuthentication clientAuthentication;
public record ClientAuthentication(String scheme, SecureString value) implements Writeable {
public ClientAuthentication(SecureString value) {
this(JwtRealmSettings.HEADER_SHARED_SECRET_AUTHENTICATION_SCHEME, value);
}
ClientAuthentication(StreamInput in) throws IOException {
this(in.readString(), in.readSecureString());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(scheme);
out.writeSecureString(value);
}
}
public Grant() {}
public Grant(StreamInput in) throws IOException {
this.type = in.readString();
this.username = in.readOptionalString();
this.password = in.readOptionalSecureString();
this.accessToken = in.readOptionalSecureString();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
this.runAsUsername = in.readOptionalString();
} else {
this.runAsUsername = null;
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) {
this.clientAuthentication = in.readOptionalWriteable(ClientAuthentication::new);
} else {
this.clientAuthentication = null;
}
}
public void writeTo(StreamOutput out) throws IOException {
out.writeString(type);
out.writeOptionalString(username);
out.writeOptionalSecureString(password);
out.writeOptionalSecureString(accessToken);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
out.writeOptionalString(runAsUsername);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) {
out.writeOptionalWriteable(clientAuthentication);
}
}
public String getType() {
return type;
}
public String getUsername() {
return username;
}
public SecureString getPassword() {
return password;
}
public SecureString getAccessToken() {
return accessToken;
}
public String getRunAsUsername() {
return runAsUsername;
}
public ClientAuthentication getClientAuthentication() {
return clientAuthentication;
}
public void setType(String type) {
this.type = type;
}
public void setUsername(String username) {
this.username = username;
}
public void setPassword(SecureString password) {
this.password = password;
}
public void setAccessToken(SecureString accessToken) {
this.accessToken = accessToken;
}
public void setRunAsUsername(String runAsUsername) {
this.runAsUsername = runAsUsername;
}
public void setClientAuthentication(ClientAuthentication clientAuthentication) {
this.clientAuthentication = clientAuthentication;
}
public ActionRequestValidationException validate(ActionRequestValidationException validationException) {
if (type == null) {
validationException = addValidationError("[grant_type] is required", validationException);
} else if (type.equals(PASSWORD_GRANT_TYPE)) {
validationException = validateRequiredField("username", username, validationException);
validationException = validateRequiredField("password", password, validationException);
validationException = validateUnsupportedField("access_token", accessToken, validationException);
if (clientAuthentication != null) {
return addValidationError("[client_authentication] is not supported for grant_type [" + type + "]", validationException);
}
} else if (type.equals(ACCESS_TOKEN_GRANT_TYPE)) {
validationException = validateRequiredField("access_token", accessToken, validationException);
validationException = validateUnsupportedField("username", username, validationException);
validationException = validateUnsupportedField("password", password, validationException);
if (clientAuthentication != null
&& JwtRealmSettings.HEADER_SHARED_SECRET_AUTHENTICATION_SCHEME.equals(clientAuthentication.scheme.trim()) == false) {
return addValidationError(
"[client_authentication.scheme] must be set to [" + JwtRealmSettings.HEADER_SHARED_SECRET_AUTHENTICATION_SCHEME + "]",
validationException
);
}
} else {
validationException = addValidationError("grant_type [" + type + "] is not supported", validationException);
}
return validationException;
}
private ActionRequestValidationException validateRequiredField(
String fieldName,
CharSequence fieldValue,
ActionRequestValidationException validationException
) {
if (fieldValue == null || fieldValue.length() == 0) {
return addValidationError("[" + fieldName + "] is required for grant_type [" + type + "]", validationException);
}
return validationException;
}
private ActionRequestValidationException validateUnsupportedField(
String fieldName,
CharSequence fieldValue,
ActionRequestValidationException validationException
) {
if (fieldValue != null && fieldValue.length() > 0) {
return addValidationError("[" + fieldName + "] is not supported for grant_type [" + type + "]", validationException);
}
return validationException;
}
}
| Grant |
java | quarkusio__quarkus | extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/DisabledOidcClientException.java | {
"start": 76,
"end": 506
} | class ____ extends RuntimeException {
public DisabledOidcClientException() {
}
public DisabledOidcClientException(String errorMessage) {
this(errorMessage, null);
}
public DisabledOidcClientException(Throwable cause) {
this(null, cause);
}
public DisabledOidcClientException(String errorMessage, Throwable cause) {
super(errorMessage, cause);
}
}
| DisabledOidcClientException |
java | quarkusio__quarkus | extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/federation/base/Foo.java | {
"start": 315,
"end": 389
} | class ____ {
@External
public int id;
public String name;
}
| Foo |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/ParentPk.java | {
"start": 251,
"end": 1086
} | class ____ implements Serializable {
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
private String firstName;
@Column(name = "p_lname")
private String lastName;
public boolean equals(Object o) {
if ( this == o ) return true;
if ( !( o instanceof ParentPk ) ) return false;
final ParentPk parentPk = (ParentPk) o;
if ( !firstName.equals( parentPk.firstName ) ) return false;
if ( !lastName.equals( parentPk.lastName ) ) return false;
return true;
}
public int hashCode() {
int result;
result = firstName.hashCode();
result = 29 * result + lastName.hashCode();
return result;
}
}
| ParentPk |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAccessType.java | {
"start": 1046,
"end": 1447
} | enum ____ {
/**
* Access-type representing 'viewing' application. ACLs against this type
* dictate who can 'view' some or all of the application related details.
*/
VIEW_APP,
/**
* Access-type representing 'modifying' application. ACLs against this type
* dictate who can 'modify' the application for e.g., by killing the
* application
*/
MODIFY_APP;
}
| ApplicationAccessType |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/typeserializerupgrade/PojoSerializerUpgradeTest.java | {
"start": 8650,
"end": 16807
} | class ____ { "
+ "private long a; "
+ "public long getA() { return a;} "
+ "public void setA(long value) { a = value; }"
+ "@Override public boolean equals(Object obj) { if (obj instanceof Pojo) { Pojo other = (Pojo) obj; return a == other.a;} else { return false; }}"
+ "@Override public int hashCode() { return Objects.hash(a); } "
+ "@Override public String toString() {return \"(\" + a + \")\";}}";
/** We should be able to handle a changed field order of a POJO as keyed state. */
@Test
public void testChangedFieldOrderWithKeyedState() throws Exception {
testPojoSerializerUpgrade(SOURCE_A, SOURCE_B, true, true);
}
/** We should be able to handle a changed field order of a POJO as operator state. */
@Test
public void testChangedFieldOrderWithOperatorState() throws Exception {
testPojoSerializerUpgrade(SOURCE_A, SOURCE_B, true, false);
}
/** Changing field types of a POJO as keyed state should require a state migration. */
@Test
public void testChangedFieldTypesWithKeyedState() throws Exception {
try {
testPojoSerializerUpgrade(SOURCE_A, SOURCE_C, true, true);
fail("Expected a state migration exception.");
} catch (Exception e) {
if (CommonTestUtils.containsCause(e, StateMigrationException.class)) {
// StateMigrationException expected
} else {
throw e;
}
}
}
/** Changing field types of a POJO as operator state should require a state migration. */
@Test
public void testChangedFieldTypesWithOperatorState() throws Exception {
try {
testPojoSerializerUpgrade(SOURCE_A, SOURCE_C, true, false);
fail("Expected a state migration exception.");
} catch (Exception e) {
if (CommonTestUtils.containsCause(e, StateMigrationException.class)) {
// StateMigrationException expected
} else {
throw e;
}
}
}
/** Adding fields to a POJO as keyed state should succeed. */
@Test
public void testAdditionalFieldWithKeyedState() throws Exception {
testPojoSerializerUpgrade(SOURCE_A, SOURCE_D, true, true);
}
/** Adding fields to a POJO as operator state should succeed. */
@Test
public void testAdditionalFieldWithOperatorState() throws Exception {
testPojoSerializerUpgrade(SOURCE_A, SOURCE_D, true, false);
}
/** Removing fields from a POJO as keyed state should succeed. */
@Test
public void testMissingFieldWithKeyedState() throws Exception {
testPojoSerializerUpgrade(SOURCE_A, SOURCE_E, false, true);
}
/** Removing fields from a POJO as operator state should succeed. */
@Test
public void testMissingFieldWithOperatorState() throws Exception {
testPojoSerializerUpgrade(SOURCE_A, SOURCE_E, false, false);
}
private void testPojoSerializerUpgrade(
String classSourceA, String classSourceB, boolean hasBField, boolean isKeyedState)
throws Exception {
final Configuration taskConfiguration = new Configuration();
final ExecutionConfig executionConfig = new ExecutionConfig();
final KeySelector<Long, Long> keySelector = new IdentityKeySelector<>();
final Collection<Long> inputs = Arrays.asList(1L, 2L, 45L, 67L, 1337L);
// run the program with classSourceA
File rootPath = temporaryFolder.newFolder();
File sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceA);
compileClass(sourceFile);
final ClassLoader classLoader =
URLClassLoader.newInstance(
new URL[] {rootPath.toURI().toURL()},
Thread.currentThread().getContextClassLoader());
OperatorSubtaskState stateHandles =
runOperator(
taskConfiguration,
executionConfig,
new StreamMap<>(new StatefulMapper(isKeyedState, false, hasBField)),
keySelector,
isKeyedState,
stateBackend,
classLoader,
null,
inputs);
// run the program with classSourceB
rootPath = temporaryFolder.newFolder();
sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceB);
compileClass(sourceFile);
final ClassLoader classLoaderB =
URLClassLoader.newInstance(
new URL[] {rootPath.toURI().toURL()},
Thread.currentThread().getContextClassLoader());
runOperator(
taskConfiguration,
executionConfig,
new StreamMap<>(new StatefulMapper(isKeyedState, true, hasBField)),
keySelector,
isKeyedState,
stateBackend,
classLoaderB,
stateHandles,
inputs);
}
private OperatorSubtaskState runOperator(
Configuration taskConfiguration,
ExecutionConfig executionConfig,
OneInputStreamOperator<Long, Long> operator,
KeySelector<Long, Long> keySelector,
boolean isKeyedState,
StateBackend stateBackend,
ClassLoader classLoader,
OperatorSubtaskState operatorSubtaskState,
Iterable<Long> input)
throws Exception {
try (final MockEnvironment environment =
new MockEnvironmentBuilder()
.setTaskName("test task")
.setManagedMemorySize(32 * 1024)
.setInputSplitProvider(new MockInputSplitProvider())
.setBufferSize(256)
.setTaskConfiguration(taskConfiguration)
.setExecutionConfig(executionConfig)
.setMaxParallelism(16)
.setUserCodeClassLoader(classLoader)
.build()) {
OneInputStreamOperatorTestHarness<Long, Long> harness = null;
try {
if (isKeyedState) {
harness =
new KeyedOneInputStreamOperatorTestHarness<>(
operator,
keySelector,
BasicTypeInfo.LONG_TYPE_INFO,
environment);
} else {
harness =
new OneInputStreamOperatorTestHarness<>(
operator, LongSerializer.INSTANCE, environment);
}
harness.setStateBackend(stateBackend);
harness.setup();
harness.initializeState(operatorSubtaskState);
harness.open();
long timestamp = 0L;
for (Long value : input) {
harness.processElement(value, timestamp++);
}
long checkpointId = 1L;
long checkpointTimestamp = timestamp + 1L;
return harness.snapshot(checkpointId, checkpointTimestamp);
} finally {
IOUtils.closeQuietly(harness);
}
}
}
private static File writeSourceFile(File root, String name, String source) throws IOException {
File sourceFile = new File(root, name);
sourceFile.getParentFile().mkdirs();
try (FileWriter writer = new FileWriter(sourceFile)) {
writer.write(source);
}
return sourceFile;
}
private static int compileClass(File sourceFile) {
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
return compiler.run(null, null, null, "-proc:none", sourceFile.getPath());
}
private static final | Pojo |
java | google__guava | android/guava/src/com/google/common/reflect/ClassPath.java | {
"start": 14489,
"end": 15305
} | class ____.
throw new IllegalStateException(e);
}
}
@Override
public String toString() {
return className;
}
}
/**
* Returns all locations that {@code classloader} and parent loaders load classes and resources
* from. Callers can {@linkplain LocationInfo#scanResources scan} individual locations selectively
* or even in parallel.
*/
static ImmutableSet<LocationInfo> locationsFrom(ClassLoader classloader) {
ImmutableSet.Builder<LocationInfo> builder = ImmutableSet.builder();
for (Map.Entry<File, ClassLoader> entry : getClassPathEntries(classloader).entrySet()) {
builder.add(new LocationInfo(entry.getKey(), entry.getValue()));
}
return builder.build();
}
/**
* Represents a single location (a directory or a jar file) in the | path |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationBuilder.java | {
"start": 885,
"end": 1384
} | interface ____ {
/**
* The name of the type of aggregation built by this builder.
*/
String getType();
/**
* Set the aggregation's metadata. Returns {@code this} for chaining.
*/
BaseAggregationBuilder setMetadata(Map<String, Object> metadata);
/**
* Set the sub aggregations if this aggregation supports sub aggregations. Returns {@code this} for chaining.
*/
BaseAggregationBuilder subAggregations(Builder subFactories);
}
| BaseAggregationBuilder |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/UnalignedCheckpointRescaleITCase.java | {
"start": 35118,
"end": 35459
} | class ____ implements CoMapFunction<Long, Long, Long> {
@Override
public Long map1(Long value) throws Exception {
return checkHeader(value);
}
@Override
public Long map2(Long value) throws Exception {
return checkHeader(value);
}
}
private static | UnionLikeCoGroup |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/BeanInvokeWithNullBodyTest.java | {
"start": 1016,
"end": 2371
} | class ____ extends ContextTestSupport {
@Test
public void testWithHelloWorld() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testWithEmptyBody() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("");
template.sendBody("direct:start", "");
assertMockEndpointsSatisfied();
}
@Test
public void testWithNullBody() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:result").message(0).body().isNull();
template.sendBody("direct:start", null);
assertMockEndpointsSatisfied();
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("foo", new MyNullFooBean());
return jndi;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("bean:foo").to("mock:result");
}
};
}
@SuppressWarnings("unused")
private static | BeanInvokeWithNullBodyTest |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/composite/ReferenceGenerator.java | {
"start": 1445,
"end": 1813
} | interface ____ {
/**
* Generates unique, within a request, reference identifier for the given object. Reference identifier must start
* with an alphanumeric.
*
* @param object object to generate reference identifier for
* @return generated reference identifier
*/
String nextReferenceFor(Object object);
}
| ReferenceGenerator |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1700/Issue1764_bean_biginteger.java | {
"start": 1419,
"end": 1609
} | class ____ {
public BigInteger value;
public Model() {
}
public Model(long value) {
this.value = BigInteger.valueOf(value);
}
}
}
| Model |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/paths/Paths_assertHasFileName_Test.java | {
"start": 1117,
"end": 3210
} | class ____ extends PathsBaseTest {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
Path actual = null;
String filename = "actual";
// WHEN
var error = expectAssertionError(() -> underTest.assertHasFileName(INFO, actual, filename));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_filename_is_null() {
// GIVEN
Path actual = tempDir.resolve("actual");
String filename = null;
// WHEN
Throwable thrown = catchThrowable(() -> underTest.assertHasFileName(INFO, actual, filename));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage("expected fileName should not be null");
}
@Test
void should_fail_if_actual_does_not_have_given_filename() {
// GIVEN
Path actual = tempDir.resolve("actual");
String filename = "filename";
// WHEN
var error = expectAssertionError(() -> underTest.assertHasFileName(INFO, null, filename));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_pass_with_non_existing_path() {
// GIVEN
Path actual = tempDir.resolve("actual");
String filename = "actual";
// WHEN/THEN
underTest.assertHasFileName(INFO, actual, filename);
}
@Test
void should_pass_with_existing_regular_file() throws IOException {
// GIVEN
Path actual = createFile(tempDir.resolve("actual"));
String filename = "actual";
// WHEN/THEN
underTest.assertHasFileName(INFO, actual, filename);
}
@Test
void should_pass_with_existing_directory() throws IOException {
// GIVEN
Path actual = createDirectory(tempDir.resolve("actual"));
String filename = "actual";
// WHEN/THEN
underTest.assertHasFileName(INFO, actual, filename);
}
@Test
void should_pass_with_existing_symbolic_link() throws IOException {
// GIVEN
Path actual = tryToCreateSymbolicLink(tempDir.resolve("actual"), tempDir);
String filename = "actual";
// WHEN/THEN
underTest.assertHasFileName(INFO, actual, filename);
}
}
| Paths_assertHasFileName_Test |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/util/ConsoleMockitoLogger.java | {
"start": 206,
"end": 443
} | class ____ implements MockitoLogger {
@Override
public void log(Object what) {
System.out.println(what);
}
@Override
public void warn(Object what) {
System.err.println(what);
}
}
| ConsoleMockitoLogger |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1435/InObject.java | {
"start": 198,
"end": 375
} | class ____ {
private final String name;
public InObject(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
| InObject |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/entitygraph/LoadAndFetchGraphTest.java | {
"start": 9739,
"end": 10999
} | class ____ {
@Id
private Integer id;
private String label;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "A_ID")
private AEntity a;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "B_ID")
private BEntity b;
@ManyToOne(fetch = FetchType.LAZY)
private CEntity c;
@OneToMany(
fetch = FetchType.LAZY,
mappedBy = "c",
cascade = CascadeType.ALL,
orphanRemoval = true
)
private List<DEntity> dList = new ArrayList<>();
public void addD(DEntity d) {
dList.add( d );
d.setC( this );
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
public AEntity getA() {
return a;
}
public void setA(AEntity a) {
this.a = a;
}
public BEntity getB() {
return b;
}
public void setB(BEntity b) {
this.b = b;
}
public CEntity getC() {
return c;
}
public void setC(CEntity c) {
this.c = c;
}
public List<DEntity> getDList() {
return dList;
}
public void setDList(List<DEntity> dList) {
this.dList = dList;
}
}
@Entity(name = "DEntity")
@Table(name = "D")
public static | CEntity |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/AsyncWaitOperatorTest.java | {
"start": 67319,
"end": 68296
} | class ____
extends AlwaysTimeoutWithDefaultValueAsyncFunction {
private final transient CountDownLatch latch = new CountDownLatch(1);
@Override
public void asyncInvoke(Integer input, ResultFuture<Integer> resultFuture) {
tryCounts.merge(input, 1, Integer::sum);
CompletableFuture.runAsync(
() -> {
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
}
@Override
public void timeout(Integer input, ResultFuture<Integer> resultFuture) {
// simulate the case reported in https://issues.apache.org/jira/browse/FLINK-38082
resultFuture.completeExceptionally(new TimeoutException("Dummy timeout error"));
}
}
private static | AlwaysTimeoutAsyncFunction |
java | playframework__playframework | core/play-integration-test/src/test/java/play/it/http/MultipleRepeatableOnTypeAndActionController.java | {
"start": 404,
"end": 637
} | class ____ extends MockController {
@SomeRepeatable // again runs two actions
@SomeRepeatable // plus two more
public Result action(Http.Request request) {
return Results.ok();
}
}
| MultipleRepeatableOnTypeAndActionController |
java | redisson__redisson | redisson/src/main/java/org/redisson/mapreduce/RedissonCollectionMapReduce.java | {
"start": 1192,
"end": 2636
} | class ____<VIn, KOut, VOut> extends MapReduceExecutor<RCollectionMapper<VIn, KOut, VOut>, VIn, KOut, VOut>
implements RCollectionMapReduce<VIn, KOut, VOut> {
public RedissonCollectionMapReduce(RObject object, RedissonClient redisson, CommandAsyncExecutor commandExecutor) {
super(object, redisson, commandExecutor);
}
@Override
public RCollectionMapReduce<VIn, KOut, VOut> timeout(long timeout, TimeUnit unit) {
this.timeout = unit.toMillis(timeout);
return this;
}
@Override
public RCollectionMapReduce<VIn, KOut, VOut> mapper(RCollectionMapper<VIn, KOut, VOut> mapper) {
check(mapper);
this.mapper = mapper;
return this;
}
@Override
public RCollectionMapReduce<VIn, KOut, VOut> reducer(RReducer<KOut, VOut> reducer) {
check(reducer);
this.reducer = reducer;
return this;
}
@Override
protected Callable<Object> createTask(String resultMapName, RCollator<KOut, VOut, Object> collator) {
CollectionMapperTask<VIn, KOut, VOut> mapperTask = new CollectionMapperTask<VIn, KOut, VOut>(mapper, objectClass, objectCodec.getClass());
return new CoordinatorTask<KOut, VOut>(mapperTask, reducer, objectName, resultMapName, objectCodec.getClass(), objectClass, collator, timeout, System.currentTimeMillis());
}
}
| RedissonCollectionMapReduce |
java | apache__flink | flink-queryable-state/flink-queryable-state-client-java/src/main/java/org/apache/flink/queryablestate/messages/KvStateResponse.java | {
"start": 2041,
"end": 2616
} | class ____
implements MessageDeserializer<KvStateResponse> {
@Override
public KvStateResponse deserializeMessage(ByteBuf buf) {
int length = buf.readInt();
Preconditions.checkArgument(
length >= 0,
"Negative length for state content. "
+ "This indicates a serialization error.");
byte[] content = new byte[length];
buf.readBytes(content);
return new KvStateResponse(content);
}
}
}
| KvStateResponseDeserializer |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationListItem.java | {
"start": 950,
"end": 4343
} | class ____ implements Writeable, ToXContentObject {
public static final ParseField NAME_FIELD = new ParseField("name");
public static final ParseField ANALYTICS_COLLECTION_NAME_FIELD = new ParseField("analytics_collection_name");
public static final ParseField UPDATED_AT_MILLIS_FIELD = new ParseField("updated_at_millis");
private final String name;
private final String analyticsCollectionName;
private final long updatedAtMillis;
/**
* Constructs a SearchApplicationListItem.
*
* @param name The name of the search application
* @param analyticsCollectionName The analytics collection associated with this application if one exists
* @param updatedAtMillis The timestamp in milliseconds when this search application was last updated.
*/
public SearchApplicationListItem(String name, @Nullable String analyticsCollectionName, long updatedAtMillis) {
Objects.requireNonNull(name, "Name cannot be null on a SearchApplicationListItem");
this.name = name;
this.analyticsCollectionName = analyticsCollectionName;
this.updatedAtMillis = updatedAtMillis;
}
public SearchApplicationListItem(StreamInput in) throws IOException {
this.name = in.readString();
this.analyticsCollectionName = in.readOptionalString();
this.updatedAtMillis = in.readLong();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(NAME_FIELD.getPreferredName(), name);
if (analyticsCollectionName != null) {
builder.field(ANALYTICS_COLLECTION_NAME_FIELD.getPreferredName(), analyticsCollectionName);
}
builder.field(UPDATED_AT_MILLIS_FIELD.getPreferredName(), updatedAtMillis);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeOptionalString(analyticsCollectionName);
out.writeLong(updatedAtMillis);
}
/**
* Returns the name of the {@link SearchApplicationListItem}.
*
* @return the name.
*/
public String name() {
return name;
}
/**
* Returns the analytics collection associated with the {@link SearchApplicationListItem} if one exists.
*
* @return the analytics collection.
*/
public String analyticsCollectionName() {
return analyticsCollectionName;
}
/**
* Returns the timestamp in milliseconds when the {@link SearchApplicationListItem} was last modified.
*
* @return the last updated timestamp in milliseconds.
*/
public long updatedAtMillis() {
return updatedAtMillis;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SearchApplicationListItem item = (SearchApplicationListItem) o;
return name.equals(item.name)
&& Objects.equals(analyticsCollectionName, item.analyticsCollectionName)
&& updatedAtMillis == item.updatedAtMillis;
}
@Override
public int hashCode() {
return Objects.hash(name, analyticsCollectionName, updatedAtMillis);
}
}
| SearchApplicationListItem |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/HamletSpec.java | {
"start": 14696,
"end": 15541
} | interface ____ extends _Child {
// BIG omitted cf. http://www.w3.org/TR/html5-diff/
/**
* Add a SMALL (small print) element
* @return a new SMALL element builder
*/
SMALL small();
/**
* Add a complete small (small print) element.
* Shortcut of: small().__(cdata).__();
* @param cdata the content of the element
* @return the current element builder
*/
_FontSize small(String cdata);
/**
* Add a complete small (small print) element.
* Shortcut of: small().$id(id).$class(class).__(cdata).__();
* @param selector css selector in the form of (#id)?(.class)*
* @param cdata the content of the element
* @return the current element builder
*/
_FontSize small(String selector, String cdata);
}
/** %fontstyle -(%pre.exclusion) */
public | _FontSize |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapOutputFile.java | {
"start": 1602,
"end": 4818
} | class ____ implements Configurable {
private Configuration conf;
static final String MAP_OUTPUT_FILENAME_STRING = "file.out";
static final String MAP_OUTPUT_INDEX_SUFFIX_STRING = ".index";
static final String REDUCE_INPUT_FILE_FORMAT_STRING = "%s/map_%d.out";
public MapOutputFile() {
}
/**
* Return the path to local map output file created earlier
*
* @return path
* @throws IOException
*/
public abstract Path getOutputFile() throws IOException;
/**
* Create a local map output file name.
*
* @param size the size of the file
* @return path
* @throws IOException
*/
public abstract Path getOutputFileForWrite(long size) throws IOException;
/**
* Create a local map output file name on the same volume.
*/
public abstract Path getOutputFileForWriteInVolume(Path existing);
/**
* Return the path to a local map output index file created earlier
*
* @return path
* @throws IOException
*/
public abstract Path getOutputIndexFile() throws IOException;
/**
* Create a local map output index file name.
*
* @param size the size of the file
* @return path
* @throws IOException
*/
public abstract Path getOutputIndexFileForWrite(long size) throws IOException;
/**
* Create a local map output index file name on the same volume.
*/
public abstract Path getOutputIndexFileForWriteInVolume(Path existing);
/**
* Return a local map spill file created earlier.
*
* @param spillNumber the number
* @return path
* @throws IOException
*/
public abstract Path getSpillFile(int spillNumber) throws IOException;
/**
* Create a local map spill file name.
*
* @param spillNumber the number
* @param size the size of the file
* @return path
* @throws IOException
*/
public abstract Path getSpillFileForWrite(int spillNumber, long size)
throws IOException;
/**
* Return a local map spill index file created earlier
*
* @param spillNumber the number
* @return path
* @throws IOException
*/
public abstract Path getSpillIndexFile(int spillNumber) throws IOException;
/**
* Create a local map spill index file name.
*
* @param spillNumber the number
* @param size the size of the file
* @return path
* @throws IOException
*/
public abstract Path getSpillIndexFileForWrite(int spillNumber, long size)
throws IOException;
/**
* Return a local reduce input file created earlier
*
* @param mapId a map task id
* @return path
* @throws IOException
*/
public abstract Path getInputFile(int mapId) throws IOException;
/**
* Create a local reduce input file name.
*
* @param mapId a map task id
* @param size the size of the file
* @return path
* @throws IOException
*/
public abstract Path getInputFileForWrite(
org.apache.hadoop.mapreduce.TaskID mapId, long size) throws IOException;
/** Removes all of the files related to a task. */
public abstract void removeAll() throws IOException;
@Override
public void setConf(Configuration conf) {
this.conf = conf;
}
@Override
public Configuration getConf() {
return conf;
}
}
| MapOutputFile |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/ResolvableType.java | {
"start": 44918,
"end": 46429
} | class ____ declares the field includes generic
* parameter variables that are satisfied by the implementation class.
* @param field the source field
* @param nestingLevel the nesting level (1 for the outer level; 2 for a nested
* generic type; etc)
* @param implementationClass the implementation class
* @return a {@code ResolvableType} for the specified field
* @see #forField(Field)
*/
public static ResolvableType forField(Field field, int nestingLevel, @Nullable Class<?> implementationClass) {
Assert.notNull(field, "Field must not be null");
ResolvableType owner = forType(implementationClass).as(field.getDeclaringClass());
return forType(null, new FieldTypeProvider(field), owner.asVariableResolver()).getNested(nestingLevel);
}
/**
* Return a {@code ResolvableType} for the specified {@link Constructor} parameter.
* @param constructor the source constructor (must not be {@code null})
* @param parameterIndex the parameter index
* @return a {@code ResolvableType} for the specified constructor parameter
* @see #forConstructorParameter(Constructor, int, Class)
*/
public static ResolvableType forConstructorParameter(Constructor<?> constructor, int parameterIndex) {
Assert.notNull(constructor, "Constructor must not be null");
return forMethodParameter(new MethodParameter(constructor, parameterIndex));
}
/**
* Return a {@code ResolvableType} for the specified {@link Constructor} parameter
* with a given implementation. Use this variant when the | that |
java | apache__logging-log4j2 | log4j-appserver/src/main/java/org/apache/logging/log4j/appserver/jetty/Log4j2Logger.java | {
"start": 2016,
"end": 2119
} | class ____ be used
* as the parent to locate the caller's ClassLoader.
*/
private static | must |
java | mapstruct__mapstruct | processor/src/test/resources/fixtures/21/org/mapstruct/ap/test/bugs/_913/DomainDtoWithNvmsNullMapperImpl.java | {
"start": 540,
"end": 8260
} | class ____ implements DomainDtoWithNvmsNullMapper {
private final Helper helper = new Helper();
@Override
public Domain create(Dto source) {
if ( source == null ) {
return null;
}
Domain domain = createNullDomain();
List<String> list = source.getStrings();
if ( list != null ) {
domain.setStrings( new LinkedHashSet<String>( list ) );
}
domain.setLongs( stringListToLongSet( source.getStrings() ) );
List<String> list1 = source.getStringsInitialized();
if ( list1 != null ) {
domain.setStringsInitialized( new LinkedHashSet<String>( list1 ) );
}
domain.setLongsInitialized( stringListToLongSet( source.getStringsInitialized() ) );
List<String> list2 = source.getStringsWithDefault();
if ( list2 != null ) {
domain.setStringsWithDefault( new ArrayList<String>( list2 ) );
}
else {
domain.setStringsWithDefault( helper.toList( "3" ) );
}
return domain;
}
@Override
public void update(Dto source, Domain target) {
if ( source == null ) {
return;
}
if ( target.getStrings() != null ) {
List<String> list = source.getStrings();
if ( list != null ) {
target.getStrings().clear();
target.getStrings().addAll( list );
}
else {
target.setStrings( null );
}
}
else {
List<String> list = source.getStrings();
if ( list != null ) {
target.setStrings( new LinkedHashSet<String>( list ) );
}
}
if ( target.getLongs() != null ) {
Set<Long> set = stringListToLongSet( source.getStrings() );
if ( set != null ) {
target.getLongs().clear();
target.getLongs().addAll( set );
}
else {
target.setLongs( null );
}
}
else {
Set<Long> set = stringListToLongSet( source.getStrings() );
if ( set != null ) {
target.setLongs( set );
}
}
if ( target.getStringsInitialized() != null ) {
List<String> list1 = source.getStringsInitialized();
if ( list1 != null ) {
target.getStringsInitialized().clear();
target.getStringsInitialized().addAll( list1 );
}
else {
target.setStringsInitialized( null );
}
}
else {
List<String> list1 = source.getStringsInitialized();
if ( list1 != null ) {
target.setStringsInitialized( new LinkedHashSet<String>( list1 ) );
}
}
if ( target.getLongsInitialized() != null ) {
Set<Long> set1 = stringListToLongSet( source.getStringsInitialized() );
if ( set1 != null ) {
target.getLongsInitialized().clear();
target.getLongsInitialized().addAll( set1 );
}
else {
target.setLongsInitialized( null );
}
}
else {
Set<Long> set1 = stringListToLongSet( source.getStringsInitialized() );
if ( set1 != null ) {
target.setLongsInitialized( set1 );
}
}
if ( target.getStringsWithDefault() != null ) {
List<String> list2 = source.getStringsWithDefault();
if ( list2 != null ) {
target.getStringsWithDefault().clear();
target.getStringsWithDefault().addAll( list2 );
}
else {
target.setStringsWithDefault( helper.toList( "3" ) );
}
}
else {
List<String> list2 = source.getStringsWithDefault();
if ( list2 != null ) {
target.setStringsWithDefault( new ArrayList<String>( list2 ) );
}
else {
target.setStringsWithDefault( helper.toList( "3" ) );
}
}
}
@Override
public Domain updateWithReturn(Dto source, Domain target) {
if ( source == null ) {
return target;
}
if ( target.getStrings() != null ) {
List<String> list = source.getStrings();
if ( list != null ) {
target.getStrings().clear();
target.getStrings().addAll( list );
}
else {
target.setStrings( null );
}
}
else {
List<String> list = source.getStrings();
if ( list != null ) {
target.setStrings( new LinkedHashSet<String>( list ) );
}
}
if ( target.getLongs() != null ) {
Set<Long> set = stringListToLongSet( source.getStrings() );
if ( set != null ) {
target.getLongs().clear();
target.getLongs().addAll( set );
}
else {
target.setLongs( null );
}
}
else {
Set<Long> set = stringListToLongSet( source.getStrings() );
if ( set != null ) {
target.setLongs( set );
}
}
if ( target.getStringsInitialized() != null ) {
List<String> list1 = source.getStringsInitialized();
if ( list1 != null ) {
target.getStringsInitialized().clear();
target.getStringsInitialized().addAll( list1 );
}
else {
target.setStringsInitialized( null );
}
}
else {
List<String> list1 = source.getStringsInitialized();
if ( list1 != null ) {
target.setStringsInitialized( new LinkedHashSet<String>( list1 ) );
}
}
if ( target.getLongsInitialized() != null ) {
Set<Long> set1 = stringListToLongSet( source.getStringsInitialized() );
if ( set1 != null ) {
target.getLongsInitialized().clear();
target.getLongsInitialized().addAll( set1 );
}
else {
target.setLongsInitialized( null );
}
}
else {
Set<Long> set1 = stringListToLongSet( source.getStringsInitialized() );
if ( set1 != null ) {
target.setLongsInitialized( set1 );
}
}
if ( target.getStringsWithDefault() != null ) {
List<String> list2 = source.getStringsWithDefault();
if ( list2 != null ) {
target.getStringsWithDefault().clear();
target.getStringsWithDefault().addAll( list2 );
}
else {
target.setStringsWithDefault( helper.toList( "3" ) );
}
}
else {
List<String> list2 = source.getStringsWithDefault();
if ( list2 != null ) {
target.setStringsWithDefault( new ArrayList<String>( list2 ) );
}
else {
target.setStringsWithDefault( helper.toList( "3" ) );
}
}
return target;
}
protected Set<Long> stringListToLongSet(List<String> list) {
if ( list == null ) {
return null;
}
Set<Long> set = LinkedHashSet.newLinkedHashSet( list.size() );
for ( String string : list ) {
set.add( Long.parseLong( string ) );
}
return set;
}
}
| DomainDtoWithNvmsNullMapperImpl |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/fs/local/LocalRecoverableFsDataOutputStream.java | {
"start": 4750,
"end": 7848
} | class ____ implements Committer {
private final LocalRecoverable recoverable;
LocalCommitter(LocalRecoverable recoverable) {
this.recoverable = checkNotNull(recoverable);
}
@Override
public void commit() throws IOException {
final File src = recoverable.tempFile();
final File dest = recoverable.targetFile();
// sanity check
if (src.length() != recoverable.offset()) {
// something was done to this file since the committer was created.
// this is not the "clean" case
throw new IOException("Cannot clean commit: File has trailing junk data.");
}
// rather than fall into default recovery, handle errors explicitly
// in order to improve error messages
try {
Files.move(src.toPath(), dest.toPath(), StandardCopyOption.ATOMIC_MOVE);
} catch (UnsupportedOperationException | AtomicMoveNotSupportedException e) {
if (!src.renameTo(dest)) {
throw new IOException(
"Committing file failed, could not rename " + src + " -> " + dest);
}
} catch (FileAlreadyExistsException e) {
throw new IOException(
"Committing file failed. Target file already exists: " + dest);
}
}
@Override
public void commitAfterRecovery() throws IOException {
final File src = recoverable.tempFile();
final File dest = recoverable.targetFile();
final long expectedLength = recoverable.offset();
if (src.exists()) {
if (src.length() > expectedLength) {
// can happen if we co from persist to recovering for commit directly
// truncate the trailing junk away
try (FileOutputStream fos = new FileOutputStream(src, true)) {
fos.getChannel().truncate(expectedLength);
}
} else if (src.length() < expectedLength) {
throw new IOException("Missing data in tmp file: " + src);
}
// source still exists, so no renaming happened yet. do it!
Files.move(src.toPath(), dest.toPath(), StandardCopyOption.ATOMIC_MOVE);
} else if (!dest.exists()) {
// neither exists - that can be a sign of
// - (1) a serious problem (file system loss of data)
// - (2) a recovery of a savepoint that is some time old and the users
// removed the files in the meantime.
// TODO how to handle this?
// We probably need an option for users whether this should log,
// or result in an exception or unrecoverable exception
}
}
@Override
public CommitRecoverable getRecoverable() {
return recoverable;
}
}
}
| LocalCommitter |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlStatsAction.java | {
"start": 351,
"end": 623
} | class ____ extends ActionType<SqlStatsResponse> {
public static final SqlStatsAction INSTANCE = new SqlStatsAction();
public static final String NAME = "cluster:monitor/xpack/sql/stats/dist";
private SqlStatsAction() {
super(NAME);
}
}
| SqlStatsAction |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/internals/ModernStrategy.java | {
"start": 2080,
"end": 2863
} | class ____'t deprecated or removed, so reference it as an argument type.
// This allows for mocking out the method implementation while still accepting Subject instances as arguments.
callAs = subject.getDeclaredMethod("callAs", Subject.class, Callable.class);
}
@Override
public <T> T doPrivileged(PrivilegedAction<T> action) {
// This is intentionally a pass-through
return action.run();
}
@Override
public Subject current() {
return (Subject) ReflectiveStrategy.invoke(current, null);
}
@Override
public <T> T callAs(Subject subject, Callable<T> action) throws CompletionException {
return (T) ReflectiveStrategy.invokeChecked(callAs, CompletionException.class, null, subject, action);
}
}
| isn |
java | apache__hadoop | hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMDefinitionSLS.java | {
"start": 3665,
"end": 6159
} | class ____ extends AmDefinitionBuilder {
private final Map<String, String> jsonJob;
private Builder(Map<String, String> jsonJob) {
this.jsonJob = jsonJob;
}
public static Builder create(Map<String, String> jsonJob) {
return new Builder(jsonJob);
}
public Builder withAmType(String key) {
if (jsonJob.containsKey(key)) {
String amType = jsonJob.get(key);
if (amType != null) {
this.amType = amType;
}
}
return this;
}
public Builder withUser(String key) {
if (jsonJob.containsKey(key)) {
String user = jsonJob.get(key);
if (user != null) {
this.user = user;
}
}
return this;
}
public Builder withQueue(String key) {
if (jsonJob.containsKey(key)) {
this.queue = jsonJob.get(key);
}
return this;
}
public Builder withJobId(String key) {
if (jsonJob.containsKey(key)) {
this.jobId = jsonJob.get(key);
}
return this;
}
public Builder withJobCount(String key) {
if (jsonJob.containsKey(key)) {
jobCount = Integer.parseInt(jsonJob.get(key));
jobCount = Math.max(jobCount, 1);
}
return this;
}
public Builder withJobStartTime(String key) {
if (jsonJob.containsKey(key)) {
this.jobStartTime = Long.parseLong(jsonJob.get(key));
}
return this;
}
public Builder withJobFinishTime(String key) {
if (jsonJob.containsKey(key)) {
this.jobFinishTime = Long.parseLong(jsonJob.get(key));
}
return this;
}
public Builder withLabelExpression(String key) {
if (jsonJob.containsKey(key)) {
this.labelExpression = jsonJob.get(key);
}
return this;
}
public AMDefinitionSLS.Builder withTaskContainers(
List<ContainerSimulator> taskContainers) {
this.taskContainers = taskContainers;
return this;
}
public AMDefinitionSLS.Builder withAmResource(Resource amResource) {
this.amResource = amResource;
return this;
}
public AMDefinitionSLS build() {
AMDefinitionSLS amDef = new AMDefinitionSLS(this);
// Job id is generated automatically if this job configuration allows
// multiple job instances
if (jobCount > 1) {
amDef.oldAppId = null;
} else {
amDef.oldAppId = jobId;
}
amDef.jobCount = jobCount;
return amDef;
}
}
}
| Builder |
java | apache__camel | components/camel-opentelemetry-metrics/src/test/java/org/apache/camel/opentelemetry/metrics/eventnotifier/OpenTelemetryExchangeEventNotifierTimeUnitTest.java | {
"start": 2063,
"end": 5946
} | class ____ extends AbstractOpenTelemetryTest {
private static final Long DELAY = 1100L;
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
OpenTelemetryExchangeEventNotifier eventNotifier = new OpenTelemetryExchangeEventNotifier();
eventNotifier.setMeter(otelExtension.getOpenTelemetry().getMeter("meterTest"));
// override default time unit from milliseconds to seconds
eventNotifier.setTimeUnit(TimeUnit.SECONDS);
eventNotifier.setLastExchangeTimeUnit(TimeUnit.SECONDS);
context.getManagementStrategy().addEventNotifier(eventNotifier);
eventNotifier.init();
return context;
}
@Test
public void testElapsedTimerEvents() throws Exception {
int count = 3;
MockEndpoint mock = getMockEndpoint("mock://result");
mock.expectedMessageCount(count);
for (int i = 0; i < count; i++) {
template.sendBody("direct:bar", "Hello " + i);
}
mock.assertIsSatisfied();
verifyElapsedTimerHistogramMetric("bar", Math.floorDiv(DELAY, 1000L), count);
}
private void verifyElapsedTimerHistogramMetric(String routeId, long delay, int msgCount) {
PointData pd = getPointDataForRouteId(DEFAULT_CAMEL_EXCHANGE_ELAPSED_TIMER, routeId);
assertInstanceOf(HistogramPointData.class, pd);
HistogramPointData hpd = (HistogramPointData) pd;
// histogram values are in seconds
assertTrue(hpd.getMax() == delay, "max value");
assertTrue(hpd.getMin() == delay, "min value");
assertTrue(hpd.getSum() >= msgCount * delay, "sum");
assertEquals(msgCount, hpd.getCount(), "count");
}
@Test
public void testMetricData() {
template.sendBody("direct:bar", "Hello");
List<MetricData> ls = getMetricData(DEFAULT_CAMEL_EXCHANGE_ELAPSED_TIMER);
assertEquals(1, ls.size());
MetricData md = ls.get(0);
assertEquals(MetricDataType.HISTOGRAM, md.getType());
assertEquals("camel.exchange.elapsed", md.getName());
assertEquals("Time taken to complete exchange", md.getDescription());
// time unit should be in seconds as configured
assertEquals("seconds", md.getUnit());
ls = getMetricData(DEFAULT_CAMEL_EXCHANGE_SENT_TIMER);
assertEquals(1, ls.size());
md = ls.get(0);
assertEquals(MetricDataType.HISTOGRAM, md.getType());
assertEquals("camel.exchange.sent", md.getName());
assertEquals("Time taken to send message to the endpoint", md.getDescription());
// time unit should be in seconds as configured
assertEquals("seconds", md.getUnit());
ls = getMetricData(DEFAULT_CAMEL_ROUTES_EXCHANGES_INFLIGHT);
assertEquals(1, ls.size());
md = ls.get(0);
assertEquals(MetricDataType.LONG_GAUGE, md.getType());
assertEquals("camel.exchanges.inflight", md.getName());
assertEquals("Route in flight messages", md.getDescription());
ls = getMetricData(DEFAULT_CAMEL_EXCHANGE_LAST_PROCESSED_TIME_INSTRUMENT);
assertEquals(1, ls.size());
md = ls.get(0);
assertEquals(MetricDataType.LONG_GAUGE, md.getType());
assertEquals("camel.exchanges.last.time", md.getName());
assertEquals("Last exchange processed time since the Unix epoch", md.getDescription());
// time unit should be in seconds as configured
assertEquals("seconds", md.getUnit());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct://bar").routeId("bar").delay(DELAY).to("mock://result");
}
};
}
}
| OpenTelemetryExchangeEventNotifierTimeUnitTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_250.java | {
"start": 848,
"end": 1217
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select concat(@@version, ' ', @@version_comment)";
SQLSelectStatement stmt = (SQLSelectStatement) SQLUtils.parseSingleStatement(sql, DbType.mysql);
assertEquals("SELECT concat(@@version, ' ', @@version_comment)", stmt.toString());
}
}
| MySqlSelectTest_250 |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/ListFactoryBean.java | {
"start": 1613,
"end": 3103
} | class ____ when defined in a Spring application context.
* <p>Default is a {@code java.util.ArrayList}.
* @see java.util.ArrayList
*/
@SuppressWarnings("rawtypes")
public void setTargetListClass(@Nullable Class<? extends List> targetListClass) {
if (targetListClass == null) {
throw new IllegalArgumentException("'targetListClass' must not be null");
}
if (!List.class.isAssignableFrom(targetListClass)) {
throw new IllegalArgumentException("'targetListClass' must implement [java.util.List]");
}
this.targetListClass = targetListClass;
}
@Override
@SuppressWarnings("rawtypes")
public Class<List> getObjectType() {
return List.class;
}
@Override
@SuppressWarnings("unchecked")
protected List<Object> createInstance() {
if (this.sourceList == null) {
throw new IllegalArgumentException("'sourceList' is required");
}
List<Object> result = null;
if (this.targetListClass != null) {
result = BeanUtils.instantiateClass(this.targetListClass);
}
else {
result = new ArrayList<>(this.sourceList.size());
}
Class<?> valueType = null;
if (this.targetListClass != null) {
valueType = ResolvableType.forClass(this.targetListClass).asCollection().resolveGeneric();
}
if (valueType != null) {
TypeConverter converter = getBeanTypeConverter();
for (Object elem : this.sourceList) {
result.add(converter.convertIfNecessary(elem, valueType));
}
}
else {
result.addAll(this.sourceList);
}
return result;
}
}
| name |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/builders/rolling/SizeBasedTriggeringPolicyBuilder.java | {
"start": 1611,
"end": 3176
} | class ____ extends AbstractBuilder<TriggeringPolicy>
implements TriggeringPolicyBuilder {
private static final String MAX_SIZE_PARAM = "MaxFileSize";
private static final long DEFAULT_MAX_SIZE = 10 * 1024 * 1024;
public SizeBasedTriggeringPolicyBuilder() {
super();
}
public SizeBasedTriggeringPolicyBuilder(final String prefix, final Properties props) {
super(prefix, props);
}
@Override
public SizeBasedTriggeringPolicy parse(final Element element, final XmlConfiguration configuration) {
final AtomicLong maxSize = new AtomicLong(DEFAULT_MAX_SIZE);
forEachElement(element.getChildNodes(), currentElement -> {
switch (currentElement.getTagName()) {
case PARAM_TAG:
switch (getNameAttributeKey(currentElement)) {
case MAX_SIZE_PARAM:
set(MAX_SIZE_PARAM, currentElement, maxSize);
break;
}
break;
}
});
return createTriggeringPolicy(maxSize.get());
}
@Override
public SizeBasedTriggeringPolicy parse(final PropertiesConfiguration configuration) {
final long maxSize = getLongProperty(MAX_SIZE_PARAM, DEFAULT_MAX_SIZE);
return createTriggeringPolicy(maxSize);
}
private SizeBasedTriggeringPolicy createTriggeringPolicy(final long maxSize) {
return SizeBasedTriggeringPolicy.createPolicy(Long.toString(maxSize));
}
}
| SizeBasedTriggeringPolicyBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/collection/spi/PersistentList.java | {
"start": 12591,
"end": 12840
} | class ____ extends AbstractListValueDelayedOperation {
public Set(int index, E addedValue, E orphan) {
super( index, addedValue, orphan );
}
@Override
public void operate() {
list.set( getIndex(), getAddedInstance() );
}
}
final | Set |
java | quarkusio__quarkus | integration-tests/oidc-tenancy/src/test/java/io/quarkus/it/keycloak/BearerTokenStepUpAuthenticationTest.java | {
"start": 1724,
"end": 16533
} | class ____ {
@TestHTTPResource("/ws/tenant-annotation/bearer-step-up-auth")
URI websocketAuthCtxUri;
@Inject
Vertx vertx;
@Test
public void testMethodLevelAuthCtxNoRbac() {
// anonymous, no RBAC annotations on endpoint but acr required -> fail
RestAssured.given()
.when().get("/step-up-auth/method-level/no-rbac-annotation")
.then().statusCode(401);
// no acr -> fail
stepUpMethodLevelRequest(Set.of(), "no-rbac-annotation").statusCode(401);
// wrong single acr -> fail
stepUpMethodLevelRequest(Set.of("3"), "no-rbac-annotation").statusCode(401);
// wrong multiple acr -> fail
stepUpMethodLevelRequest(Set.of("3", "4"), "no-rbac-annotation").statusCode(401)
.header("www-authenticate", containsString("insufficient_user_authentication"))
.header("www-authenticate", containsString("1"));
// correct acr -> pass
stepUpMethodLevelRequest(Set.of("1"), "no-rbac-annotation").statusCode(200).body(is("no-rbac-annotation"));
}
@Test
public void testLocalTokenVerification() {
// anonymous, no RBAC annotations on endpoint but acr required -> fail
RestAssured.given()
.when().get("/tenant-ann-no-oidc-server-step-up-auth")
.then().statusCode(401);
Function<Set<String>, ValidatableResponse> request = acrValues -> RestAssured
.given()
.auth().oauth2(getAccessTokenVerifiedWithoutOidcServer(acrValues))
.when().get("/tenant-ann-no-oidc-server-step-up-auth")
.then();
// no acr -> fail
request.apply(Set.of()).statusCode(401);
// wrong single acr -> fail
request.apply(Set.of("3")).statusCode(401);
// wrong multiple acr -> fail
request.apply(Set.of("3", "4")).statusCode(401)
.header("www-authenticate", containsString("insufficient_user_authentication"))
.header("www-authenticate", containsString("1"));
// correct acr -> pass
request.apply(Set.of("1")).statusCode(200).body(is("/tenant-ann-no-oidc-server-step-up-auth"));
}
@Test
public void testClassLevelAuthCtxNoRbac() {
// anonymous, no RBAC annotations on endpoint but acr required -> fail
RestAssured.given()
.when().get("/step-up-auth/class-level/no-rbac-annotation")
.then().statusCode(401);
// no acr -> fail
stepUpClassLevelRequest(Set.of(), "no-rbac-annotation").statusCode(401);
// wrong single acr -> fail
stepUpClassLevelRequest(Set.of("3"), "no-rbac-annotation").statusCode(401);
// wrong multiple acr -> fail
stepUpClassLevelRequest(Set.of("3", "4"), "no-rbac-annotation").statusCode(401);
// correct acr -> pass
stepUpClassLevelRequest(Set.of("2"), "no-rbac-annotation").statusCode(200).body(is("no-rbac-annotation"));
}
@Test
public void testMethodLevelAuthCtxRolesAllowed() {
// no acr -> fail
stepUpMethodLevelRequest(Set.of(), "user-role").statusCode(401);
// wrong single acr -> fail
stepUpMethodLevelRequest(Set.of("1"), "user-role").statusCode(401);
// wrong multiple acr -> fail
stepUpMethodLevelRequest(Set.of("1", "4"), "user-role").statusCode(401)
.header("www-authenticate", containsString("insufficient_user_authentication"))
.header("www-authenticate", containsString("3"));
// correct acr & wrong role -> fail
stepUpMethodLevelRequest(Set.of("3"), "admin-role").statusCode(403);
// correct acr & correct role -> pass
stepUpMethodLevelRequest(Set.of("3"), "user-role").statusCode(200).body(is("user-role"));
}
@Test
public void testMethodLevelAuthTenantAnnotationSelection() {
// wrong acr & correct tenant -> fail
RestAssured.given()
.auth().oauth2(getAccessToken(Set.of("3")))
.when().get("/tenant-ann-step-up-auth/bearer-step-up-auth-1")
.then()
.statusCode(401);
// correct acr & tenant -> pass
RestAssured.given()
.auth().oauth2(getAccessToken(Set.of("6")))
.when().get("/tenant-ann-step-up-auth/bearer-step-up-auth-1")
.then()
.statusCode(200)
.body(is("step-up-auth-annotation-selection"));
// correct acr & second tenant -> pass
RestAssured.given()
.auth().oauth2(getAccessToken(Set.of("6")))
.when().get("/tenant-ann-step-up-auth/bearer-step-up-auth-2")
.then()
.statusCode(200)
.body(is("step-up-auth-annotation-selection-2"));
}
@Test
public void testClassLevelAuthCtxRolesAllowed() {
// no acr -> fail
stepUpClassLevelRequest(Set.of(), "user-role").statusCode(401);
// wrong single acr -> fail
stepUpClassLevelRequest(Set.of("1"), "user-role").statusCode(401);
// wrong multiple acr -> fail
stepUpClassLevelRequest(Set.of("1", "4"), "user-role").statusCode(401);
// correct acr & wrong role -> fail
stepUpClassLevelRequest(Set.of("2"), "admin-role").statusCode(403);
// correct acr & correct role -> pass
stepUpClassLevelRequest(Set.of("2"), "user-role").statusCode(200).body(is("user-role"));
}
@Test
public void testMethodLevelMultipleAcrsRequired() {
// no acr -> fail
stepUpMethodLevelRequest(Set.of(), "multiple-acr-required").statusCode(401);
// wrong single acr -> fail
stepUpMethodLevelRequest(Set.of("4"), "multiple-acr-required").statusCode(401);
// wrong multiple acr -> fail
stepUpMethodLevelRequest(Set.of("4", "5"), "multiple-acr-required").statusCode(401)
.header("www-authenticate", containsString("insufficient_user_authentication"))
.header("www-authenticate", containsString("1"))
.header("www-authenticate", containsString("2"))
.header("www-authenticate", containsString("3"));
// one wrong, one correct acr -> fail
stepUpMethodLevelRequest(Set.of("1", "4"), "multiple-acr-required").statusCode(401);
// one wrong, two correct acrs -> fail
stepUpMethodLevelRequest(Set.of("1", "2", "4"), "multiple-acr-required").statusCode(401);
// correct acrs -> pass
stepUpMethodLevelRequest(Set.of("1", "2", "3"), "multiple-acr-required").statusCode(200)
.body(is("multiple-acr-required"));
// correct acrs & an irrelevant extra acr -> pass
stepUpMethodLevelRequest(Set.of("1", "2", "3", "4"), "multiple-acr-required").statusCode(200)
.body(is("multiple-acr-required"));
}
@Test
public void testWebSocketsClassLevelAuthContextAnnotation()
throws ExecutionException, InterruptedException, TimeoutException {
// wrong acr -> fail
String wrongToken = getAccessToken(Set.of("3"));
Assertions.assertThrows(RuntimeException.class, () -> callWebSocketEndpoint(wrongToken, true));
// correct acr -> pass
String correctToken = getAccessToken(Set.of("7"));
callWebSocketEndpoint(correctToken, false);
}
@Test
public void testMaxAgeAndAcrRequired() {
// no auth_time claim && no acr -> fail
stepUpMethodLevelRequest(null, "max-age-and-acr-required").statusCode(401)
.header("www-authenticate", containsString("insufficient_user_authentication"))
.header("www-authenticate", containsString("max_age"))
.header("www-authenticate", containsString("120"))
.header("www-authenticate", containsString("acr_values"))
.header("www-authenticate", containsString("myACR"));
// no auth_time claim but iat is correct and acr is correct -> pass
stepUpMethodLevelRequest(Set.of("myACR"), "max-age-and-acr-required").statusCode(200)
.body(is("max-age-and-acr-required"));
// correct acr but (auth_time + max_age < now) -> fail
stepUpMethodLevelRequest(Set.of("myACR"), "max-age-and-acr-required", 123L).statusCode(401)
.header("www-authenticate", containsString("insufficient_user_authentication"))
.header("www-authenticate", containsString("max_age"))
.header("www-authenticate", containsString("120"))
.header("www-authenticate", containsString("acr_values"))
.header("www-authenticate", containsString("myACR"));
// correct expires at (auth_time + max_age > now) but wrong acr -> fail
final long nowSecs = System.currentTimeMillis() / 1000;
stepUpMethodLevelRequest(Set.of("wrongACR"), "max-age-and-acr-required", nowSecs).statusCode(401)
.header("www-authenticate", containsString("insufficient_user_authentication"))
.header("www-authenticate", containsString("max_age"))
.header("www-authenticate", containsString("120"))
.header("www-authenticate", containsString("acr_values"))
.header("www-authenticate", containsString("myACR"));
// correct acr but (auth_time + max_age > now) -> pass
stepUpMethodLevelRequest(Set.of("myACR"), "max-age-and-acr-required", nowSecs).statusCode(200)
.body(is("max-age-and-acr-required"));
}
@Test
public void testSingleAcrStringValueRequired() {
// wrong single acr -> fail
stepUpMethodLevelRequest(Set.of("3_string"), "no-rbac-annotation-string").statusCode(401);
// correct acr -> pass
stepUpMethodLevelRequest(Set.of("1_string"), "no-rbac-annotation-string").statusCode(200)
.body(is("no-rbac-annotation"));
}
private static ValidatableResponse stepUpMethodLevelRequest(Set<String> acrValues, String path) {
return stepUpMethodLevelRequest(acrValues, path, null);
}
private static ValidatableResponse stepUpMethodLevelRequest(Set<String> acrValues, String path,
Long authTime) {
return stepUpRequest(acrValues, "method", path, authTime);
}
private static ValidatableResponse stepUpClassLevelRequest(Set<String> acrValues, String path) {
return stepUpRequest(acrValues, "class", path, null);
}
private static ValidatableResponse stepUpRequest(Set<String> acrValues, String level, String path,
Long authTime) {
return RestAssured.given()
.auth().oauth2(getAccessTokenVerifiedWithOidcServer(acrValues, authTime))
.when().get("/step-up-auth/" + level + "-level/" + path)
.then();
}
private static String getAccessToken(Set<String> acrValues) {
return getAccessTokenVerifiedWithOidcServer(acrValues, null);
}
static String getAccessTokenWithAcr(Set<String> acrValues) {
return getAccessTokenVerifiedWithOidcServer(acrValues, null);
}
private static String getAccessTokenVerifiedWithOidcServer(Set<String> acrValues, Long authTime) {
// get access token from simple OIDC resource
String json = RestAssured
.given()
.queryParam("auth_time", authTime == null ? "" : Long.toString(authTime))
.queryParam("acr", acrValues == null ? "" : String.join(",", acrValues))
.when()
.post("/oidc/accesstoken-with-acr")
.body().asString();
JsonObject object = new JsonObject(json);
return object.getString("access_token");
}
private static String getAccessTokenVerifiedWithoutOidcServer(Set<String> acrValues) {
var jwtBuilder = Jwt.claim("scope", "read:data").preferredUserName("alice").issuer("acceptable-issuer");
if (acrValues != null) {
jwtBuilder.claim(Claims.acr, acrValues);
}
return jwtBuilder.sign();
}
private void callWebSocketEndpoint(String token, boolean expectFailure)
throws InterruptedException, ExecutionException, TimeoutException {
CountDownLatch connectedLatch = new CountDownLatch(1);
CountDownLatch messagesLatch = new CountDownLatch(2);
List<String> messages = new CopyOnWriteArrayList<>();
AtomicReference<io.vertx.core.http.WebSocket> ws1 = new AtomicReference<>();
WebSocketClient client = vertx.createWebSocketClient();
WebSocketConnectOptions options = new WebSocketConnectOptions();
options.setHost(websocketAuthCtxUri.getHost());
options.setPort(websocketAuthCtxUri.getPort());
options.setURI(websocketAuthCtxUri.getPath());
if (token != null) {
options.addHeader(HttpHeaders.AUTHORIZATION.toString(), "Bearer " + token);
}
AtomicReference<Throwable> throwable = new AtomicReference<>();
try {
client
.connect(options)
.onComplete(r -> {
if (r.succeeded()) {
io.vertx.core.http.WebSocket ws = r.result();
ws.textMessageHandler(msg -> {
messages.add(msg);
messagesLatch.countDown();
});
// We will use this socket to write a message later on
ws1.set(ws);
connectedLatch.countDown();
} else {
throwable.set(r.cause());
}
});
if (expectFailure) {
Awaitility.await().atMost(5, TimeUnit.SECONDS).until(() -> throwable.get() != null);
throw new RuntimeException(throwable.get());
} else {
Assertions.assertTrue(connectedLatch.await(5, TimeUnit.SECONDS));
ws1.get().writeTextMessage("hello");
Assertions.assertTrue(messagesLatch.await(5, TimeUnit.SECONDS), "Messages: " + messages);
Assertions.assertEquals(2, messages.size(), "Messages: " + messages);
Assertions.assertEquals("ready", messages.get(0));
Assertions.assertEquals("step-up-auth-annotation-selection echo: hello", messages.get(1));
}
} finally {
client.close().toCompletionStage().toCompletableFuture().get(5, TimeUnit.SECONDS);
}
}
@Tenant("tenant-public-key")
@Path("/tenant-ann-no-oidc-server-step-up-auth")
public static | BearerTokenStepUpAuthenticationTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/functions/Function8.java | {
"start": 1146,
"end": 1826
} | interface ____<@NonNull T1, @NonNull T2, @NonNull T3, @NonNull T4, @NonNull T5, @NonNull T6, @NonNull T7, @NonNull T8, @NonNull R> {
/**
* Calculate a value based on the input values.
* @param t1 the first value
* @param t2 the second value
* @param t3 the third value
* @param t4 the fourth value
* @param t5 the fifth value
* @param t6 the sixth value
* @param t7 the seventh value
* @param t8 the eighth value
* @return the result value
* @throws Throwable if the implementation wishes to throw any type of exception
*/
R apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8) throws Throwable;
}
| Function8 |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java | {
"start": 5607,
"end": 32109
} | class ____
implements HttpUrlConnectorProvider.ConnectionFactory {
@Override
public HttpURLConnection getConnection(final URL url)
throws IOException {
try {
return (HttpURLConnection)url.openConnection();
} catch (UndeclaredThrowableException e) {
throw new IOException(e.getCause());
}
}
}
@Test
public void testAbout() throws Exception {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/");
Client client = createClient()
.register(TimelineAboutReader.class);
try {
Response resp = getResponse(client, uri);
TimelineAbout about = resp.readEntity(TimelineAbout.class);
assertNotNull(about);
assertEquals("Timeline Reader API", about.getAbout());
} finally {
client.close();
}
}
@Test
public void testGetEntityDefaultView() throws Exception {
Client client = createClient().register(TimelineEntityReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_1");
Response resp = getResponse(client, uri);
TimelineEntity entity = resp.readEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
assertEquals((Long) 1425016502000L, entity.getCreatedTime());
// Default view i.e. when no fields are specified, entity contains only
// entity id, entity type and created time.
assertEquals(0, entity.getConfigs().size());
assertEquals(0, entity.getMetrics().size());
} finally {
client.close();
}
}
@Test
void testGetEntityWithUserAndFlowInfo() throws Exception {
Client client = createClient().register(TimelineEntityReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_1?" +
"userid=user1&flowname=flow1&flowrunid=1");
Response resp = getResponse(client, uri);
TimelineEntity entity = resp.readEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
assertEquals((Long) 1425016502000L, entity.getCreatedTime());
} finally {
client.close();
}
}
@Test
void testGetEntityCustomFields() throws Exception {
Client client = createClient().register(TimelineEntityReader.class);
try {
// Fields are case insensitive.
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_1?" +
"fields=CONFIGS,Metrics,info");
Response resp = getResponse(client, uri);
TimelineEntity entity = resp.readEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
assertEquals(3, entity.getConfigs().size());
assertEquals(3, entity.getMetrics().size());
assertTrue(entity.getInfo().containsKey(TimelineReaderUtils.UID_KEY),
"UID should be present");
// Includes UID.
assertEquals(3, entity.getInfo().size());
// No events will be returned as events are not part of fields.
assertEquals(0, entity.getEvents().size());
} finally {
client.close();
}
}
@Test
void testGetEntityAllFields() throws Exception {
Client client = createClient().register(TimelineEntityReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_1?fields=ALL");
Response resp = getResponse(client, uri);
TimelineEntity entity = resp.readEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
assertEquals(3, entity.getConfigs().size());
assertEquals(3, entity.getMetrics().size());
assertTrue(entity.getInfo().containsKey(TimelineReaderUtils.UID_KEY),
"UID should be present");
// Includes UID.
assertEquals(3, entity.getInfo().size());
assertEquals(2, entity.getEvents().size());
} finally {
client.close();
}
}
@Test
void testGetEntityNotPresent() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_10");
verifyHttpResponse(client, uri, Response.Status.NOT_FOUND);
} finally {
client.close();
}
}
@Test
void testQueryWithoutCluster() throws Exception {
Client client = createClient().
register(TimelineEntityReader.class).
register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/apps/app1/entities/app/id_1");
Response resp = getResponse(client, uri);
TimelineEntity entity = resp.readEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/apps/app1/entities/app");
resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(4, entities.size());
} finally {
client.close();
}
}
@Test
void testGetEntities() throws Exception {
Client client = createClient().
register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.readEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(4, entities.size());
assertTrue(entities.contains(newEntity("app", "id_1")) &&
entities.contains(newEntity("app", "id_2")) &&
entities.contains(newEntity("app", "id_3")) &&
entities.contains(newEntity("app", "id_4")),
"Entities id_1, id_2, id_3 and id_4 should have been" +
" present in response");
} finally {
client.close();
}
}
@Test
void testGetEntitiesWithLimit() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?limit=2");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.readEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
// Entities returned are based on most recent created time.
assertTrue(entities.contains(newEntity("app", "id_1")) &&
entities.contains(newEntity("app", "id_4")),
"Entities with id_1 and id_4 should have been present " +
"in response based on entity created time.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?limit=3");
resp = getResponse(client, uri);
entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
// Even though 2 entities out of 4 have same created time, one entity
// is left out due to limit
assertEquals(3, entities.size());
} finally {
client.close();
}
}
@Test
void testGetEntitiesBasedOnCreatedTime() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"createdtimestart=1425016502030&createdtimeend=1425016502060");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.readEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue(entities.contains(newEntity("app", "id_4")),
"Entity with id_4 should have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?createdtimeend" +
"=1425016502010");
resp = getResponse(client, uri);
entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(3, entities.size());
assertFalse(entities.contains(newEntity("app", "id_4")),
"Entity with id_4 should not have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?createdtimestart=" +
"1425016502010");
resp = getResponse(client, uri);
entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue(entities.contains(newEntity("app", "id_4")),
"Entity with id_4 should have been present in response.");
} finally {
client.close();
}
}
@Test
void testGetEntitiesByRelations() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?relatesto=" +
"flow:flow1");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.readEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue(entities.contains(newEntity("app", "id_1")),
"Entity with id_1 should have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?isrelatedto=" +
"type1:tid1_2,type2:tid2_1%60");
resp = getResponse(client, uri);
entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue(entities.contains(newEntity("app", "id_1")),
"Entity with id_1 should have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?isrelatedto=" +
"type1:tid1_1:tid1_2,type2:tid2_1%60");
resp = getResponse(client, uri);
entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue(entities.contains(newEntity("app", "id_1")),
"Entity with id_1 should have been present in response.");
} finally {
client.close();
}
}
@Test
void testGetEntitiesByConfigFilters() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"conffilters=config_1%20eq%20123%20AND%20config_3%20eq%20abc");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue(entities.contains(newEntity("app", "id_3")),
"Entity with id_3 should have been present in response.");
} finally {
client.close();
}
}
@Test
void testGetEntitiesByInfoFilters() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"infofilters=info2%20eq%203.5");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue(entities.contains(newEntity("app", "id_3")),
"Entity with id_3 should have been present in response.");
} finally {
client.close();
}
}
@Test
void testGetEntitiesByMetricFilters() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"metricfilters=metric3%20ge%200");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.readEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
assertTrue(entities.contains(newEntity("app", "id_1")) &&
entities.contains(newEntity("app", "id_2")),
"Entities with id_1 and id_2 should have been present in response.");
} finally {
client.close();
}
}
@Test
void testGetEntitiesByEventFilters() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"eventfilters=event_2,event_4");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue(entities.contains(newEntity("app", "id_3")),
"Entity with id_3 should have been present in response.");
} finally {
client.close();
}
}
@Test
void testGetEntitiesNoMatch() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"metricfilters=metric7%20ge%200&isrelatedto=type1:tid1_1:tid1_2," +
"type2:tid2_1%60&relatesto=flow:flow1&eventfilters=event_2,event_4" +
"&infofilters=info2%20eq%203.5&createdtimestart=1425016502030&" +
"createdtimeend=1425016502060");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.readEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
assertEquals(0, entities.size());
} finally {
client.close();
}
}
@Test
void testInvalidValuesHandling() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?flowrunid=a23b");
verifyHttpResponse(client, uri, Response.Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app/id_1?flowrunid=2ab15");
verifyHttpResponse(client, uri, Response.Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?limit=#$561av");
verifyHttpResponse(client, uri, Response.Status.BAD_REQUEST);
} finally {
client.close();
}
}
@Test
void testGetAppAttempts() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/"
+ "entities/YARN_APPLICATION_ATTEMPT");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.readEntity(new GenericType<Set<TimelineEntity>>() {});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
int totalEntities = entities.size();
assertEquals(2, totalEntities);
assertTrue(entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(), "app-attempt-1")),
"Entity with app-attempt-2 should have been present in response.");
assertTrue(entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(), "app-attempt-2")),
"Entity with app-attempt-2 should have been present in response.");
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/appattempts");
resp = getResponse(client, uri);
entities = resp.readEntity(new GenericType<Set<TimelineEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType());
assertNotNull(entities);
int retrievedEntity = entities.size();
assertEquals(2, retrievedEntity);
assertTrue(entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(), "app-attempt-1")),
"Entity with app-attempt-2 should have been present in response.");
assertTrue(entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(), "app-attempt-2")),
"Entity with app-attempt-2 should have been present in response.");
assertEquals(totalEntities, retrievedEntity);
} finally {
client.close();
}
}
@Test
void testGetAppAttempt() throws Exception {
Client client = createClient().register(TimelineEntityReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/entities/"
+ "YARN_APPLICATION_ATTEMPT/app-attempt-1");
Response resp = getResponse(client, uri);
TimelineEntity entities1 = resp.readEntity(new GenericType<TimelineEntity>() {});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities1);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/appattempts/app-attempt-1");
resp = getResponse(client, uri);
TimelineEntity entities2 = resp.readEntity(new GenericType<TimelineEntity>() {});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType());
assertNotNull(entities2);
assertEquals(entities1, entities2);
} finally {
client.close();
}
}
@Test
void testGetContainers() throws Exception {
Client client = createClient().register(TimelineEntitySetReader.class);
try {
// total 3 containers in a application.
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/entities/YARN_CONTAINER");
Response resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.readEntity(new GenericType<Set<TimelineEntity>>() {});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities);
int totalEntities = entities.size();
assertEquals(3, totalEntities);
assertTrue(
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1")),
"Entity with container_1_1 should have been present in response.");
assertTrue(
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1")),
"Entity with container_2_1 should have been present in response.");
assertTrue(
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2")),
"Entity with container_2_2 should have been present in response.");
// for app-attempt1 1 container has run
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/"
+ "appattempts/app-attempt-1/containers");
resp = getResponse(client, uri);
entities = resp.readEntity(new GenericType<Set<TimelineEntity>>() {});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType());
assertNotNull(entities);
int retrievedEntity = entities.size();
assertEquals(1, retrievedEntity);
assertTrue(
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1")),
"Entity with container_1_1 should have been present in response.");
// for app-attempt2 2 containers has run
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/"
+ "appattempts/app-attempt-2/containers");
resp = getResponse(client, uri);
entities = resp.readEntity(new GenericType<Set<TimelineEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType());
assertNotNull(entities);
retrievedEntity += entities.size();
assertEquals(2, entities.size());
assertTrue(
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1")),
"Entity with container_2_1 should have been present in response.");
assertTrue(
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2")),
"Entity with container_2_2 should have been present in response.");
assertEquals(totalEntities, retrievedEntity);
} finally {
client.close();
}
}
@Test
void testGetContainer() throws Exception {
Client client = createClient().register(TimelineEntityReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/"
+ "entities/YARN_CONTAINER/container_2_2");
Response resp = getResponse(client, uri);
TimelineEntity entities1 =
resp.readEntity(new GenericType<TimelineEntity>() {
});
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
resp.getMediaType().toString());
assertNotNull(entities1);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/containers/container_2_2");
resp = getResponse(client, uri);
TimelineEntity entities2 =
resp.readEntity(new GenericType<TimelineEntity>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType());
assertNotNull(entities2);
assertEquals(entities1, entities2);
} finally {
client.close();
}
}
@Test
void testHealthCheck() throws Exception {
Client client = createClient().register(TimelineHealthReader.class);
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/health");
Response resp = getResponse(client, uri);
TimelineHealth timelineHealth =
resp.readEntity(new GenericType<TimelineHealth>() {
});
assertEquals(200, resp.getStatus());
assertEquals(TimelineHealth.TimelineHealthStatus.RUNNING,
timelineHealth.getHealthStatus());
} finally {
client.close();
}
}
} | DummyURLConnectionFactory |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Issue995.java | {
"start": 159,
"end": 361
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
Person person = new Person();
JSONPath.set(person, "$.nose.name", "xxx");
}
public static | Issue995 |
java | google__guice | core/test/com/google/inject/ProvisionListenerTest.java | {
"start": 18845,
"end": 18897
} | class ____ implements JitFoo, LinkedFoo {}
static | Foo |
java | google__guava | android/guava/src/com/google/common/collect/MapMakerInternalMap.java | {
"start": 14251,
"end": 14845
} | interface ____<K, V, E extends InternalEntry<K, V, E>> extends InternalEntry<K, V, E> {
/** Gets the weak value reference held by entry. */
WeakValueReference<K, V, E> getValueReference();
}
@SuppressWarnings("unchecked") // impl never uses a parameter or returns any non-null value
static <K, V, E extends InternalEntry<K, V, E>>
WeakValueReference<K, V, E> unsetWeakValueReference() {
return (WeakValueReference<K, V, E>) UNSET_WEAK_VALUE_REFERENCE;
}
/** Concrete implementation of {@link InternalEntry} for strong keys and strong values. */
static | WeakValueEntry |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_xiedun.java | {
"start": 268,
"end": 675
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
// File file = new File("/Users/wenshao/Downloads/json.txt");
//
// BufferedReader reader = new BufferedReader(new FileReader(file));
// char[] buf = new char[20480];
// int readed = reader.read(buf);
// String content = new String(buf);
//
// JSON.parse(content);
}
}
| Bug_for_xiedun |
java | google__guice | core/test/com/google/inject/DuplicateBindingsTest.java | {
"start": 14061,
"end": 14686
} | class ____ extends AbstractModule {
protected final FooImpl foo;
protected final Provider<Foo> pFoo;
protected final Class<? extends Provider<? extends Foo>> pclFoo;
protected final Class<? extends Foo> clFoo;
protected final Constructor<FooImpl> cFoo;
FooModule(
FooImpl foo,
Provider<Foo> pFoo,
Class<? extends Provider<? extends Foo>> pclFoo,
Class<? extends Foo> clFoo,
Constructor<FooImpl> cFoo) {
this.foo = foo;
this.pFoo = pFoo;
this.pclFoo = pclFoo;
this.clFoo = clFoo;
this.cFoo = cFoo;
}
}
private static | FooModule |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/exception/NotAuditedException.java | {
"start": 199,
"end": 534
} | class ____ extends AuditException {
private static final long serialVersionUID = 4809674577449455510L;
private final String entityName;
public NotAuditedException(String entityName, String message) {
super( message );
this.entityName = entityName;
}
public String getEntityName() {
return entityName;
}
}
| NotAuditedException |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLConstraint.java | {
"start": 798,
"end": 1049
} | interface ____ extends SQLObject {
SQLName getName();
void setName(SQLName value);
SQLExpr getComment();
void setComment(SQLExpr x);
void simplify();
default void setHasConstraint(boolean hasConstraint) {
}
}
| SQLConstraint |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/main/java/org/apache/dubbo/config/ServiceConfig.java | {
"start": 19414,
"end": 19792
} | interface ____ mapping for service "
+ url.getServiceKey());
}
} catch (Exception e) {
logger.error(
CONFIG_SERVER_DISCONNECTED,
"configuration server disconnected",
"",
"[INSTANCE_REGISTER] [METADATA_REGISTER] Failed register | application |
java | apache__logging-log4j2 | log4j-layout-template-json-test/src/test/java/org/apache/logging/log4j/layout/template/json/JsonTemplateLayoutTest.java | {
"start": 54473,
"end": 67392
} | class ____ extends Thread implements AutoCloseable {
private final ServerSocket serverSocket;
private final BlockingQueue<JsonNode> receivedNodes;
private volatile int droppedNodeCount = 0;
private volatile boolean closed = false;
private JsonAcceptingTcpServer(final int port, final int capacity) throws IOException {
this.serverSocket = new ServerSocket(port);
this.receivedNodes = new ArrayBlockingQueue<>(capacity);
serverSocket.setReuseAddress(true);
serverSocket.setSoTimeout(5_000);
setDaemon(true);
start();
}
@Override
public void run() {
try {
try (final Socket socket = serverSocket.accept()) {
final InputStream inputStream = socket.getInputStream();
while (!closed) {
final MappingIterator<JsonNode> iterator = JacksonFixture.getObjectMapper()
.readerFor(JsonNode.class)
.readValues(inputStream);
while (iterator.hasNextValue()) {
final JsonNode value = iterator.nextValue();
synchronized (this) {
final boolean added = receivedNodes.offer(value);
if (!added) {
droppedNodeCount++;
}
}
}
}
}
} catch (final EOFException ignored) {
// Socket is closed.
} catch (final Exception error) {
if (!closed) {
throw new RuntimeException(error);
}
}
}
@Override
public synchronized void close() {
if (closed) {
throw new IllegalStateException("shutdown has already been invoked");
}
closed = true;
interrupt();
try {
join(3_000L);
} catch (InterruptedException ignored) {
// Due to JDK-7027157, we shouldn't throw an InterruptedException
// from an AutoCloseable#close() method. Hence we catch it and
// then restore the interrupted flag.
Thread.currentThread().interrupt();
}
}
}
private static String explainChars(final String input) {
return IntStream.range(0, input.length())
.mapToObj(i -> {
final char c = input.charAt(i);
return String.format("'%c' (%04X)", c, (int) c);
})
.collect(Collectors.joining(", "));
}
@Test
void test_PatternResolver() {
// Create the event template.
final String eventTemplate = writeJson(asMap(
"message",
asMap(
"$resolver", "pattern",
"pattern", "%p:%m")));
// Create the layout.
final JsonTemplateLayout layout = JsonTemplateLayout.newBuilder()
.setConfiguration(CONFIGURATION)
.setEventTemplate(eventTemplate)
.build();
// Create the log event.
final SimpleMessage message = new SimpleMessage("foo");
final Level level = Level.FATAL;
final LogEvent logEvent = Log4jLogEvent.newBuilder()
.setLoggerName(LOGGER_NAME)
.setMessage(message)
.setLevel(level)
.build();
// Check the serialized event.
usingSerializedLogEventAccessor(layout, logEvent, accessor -> {
final String expectedMessage = String.format("%s:%s", level, message.getFormattedMessage());
assertThat(accessor.getString("message")).isEqualTo(expectedMessage);
});
}
@Test
void test_MessageParameterResolver_with_ParameterizedMessageFactory() {
testMessageParameterResolver(ParameterizedMessageFactory.INSTANCE);
}
@Test
void test_MessageParameterResolver_noParameters_with_ParameterizedMessageFactory() {
testMessageParameterResolverNoParameters(ParameterizedMessageFactory.INSTANCE);
}
@Test
void test_MessageParameterResolver_with_ReusableMessageFactory() {
testMessageParameterResolver(ReusableMessageFactory.INSTANCE);
}
@Test
void test_MessageParameterResolver_noParameters_with_ReusableMessageFactory() {
testMessageParameterResolverNoParameters(ReusableMessageFactory.INSTANCE);
}
private static void testMessageParameterResolver(final MessageFactory messageFactory) {
// Create the event template.
final String eventTemplate = writeJson(asMap(
"po*", asMap("$resolver", "messageParameter"),
"ps*", asMap("$resolver", "messageParameter", "stringified", true),
"po2", asMap("$resolver", "messageParameter", "index", 2),
"ps2", asMap("$resolver", "messageParameter", "index", 2, "stringified", true),
"po3", asMap("$resolver", "messageParameter", "index", 3)));
// Create the layout.
final JsonTemplateLayout layout = JsonTemplateLayout.newBuilder()
.setConfiguration(CONFIGURATION)
.setEventTemplate(eventTemplate)
.build();
// Create the log event.
final Object[] parameters = {1L + (long) Integer.MAX_VALUE, "foo", 56};
final Message message = messageFactory.newMessage("foo", parameters);
final Level level = Level.FATAL;
final LogEvent logEvent = Log4jLogEvent.newBuilder()
.setLoggerName(LOGGER_NAME)
.setMessage(message)
.setLevel(level)
.build();
// Check the serialized event.
usingSerializedLogEventAccessor(layout, logEvent, accessor -> {
assertThat(accessor.getObject("po*")).isEqualTo(Arrays.asList(parameters));
final List<String> stringifiedParameters =
Arrays.stream(parameters).map(String::valueOf).collect(Collectors.toList());
assertThat(accessor.getObject("ps*")).isEqualTo(stringifiedParameters);
assertThat(accessor.getObject("po2")).isEqualTo(parameters[2]);
assertThat(accessor.getString("ps2")).isEqualTo(stringifiedParameters.get(2));
assertThat(accessor.getString("ps3")).isNull();
});
}
private static void testMessageParameterResolverNoParameters(final MessageFactory messageFactory) {
// Create the event template.
final String eventTemplate = writeJson(asMap(
"po*", asMap("$resolver", "messageParameter"),
"ps*", asMap("$resolver", "messageParameter", "stringified", true)));
// Create the layout.
final JsonTemplateLayout layout = JsonTemplateLayout.newBuilder()
.setConfiguration(CONFIGURATION)
.setEventTemplate(eventTemplate)
.build();
// Create the log event.
final Message message = messageFactory.newMessage("foo", new Object[0]);
final Level level = Level.FATAL;
final LogEvent logEvent = Log4jLogEvent.newBuilder()
.setLoggerName(LOGGER_NAME)
.setMessage(message)
.setLevel(level)
.build();
// Check the serialized event.
usingSerializedLogEventAccessor(layout, logEvent, accessor -> {
assertThat(accessor.getObject("po*")).isEqualTo(Collections.emptyList());
assertThat(accessor.getObject("ps*")).isEqualTo(Collections.emptyList());
});
}
@Test
void test_unresolvable_nested_fields_are_skipped() {
// Create the event template.
final String eventTemplate = writeJson(asMap(
"exception",
asMap(
"message",
asMap(
"$resolver", "exception",
"field", "message"),
"className",
asMap(
"$resolver", "exception",
"field", "className")),
"exceptionRootCause",
asMap(
"message",
asMap(
"$resolver", "exceptionRootCause",
"field", "message"),
"className",
asMap(
"$resolver", "exceptionRootCause",
"field", "className")),
"source",
asMap(
"lineNumber",
asMap(
"$resolver", "source",
"field", "lineNumber"),
"fileName",
asMap(
"$resolver", "source",
"field", "fileName")),
"emptyMap", Collections.emptyMap(),
"emptyList", Collections.emptyList(),
"null", null));
// Create the layout.
final JsonTemplateLayout layout = JsonTemplateLayout.newBuilder()
.setConfiguration(CONFIGURATION)
.setEventTemplate(eventTemplate)
.setStackTraceEnabled(false) // Disable "exception" and "exceptionRootCause" resolvers.
.setLocationInfoEnabled(false) // Disable the "source" resolver.
.build();
// Create the log event.
final SimpleMessage message = new SimpleMessage("foo");
final Exception thrown = new RuntimeException("bar");
final LogEvent logEvent = Log4jLogEvent.newBuilder()
.setLoggerName(LOGGER_NAME)
.setMessage(message)
.setThrown(thrown)
.build();
// Check the serialized event.
final String expectedSerializedLogEventJson = "{}" + JsonTemplateLayoutDefaults.getEventDelimiter();
final String actualSerializedLogEventJson = layout.toSerializable(logEvent);
assertThat(actualSerializedLogEventJson).isEqualTo(expectedSerializedLogEventJson);
}
@Test
void test_recursive_collections() {
// Create the event template.
final String eventTemplate = writeJson(asMap("message", asMap("$resolver", "message")));
// Create the layout.
final JsonTemplateLayout layout = JsonTemplateLayout.newBuilder()
.setConfiguration(CONFIGURATION)
.setEventTemplate(eventTemplate)
.build();
// Create a recursive collection.
final Object[] recursiveCollection = new Object[1];
recursiveCollection[0] = recursiveCollection;
// Create the log event.
final Message message = new ObjectMessage(recursiveCollection);
final LogEvent logEvent = Log4jLogEvent.newBuilder()
.setLoggerName(LOGGER_NAME)
.setMessage(message)
.build();
// Check the serialized event.
assertThatThrownBy(() -> layout.toSerializable(logEvent)).isInstanceOf(StackOverflowError.class);
}
@Test
void test_eventTemplateRootObjectKey() {
// Create the event template.
final String eventTemplate = writeJson(asMap("message", asMap("$resolver", "message")));
// Create the layout.
final JsonTemplateLayout layout = JsonTemplateLayout.newBuilder()
.setConfiguration(CONFIGURATION)
.setEventTemplate(eventTemplate)
.setEventTemplateRootObjectKey("event")
.build();
// Create the log event.
final Message message = new SimpleMessage("LOG4J2-2985");
final LogEvent logEvent = Log4jLogEvent.newBuilder()
.setLoggerName(LOGGER_NAME)
.setMessage(message)
.build();
// Check the serialized event.
usingSerializedLogEventAccessor(
layout, logEvent, accessor -> assertThat(accessor.getObject(new String[] {"event", "message"}))
.isEqualTo("LOG4J2-2985"));
}
}
| JsonAcceptingTcpServer |
java | micronaut-projects__micronaut-core | http-client/src/main/java/io/micronaut/http/client/netty/Http1ResponseHandler.java | {
"start": 8441,
"end": 12838
} | class ____ extends ReaderState<HttpContent> implements BufferConsumer.Upstream {
private final ResponseListener listener;
private final ChannelHandlerContext streamingContext;
private final StreamingNettyByteBody.SharedBuffer streaming;
private final boolean wasAutoRead;
private long demand;
UnbufferedContent(ResponseListener listener, ChannelHandlerContext ctx, HttpResponse response) {
this.listener = listener;
streaming = new NettyByteBodyFactory(ctx.channel()).createStreamingBuffer(listener.sizeLimits(), this);
if (!listener.isHeadResponse()) {
streaming.setExpectedLengthFrom(response.headers());
}
streamingContext = ctx;
wasAutoRead = ctx.channel().config().isAutoRead();
ctx.channel().config().setAutoRead(false);
}
@Override
void leave(ChannelHandlerContext ctx) {
ctx.channel().config().setAutoRead(wasAutoRead);
}
void add(ReadBuffer buf) {
int n = buf.readable();
if (n != 0) {
demand -= n;
streaming.add(buf);
} else {
buf.close();
}
}
@Override
void read(ChannelHandlerContext ctx, HttpContent msg) {
add(NettyReadBufferFactory.of(ctx.alloc()).adapt(msg.content()));
if (msg instanceof LastHttpContent) {
transitionToState(ctx, this, AfterContent.INSTANCE);
streaming.complete();
listener.finish(ctx);
}
}
@Override
void channelReadComplete(ChannelHandlerContext ctx) {
if (demand > 0) {
ctx.read();
}
}
@Override
void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
transitionToState(ctx, this, AfterContent.INSTANCE);
streaming.error(cause);
listener.finish(ctx);
}
@Override
public void start() {
if (streamingContext.executor().inEventLoop()) {
start0();
} else {
streamingContext.executor().execute(this::start0);
}
}
private void start0() {
onBytesConsumed0(1);
}
@Override
public void onBytesConsumed(long bytesConsumed) {
if (streamingContext.executor().inEventLoop()) {
onBytesConsumed0(bytesConsumed);
} else {
streamingContext.executor().execute(() -> onBytesConsumed0(bytesConsumed));
}
}
private void onBytesConsumed0(long bytesConsumed) {
if (state != this) {
return;
}
long oldDemand = demand;
long newDemand = oldDemand + bytesConsumed;
if (newDemand < oldDemand) {
// overflow
newDemand = Long.MAX_VALUE;
}
this.demand = newDemand;
if (oldDemand <= 0 && newDemand > 0) {
streamingContext.read();
}
}
@Override
public void allowDiscard() {
if (streamingContext.executor().inEventLoop()) {
allowDiscard0();
} else {
streamingContext.executor().execute(this::allowDiscard0);
}
}
private void allowDiscard0() {
if (state == this) {
transitionToState(streamingContext, this, new DiscardingContent(listener, streaming));
disregardBackpressure();
}
listener.allowDiscard();
}
@Override
public void disregardBackpressure() {
if (streamingContext.executor().inEventLoop()) {
disregardBackpressure0();
} else {
streamingContext.executor().execute(this::disregardBackpressure0);
}
}
private void disregardBackpressure0() {
long oldDemand = demand;
demand = Long.MAX_VALUE;
if (oldDemand <= 0 && state == this) {
streamingContext.read();
}
}
}
/**
* Short-circuiting handler that discards incoming content.
*/
private final | UnbufferedContent |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/type/ByteObjectArrayTypeHandlerTest.java | {
"start": 901,
"end": 2869
} | class ____ extends BaseTypeHandlerTest {
private static final TypeHandler<Byte[]> TYPE_HANDLER = new ByteObjectArrayTypeHandler();
@Override
@Test
public void shouldSetParameter() throws Exception {
TYPE_HANDLER.setParameter(ps, 1, new Byte[] { 1, 2, 3 }, null);
verify(ps).setBytes(1, new byte[] { 1, 2, 3 });
}
@Override
@Test
public void shouldGetResultFromResultSetByName() throws Exception {
byte[] byteArray = { 1, 2 };
when(rs.getBytes("column")).thenReturn(byteArray);
assertThat(TYPE_HANDLER.getResult(rs, "column")).isEqualTo(new Byte[] { 1, 2 });
verify(rs, never()).wasNull();
}
@Override
@Test
public void shouldGetResultNullFromResultSetByName() throws Exception {
when(rs.getBytes("column")).thenReturn(null);
assertThat(TYPE_HANDLER.getResult(rs, "column")).isNull();
verify(rs, never()).wasNull();
}
@Override
@Test
public void shouldGetResultFromResultSetByPosition() throws Exception {
byte[] byteArray = { 1, 2 };
when(rs.getBytes(1)).thenReturn(byteArray);
assertThat(TYPE_HANDLER.getResult(rs, 1)).isEqualTo(new Byte[] { 1, 2 });
verify(rs, never()).wasNull();
}
@Override
@Test
public void shouldGetResultNullFromResultSetByPosition() throws Exception {
when(rs.getBytes(1)).thenReturn(null);
assertThat(TYPE_HANDLER.getResult(rs, 1)).isNull();
verify(rs, never()).wasNull();
}
@Override
@Test
public void shouldGetResultFromCallableStatement() throws Exception {
byte[] byteArray = { 1, 2 };
when(cs.getBytes(1)).thenReturn(byteArray);
assertThat(TYPE_HANDLER.getResult(cs, 1)).isEqualTo(new Byte[] { 1, 2 });
verify(cs, never()).wasNull();
}
@Override
@Test
public void shouldGetResultNullFromCallableStatement() throws Exception {
when(cs.getBytes(1)).thenReturn(null);
assertThat(TYPE_HANDLER.getResult(cs, 1)).isNull();
verify(cs, never()).wasNull();
}
}
| ByteObjectArrayTypeHandlerTest |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/inheritance/single/notownedrelation/Contact.java | {
"start": 816,
"end": 1364
} | class ____ implements Serializable {
@Id
@GeneratedValue
private Long id;
private String email;
@OneToMany(mappedBy = "contact")
private Set<Address> addresses;
public Contact() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Set<Address> getAddresses() {
return addresses;
}
public void setAddresses(Set<Address> addresses) {
this.addresses = addresses;
}
}
| Contact |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/metrics/MetricsContext.java | {
"start": 1685,
"end": 2017
} | interface ____ {
/* predefined fields */
String NAMESPACE = "_namespace"; // metrics namespace, formerly jmx prefix
/**
* Returns the labels for this metrics context.
*
* @return the map of label keys and values; never null but possibly empty
*/
Map<String, String> contextLabels();
}
| MetricsContext |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/extension/director/impl/TestAppService.java | {
"start": 1247,
"end": 2428
} | class ____ extends BaseTestService implements FooAppService {
private FooFrameworkService frameworkService;
private FooFrameworkProvider frameworkProvider;
private FooAppProvider appProvider;
private FooModuleProvider moduleProvider;
public FooFrameworkService getFrameworkService() {
return frameworkService;
}
public void setFrameworkService(FooFrameworkService frameworkService) {
this.frameworkService = frameworkService;
}
public FooAppProvider getAppProvider() {
return appProvider;
}
public void setAppProvider(FooAppProvider appProvider) {
this.appProvider = appProvider;
}
public FooModuleProvider getModuleProvider() {
return moduleProvider;
}
public void setModuleProvider(FooModuleProvider moduleProvider) {
this.moduleProvider = moduleProvider;
}
public FooFrameworkProvider getFrameworkProvider() {
return frameworkProvider;
}
public void setFrameworkProvider(FooFrameworkProvider frameworkProvider) {
this.frameworkProvider = frameworkProvider;
}
@Override
public void process(URL url) {}
}
| TestAppService |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/CurrencyTest3.java | {
"start": 262,
"end": 1073
} | class ____ {
public Currency currency;
public BigDecimal amount;
@Override
public String toString() {
return "Money{currency=" + currency + ", amount=" + amount + '}';
}
}
public void testJson() throws Exception {
Money money = new Money();
money.currency = Currency.getInstance("CNY");
money.amount = new BigDecimal("10.03");
String json = JSON.toJSONString(money);
System.out.println("json = " + json);
Money moneyBack = JSON.parseObject(json, Money.class);
System.out.println("money = " + moneyBack);
JSONObject jsonObject = JSON.parseObject(json);
Money moneyCast = JSON.toJavaObject(jsonObject, Money.class);
System.out.printf("money = " + moneyCast);
}
}
| Money |
java | micronaut-projects__micronaut-core | test-suite/src/test/groovy/io/micronaut/context/inject/EventManagerImpl.java | {
"start": 647,
"end": 888
} | class ____ implements EventManager {
private String serviceName;
public EventManagerImpl(String serviceName) {
this.serviceName = serviceName;
}
@Override
public void register(String id) {
}
}
| EventManagerImpl |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceShardsClosedListenersTests.java | {
"start": 7869,
"end": 9527
} | class ____ extends IndicesClusterStateService {
BiConsumer<IndicesClusterStateService, ClusterChangedEvent> doApplyClusterStateHook;
TestIndicesClusterStateService(
ThreadPool threadPool,
BiConsumer<IndicesClusterStateService, ClusterChangedEvent> doApplyClusterStateHook
) {
super(
Settings.EMPTY,
new MockIndicesService(),
new ClusterService(Settings.EMPTY, ClusterSettings.createBuiltInClusterSettings(), threadPool, null),
threadPool,
mock(PeerRecoveryTargetService.class),
mock(ShardStateAction.class),
mock(RepositoriesService.class),
mock(SearchService.class),
mock(PeerRecoverySourceService.class),
new SnapshotShardsService(
Settings.EMPTY,
new ClusterService(Settings.EMPTY, ClusterSettings.createBuiltInClusterSettings(), threadPool, null),
mock(RepositoriesService.class),
MockTransportService.createMockTransportService(new MockTransport(), threadPool),
mock(IndicesService.class)
),
mock(PrimaryReplicaSyncer.class),
RetentionLeaseSyncer.EMPTY,
mock(NodeClient.class)
);
this.doApplyClusterStateHook = doApplyClusterStateHook;
}
@Override
protected void doApplyClusterState(final ClusterChangedEvent event) {
doApplyClusterStateHook.accept(this, event);
}
}
}
| TestIndicesClusterStateService |
java | apache__logging-log4j2 | log4j-api-test/src/test/java/org/apache/logging/log4j/internal/map/UnmodifiableArrayBackedMapTest.java | {
"start": 1696,
"end": 16255
} | class ____ {
private static final int TEST_DATA_SIZE = 5;
private HashMap<String, String> getTestParameters() {
return getTestParameters(TEST_DATA_SIZE);
}
private HashMap<String, String> getTestParameters(int numParams) {
HashMap<String, String> params = new LinkedHashMap<>();
for (int i = 0; i < numParams; i++) {
params.put("" + i, "value" + i);
}
return params;
}
@Test
void testCopyAndPut() {
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP;
testMap = testMap.copyAndPut("6", "value6");
assertTrue(testMap.containsKey("6"));
assertEquals("value6", testMap.get("6"));
testMap = testMap.copyAndPut("6", "another value");
assertTrue(testMap.containsKey("6"));
assertEquals("another value", testMap.get("6"));
HashMap<String, String> newValues = getTestParameters();
testMap = testMap.copyAndPutAll(newValues);
assertEquals("value1", testMap.get("1"));
assertEquals("value4", testMap.get("4"));
assertEquals("another value", testMap.get("6"));
}
@Test
void testCopyAndRemove() {
// general removing from well-populated set
HashMap<String, String> params = getTestParameters();
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(params);
testMap = testMap.copyAndRemove("2");
testMap = testMap.copyAndRemove("not_present");
assertEquals(4, testMap.size());
assertFalse(testMap.containsKey("2"));
assertTrue(testMap.containsKey("1"));
assertFalse(testMap.containsValue("value2"));
// test removing from empty set
testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPut("test", "test");
testMap = testMap.copyAndRemove("test");
assertTrue(testMap.isEmpty());
// test removing first of two elements
testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPut("test1", "test1");
testMap = testMap.copyAndPut("test2", "test2");
testMap = testMap.copyAndRemove("test1");
assertFalse(testMap.containsKey("test1"));
assertTrue(testMap.containsKey("test2"));
// test removing second of two elements
testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPut("test1", "test1");
testMap = testMap.copyAndPut("test2", "test2");
testMap = testMap.copyAndRemove("test2");
assertTrue(testMap.containsKey("test1"));
assertFalse(testMap.containsKey("test2"));
}
@Test
void testCopyAndRemoveAll() {
HashMap<String, String> initialMapContents = getTestParameters(15);
initialMapContents.put("extra_key", "extra_value");
HashSet<String> keysToRemove = new LinkedHashSet<>();
keysToRemove.add("3");
keysToRemove.add("11");
keysToRemove.add("definitely_not_found");
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(initialMapContents);
testMap = testMap.copyAndRemoveAll(keysToRemove);
assertEquals(14, testMap.size());
assertFalse(testMap.containsKey("3"));
assertFalse(testMap.containsValue("value3"));
assertFalse(testMap.containsKey("11"));
assertFalse(testMap.containsValue("value11"));
assertTrue(testMap.containsKey("extra_key"));
assertTrue(testMap.containsValue("extra_value"));
assertTrue(testMap.containsKey("1"));
assertTrue(testMap.containsValue("value1"));
assertTrue(testMap.containsKey("0"));
assertTrue(testMap.containsValue("value0"));
assertTrue(testMap.containsKey("14"));
assertTrue(testMap.containsValue("value14"));
testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(initialMapContents);
UnmodifiableArrayBackedMap testMapWithArrayListRemoval =
testMap.copyAndRemoveAll(new ArrayList<>(keysToRemove));
UnmodifiableArrayBackedMap testMapWithSetRemoval = testMap.copyAndRemoveAll(keysToRemove);
assertEquals(testMapWithSetRemoval, testMapWithArrayListRemoval);
testMap = UnmodifiableArrayBackedMap.EMPTY_MAP;
assertEquals(0, testMap.copyAndRemoveAll(initialMapContents.keySet()).size());
testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPut("test", "test");
assertEquals(1, testMap.copyAndRemoveAll(initialMapContents.keySet()).size());
testMap = testMap.copyAndRemoveAll(Collections.singleton("not found"));
assertEquals(0, testMap.copyAndRemoveAll(testMap.keySet()).size());
testMap = testMap.copyAndRemoveAll(Collections.singleton("test"));
assertEquals(0, testMap.copyAndRemoveAll(testMap.keySet()).size());
}
@Test
void testEmptyMap() {
assertNull(UnmodifiableArrayBackedMap.EMPTY_MAP.get("test"));
}
@Test
void testEntrySetIteratorAndSize() {
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(getTestParameters());
Set<Map.Entry<String, String>> entrySet = testMap.entrySet();
int numEntriesFound = 0;
for (@SuppressWarnings("unused") Map.Entry<String, String> entry : entrySet) {
numEntriesFound++;
}
assertEquals(testMap.size(), numEntriesFound);
assertEquals(testMap.size(), entrySet.size());
}
@Test
void testEntrySetMutatorsBlocked() {
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(getTestParameters());
Set<Map.Entry<String, String>> entrySet = testMap.entrySet();
for (Map.Entry<String, String> entry : entrySet) {
try {
entry.setValue("test");
fail("Entry.setValue() wasn't blocked");
} catch (UnsupportedOperationException e) {
}
}
for (@SuppressWarnings("unused") Map.Entry<String, String> entry : entrySet) {
try {
entrySet.add(null);
fail("EntrySet.add() wasn't blocked");
} catch (UnsupportedOperationException e) {
}
}
for (@SuppressWarnings("unused") Map.Entry<String, String> entry : entrySet) {
try {
entrySet.addAll(new HashSet<>());
fail("EntrySet.addAll() wasn't blocked");
} catch (UnsupportedOperationException e) {
}
}
}
/**
* Tests various situations with .equals(). Test tries comparisons in both
* directions, to make sure that HashMap.equals(UnmodifiableArrayBackedMap) work
* as well as UnmodifiableArrayBackedMap.equals(HashMap).
*/
@Test
void testEqualsHashCodeWithIdenticalContent() {
HashMap<String, String> params = getTestParameters();
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(params);
assertEquals(params, testMap);
assertEquals(testMap, params);
assertEquals(params.hashCode(), testMap.hashCode());
}
@Test
void testEqualsHashCodeWithOneEmptyMap() {
HashMap<String, String> params = getTestParameters();
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(params);
// verify empty maps are not equal to non-empty maps
assertNotEquals(UnmodifiableArrayBackedMap.EMPTY_MAP, params);
assertNotEquals(new HashMap<>(), testMap);
assertNotEquals(UnmodifiableArrayBackedMap.EMPTY_MAP, params);
assertNotEquals(new HashMap<>(), testMap);
}
@Test
void testEqualsHashCodeWithOneKeyRemoved() {
HashMap<String, String> params = getTestParameters();
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(params);
params.remove("1");
assertNotEquals(params, testMap);
assertNotEquals(testMap, params);
testMap = testMap.copyAndRemove("1").copyAndRemove("2");
assertNotEquals(params, testMap);
assertNotEquals(testMap, params);
}
@Test
void testEqualsWhenOneValueDiffers() {
HashMap<String, String> params = getTestParameters();
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(params);
assertNotEquals(params, testMap.copyAndPut("1", "different value"));
assertNotEquals(testMap.copyAndPut("1", "different value"), params);
}
@Test
void testForEachBiConsumer_JavaUtil() {
final Map<String, String> map = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(getTestParameters());
final Collection<String> keys = new HashSet<>();
map.forEach((key, value) -> keys.add(key));
assertEquals(map.keySet(), keys);
}
@Test
void testForEachBiConsumer_Log4jUtil() {
final ReadOnlyStringMap map = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(getTestParameters());
final Collection<String> keys = new HashSet<>();
map.forEach((key, value) -> keys.add(key));
assertEquals(map.toMap().keySet(), keys);
}
@Test
void testForEachTriConsumer() {
UnmodifiableArrayBackedMap map = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(getTestParameters());
HashMap<String, String> iterationResultMap = new HashMap<>();
TriConsumer<String, String, Map<String, String>> triConsumer =
new TriConsumer<String, String, Map<String, String>>() {
@Override
public void accept(String k, String v, Map<String, String> s) {
s.put(k, v);
}
};
map.forEach(triConsumer, iterationResultMap);
assertEquals(map, iterationResultMap);
}
@Test
void testImmutability() {
HashMap<String, String> params = getTestParameters();
UnmodifiableArrayBackedMap originalMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(params);
UnmodifiableArrayBackedMap modifiedMap = originalMap.copyAndPutAll(getTestParameters());
assertEquals(originalMap, params);
modifiedMap = modifiedMap.copyAndRemoveAll(modifiedMap.keySet());
assertTrue(modifiedMap.isEmpty());
assertEquals(originalMap, params);
}
@Test
void testInstanceCopy() {
HashMap<String, String> params = getTestParameters();
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(params);
UnmodifiableArrayBackedMap testMap2 = new UnmodifiableArrayBackedMap(testMap);
assertEquals(testMap, testMap2);
}
@Test
void testMutatorsBlocked() {
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(getTestParameters());
try {
testMap.put("a", "a");
fail("put() wasn't blocked");
} catch (UnsupportedOperationException e) {
}
try {
testMap.putAll(new HashMap<>());
fail("putAll() wasn't blocked");
} catch (UnsupportedOperationException e) {
}
try {
testMap.remove("1");
fail("remove() wasn't blocked");
} catch (UnsupportedOperationException e) {
}
try {
testMap.clear();
fail("clear() wasn't blocked");
} catch (UnsupportedOperationException e) {
}
}
@Test
void testNullValue() {
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP;
testMap = testMap.copyAndPut("key", null);
assertTrue(testMap.containsKey("key"));
assertTrue(testMap.containsValue(null));
assertEquals(1, testMap.size());
assertNull(testMap.get("key"));
}
@Test
void testReads() {
assertNull(UnmodifiableArrayBackedMap.EMPTY_MAP.get("test"));
HashMap<String, String> params = getTestParameters();
UnmodifiableArrayBackedMap testMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(params);
for (Map.Entry<String, String> entry : params.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
assertTrue(testMap.containsKey(key));
assertTrue(testMap.containsValue(value));
assertEquals(testMap.get(key), params.get(key));
}
assertFalse(testMap.containsKey("not_present"));
assertFalse(testMap.containsValue("not_present"));
assertNull(testMap.get("not_present"));
}
@Test
void testState() {
UnmodifiableArrayBackedMap originalMap;
UnmodifiableArrayBackedMap newMap;
originalMap = UnmodifiableArrayBackedMap.EMPTY_MAP;
newMap = UnmodifiableArrayBackedMap.getMap(originalMap.getBackingArray());
assertEquals(originalMap, newMap);
originalMap = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPutAll(getTestParameters());
newMap = UnmodifiableArrayBackedMap.getMap(originalMap.getBackingArray());
assertEquals(originalMap, newMap);
originalMap = UnmodifiableArrayBackedMap.EMPTY_MAP
.copyAndPutAll(getTestParameters())
.copyAndRemove("1");
newMap = UnmodifiableArrayBackedMap.getMap(originalMap.getBackingArray());
assertEquals(originalMap, newMap);
}
@Test
void testToMap() {
UnmodifiableArrayBackedMap map = UnmodifiableArrayBackedMap.EMPTY_MAP.copyAndPut("test", "test");
// verify same instance, not just equals()
assertSame(map, map.toMap());
}
@Test
void copyAndRemoveAll_should_work() {
// Create the actual map
UnmodifiableArrayBackedMap actualMap = UnmodifiableArrayBackedMap.EMPTY_MAP;
actualMap = actualMap.copyAndPut("outer", "two");
actualMap = actualMap.copyAndPut("inner", "one");
actualMap = actualMap.copyAndPut("not-in-closeable", "true");
// Create the expected map
UnmodifiableArrayBackedMap expectedMap = UnmodifiableArrayBackedMap.EMPTY_MAP;
expectedMap = expectedMap.copyAndPut("outer", "two");
expectedMap = expectedMap.copyAndPut("not-in-closeable", "true");
// Remove the key and verify
actualMap = actualMap.copyAndRemoveAll(Collections.singleton("inner"));
Assertions.assertThat(actualMap).isEqualTo(expectedMap);
}
}
| UnmodifiableArrayBackedMapTest |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/config/AbstractRepositoryConfigTests.java | {
"start": 1248,
"end": 1387
} | class ____ integration test for namespace configuration.
*
* @author Oliver Gierke
*/
@ExtendWith(SpringExtension.class)
public abstract | for |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.