language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/mysql_block_2.java
|
{
"start": 951,
"end": 8791
}
|
class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = "BEGIN;\n" +
" DELETE t0 FROM ktv_tmp_sqlarea t0 WHERE t0.dbid=?;\n" +
" INSERT INTO ktv_tmp_sqlarea(`dbid`,`sql_id`,`parsing_schema_name`,`sql_fulltext`,`cpu_time`,`buffer_gets`,`executions`,`command_name`,`sharable_mem`,`persiste\n" +
"nt_mem`,`users_opening`,`fetches`,`loads`,`disk_reads`,`direct_writes`,`command_type`,`plan_hash_value`,`action`,`remote`,`is_obsolete`,`physical_read_requests`,`\n" +
"physical_write_requests`,`elapsed_time`,`user_io_wait_time`,`collection_time`)\n" +
" SELECT `dbid`,`sql_id`,`parsing_schema_name`,`sql_fulltext`,sum(`cpu_time`),sum(`buffer_gets`),sum(`executions`),max(`command_name`),sum(`sharable_mem`),sum(`\n" +
"persistent_mem`),sum(`users_opening`),sum(`fetches`),sum(`loads`),sum(`disk_reads`),sum(`direct_writes`),max(`command_type`),max(`plan_hash_value`),max(`action`),\n" +
"max(`remote`),max(`is_obsolete`),sum(`physical_read_requests`),sum(`physical_write_requests`),sum(`elapsed_time`),sum(`user_io_wait_time`),max(`collection_time`)\n" +
" FROM ktv_sqlarea WHERE dbid=? GROUP BY sql_fulltext;\n" +
" DELETE FROM ktv_sqlarea WHERE dbid=?;\n" +
" INSERT INTO ktv_sqlarea(`dbid`,`sql_id`,`parsing_schema_name`,`sql_fulltext`,`cpu_time`,`buffer_gets`,`executions`,`command_name`,`sharable_mem`,`persistent_m\n" +
"em`,`users_opening`,`fetches`,`loads`,`disk_reads`,`direct_writes`,`command_type`,`plan_hash_value`,`action`,`remote`,`is_obsolete`,`physical_read_requests`,`phys\n" +
"ical_write_requests`,`elapsed_time`,`user_io_wait_time`,`collection_time`)\n" +
" SELECT `dbid`,`sql_id`,`parsing_schema_name`,`sql_fulltext`,`cpu_time`,`buffer_gets`,`executions`,`command_name`,`sharable_mem`,`persistent_mem`,`users_openin\n" +
"g`,`fetches`,`loads`,`disk_reads`,`direct_writes`,`command_type`,`plan_hash_value`,`action`,`remote`,`is_obsolete`,`physical_read_requests`,`physical_write_reques\n" +
"ts`,`elapsed_time`,`user_io_wait_time`,`collection_time`\n" +
" FROM ktv_tmp_sqlarea WHERE dbid=? and sql_fulltext is not null;\n" +
" ROLLBACK;\n" +
" DELETE FROM ktv_tmp_sqlarea WHERE dbid=?;";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
assertEquals(7, statementList.size());
SQLStatement stmt = statementList.get(0);
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.MYSQL);
for (SQLStatement statement : statementList) {
statement.accept(visitor);
}
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("relationships : " + visitor.getRelationships());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(2, visitor.getTables().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("employees")));
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("emp_name")));
// assertEquals(7, visitor.getColumns().size());
// assertEquals(3, visitor.getConditions().size());
// assertEquals(1, visitor.getRelationships().size());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("employees", "salary")));
{
String output = SQLUtils.toSQLString(statementList, DbType.mysql);
assertEquals("BEGIN ;\n" +
"\n" +
"DELETE t0\n" +
"FROM ktv_tmp_sqlarea t0\n" +
"WHERE t0.dbid = ?;\n" +
"\n" +
"INSERT INTO ktv_tmp_sqlarea (`dbid`, `sql_id`, `parsing_schema_name`, `sql_fulltext`, `cpu_time`\n" +
"\t, `buffer_gets`, `executions`, `command_name`, `sharable_mem`, `persiste\n" +
"nt_mem`\n" +
"\t, `users_opening`, `fetches`, `loads`, `disk_reads`, `direct_writes`\n" +
"\t, `command_type`, `plan_hash_value`, `action`, `remote`, `is_obsolete`\n" +
"\t, `physical_read_requests`, `\n" +
"physical_write_requests`, `elapsed_time`, `user_io_wait_time`, `collection_time`)\n" +
"SELECT `dbid`, `sql_id`, `parsing_schema_name`, `sql_fulltext`\n" +
"\t, sum(`cpu_time`), sum(`buffer_gets`)\n" +
"\t, sum(`executions`), max(`command_name`)\n" +
"\t, sum(`sharable_mem`), sum(`\n" +
"persistent_mem`)\n" +
"\t, sum(`users_opening`), sum(`fetches`)\n" +
"\t, sum(`loads`), sum(`disk_reads`)\n" +
"\t, sum(`direct_writes`), max(`command_type`)\n" +
"\t, max(`plan_hash_value`), max(`action`)\n" +
"\t, max(`remote`), max(`is_obsolete`)\n" +
"\t, sum(`physical_read_requests`), sum(`physical_write_requests`)\n" +
"\t, sum(`elapsed_time`), sum(`user_io_wait_time`)\n" +
"\t, max(`collection_time`)\n" +
"FROM ktv_sqlarea\n" +
"WHERE dbid = ?\n" +
"GROUP BY sql_fulltext;\n" +
"\n" +
"DELETE FROM ktv_sqlarea\n" +
"WHERE dbid = ?;\n" +
"\n" +
"INSERT INTO ktv_sqlarea (`dbid`, `sql_id`, `parsing_schema_name`, `sql_fulltext`, `cpu_time`\n" +
"\t, `buffer_gets`, `executions`, `command_name`, `sharable_mem`, `persistent_m\n" +
"em`\n" +
"\t, `users_opening`, `fetches`, `loads`, `disk_reads`, `direct_writes`\n" +
"\t, `command_type`, `plan_hash_value`, `action`, `remote`, `is_obsolete`\n" +
"\t, `physical_read_requests`, `phys\n" +
"ical_write_requests`, `elapsed_time`, `user_io_wait_time`, `collection_time`)\n" +
"SELECT `dbid`, `sql_id`, `parsing_schema_name`, `sql_fulltext`, `cpu_time`\n" +
"\t, `buffer_gets`, `executions`, `command_name`, `sharable_mem`, `persistent_mem`\n" +
"\t, `users_openin\n" +
"g`, `fetches`, `loads`, `disk_reads`, `direct_writes`\n" +
"\t, `command_type`, `plan_hash_value`, `action`, `remote`, `is_obsolete`\n" +
"\t, `physical_read_requests`, `physical_write_reques\n" +
"ts`, `elapsed_time`, `user_io_wait_time`, `collection_time`\n" +
"FROM ktv_tmp_sqlarea\n" +
"WHERE dbid = ?\n" +
"\tAND sql_fulltext IS NOT NULL;\n" +
"\n" +
"ROLLBACK;\n" +
"\n" +
"DELETE FROM ktv_tmp_sqlarea\n" +
"WHERE dbid = ?;", //
output);
}
}
}
|
mysql_block_2
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java
|
{
"start": 532,
"end": 986
}
|
class ____ extends AbstractNumericMetricTestCase<ExtendedStatsAggregationBuilder> {
@Override
protected ExtendedStatsAggregationBuilder doCreateTestAggregatorFactory() {
ExtendedStatsAggregationBuilder factory = new ExtendedStatsAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
if (randomBoolean()) {
factory.sigma(randomDoubleBetween(0.0, 10.0, true));
}
return factory;
}
}
|
ExtendedStatsTests
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/internal/entities/mapper/MultiPropertyMapper.java
|
{
"start": 1044,
"end": 8647
}
|
class ____ extends AbstractPropertyMapper implements ExtendedPropertyMapper {
protected final Map<PropertyData, PropertyMapper> properties;
private final Map<String, PropertyData> propertyDatas;
public MultiPropertyMapper() {
properties = Tools.newLinkedHashMap();
propertyDatas = Tools.newLinkedHashMap();
}
@Override
public void add(PropertyData propertyData) {
final SinglePropertyMapper single = new SinglePropertyMapper();
single.add( propertyData );
properties.put( propertyData, single );
propertyDatas.put( propertyData.getName(), propertyData );
}
@Override
public CompositeMapperBuilder addComponent(
PropertyData propertyData,
Class componentClass, EmbeddableInstantiator instantiator) {
if ( properties.get( propertyData ) != null ) {
// This is needed for second pass to work properly in the components mapper
return (CompositeMapperBuilder) properties.get( propertyData );
}
final ComponentPropertyMapper componentMapperBuilder = new ComponentPropertyMapper(
propertyData,
componentClass,
instantiator
);
addComposite( propertyData, componentMapperBuilder );
return componentMapperBuilder;
}
@Override
public void addComposite(PropertyData propertyData, PropertyMapper propertyMapper) {
properties.put( propertyData, propertyMapper );
propertyDatas.put( propertyData.getName(), propertyData );
}
protected Object getAtIndexOrNull(Object[] array, int index) {
return array == null ? null : array[index];
}
@Override
public boolean map(
SharedSessionContractImplementor session,
Map<String, Object> data,
String[] propertyNames,
Object[] newState,
Object[] oldState) {
boolean ret = false;
for ( int i = 0; i < propertyNames.length; i++ ) {
final String propertyName = propertyNames[i];
if ( propertyDatas.containsKey( propertyName ) ) {
final PropertyMapper propertyMapper = properties.get( propertyDatas.get( propertyName ) );
final Object newObj = getAtIndexOrNull( newState, i );
final Object oldObj = getAtIndexOrNull( oldState, i );
ret |= propertyMapper.mapToMapFromEntity( session, data, newObj, oldObj );
propertyMapper.mapModifiedFlagsToMapFromEntity( session, data, newObj, oldObj );
}
}
return ret;
}
@Override
public boolean mapToMapFromEntity(
final SharedSessionContractImplementor session,
final Map<String, Object> data,
final Object newObj,
final Object oldObj) {
boolean ret = false;
for ( Map.Entry<PropertyData, PropertyMapper> entry : properties.entrySet() ) {
final PropertyData propertyData = entry.getKey();
final PropertyMapper propertyMapper = entry.getValue();
// synthetic properties are not part of the entity model; therefore they should be ignored.
if ( propertyData.isSynthetic() ) {
continue;
}
Getter getter;
if ( newObj != null ) {
getter = ReflectionTools.getGetter(
newObj.getClass(),
propertyData,
session.getFactory().getServiceRegistry()
);
}
else if ( oldObj != null ) {
getter = ReflectionTools.getGetter(
oldObj.getClass(),
propertyData,
session.getFactory().getServiceRegistry()
);
}
else {
return false;
}
ret |= propertyMapper.mapToMapFromEntity(
session, data,
newObj == null ? null : getter.get( newObj ),
oldObj == null ? null : getter.get( oldObj )
);
}
return ret;
}
@Override
public void mapModifiedFlagsToMapFromEntity(
final SharedSessionContractImplementor session,
final Map<String, Object> data,
final Object newObj,
final Object oldObj) {
for ( Map.Entry<PropertyData, PropertyMapper> entry : properties.entrySet() ) {
final PropertyData propertyData = entry.getKey();
final PropertyMapper propertyMapper = entry.getValue();
// synthetic properties are not part of the entity model; therefore they should be ignored.
if ( propertyData.isSynthetic() ) {
continue;
}
Getter getter;
if ( newObj != null ) {
getter = ReflectionTools.getGetter(
newObj.getClass(),
propertyData,
session.getFactory().getServiceRegistry()
);
}
else if ( oldObj != null ) {
getter = ReflectionTools.getGetter(
oldObj.getClass(),
propertyData,
session.getFactory().getServiceRegistry()
);
}
else {
break;
}
propertyMapper.mapModifiedFlagsToMapFromEntity(
session, data,
newObj == null ? null : getter.get( newObj ),
oldObj == null ? null : getter.get( oldObj )
);
}
}
@Override
public void mapToEntityFromMap(
EnversService enversService,
Object obj,
Map data,
Object primaryKey,
AuditReaderImplementor versionsReader,
Number revision) {
for ( PropertyMapper mapper : properties.values() ) {
mapper.mapToEntityFromMap( enversService, obj, data, primaryKey, versionsReader, revision );
}
}
@Override
public Object mapToEntityFromMap(
EnversService enversService,
Map data,
Object primaryKey,
AuditReaderImplementor versionsReader,
Number revision) {
return null;
}
private Pair<PropertyMapper, String> getMapperAndDelegatePropName(String referencingPropertyName) {
// Name of the property, to which we will delegate the mapping.
String delegatePropertyName;
// Checking if the property name doesn't reference a collection in a component - then the name will containa a .
final int dotIndex = referencingPropertyName.indexOf( '.' );
if ( dotIndex != -1 ) {
// Computing the name of the component
final String componentName = referencingPropertyName.substring( 0, dotIndex );
// And the name of the property in the component
final String propertyInComponentName = MappingTools.createComponentPrefix( componentName )
+ referencingPropertyName.substring( dotIndex + 1 );
// We need to get the mapper for the component.
referencingPropertyName = componentName;
// As this is a component, we delegate to the property in the component.
delegatePropertyName = propertyInComponentName;
}
else {
// If this is not a component, we delegate to the same property.
delegatePropertyName = referencingPropertyName;
}
return Pair.make( properties.get( propertyDatas.get( referencingPropertyName ) ), delegatePropertyName );
}
@Override
public void mapModifiedFlagsToMapForCollectionChange(String collectionPropertyName, Map<String, Object> data) {
final Pair<PropertyMapper, String> pair = getMapperAndDelegatePropName( collectionPropertyName );
final PropertyMapper mapper = pair.getFirst();
if ( mapper != null ) {
mapper.mapModifiedFlagsToMapForCollectionChange( pair.getSecond(), data );
}
}
@Override
public List<PersistentCollectionChangeData> mapCollectionChanges(
SharedSessionContractImplementor session,
String referencingPropertyName,
PersistentCollection newColl,
Serializable oldColl, Object id) {
final Pair<PropertyMapper, String> pair = getMapperAndDelegatePropName( referencingPropertyName );
final PropertyMapper mapper = pair.getFirst();
if ( mapper != null ) {
return mapper.mapCollectionChanges( session, pair.getSecond(), newColl, oldColl, id );
}
else {
return null;
}
}
@Override
public Map<PropertyData, PropertyMapper> getProperties() {
return properties;
}
public Map<String, PropertyData> getPropertyDatas() {
return propertyDatas;
}
@Override
public boolean hasPropertiesWithModifiedFlag() {
for ( PropertyData property : getProperties().keySet() ) {
if ( property.isUsingModifiedFlag() ) {
return true;
}
}
return false;
}
}
|
MultiPropertyMapper
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/filter/CurrentValueDeser4184Test.java
|
{
"start": 933,
"end": 2789
}
|
class ____ extends ValueDeserializer<UserType> {
@Override
public UserType deserialize(JsonParser p, DeserializationContext ctxt) {
Object currentValue = p.streamReadContext().getParent().currentValue();
if (null == currentValue) {
ctxt.reportInputMismatch(UserType.class, "No currentValue() available");
}
if (!(currentValue instanceof User)) {
ctxt.reportInputMismatch(UserType.class, "currentValue() of wrong type, not User but: "
+currentValue.getClass().getName());
}
JsonNode node = ctxt.readTree(p);
int value = node.path("value").asInt(-1);
switch (value) {
case 1:
return UserType.ADMIN;
case 2:
return UserType.USER;
}
throw new IllegalArgumentException("Bad value: "+value);
}
}
/*
/**********************************************************************
/* Test methods
/**********************************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
// [databind#4184]
@Test
public void testCurrentValue4184FullPojo() throws Exception
{
String json = "{\"role\": {\"name\": \"Manager\"}, \"type\": {\"value\":1}}";
User user = MAPPER.readValue(json, User.class);
assertNotNull(user);
assertEquals(UserType.ADMIN, user.type);
}
// [databind#4184]
@Test
public void testCurrentValue4184EmptyPojo() throws Exception
{
String json = "{\"role\": {}, \"type\": {\"value\":1}}";
User user = MAPPER.readValue(json, User.class);
assertNotNull(user);
assertEquals(UserType.ADMIN, user.type);
}
}
|
UserTypeDeserializer
|
java
|
spring-projects__spring-security
|
test/src/test/java/org/springframework/security/test/web/support/WebTestUtilsTests.java
|
{
"start": 7683,
"end": 8162
}
|
class ____ {
static CsrfTokenRepository CSRF_REPO;
static SecurityContextRepository CONTEXT_REPO;
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.csrf((csrf) -> csrf
.csrfTokenRepository(CSRF_REPO))
.securityContext((context) -> context
.securityContextRepository(CONTEXT_REPO));
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static
|
CustomSecurityConfig
|
java
|
spring-projects__spring-boot
|
module/spring-boot-quartz/src/test/java/org/springframework/boot/quartz/actuate/endpoint/QuartzEndpointWebIntegrationTests.java
|
{
"start": 2237,
"end": 8861
}
|
class ____ {
private static final JobDetail jobOne = JobBuilder.newJob(Job.class)
.withIdentity("jobOne", "samples")
.usingJobData(new JobDataMap(Collections.singletonMap("name", "test")))
.withDescription("A sample job")
.build();
private static final JobDetail jobTwo = JobBuilder.newJob(DelegatingJob.class)
.withIdentity("jobTwo", "samples")
.build();
private static final JobDetail jobThree = JobBuilder.newJob(Job.class).withIdentity("jobThree").build();
private static final CronTrigger triggerOne = TriggerBuilder.newTrigger()
.withDescription("Once a day 3AM")
.withIdentity("triggerOne")
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0))
.build();
private static final SimpleTrigger triggerTwo = TriggerBuilder.newTrigger()
.withDescription("Once a day")
.withIdentity("triggerTwo", "tests")
.withSchedule(SimpleScheduleBuilder.repeatHourlyForever(24))
.build();
private static final CalendarIntervalTrigger triggerThree = TriggerBuilder.newTrigger()
.withDescription("Once a week")
.withIdentity("triggerThree", "tests")
.withSchedule(CalendarIntervalScheduleBuilder.calendarIntervalSchedule().withIntervalInWeeks(1))
.build();
@WebEndpointTest
void quartzReport(WebTestClient client) {
client.get()
.uri("/actuator/quartz")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("jobs.groups")
.isEqualTo(new JSONArray().appendElement("samples").appendElement("DEFAULT"))
.jsonPath("triggers.groups")
.isEqualTo(new JSONArray().appendElement("DEFAULT").appendElement("tests"));
}
@WebEndpointTest
void quartzJobNames(WebTestClient client) {
client.get()
.uri("/actuator/quartz/jobs")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("groups.samples.jobs")
.isEqualTo(new JSONArray().appendElement("jobOne").appendElement("jobTwo"))
.jsonPath("groups.DEFAULT.jobs")
.isEqualTo(new JSONArray().appendElement("jobThree"));
}
@WebEndpointTest
void quartzTriggerNames(WebTestClient client) {
client.get()
.uri("/actuator/quartz/triggers")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("groups.DEFAULT.paused")
.isEqualTo(false)
.jsonPath("groups.DEFAULT.triggers")
.isEqualTo(new JSONArray().appendElement("triggerOne"))
.jsonPath("groups.tests.paused")
.isEqualTo(false)
.jsonPath("groups.tests.triggers")
.isEqualTo(new JSONArray().appendElement("triggerTwo").appendElement("triggerThree"));
}
@WebEndpointTest
void quartzTriggersOrJobsAreAllowed(WebTestClient client) {
client.get().uri("/actuator/quartz/something-else").exchange().expectStatus().isBadRequest();
}
@WebEndpointTest
void quartzJobGroupSummary(WebTestClient client) {
client.get()
.uri("/actuator/quartz/jobs/samples")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("group")
.isEqualTo("samples")
.jsonPath("jobs.jobOne.className")
.isEqualTo(Job.class.getName())
.jsonPath("jobs.jobTwo.className")
.isEqualTo(DelegatingJob.class.getName());
}
@WebEndpointTest
void quartzJobGroupSummaryWithUnknownGroup(WebTestClient client) {
client.get().uri("/actuator/quartz/jobs/does-not-exist").exchange().expectStatus().isNotFound();
}
@WebEndpointTest
void quartzTriggerGroupSummary(WebTestClient client) {
client.get()
.uri("/actuator/quartz/triggers/tests")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("group")
.isEqualTo("tests")
.jsonPath("paused")
.isEqualTo("false")
.jsonPath("triggers.cron")
.isEmpty()
.jsonPath("triggers.simple.triggerTwo.interval")
.isEqualTo(86400000)
.jsonPath("triggers.dailyTimeInterval")
.isEmpty()
.jsonPath("triggers.calendarInterval.triggerThree.interval")
.isEqualTo(604800000)
.jsonPath("triggers.custom")
.isEmpty();
}
@WebEndpointTest
void quartzTriggerGroupSummaryWithUnknownGroup(WebTestClient client) {
client.get().uri("/actuator/quartz/triggers/does-not-exist").exchange().expectStatus().isNotFound();
}
@WebEndpointTest
void quartzJobDetail(WebTestClient client) {
client.get()
.uri("/actuator/quartz/jobs/samples/jobOne")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("group")
.isEqualTo("samples")
.jsonPath("name")
.isEqualTo("jobOne")
.jsonPath("data.name")
.isEqualTo("test");
}
@WebEndpointTest
void quartzJobDetailWithUnknownKey(WebTestClient client) {
client.get().uri("/actuator/quartz/jobs/samples/does-not-exist").exchange().expectStatus().isNotFound();
}
@WebEndpointTest
void quartzTriggerDetail(WebTestClient client) {
client.get()
.uri("/actuator/quartz/triggers/DEFAULT/triggerOne")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("group")
.isEqualTo("DEFAULT")
.jsonPath("name")
.isEqualTo("triggerOne")
.jsonPath("description")
.isEqualTo("Once a day 3AM")
.jsonPath("state")
.isEqualTo("NORMAL")
.jsonPath("type")
.isEqualTo("cron")
.jsonPath("simple")
.doesNotExist()
.jsonPath("calendarInterval")
.doesNotExist()
.jsonPath("dailyInterval")
.doesNotExist()
.jsonPath("custom")
.doesNotExist()
.jsonPath("cron.expression")
.isEqualTo("0 0 3 ? * *");
}
@WebEndpointTest
void quartzTriggerDetailWithUnknownKey(WebTestClient client) {
client.get().uri("/actuator/quartz/triggers/tests/does-not-exist").exchange().expectStatus().isNotFound();
}
@WebEndpointTest
void quartzTriggerJob(WebTestClient client) {
client.post()
.uri("/actuator/quartz/jobs/samples/jobOne")
.contentType(MediaType.APPLICATION_JSON)
.bodyValue(Map.of("state", "running"))
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("group")
.isEqualTo("samples")
.jsonPath("name")
.isEqualTo("jobOne")
.jsonPath("className")
.isEqualTo("org.quartz.Job")
.jsonPath("triggerTime")
.isNotEmpty();
}
@WebEndpointTest
void quartzTriggerJobWithUnknownJobKey(WebTestClient client) {
client.post()
.uri("/actuator/quartz/jobs/samples/does-not-exist")
.contentType(MediaType.APPLICATION_JSON)
.bodyValue(Map.of("state", "running"))
.exchange()
.expectStatus()
.isNotFound();
}
@WebEndpointTest
void quartzTriggerJobWithUnknownState(WebTestClient client) {
client.post()
.uri("/actuator/quartz/jobs/samples/jobOne")
.contentType(MediaType.APPLICATION_JSON)
.bodyValue(Map.of("state", "unknown"))
.exchange()
.expectStatus()
.isBadRequest();
}
@Configuration(proxyBeanMethods = false)
static
|
QuartzEndpointWebIntegrationTests
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/test/java/org/springframework/web/reactive/function/server/NestedRouteIntegrationTests.java
|
{
"start": 1766,
"end": 4878
}
|
class ____ extends AbstractRouterFunctionIntegrationTests {
private final RestTemplate restTemplate = new RestTemplate();
@Override
protected RouterFunction<?> routerFunction() {
NestedHandler nestedHandler = new NestedHandler();
return nest(path("/foo/"),
route(GET("/bar"), nestedHandler::pattern)
.andRoute(GET("/baz"), nestedHandler::pattern))
.andNest(GET("{foo}"),
route(GET("/bar"), nestedHandler::variables).and(
nest(GET("/{bar}"),
route(GET("/{baz}"), nestedHandler::variables))))
.andRoute(path("/{qux}/quux").and(method(HttpMethod.GET)), nestedHandler::variables)
.andRoute(all(), nestedHandler::variables);
}
@ParameterizedHttpServerTest
void bar(HttpServer httpServer) throws Exception {
startServer(httpServer);
ResponseEntity<String> result =
restTemplate.getForEntity("http://localhost:" + port + "/foo/bar", String.class);
assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(result.getBody()).isEqualTo("/foo/bar");
}
@ParameterizedHttpServerTest
void baz(HttpServer httpServer) throws Exception {
startServer(httpServer);
ResponseEntity<String> result =
restTemplate.getForEntity("http://localhost:" + port + "/foo/baz", String.class);
assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(result.getBody()).isEqualTo("/foo/baz");
}
@ParameterizedHttpServerTest
void variables(HttpServer httpServer) throws Exception {
startServer(httpServer);
ResponseEntity<String> result =
restTemplate.getForEntity("http://localhost:" + port + "/1/2/3", String.class);
assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK);
String body = result.getBody();
assertThat(body).startsWith("/{foo}/{bar}/{baz}");
assertThat(body).contains("foo=1");
assertThat(body).contains("bar=2");
assertThat(body).contains("baz=3");
}
// SPR-16868
@ParameterizedHttpServerTest
void parentVariables(HttpServer httpServer) throws Exception {
startServer(httpServer);
ResponseEntity<String> result =
restTemplate.getForEntity("http://localhost:" + port + "/1/bar", String.class);
assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(result.getBody()).isEqualTo("/{foo}/bar\n{foo=1}");
}
// SPR 16692
@ParameterizedHttpServerTest
void removeFailedNestedPathVariables(HttpServer httpServer) throws Exception {
startServer(httpServer);
ResponseEntity<String> result =
restTemplate.getForEntity("http://localhost:" + port + "/qux/quux", String.class);
assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(result.getBody()).isEqualTo("/{qux}/quux\n{qux=qux}");
}
// SPR 17210
@ParameterizedHttpServerTest
void removeFailedPathVariablesAnd(HttpServer httpServer) throws Exception {
startServer(httpServer);
ResponseEntity<String> result =
restTemplate.postForEntity("http://localhost:" + port + "/qux/quux", "", String.class);
assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(result.getBody()).isEqualTo("{}");
}
private static
|
NestedRouteIntegrationTests
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/blob/BlobClientTest.java
|
{
"start": 2305,
"end": 21816
}
|
class ____ {
/** The buffer size used during the tests in bytes. */
private static final int TEST_BUFFER_SIZE = 17 * 1000;
/** The instance of the (non-ssl) BLOB server used during the tests. */
static BlobServer blobServer;
/** The blob service (non-ssl) client configuration. */
static Configuration clientConfig;
@TempDir static java.nio.file.Path tempDir;
/** Starts the BLOB server. */
@BeforeAll
static void startServer() throws IOException {
blobServer = TestingBlobUtils.createServer(tempDir);
blobServer.start();
clientConfig = new Configuration();
}
/** Shuts the BLOB server down. */
@AfterAll
static void stopServer() throws IOException {
if (blobServer != null) {
blobServer.close();
}
}
/**
* Creates a test buffer and fills it with a specific byte pattern.
*
* @return a test buffer filled with a specific byte pattern
*/
private static byte[] createTestBuffer() {
final byte[] buf = new byte[TEST_BUFFER_SIZE];
for (int i = 0; i < buf.length; ++i) {
buf[i] = (byte) (i % 128);
}
return buf;
}
/**
* Prepares a test file for the unit tests, i.e. the methods fills the file with a particular
* byte patterns and computes the file's BLOB key.
*
* @param file the file to prepare for the unit tests
* @return the BLOB key of the prepared file
* @throws IOException thrown if an I/O error occurs while writing to the test file
*/
private static byte[] prepareTestFile(File file) throws IOException {
MessageDigest md = BlobUtils.createMessageDigest();
final byte[] buf = new byte[TEST_BUFFER_SIZE];
for (int i = 0; i < buf.length; ++i) {
buf[i] = (byte) (i % 128);
}
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
for (int i = 0; i < 20; ++i) {
fos.write(buf);
md.update(buf);
}
} finally {
if (fos != null) {
fos.close();
}
}
return md.digest();
}
/**
* Validates the result of a GET operation by comparing the data from the retrieved input stream
* to the content of the specified buffer.
*
* @param actualInputStream the input stream returned from the GET operation (will be closed by
* this method)
* @param expectedBuf the buffer to compare the input stream's data to
* @throws IOException thrown if an I/O error occurs while reading the input stream
*/
static void validateGetAndClose(final InputStream actualInputStream, final byte[] expectedBuf)
throws IOException {
try {
byte[] receivedBuffer = new byte[expectedBuf.length];
int bytesReceived = 0;
while (true) {
final int read =
actualInputStream.read(
receivedBuffer,
bytesReceived,
receivedBuffer.length - bytesReceived);
if (read < 0) {
throw new EOFException();
}
bytesReceived += read;
if (bytesReceived == receivedBuffer.length) {
assertThat(actualInputStream.read()).isEqualTo(-1);
assertThat(receivedBuffer).isEqualTo(expectedBuf);
return;
}
}
} finally {
actualInputStream.close();
}
}
/**
* Validates the result of a GET operation by comparing the data from the retrieved input stream
* to the content of the expected input stream.
*
* @param actualInputStream the input stream returned from the GET operation (will be closed by
* this method)
* @param expectedInputStream the input stream to compare the input stream's data to
* @throws IOException thrown if an I/O error occurs while reading any input stream
*/
static void validateGetAndClose(InputStream actualInputStream, InputStream expectedInputStream)
throws IOException {
try {
while (true) {
final int r1 = actualInputStream.read();
final int r2 = expectedInputStream.read();
assertThat(r1).isEqualTo(r2);
if (r1 < 0) {
break;
}
}
} finally {
actualInputStream.close();
expectedInputStream.close();
}
}
/**
* Validates the result of a GET operation by comparing the data from the retrieved input stream
* to the content of the specified file.
*
* @param actualInputStream the input stream returned from the GET operation
* @param expectedFile the file to compare the input stream's data to
* @throws IOException thrown if an I/O error occurs while reading the input stream or the file
*/
@SuppressWarnings("WeakerAccess")
static void validateGetAndClose(final InputStream actualInputStream, final File expectedFile)
throws IOException {
validateGetAndClose(actualInputStream, Files.newInputStream(expectedFile.toPath()));
}
protected boolean isSSLEnabled() {
return false;
}
@Test
void testContentAddressableBufferTransientBlob() throws IOException, InterruptedException {
testContentAddressableBuffer(TRANSIENT_BLOB);
}
@Test
void testContentAddressableBufferPermantBlob() throws IOException, InterruptedException {
testContentAddressableBuffer(PERMANENT_BLOB);
}
/**
* Tests the PUT/GET operations for content-addressable buffers.
*
* @param blobType whether the BLOB should become permanent or transient
*/
private void testContentAddressableBuffer(BlobKey.BlobType blobType)
throws IOException, InterruptedException {
BlobClient client = null;
try {
byte[] testBuffer = createTestBuffer();
MessageDigest md = BlobUtils.createMessageDigest();
md.update(testBuffer);
byte[] digest = md.digest();
InetSocketAddress serverAddress =
new InetSocketAddress(
getBlobServer().getAddress().getHostName(), getBlobServer().getPort());
client = new BlobClient(serverAddress, getBlobClientConfig());
JobID jobId = new JobID();
// Store the data (job-unrelated)
BlobKey receivedKey1 = null;
if (blobType == TRANSIENT_BLOB) {
receivedKey1 = client.putBuffer(null, testBuffer, 0, testBuffer.length, blobType);
assertThat(receivedKey1.getHash()).isEqualTo(digest);
}
// try again with a job-related BLOB:
BlobKey receivedKey2 =
client.putBuffer(jobId, testBuffer, 0, testBuffer.length, blobType);
assertThat(receivedKey2.getHash()).isEqualTo(digest);
if (blobType == TRANSIENT_BLOB) {
verifyKeyDifferentHashEquals(receivedKey1, receivedKey2);
}
// Retrieve the data (job-unrelated)
if (blobType == TRANSIENT_BLOB) {
validateGetAndClose(client.getInternal(null, receivedKey1), testBuffer);
// transient BLOBs should be deleted from the server, eventually
verifyDeletedEventually(getBlobServer(), null, receivedKey1);
}
// job-related
validateGetAndClose(client.getInternal(jobId, receivedKey2), testBuffer);
if (blobType == TRANSIENT_BLOB) {
// transient BLOBs should be deleted from the server, eventually
verifyDeletedEventually(getBlobServer(), jobId, receivedKey2);
}
// Check reaction to invalid keys for job-unrelated blobs
final BlobClient finalClient1 = client;
assertThatThrownBy(() -> finalClient1.getInternal(null, BlobKey.createKey(blobType)))
.isInstanceOf(IOException.class);
// Check reaction to invalid keys for job-related blobs
// new client needed (closed from failure above)
client = new BlobClient(serverAddress, getBlobClientConfig());
final BlobClient finalClient2 = client;
assertThatThrownBy(() -> finalClient2.getInternal(jobId, BlobKey.createKey(blobType)))
.isInstanceOf(IOException.class);
} finally {
if (client != null) {
try {
client.close();
} catch (Throwable ignored) {
}
}
}
}
protected Configuration getBlobClientConfig() {
return clientConfig;
}
protected BlobServer getBlobServer() {
return blobServer;
}
@Test
void testContentAddressableStreamTransientBlob() throws IOException, InterruptedException {
testContentAddressableStream(TRANSIENT_BLOB);
}
@Test
void testContentAddressableStreamPermanentBlob() throws IOException, InterruptedException {
testContentAddressableStream(PERMANENT_BLOB);
}
/**
* Tests the PUT/GET operations for content-addressable streams.
*
* @param blobType whether the BLOB should become permanent or transient
*/
private void testContentAddressableStream(BlobKey.BlobType blobType)
throws IOException, InterruptedException {
File testFile = tempDir.resolve("test_file").toFile();
byte[] digest = prepareTestFile(testFile);
InputStream is = null;
try (BlobClient client =
new BlobClient(
new InetSocketAddress(
getBlobServer().getAddress().getHostName(),
getBlobServer().getPort()),
getBlobClientConfig())) {
JobID jobId = new JobID();
BlobKey receivedKey1 = null;
// Store the data (job-unrelated)
if (blobType == TRANSIENT_BLOB) {
is = Files.newInputStream(testFile.toPath());
receivedKey1 = client.putInputStream(null, is, blobType);
assertThat(receivedKey1.getHash()).isEqualTo(digest);
}
// try again with a job-related BLOB:
is = Files.newInputStream(testFile.toPath());
BlobKey receivedKey2 = client.putInputStream(jobId, is, blobType);
is.close();
is = null;
// Retrieve the data (job-unrelated)
if (blobType == TRANSIENT_BLOB) {
verifyKeyDifferentHashEquals(receivedKey1, receivedKey2);
validateGetAndClose(client.getInternal(null, receivedKey1), testFile);
// transient BLOBs should be deleted from the server, eventually
verifyDeletedEventually(getBlobServer(), null, receivedKey1);
}
// job-related
validateGetAndClose(client.getInternal(jobId, receivedKey2), testFile);
if (blobType == TRANSIENT_BLOB) {
// transient BLOBs should be deleted from the server, eventually
verifyDeletedEventually(getBlobServer(), jobId, receivedKey2);
}
} finally {
if (is != null) {
try {
is.close();
} catch (Throwable ignored) {
}
}
}
}
@Test
void testGetFailsDuringStreamingNoJobTransientBlob() throws IOException {
testGetFailsDuringStreaming(null, TRANSIENT_BLOB);
}
@Test
void testGetFailsDuringStreamingForJobTransientBlob() throws IOException {
testGetFailsDuringStreaming(new JobID(), TRANSIENT_BLOB);
}
@Test
void testGetFailsDuringStreamingForJobPermanentBlob() throws IOException {
testGetFailsDuringStreaming(new JobID(), PERMANENT_BLOB);
}
/**
* Checks the correct result if a GET operation fails during the file download.
*
* @param jobId job ID or <tt>null</tt> if job-unrelated
* @param blobType whether the BLOB should become permanent or transient
*/
private void testGetFailsDuringStreaming(@Nullable final JobID jobId, BlobKey.BlobType blobType)
throws IOException {
assumeThat(isSSLEnabled())
.as("This test can deadlock when using SSL. See FLINK-19369.")
.isFalse();
try (BlobClient client =
new BlobClient(
new InetSocketAddress(
getBlobServer().getAddress().getHostName(),
getBlobServer().getPort()),
getBlobClientConfig())) {
byte[] data = new byte[5000000];
Random rnd = new Random();
rnd.nextBytes(data);
// put content addressable (like libraries)
BlobKey key = client.putBuffer(jobId, data, 0, data.length, blobType);
assertThat(key).isNotNull();
// issue a GET request that succeeds
InputStream is = client.getInternal(jobId, key);
byte[] receiveBuffer = new byte[data.length];
int firstChunkLen = 50000;
BlobUtils.readFully(is, receiveBuffer, 0, firstChunkLen, null);
BlobUtils.readFully(is, receiveBuffer, firstChunkLen, firstChunkLen, null);
// shut down the server
for (BlobServerConnection conn : getBlobServer().getCurrentActiveConnections()) {
conn.close();
}
try {
BlobUtils.readFully(
is,
receiveBuffer,
2 * firstChunkLen,
data.length - 2 * firstChunkLen,
null);
// we tolerate that this succeeds, as the receiver socket may have buffered
// everything already, but in this case, also verify the contents
assertThat(receiveBuffer).isEqualTo(data);
} catch (IOException e) {
// expected
}
}
}
/**
* Tests the static {@link BlobClient#uploadFiles(InetSocketAddress, Configuration, JobID,
* List)} helper.
*/
@Test
void testUploadJarFilesHelper() throws Exception {
uploadJarFile(getBlobServer(), getBlobClientConfig());
}
/**
* Tests the static {@link BlobClient#uploadFiles(InetSocketAddress, Configuration, JobID,
* List)}} helper.
*/
static void uploadJarFile(BlobServer blobServer, Configuration blobClientConfig)
throws Exception {
final File testFile = File.createTempFile("testfile", ".dat");
testFile.deleteOnExit();
prepareTestFile(testFile);
InetSocketAddress serverAddress =
new InetSocketAddress(blobServer.getAddress().getHostName(), blobServer.getPort());
uploadJarFile(serverAddress, blobClientConfig, testFile);
uploadJarFile(serverAddress, blobClientConfig, testFile);
}
private static void uploadJarFile(
final InetSocketAddress serverAddress,
final Configuration blobClientConfig,
final File testFile)
throws IOException {
JobID jobId = new JobID();
List<PermanentBlobKey> blobKeys =
BlobClient.uploadFiles(
serverAddress,
blobClientConfig,
jobId,
Collections.singletonList(new Path(testFile.toURI())));
assertThat(blobKeys).hasSize(1);
try (BlobClient blobClient = new BlobClient(serverAddress, blobClientConfig)) {
validateGetAndClose(blobClient.getInternal(jobId, blobKeys.get(0)), testFile);
}
}
/** Tests the socket operation timeout. */
@Test
void testSocketTimeout() throws IOException {
Configuration clientConfig = getBlobClientConfig();
int oldSoTimeout = clientConfig.get(BlobServerOptions.SO_TIMEOUT);
clientConfig.set(BlobServerOptions.SO_TIMEOUT, 50);
try (final TestBlobServer testBlobServer =
new TestBlobServer(
clientConfig,
tempDir.resolve("test_server").toFile(),
new VoidBlobStore(),
10_000L)) {
testBlobServer.start();
InetSocketAddress serverAddress =
new InetSocketAddress(
getBlobServer().getAddress().getHostName(), testBlobServer.getPort());
try (BlobClient client = new BlobClient(serverAddress, clientConfig)) {
client.getInternal(new JobID(), BlobKey.createKey(TRANSIENT_BLOB));
fail("Should throw an exception.");
} catch (Throwable t) {
assertThat(ExceptionUtils.findThrowable(t, java.net.SocketTimeoutException.class))
.isPresent();
}
} finally {
clientConfig.set(BlobServerOptions.SO_TIMEOUT, oldSoTimeout);
}
}
@Test
void testUnresolvedInetSocketAddress() throws Exception {
try (BlobClient client =
new BlobClient(
InetSocketAddress.createUnresolved(
getBlobServer().getAddress().getHostName(),
getBlobServer().getPort()),
getBlobClientConfig())) {
assertThat(client.isConnected()).isTrue();
}
}
/** BlobServer should return routable address when bound to wildcard. */
@Test
void testWildcardBindingAddress() throws Exception {
Configuration config = new Configuration();
config.set(JobManagerOptions.BIND_HOST, "0.0.0.0");
File tempServerDir = tempDir.resolve("wildcard_test").toFile();
tempServerDir.mkdirs();
try (BlobServer testServer = new BlobServer(config, tempServerDir, new VoidBlobStore())) {
testServer.start();
InetAddress address = testServer.getAddress();
assertThat(address.getHostAddress())
.as("Should not return wildcard address")
.isNotEqualTo("0.0.0.0");
}
}
/** BlobServer should return the configured bind address. */
@Test
void testReturnsConfiguredBindAddress() throws Exception {
String loopbackAddress = InetAddress.getLoopbackAddress().getHostAddress();
Configuration config = new Configuration();
config.set(JobManagerOptions.BIND_HOST, loopbackAddress);
File tempServerDir = tempDir.resolve("bind_address_test").toFile();
tempServerDir.mkdirs();
try (BlobServer testServer = new BlobServer(config, tempServerDir, new VoidBlobStore())) {
testServer.start();
InetAddress address = testServer.getAddress();
assertThat(address.getHostAddress())
.as("Should return the bound address")
.isEqualTo(loopbackAddress);
}
}
static
|
BlobClientTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/preprocessor/TestTagAddProcessor.java
|
{
"start": 1267,
"end": 1332
}
|
class ____ test the functionality of TagAddProcessor.
*/
public
|
will
|
java
|
micronaut-projects__micronaut-core
|
http-netty/src/main/java/io/micronaut/http/netty/channel/IoUringEventLoopGroupFactory.java
|
{
"start": 1660,
"end": 3091
}
|
class ____ implements EventLoopGroupFactory {
public static final String NAME = "io_uring";
@Override
public IoHandlerFactory createIoHandlerFactory() {
return IoUringIoHandler.newFactory();
}
@Override
public boolean isNative() {
return true;
}
@Override
public Class<? extends Channel> channelClass(NettyChannelType type) throws UnsupportedOperationException {
return switch (type) {
case SERVER_SOCKET -> IoUringServerSocketChannel.class;
case CLIENT_SOCKET -> IoUringSocketChannel.class;
case DATAGRAM_SOCKET -> IoUringDatagramChannel.class;
default -> throw new UnsupportedOperationException("Channel type not supported");
};
}
@Override
public Class<? extends Channel> channelClass(NettyChannelType type, @Nullable EventLoopGroupConfiguration configuration) {
return channelClass(type);
}
@Override
public Channel channelInstance(NettyChannelType type, @Nullable EventLoopGroupConfiguration configuration) {
return switch (type) {
case SERVER_SOCKET -> new IoUringServerSocketChannel();
case CLIENT_SOCKET -> new IoUringSocketChannel();
case DATAGRAM_SOCKET -> new IoUringDatagramChannel();
default -> throw new UnsupportedOperationException("Channel type not supported");
};
}
}
|
IoUringEventLoopGroupFactory
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/aot/generate/GeneratedClass.java
|
{
"start": 3995,
"end": 4166
}
|
class ____ be generated.
* @param name the name of the nested class
* @param type a {@link Consumer} used to build the type
* @return an existing or newly generated
|
will
|
java
|
junit-team__junit5
|
junit-platform-engine/src/main/java/org/junit/platform/engine/support/hierarchical/ForkJoinPoolHierarchicalTestExecutorService.java
|
{
"start": 2036,
"end": 8169
}
|
class ____ implements HierarchicalTestExecutorService {
// package-private for testing
final ForkJoinPool forkJoinPool;
private final TaskEventListener taskEventListener;
private final int parallelism;
private final ThreadLocal<ThreadLock> threadLocks = ThreadLocal.withInitial(ThreadLock::new);
/**
* Create a new {@code ForkJoinPoolHierarchicalTestExecutorService} based on
* the supplied {@link ConfigurationParameters}.
*
* @see DefaultParallelExecutionConfigurationStrategy
* @deprecated Please use
* {@link ParallelHierarchicalTestExecutorServiceFactory#create(ConfigurationParameters)}
* with configuration parameter
* {@value ParallelHierarchicalTestExecutorServiceFactory#EXECUTOR_SERVICE_PROPERTY_NAME}
* set to
* {@link ParallelExecutorServiceType#FORK_JOIN_POOL FORK_JOIN_POOL}
* instead.
*/
@API(status = DEPRECATED, since = "6.1")
@Deprecated(since = "6.1")
public ForkJoinPoolHierarchicalTestExecutorService(ConfigurationParameters configurationParameters) {
this(DefaultParallelExecutionConfigurationStrategy.toConfiguration(configurationParameters));
}
/**
* Create a new {@code ForkJoinPoolHierarchicalTestExecutorService} based on
* the supplied {@link ParallelExecutionConfiguration}.
*
* @since 1.7
* @deprecated Please use
* {@link ParallelHierarchicalTestExecutorServiceFactory#create(ParallelExecutorServiceType, ParallelExecutionConfiguration)}
* with
* {@link ParallelExecutorServiceType#FORK_JOIN_POOL ParallelExecutorServiceType.FORK_JOIN_POOL}
* instead.
*/
@API(status = DEPRECATED, since = "6.1")
@Deprecated(since = "6.1")
public ForkJoinPoolHierarchicalTestExecutorService(ParallelExecutionConfiguration configuration) {
this(configuration, TaskEventListener.NOOP);
}
ForkJoinPoolHierarchicalTestExecutorService(ParallelExecutionConfiguration configuration,
TaskEventListener taskEventListener) {
forkJoinPool = createForkJoinPool(configuration);
this.taskEventListener = taskEventListener;
parallelism = forkJoinPool.getParallelism();
LoggerFactory.getLogger(getClass()).config(() -> "Using ForkJoinPool with parallelism of " + parallelism);
}
private ForkJoinPool createForkJoinPool(ParallelExecutionConfiguration configuration) {
try {
return new ForkJoinPool(configuration.getParallelism(), new WorkerThreadFactory(), null, false,
configuration.getCorePoolSize(), configuration.getMaxPoolSize(), configuration.getMinimumRunnable(),
configuration.getSaturatePredicate(), configuration.getKeepAliveSeconds(), TimeUnit.SECONDS);
}
catch (Exception cause) {
throw new JUnitException("Failed to create ForkJoinPool", cause);
}
}
@Override
public Future<@Nullable Void> submit(TestTask testTask) {
ExclusiveTask exclusiveTask = new ExclusiveTask(testTask);
if (!isAlreadyRunningInForkJoinPool()) {
// ensure we're running inside the ForkJoinPool so we
// can use ForkJoinTask API in invokeAll etc.
return forkJoinPool.submit(exclusiveTask);
}
// Limit the amount of queued work so we don't consume dynamic tests too eagerly
// by forking only if the current worker thread's queue length is below the
// desired parallelism. This optimistically assumes that the already queued tasks
// can be stolen by other workers and the new task requires about the same
// execution time as the already queued tasks. If the other workers are busy,
// the parallelism is already at its desired level. If all already queued tasks
// can be stolen by otherwise idle workers and the new task takes significantly
// longer, parallelism will drop. However, that only happens if the enclosing test
// task is the only one remaining which should rarely be the case.
if (testTask.getExecutionMode() == CONCURRENT && ForkJoinTask.getSurplusQueuedTaskCount() < parallelism) {
return exclusiveTask.fork();
}
exclusiveTask.execSync();
return completedFuture(null);
}
private boolean isAlreadyRunningInForkJoinPool() {
return ForkJoinTask.getPool() == forkJoinPool;
}
@Override
public void invokeAll(List<? extends TestTask> tasks) {
if (tasks.size() == 1) {
new ExclusiveTask(tasks.get(0)).execSync();
return;
}
Deque<ExclusiveTask> isolatedTasks = new ArrayDeque<>();
Deque<ExclusiveTask> sameThreadTasks = new ArrayDeque<>();
Deque<ExclusiveTask> concurrentTasksInReverseOrder = new ArrayDeque<>();
forkConcurrentTasks(tasks, isolatedTasks, sameThreadTasks, concurrentTasksInReverseOrder);
executeSync(sameThreadTasks);
joinConcurrentTasksInReverseOrderToEnableWorkStealing(concurrentTasksInReverseOrder);
executeSync(isolatedTasks);
}
private void forkConcurrentTasks(List<? extends TestTask> tasks, Deque<ExclusiveTask> isolatedTasks,
Deque<ExclusiveTask> sameThreadTasks, Deque<ExclusiveTask> concurrentTasksInReverseOrder) {
for (TestTask testTask : tasks) {
ExclusiveTask exclusiveTask = new ExclusiveTask(testTask);
if (requiresGlobalReadWriteLock(testTask)) {
isolatedTasks.add(exclusiveTask);
}
else if (testTask.getExecutionMode() == SAME_THREAD) {
sameThreadTasks.add(exclusiveTask);
}
else {
exclusiveTask.fork();
concurrentTasksInReverseOrder.addFirst(exclusiveTask);
}
}
}
private static boolean requiresGlobalReadWriteLock(TestTask testTask) {
return testTask.getResourceLock().getResources().contains(GLOBAL_READ_WRITE);
}
private void executeSync(Deque<ExclusiveTask> tasks) {
for (ExclusiveTask task : tasks) {
task.execSync();
}
}
private void joinConcurrentTasksInReverseOrderToEnableWorkStealing(
Deque<ExclusiveTask> concurrentTasksInReverseOrder) {
for (ExclusiveTask forkedTask : concurrentTasksInReverseOrder) {
forkedTask.join();
resubmitDeferredTasks();
}
}
private void resubmitDeferredTasks() {
List<ExclusiveTask> deferredTasks = threadLocks.get().deferredTasks;
for (ExclusiveTask deferredTask : deferredTasks) {
if (!deferredTask.isDone()) {
deferredTask.fork();
}
}
deferredTasks.clear();
}
@Override
public void close() {
forkJoinPool.shutdownNow();
}
// this
|
ForkJoinPoolHierarchicalTestExecutorService
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/rpc/model/ModelConstants.java
|
{
"start": 846,
"end": 1138
}
|
interface ____ {
String DEPLOYER = "deployer";
/**
* Keep Dubbo running when spring is stopped
*/
String KEEP_RUNNING_ON_SPRING_CLOSED = "keepRunningOnSpringClosed";
String KEEP_RUNNING_ON_SPRING_CLOSED_KEY = "dubbo.module.keepRunningOnSpringClosed";
}
|
ModelConstants
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/CompletedCheckpointStoreTest.java
|
{
"start": 12018,
"end": 12218
}
|
class ____ is used
* when discarding. Spying on a regular {@link CompletedCheckpoint} instance with Mockito
* doesn't work, because it breaks serializability.
*/
protected static
|
loader
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-api/src/test/java/org/apache/dubbo/rpc/stub/StubProxyFactoryTest.java
|
{
"start": 1877,
"end": 2376
}
|
class ____ implements ServerService<MockInterface>, MockInterface {
@Override
public Invoker<MockInterface> getInvoker(URL url) {
return invoker2;
}
@Override
public ServiceDescriptor getServiceDescriptor() {
return null;
}
}
@Test
void getInvoker() {
URL url = Mockito.mock(URL.class);
Assertions.assertEquals(invoker2, factory.getInvoker(new MockStub(), MockInterface.class, url));
}
}
|
MockStub
|
java
|
apache__camel
|
components/camel-iso8583/src/test/java/org/apache/camel/dataformat/iso8583/Iso8583DataFormatSimpleTest.java
|
{
"start": 1128,
"end": 3309
}
|
class ____ extends CamelTestSupport {
@Test
public void testUnmarshal() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:result").message(0).body().isInstanceOf(Map.class);
getMockEndpoint("mock:result").message(0).body().simple("${body[op]}").isEqualTo("650000");
getMockEndpoint("mock:result").message(0).body().simple("${body[amount]}").isEqualTo("30.00");
getMockEndpoint("mock:result").message(0).body().simple("${body[ref]}").isEqualTo("001234425791");
getMockEndpoint("mock:result").message(0).body().simple("${body[response]}").isEqualTo("00");
getMockEndpoint("mock:result").message(0).body().simple("${body[terminal]}").isEqualTo("614209027600TéST");
getMockEndpoint("mock:result").message(0).body().simple("${body[currency]}").isEqualTo("484");
template.sendBody("direct:unmarshal", new File("src/test/resources/parse1.txt"));
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:unmarshal").unmarshal().iso8583("0210")
.transform().simple(
"""
{
"op": "${body.getAt(3).value}",
"amount": ${body.getAt(4).value.toPlainString},
"ref": "${body.getAt(37).value}",
"response": "${body.getAt(39).value}",
"terminal": "${body.getAt(41).value}",
"currency": "${body.getAt(49).value}"
}
""")
.log("${body}")
.unmarshal().json()
.to("mock:result");
}
};
}
}
|
Iso8583DataFormatSimpleTest
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
|
{
"start": 72708,
"end": 73113
}
|
class ____
implements SingleArcTransition<JobImpl, JobEvent> {
@Override
public void transition(JobImpl job, JobEvent event) {
job.addDiagnostic("Job received Kill in INITED state.");
job.eventHandler.handle(new CommitterJobAbortEvent(job.jobId,
job.jobContext,
org.apache.hadoop.mapreduce.JobStatus.State.KILLED));
}
}
private static
|
KillInitedJobTransition
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/retry/RetryPolicyTests.java
|
{
"start": 1584,
"end": 2724
}
|
class ____ {
@Test
void withDefaults() {
var policy = RetryPolicy.withDefaults();
assertThat(policy.shouldRetry(new AssertionError())).isTrue();
assertThat(policy.shouldRetry(new IOException())).isTrue();
assertThat(policy.getBackOff())
.asInstanceOf(type(FixedBackOff.class))
.satisfies(backOff -> {
assertThat(backOff.getMaxAttempts()).isEqualTo(3);
assertThat(backOff.getInterval()).isEqualTo(1000);
});
}
@Test
void withMaxRetriesPreconditions() {
assertThatIllegalArgumentException()
.isThrownBy(() -> RetryPolicy.withMaxRetries(-1))
.withMessageStartingWith("Invalid maxRetries (-1)");
}
@Test
void withMaxRetries() {
var policy = RetryPolicy.withMaxRetries(5);
assertThat(policy.shouldRetry(new AssertionError())).isTrue();
assertThat(policy.shouldRetry(new IOException())).isTrue();
assertThat(policy.getBackOff())
.asInstanceOf(type(FixedBackOff.class))
.satisfies(backOff -> {
assertThat(backOff.getMaxAttempts()).isEqualTo(5);
assertThat(backOff.getInterval()).isEqualTo(1000);
});
}
}
@Nested
|
FactoryMethodTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/model/MutationType.java
|
{
"start": 196,
"end": 452
}
|
enum ____ {
INSERT( true ),
UPDATE( true ),
DELETE( false );
private final boolean canSkipTables;
MutationType(boolean canSkipTables) {
this.canSkipTables = canSkipTables;
}
public boolean canSkipTables() {
return canSkipTables;
}
}
|
MutationType
|
java
|
apache__kafka
|
coordinator-common/src/main/java/org/apache/kafka/coordinator/common/runtime/CoordinatorRecordSerde.java
|
{
"start": 1880,
"end": 4392
}
|
class ____ implements Serializer<CoordinatorRecord>, Deserializer<CoordinatorRecord> {
@Override
public byte[] serializeKey(CoordinatorRecord record) {
// Record does not accept a null key.
return MessageUtil.toCoordinatorTypePrefixedBytes(
record.key()
);
}
@Override
public byte[] serializeValue(CoordinatorRecord record) {
// Tombstone is represented with a null value.
if (record.value() == null) {
return null;
} else {
return MessageUtil.toVersionPrefixedBytes(
record.value().version(),
record.value().message()
);
}
}
@Override
public CoordinatorRecord deserialize(
ByteBuffer keyBuffer,
ByteBuffer valueBuffer
) throws RuntimeException {
final short recordType = readVersion(keyBuffer, "key");
final ApiMessage keyMessage = apiMessageKeyFor(recordType);
readMessage(keyMessage, keyBuffer, recordType, "key");
if (valueBuffer == null) {
return CoordinatorRecord.tombstone(keyMessage);
}
final ApiMessage valueMessage = apiMessageValueFor(recordType);
final short valueVersion = readVersion(valueBuffer, "value");
if (valueVersion < valueMessage.lowestSupportedVersion() || valueVersion > valueMessage.highestSupportedVersion()) {
throw new UnknownRecordVersionException(recordType, valueVersion);
}
readMessage(valueMessage, valueBuffer, valueVersion, "value");
return CoordinatorRecord.record(
keyMessage,
new ApiMessageAndVersion(valueMessage, valueVersion)
);
}
private short readVersion(ByteBuffer buffer, String name) throws RuntimeException {
try {
return buffer.getShort();
} catch (BufferUnderflowException ex) {
throw new RuntimeException(String.format("Could not read version from %s's buffer.", name));
}
}
private void readMessage(ApiMessage message, ByteBuffer buffer, short version, String name) throws RuntimeException {
try {
message.read(new ByteBufferAccessor(buffer), version);
} catch (RuntimeException ex) {
throw new RuntimeException(String.format("Could not read record with version %d from %s's buffer due to: %s.",
version, name, ex.getMessage()), ex);
}
}
/**
* Concrete child
|
CoordinatorRecordSerde
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/hash/AbstractByteHasherTest.java
|
{
"start": 1015,
"end": 3370
}
|
class ____ extends TestCase {
public void testBytes() {
TestHasher hasher = new TestHasher(); // byte order insignificant here
byte[] expected = {1, 2, 3, 4, 5, 6, 7, 8};
hasher.putByte((byte) 1);
hasher.putBytes(new byte[] {2, 3, 4, 5, 6});
hasher.putByte((byte) 7);
hasher.putBytes(new byte[] {});
hasher.putBytes(new byte[] {8});
hasher.assertBytes(expected);
}
public void testShort() {
TestHasher hasher = new TestHasher();
hasher.putShort((short) 0x0201);
hasher.assertBytes(new byte[] {1, 2});
}
public void testInt() {
TestHasher hasher = new TestHasher();
hasher.putInt(0x04030201);
hasher.assertBytes(new byte[] {1, 2, 3, 4});
}
public void testLong() {
TestHasher hasher = new TestHasher();
hasher.putLong(0x0807060504030201L);
hasher.assertBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
}
public void testChar() {
TestHasher hasher = new TestHasher();
hasher.putChar((char) 0x0201);
hasher.assertBytes(new byte[] {1, 2});
}
public void testString() {
Random random = new Random();
for (int i = 0; i < 100; i++) {
byte[] bytes = new byte[64];
random.nextBytes(bytes);
String s = new String(bytes, UTF_16LE); // so all random strings are valid
assertEquals(
new TestHasher().putUnencodedChars(s).hash(),
new TestHasher().putBytes(s.getBytes(UTF_16LE)).hash());
assertEquals(
new TestHasher().putUnencodedChars(s).hash(),
new TestHasher().putString(s, UTF_16LE).hash());
}
}
public void testFloat() {
TestHasher hasher = new TestHasher();
hasher.putFloat(Float.intBitsToFloat(0x04030201));
hasher.assertBytes(new byte[] {1, 2, 3, 4});
}
public void testDouble() {
TestHasher hasher = new TestHasher();
hasher.putDouble(Double.longBitsToDouble(0x0807060504030201L));
hasher.assertBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
}
public void testCorrectExceptions() {
TestHasher hasher = new TestHasher();
assertThrows(IndexOutOfBoundsException.class, () -> hasher.putBytes(new byte[8], -1, 4));
assertThrows(IndexOutOfBoundsException.class, () -> hasher.putBytes(new byte[8], 0, 16));
assertThrows(IndexOutOfBoundsException.class, () -> hasher.putBytes(new byte[8], 0, -1));
}
private
|
AbstractByteHasherTest
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/reflection/ReflectorTest.java
|
{
"start": 12379,
"end": 13439
}
|
class ____ {
public Integer isBool() {
return Integer.valueOf(1);
}
public Integer getBool() {
return Integer.valueOf(2);
}
public void setBool(boolean bool) {
}
public void setBool(Integer bool) {
}
}
ReflectorFactory reflectorFactory = new DefaultReflectorFactory();
Reflector reflector = reflectorFactory.findForClass(Bean.class);
Class<?> paramType = reflector.getSetterType("bool");
Object[] param = boolean.class.equals(paramType) ? new Boolean[] { true } : new Integer[] { 1 };
Invoker ambiguousInvoker = reflector.getSetInvoker("bool");
when(() -> ambiguousInvoker.invoke(new Bean(), param));
then(caughtException()).isInstanceOf(ReflectionException.class).hasMessageMatching(
"Ambiguous setters defined for property 'bool' in class '" + Bean.class.getName().replace("$", "\\$")
+ "' with types '(java.lang.Integer|boolean)' and '(java.lang.Integer|boolean)'\\.");
}
@Test
void shouldGetGenericGetter() throws Exception {
|
Bean
|
java
|
apache__dubbo
|
dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/Constants.java
|
{
"start": 853,
"end": 1640
}
|
interface ____ {
byte HESSIAN2_SERIALIZATION_ID = 2;
byte JAVA_SERIALIZATION_ID = 3;
byte COMPACTED_JAVA_SERIALIZATION_ID = 4;
byte FASTJSON_SERIALIZATION_ID = 6;
byte NATIVE_JAVA_SERIALIZATION_ID = 7;
byte KRYO_SERIALIZATION_ID = 8;
byte FST_SERIALIZATION_ID = 9;
byte NATIVE_HESSIAN_SERIALIZATION_ID = 10;
byte PROTOSTUFF_SERIALIZATION_ID = 12;
byte AVRO_SERIALIZATION_ID = 11;
byte GSON_SERIALIZATION_ID = 16;
byte JACKSON_SERIALIZATION_ID = 18;
byte PROTOBUF_JSON_SERIALIZATION_ID = 21;
byte PROTOBUF_SERIALIZATION_ID = 22;
byte FASTJSON2_SERIALIZATION_ID = 23;
byte KRYO_SERIALIZATION2_ID = 25;
byte MSGPACK_SERIALIZATION_ID = 27;
byte FURY_SERIALIZATION_ID = 28;
byte CUSTOM_MESSAGE_PACK_ID = 31;
}
|
Constants
|
java
|
elastic__elasticsearch
|
libs/dissect/src/test/java/org/elasticsearch/dissect/DissectKeyTests.java
|
{
"start": 761,
"end": 7945
}
|
class ____ extends ESTestCase {
public void testNoModifier() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey(keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testAppendModifier() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey("+" + keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.APPEND));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testAppendWithOrderModifier() {
String keyName = randomAlphaOfLengthBetween(1, 10);
int length = randomIntBetween(1, 100);
DissectKey dissectKey = new DissectKey("+" + keyName + "/" + length);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.APPEND_WITH_ORDER));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(length));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testAppendWithOrderModifierNoName() {
int length = randomIntBetween(1, 100);
DissectException e = expectThrows(DissectException.class, () -> new DissectKey("+/" + length));
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key"));
}
public void testOrderModifierWithoutAppend() {
String keyName = randomAlphaOfLengthBetween(1, 10);
int length = randomIntBetween(1, 100);
DissectException e = expectThrows(DissectException.class, () -> new DissectKey(keyName + "/" + length));
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key"));
}
public void testFieldNameModifier() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey("*" + keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.FIELD_NAME));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testFieldValueModifiers() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey("&" + keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.FIELD_VALUE));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testRightPaddingModifiers() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey(keyName + "->");
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(true));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
dissectKey = new DissectKey("*" + keyName + "->");
assertThat(dissectKey.skipRightPadding(), is(true));
dissectKey = new DissectKey("&" + keyName + "->");
assertThat(dissectKey.skipRightPadding(), is(true));
dissectKey = new DissectKey("+" + keyName + "->");
assertThat(dissectKey.skipRightPadding(), is(true));
dissectKey = new DissectKey("?" + keyName + "->");
assertThat(dissectKey.skipRightPadding(), is(true));
dissectKey = new DissectKey("+" + keyName + "/2->");
assertThat(dissectKey.skipRightPadding(), is(true));
}
public void testMultipleLeftModifiers() {
String keyName = randomAlphaOfLengthBetween(1, 10);
List<String> validModifiers = EnumSet.allOf(DissectKey.Modifier.class)
.stream()
.filter(m -> m.equals(DissectKey.Modifier.NONE) == false)
.map(DissectKey.Modifier::toString)
.collect(Collectors.toList());
String modifier1 = randomFrom(validModifiers);
String modifier2 = randomFrom(validModifiers);
DissectException e = expectThrows(DissectException.class, () -> new DissectKey(modifier1 + modifier2 + keyName));
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key"));
}
public void testSkipKey() {
String keyName = "";
DissectKey dissectKey = new DissectKey(keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE));
assertThat(dissectKey.skip(), is(true));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testNamedSkipKey() {
String keyName = "myname";
DissectKey dissectKey = new DissectKey("?" + keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NAMED_SKIP));
assertThat(dissectKey.skip(), is(true));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testSkipKeyWithPadding() {
String keyName = "";
DissectKey dissectKey = new DissectKey(keyName + "->");
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE));
assertThat(dissectKey.skip(), is(true));
assertThat(dissectKey.skipRightPadding(), is(true));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testNamedEmptySkipKeyWithPadding() {
String keyName = "";
DissectKey dissectKey = new DissectKey("?" + keyName + "->");
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NAMED_SKIP));
assertThat(dissectKey.skip(), is(true));
assertThat(dissectKey.skipRightPadding(), is(true));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testInvalidModifiers() {
// should never happen due to regex
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DissectKey.Modifier.fromString("x"));
assertThat(e.getMessage(), CoreMatchers.containsString("invalid modifier"));
}
}
|
DissectKeyTests
|
java
|
grpc__grpc-java
|
xds/src/test/java/io/grpc/xds/XdsClientMetricReporterImplTest.java
|
{
"start": 17397,
"end": 17727
}
|
class ____ extends Handler {
List<LogRecord> logs = new ArrayList<>();
@Override
public void publish(LogRecord record) {
logs.add(record);
}
@Override
public void close() {}
@Override
public void flush() {}
public List<LogRecord> getLogs() {
return logs;
}
}
}
|
TestlogHandler
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/support/JspAwareRequestContext.java
|
{
"start": 3397,
"end": 3534
}
|
class ____ isolates the JSTL dependency.
* Just called to resolve the fallback locale if the JSTL API is present.
*/
private static
|
that
|
java
|
quarkusio__quarkus
|
integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/complex/Parent2.java
|
{
"start": 130,
"end": 518
}
|
class ____ extends ParentBase {
@Id
private Long id;
public Parent2(String name, String detail, int age, float test, TestEnum testEnum, Long id) {
super(name, detail, age, test, testEnum);
this.id = id;
}
public Parent2() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
|
Parent2
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/buffer/Buffer.java
|
{
"start": 1395,
"end": 27566
}
|
interface ____ extends ClusterSerializable, Shareable {
/**
* Create a buffer from the base 64 URL encoded {@code value}
* @param value the base64 encoded value
* @return the buffer
*/
static Buffer fromJson(String value) {
byte[] bytes = JsonUtil.BASE64_DECODER.decode(value);
return buffer(bytes);
}
/**
* Create a new, empty buffer.
*
* @return the buffer
*/
static Buffer buffer() {
return BufferInternal.buffer();
}
/**
* Create a new buffer given the initial size hint.
* <p>
* If you know the buffer will require a certain size, providing the hint can prevent unnecessary re-allocations
* as the buffer is written to and resized.
*
* @param initialSizeHint the hint, in bytes
* @return the buffer
*/
static Buffer buffer(int initialSizeHint) {
return BufferInternal.buffer(initialSizeHint);
}
/**
* Create a new buffer from a string. The string will be UTF-8 encoded into the buffer.
*
* @param string the string
* @return the buffer
*/
static Buffer buffer(String string) {
return BufferInternal.buffer(string);
}
/**
* Create a new buffer from a string and using the specified encoding.
* The string will be encoded into the buffer using the specified encoding.
*
* @param string the string
* @return the buffer
*/
static Buffer buffer(String string, String enc) {
return BufferInternal.buffer(string, enc);
}
/**
* Create a new buffer from a byte[]. The byte[] will be copied to form the buffer.
*
* @param bytes the byte array
* @return the buffer
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
static Buffer buffer(byte[] bytes) {
return BufferInternal.buffer(bytes);
}
/**
* Returns a {@code String} representation of the Buffer with the {@code UTF-8 }encoding
*/
String toString();
/**
* Returns a {@code String} representation of the Buffer with the encoding specified by {@code enc}
*/
String toString(String enc);
/**
* Returns a {@code String} representation of the Buffer with the encoding specified by {@code enc}
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
String toString(Charset enc);
/**
* Returns a {@link JsonObject} representation of this buffer's content.
*/
JsonObject toJsonObject();
/**
* Returns a {@link JsonArray} representation of this buffer's content.
*/
JsonArray toJsonArray();
/**
* Returns a Json value representation of this buffer's content.
*
* @return a Json value which can be a {@link JsonArray}, {@link JsonObject}, {@link String}, ... if the buffer contains an array, object, string, ...etc
*/
default Object toJsonValue() {
return Json.CODEC.fromBuffer(this, Object.class);
}
/**
* Encode the buffer bytes to their base 64 URL encoded representation.
*/
default String toJson() {
return JsonUtil.BASE64_ENCODER.encodeToString(getBytes());
}
/**
* Returns the {@code byte} at position {@code pos} in the Buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 1} is greater than the length of the Buffer.
*/
byte getByte(int pos);
/**
* Returns the unsigned {@code byte} at position {@code pos} in the Buffer, as a {@code short}.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 1} is greater than the length of the Buffer.
*/
short getUnsignedByte(int pos);
/**
* Returns the {@code int} at position {@code pos} in the Buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 4} is greater than the length of the Buffer.
*/
int getInt(int pos);
/**
* Gets a 32-bit integer at the specified absolute {@code index} in this buffer with Little Endian Byte Order.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or {@code index + 4} is greater than {@code this.capacity}
*/
int getIntLE(int pos);
/**
* Returns the unsigned {@code int} at position {@code pos} in the Buffer, as a {@code long}.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 4} is greater than the length of the Buffer.
*/
long getUnsignedInt(int pos);
/**
* Returns the unsigned {@code int} at position {@code pos} in the Buffer, as a {@code long} in Little Endian Byte Order.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 4} is greater than the length of the Buffer.
*/
long getUnsignedIntLE(int pos);
/**
* Returns the {@code long} at position {@code pos} in the Buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 8} is greater than the length of the Buffer.
*/
long getLong(int pos);
/**
* Gets a 64-bit long integer at the specified absolute {@code index} in this buffer in Little Endian Byte Order.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or {@code index + 8} is greater than the length of the Buffer.
*/
long getLongLE(int pos);
/**
* Returns the {@code double} at position {@code pos} in the Buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 8} is greater than the length of the Buffer.
*/
double getDouble(int pos);
/**
* Gets a double at the specified absolute {@code index} in this buffer in Little Endian Byte Order.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 8} is greater than the length of the Buffer.
*/
double getDoubleLE(int pos);
/**
* Returns the {@code float} at position {@code pos} in the Buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 4} is greater than the length of the Buffer.
*/
float getFloat(int pos);
/**
* Gets a float at the specified absolute {@code index} in this buffer in Little Endian Byte Order.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 4} is greater than the length of the Buffer.
*/
float getFloatLE(int pos);
/**
* Returns the {@code short} at position {@code pos} in the Buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 2} is greater than the length of the Buffer.
*/
short getShort(int pos);
/**
* Gets a 16-bit short integer at the specified absolute {@code index} in this buffer in Little Endian Byte Order.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or {@code index + 2} is greater than the length of the Buffer.
*/
short getShortLE(int pos);
/**
* Returns the unsigned {@code short} at position {@code pos} in the Buffer, as an {@code int}.
*
* @throws IndexOutOfBoundsException if the specified {@code pos} is less than {@code 0} or {@code pos + 2} is greater than the length of the Buffer.
*/
int getUnsignedShort(int pos);
/**
* Gets an unsigned 16-bit short integer at the specified absolute {@code index} in this buffer in Little Endian Byte Order.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or {@code index + 2} is greater than the length of the Buffer.
*/
int getUnsignedShortLE(int pos);
/**
* Gets a 24-bit medium integer at the specified absolute {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or {@code index + 3} is greater than the length of the Buffer.
*/
int getMedium(int pos);
/**
* Gets a 24-bit medium integer at the specified absolute {@code index} in this buffer in the Little Endian Byte Order.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or {@code index + 3} is greater than the length of the Buffer.
*/
int getMediumLE(int pos);
/**
* Gets an unsigned 24-bit medium integer at the specified absolute {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or {@code index + 3} is greater than the length of the Buffer.
*/
int getUnsignedMedium(int pos);
/**
* Gets an unsigned 24-bit medium integer at the specified absolute {@code index} in this buffer in Little Endian Byte Order.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or {@code index + 3} is greater than the length of the Buffer.
*/
int getUnsignedMediumLE(int pos);
/**
* Returns a copy of the entire Buffer as a {@code byte[]}
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
byte[] getBytes();
/**
* Returns a copy of a sub-sequence the Buffer as a {@code byte[]} starting at position {@code start}
* and ending at position {@code end - 1}
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
byte[] getBytes(int start, int end);
/**
* Transfers the content of the Buffer into a {@code byte[]}.
*
* @param dst the destination byte array
* @throws IndexOutOfBoundsException if the content of the Buffer cannot fit into the destination byte array
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
@Fluent
Buffer getBytes(byte[] dst);
/**
* Transfers the content of the Buffer into a {@code byte[]} at the specific destination.
*
* @param dst the destination byte array
* @throws IndexOutOfBoundsException if the content of the Buffer cannot fit into the destination byte array
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
@Fluent
Buffer getBytes(byte[] dst, int dstIndex);
/**
* Transfers the content of the Buffer starting at position {@code start} and ending at position {@code end - 1}
* into a {@code byte[]}.
*
* @param dst the destination byte array
* @throws IndexOutOfBoundsException if the content of the Buffer cannot fit into the destination byte array
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
@Fluent
Buffer getBytes(int start, int end, byte[] dst);
/**
* Transfers the content of the Buffer starting at position {@code start} and ending at position {@code end - 1}
* into a {@code byte[]} at the specific destination.
*
* @param dst the destination byte array
* @throws IndexOutOfBoundsException if the content of the Buffer cannot fit into the destination byte array
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
@Fluent
Buffer getBytes(int start, int end, byte[] dst, int dstIndex);
/**
* Returns a copy of a sub-sequence the Buffer as a {@link io.vertx.core.buffer.Buffer} starting at position {@code start}
* and ending at position {@code end - 1}
*/
Buffer getBuffer(int start, int end);
/**
* Returns a copy of a sub-sequence the Buffer as a {@code String} starting at position {@code start}
* and ending at position {@code end - 1} interpreted as a String in the specified encoding
*/
String getString(int start, int end, String enc);
/**
* Returns a copy of a sub-sequence the Buffer as a {@code String} starting at position {@code start}
* and ending at position {@code end - 1} interpreted as a String in UTF-8 encoding
*/
String getString(int start, int end);
/**
* Appends the specified {@code Buffer} to the end of this Buffer. The buffer will expand as necessary to accommodate
* any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendBuffer(Buffer buff);
/**
* Appends the specified {@code Buffer} starting at the {@code offset} using {@code len} to the end of this Buffer. The buffer will expand as necessary to accommodate
* any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendBuffer(Buffer buff, int offset, int len);
/**
* Appends the specified {@code byte[]} to the end of the Buffer. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
@Fluent
Buffer appendBytes(byte[] bytes);
/**
* Appends the specified number of bytes from {@code byte[]} to the end of the Buffer, starting at the given offset.
* The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
@Fluent
Buffer appendBytes(byte[] bytes, int offset, int len);
/**
* Appends the specified {@code byte} to the end of the Buffer. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendByte(byte b);
/**
* Appends the specified unsigned {@code byte} to the end of the Buffer. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendUnsignedByte(short b);
/**
* Appends the specified {@code int} to the end of the Buffer. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendInt(int i);
/**
* Appends the specified {@code int} to the end of the Buffer in the Little Endian Byte Order. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendIntLE(int i);
/**
* Appends the specified unsigned {@code int} to the end of the Buffer. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendUnsignedInt(long i);
/**
* Appends the specified unsigned {@code int} to the end of the Buffer in the Little Endian Byte Order. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendUnsignedIntLE(long i);
/**
* Appends the specified 24bit {@code int} to the end of the Buffer. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendMedium(int i);
/**
* Appends the specified 24bit {@code int} to the end of the Buffer in the Little Endian Byte Order. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendMediumLE(int i);
/**
* Appends the specified {@code long} to the end of the Buffer. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendLong(long l);
/**
* Appends the specified {@code long} to the end of the Buffer in the Little Endian Byte Order. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendLongLE(long l);
/**
* Appends the specified {@code short} to the end of the Buffer.The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendShort(short s);
/**
* Appends the specified {@code short} to the end of the Buffer in the Little Endian Byte Order.The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendShortLE(short s);
/**
* Appends the specified unsigned {@code short} to the end of the Buffer.The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendUnsignedShort(int s);
/**
* Appends the specified unsigned {@code short} to the end of the Buffer in the Little Endian Byte Order.The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendUnsignedShortLE(int s);
/**
* Appends the specified {@code float} to the end of the Buffer. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendFloat(float f);
/**
* Appends the specified unsigned {@code float} to the end of the Buffer in the Little Endian Byte Order.The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendFloatLE(float f);
/**
* Appends the specified {@code double} to the end of the Buffer. The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendDouble(double d);
/**
* Appends the specified unsigned {@code double} to the end of the Buffer in the Little Endian Byte Order.The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.
*/
@Fluent
Buffer appendDoubleLE(double d);
/**
* Appends the specified {@code String} to the end of the Buffer with the encoding as specified by {@code enc}.<p>
* The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together.<p>
*/
@Fluent
Buffer appendString(String str, String enc);
/**
* Appends the specified {@code String str} to the end of the Buffer with UTF-8 encoding.<p>
* The buffer will expand as necessary to accommodate any bytes written.<p>
* Returns a reference to {@code this} so multiple operations can be appended together<p>
*/
@Fluent
Buffer appendString(String str);
/**
* Sets the {@code byte} at position {@code pos} in the Buffer to the value {@code b}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setByte(int pos, byte b);
/**
* Sets the unsigned {@code byte} at position {@code pos} in the Buffer to the value {@code b}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setUnsignedByte(int pos, short b);
/**
* Sets the {@code int} at position {@code pos} in the Buffer to the value {@code i}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setInt(int pos, int i);
/**
* Sets the {@code int} at position {@code pos} in the Buffer to the value {@code i} in the Little Endian Byte Order.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setIntLE(int pos, int i);
/**
* Sets the unsigned {@code int} at position {@code pos} in the Buffer to the value {@code i}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setUnsignedInt(int pos, long i);
/**
* Sets the unsigned {@code int} at position {@code pos} in the Buffer to the value {@code i} in the Little Endian Byte Order.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setUnsignedIntLE(int pos, long i);
/**
* Sets the 24bit {@code int} at position {@code pos} in the Buffer to the value {@code i}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setMedium(int pos, int i);
/**
* Sets the 24bit {@code int} at position {@code pos} in the Buffer to the value {@code i}. in the Little Endian Byte Order<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setMediumLE(int pos, int i);
/**
* Sets the {@code long} at position {@code pos} in the Buffer to the value {@code l}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setLong(int pos, long l);
/**
* Sets the {@code long} at position {@code pos} in the Buffer to the value {@code l} in the Little Endian Byte Order.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setLongLE(int pos, long l);
/**
* Sets the {@code double} at position {@code pos} in the Buffer to the value {@code d}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setDouble(int pos, double d);
/**
* Sets the {@code double} at position {@code pos} in the Buffer to the value {@code d} in the Little Endian Byte Order.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setDoubleLE(int pos, double d);
/**
* Sets the {@code float} at position {@code pos} in the Buffer to the value {@code f}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setFloat(int pos, float f);
/**
* Sets the {@code float} at position {@code pos} in the Buffer to the value {@code f} in the Little Endian Byte Order.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setFloatLE(int pos, float f);
/**
* Sets the {@code short} at position {@code pos} in the Buffer to the value {@code s}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setShort(int pos, short s);
/**
* Sets the {@code short} at position {@code pos} in the Buffer to the value {@code s} in the Little Endian Byte Order.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setShortLE(int pos, short s);
/**
* Sets the unsigned {@code short} at position {@code pos} in the Buffer to the value {@code s}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setUnsignedShort(int pos, int s);
/**
* Sets the unsigned {@code short} at position {@code pos} in the Buffer to the value {@code s} in the Little Endian Byte Order.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setUnsignedShortLE(int pos, int s);
/**
* Sets the bytes at position {@code pos} in the Buffer to the bytes represented by the {@code Buffer b}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setBuffer(int pos, Buffer b);
/**
* Sets the bytes at position {@code pos} in the Buffer to the bytes represented by the {@code Buffer b} on the given {@code offset} and {@code len}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setBuffer(int pos, Buffer b, int offset, int len);
/**
* Sets the bytes at position {@code pos} in the Buffer to the bytes represented by the {@code ByteBuffer b}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
@Fluent
Buffer setBytes(int pos, ByteBuffer b);
/**
* Sets the bytes at position {@code pos} in the Buffer to the bytes represented by the {@code byte[] b}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
@Fluent
Buffer setBytes(int pos, byte[] b);
/**
* Sets the given number of bytes at position {@code pos} in the Buffer to the bytes represented by the {@code byte[] b}.<p></p>
* The buffer will expand as necessary to accommodate any value written.
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
@Fluent
Buffer setBytes(int pos, byte[] b, int offset, int len);
/**
* Sets the bytes at position {@code pos} in the Buffer to the value of {@code str} encoded in UTF-8.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setString(int pos, String str);
/**
* Sets the bytes at position {@code pos} in the Buffer to the value of {@code str} encoded in encoding {@code enc}.<p>
* The buffer will expand as necessary to accommodate any value written.
*/
@Fluent
Buffer setString(int pos, String str, String enc);
/**
* Returns the length of the buffer, measured in bytes.
* All positions are indexed from zero.
*/
int length();
/**
* Returns a copy of the entire Buffer.
*/
Buffer copy();
/**
* Returns a slice of this buffer. Modifying the content
* of the returned buffer or this buffer affects each other's content
* while they maintain separate indexes and marks.
*/
Buffer slice();
/**
* Returns a slice of this buffer. Modifying the content
* of the returned buffer or this buffer affects each other's content
* while they maintain separate indexes and marks.
*/
Buffer slice(int start, int end);
}
|
Buffer
|
java
|
netty__netty
|
codec-socks/src/test/java/io/netty/handler/codec/socksx/v5/Socks5PasswordAuthResponseDecoderTest.java
|
{
"start": 1006,
"end": 1897
}
|
class ____ {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(
Socks5PasswordAuthResponseDecoderTest.class);
private static void test(Socks5PasswordAuthStatus status) {
logger.debug("Testing Socks5PasswordAuthResponseDecoder with status: " + status);
Socks5PasswordAuthResponse msg = new DefaultSocks5PasswordAuthResponse(status);
EmbeddedChannel embedder = new EmbeddedChannel(new Socks5PasswordAuthResponseDecoder());
Socks5CommonTestUtils.writeFromServerToClient(embedder, msg);
msg = embedder.readInbound();
assertSame(msg.status(), status);
assertNull(embedder.readInbound());
}
@Test
public void testSocksCmdResponseDecoder() {
test(Socks5PasswordAuthStatus.SUCCESS);
test(Socks5PasswordAuthStatus.FAILURE);
}
}
|
Socks5PasswordAuthResponseDecoderTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/function/TimestampaddFunction.java
|
{
"start": 2131,
"end": 5295
}
|
class ____
extends AbstractSqmSelfRenderingFunctionDescriptor {
private final Dialect dialect;
private final SqlAstNodeRenderingMode[] renderingModes;
public TimestampaddFunction(Dialect dialect, TypeConfiguration typeConfiguration) {
this( dialect, typeConfiguration, SqlAstNodeRenderingMode.DEFAULT );
}
public TimestampaddFunction(Dialect dialect, TypeConfiguration typeConfiguration, SqlAstNodeRenderingMode... renderingModes) {
super(
"timestampadd",
new ArgumentTypesValidator(
StandardArgumentsValidators.exactly( 3 ),
TEMPORAL_UNIT, INTEGER, TEMPORAL
),
StandardFunctionReturnTypeResolvers.useArgType( 3 ),
StandardFunctionArgumentTypeResolvers.invariant( typeConfiguration, TEMPORAL_UNIT, INTEGER, TEMPORAL )
);
this.dialect = dialect;
this.renderingModes = renderingModes;
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> arguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
final DurationUnit field = (DurationUnit) arguments.get( 0 );
final Expression magnitude = (Expression) arguments.get(1);
final Expression to = (Expression) arguments.get( 2 );
patternRenderer( field.getUnit(), magnitude, to ).render( sqlAppender, arguments, walker );
}
PatternRenderer patternRenderer(TemporalUnit unit, Expression interval, Expression to) {
TemporalType temporalType = getSqlTemporalType( to.getExpressionType() );
IntervalType intervalType = getSqlIntervalType( interval.getExpressionType().getSingleJdbcMapping() );
return new PatternRenderer( dialect.timestampaddPattern( unit, temporalType, intervalType ), renderingModes );
}
// @Override
// protected <T> SelfRenderingSqlFunctionExpression<T> generateSqmFunctionExpression(
// List<SqmTypedNode<?>> arguments,
// ReturnableType<T> impliedResultType,
// QueryEngine queryEngine,
// TypeConfiguration typeConfiguration) {
// SqmExtractUnit<?> field = (SqmExtractUnit<?>) arguments.get(0);
// SqmExpression<?> to = (SqmExpression<?>) arguments.get(2);
// return queryEngine.getSqmFunctionRegistry()
// .patternDescriptorBuilder(
// "timestampadd",
// dialect.timestampaddPattern(
// field.getUnit(),
// typeConfiguration.isSqlTimestampType( to.getNodeType() )
// )
// )
// .setExactArgumentCount( 3 )
// .setReturnTypeResolver( useArgType( 3 ) )
// .descriptor()
// .generateSqmExpression(
// arguments,
// impliedResultType,
// queryEngine,
// typeConfiguration
// );
// }
public SelfRenderingFunctionSqlAstExpression expression(
ReturnableType<?> impliedResultType,
SqlAstNode... sqlAstArguments) {
Expression to = (Expression) sqlAstArguments[2];
return new SelfRenderingFunctionSqlAstExpression(
getName(),
this,
asList( sqlAstArguments ),
impliedResultType != null
? impliedResultType
: (ReturnableType<?>) to.getExpressionType().getSingleJdbcMapping(),
to.getExpressionType()
);
}
@Override
public String getArgumentListSignature() {
return "(TEMPORAL_UNIT field, INTEGER magnitude, TEMPORAL datetime)";
}
}
|
TimestampaddFunction
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/view/InternalResourceViewResolver.java
|
{
"start": 953,
"end": 1846
}
|
class ____ all views generated by this resolver can be specified
* via {@link #setViewClass}. See {@link UrlBasedViewResolver}'s javadoc for details.
* The default is {@link InternalResourceView}, or {@link JstlView} if the
* JSTL API is present.
*
* <p>BTW, it's good practice to put JSP files that just serve as views under
* WEB-INF, to hide them from direct access (for example, via a manually entered URL).
* Only controllers will be able to access them then.
*
* <p><b>Note:</b> When chaining ViewResolvers, an InternalResourceViewResolver
* always needs to be last, as it will attempt to resolve any view name,
* no matter whether the underlying resource actually exists.
*
* @author Juergen Hoeller
* @since 17.02.2003
* @see #setViewClass
* @see #setPrefix
* @see #setSuffix
* @see #setRequestContextAttribute
* @see InternalResourceView
* @see JstlView
*/
public
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UnfollowActionTests.java
|
{
"start": 810,
"end": 5366
}
|
class ____ extends AbstractActionTestCase<UnfollowAction> {
@Override
protected UnfollowAction doParseInstance(XContentParser parser) throws IOException {
return UnfollowAction.parse(parser);
}
@Override
protected UnfollowAction createTestInstance() {
return UnfollowAction.INSTANCE;
}
@Override
protected UnfollowAction mutateInstance(UnfollowAction instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Reader<UnfollowAction> instanceReader() {
return in -> UnfollowAction.INSTANCE;
}
public void testToSteps() {
UnfollowAction action = createTestInstance();
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
assertThat(steps, notNullValue());
assertThat(steps.size(), equalTo(9));
StepKey expectedFirstStepKey = new StepKey(phase, UnfollowAction.NAME, UnfollowAction.CONDITIONAL_UNFOLLOW_STEP);
StepKey expectedSecondStepKey = new StepKey(phase, UnfollowAction.NAME, WaitForIndexingCompleteStep.NAME);
StepKey expectedThirdStepKey = new StepKey(phase, UnfollowAction.NAME, WaitUntilTimeSeriesEndTimePassesStep.NAME);
StepKey expectedFourthStepKey = new StepKey(phase, UnfollowAction.NAME, WaitForFollowShardTasksStep.NAME);
StepKey expectedFifthStepKey = new StepKey(phase, UnfollowAction.NAME, PauseFollowerIndexStep.NAME);
StepKey expectedSixthStepKey = new StepKey(phase, UnfollowAction.NAME, CloseFollowerIndexStep.NAME);
StepKey expectedSeventhStepKey = new StepKey(phase, UnfollowAction.NAME, UnfollowFollowerIndexStep.NAME);
StepKey expectedEighthStepKey = new StepKey(phase, UnfollowAction.NAME, OPEN_FOLLOWER_INDEX_STEP_NAME);
StepKey expectedNinthStepKey = new StepKey(phase, UnfollowAction.NAME, WaitForIndexColorStep.NAME);
BranchingStep firstStep = (BranchingStep) steps.get(0);
assertThat(firstStep.getKey(), equalTo(expectedFirstStepKey));
WaitForIndexingCompleteStep secondStep = (WaitForIndexingCompleteStep) steps.get(1);
assertThat(secondStep.getKey(), equalTo(expectedSecondStepKey));
assertThat(secondStep.getNextStepKey(), equalTo(expectedThirdStepKey));
WaitUntilTimeSeriesEndTimePassesStep thirdStep = (WaitUntilTimeSeriesEndTimePassesStep) steps.get(2);
assertThat(thirdStep.getKey(), equalTo(expectedThirdStepKey));
assertThat(thirdStep.getNextStepKey(), equalTo(expectedFourthStepKey));
WaitForFollowShardTasksStep fourthStep = (WaitForFollowShardTasksStep) steps.get(3);
assertThat(fourthStep.getKey(), equalTo(expectedFourthStepKey));
assertThat(fourthStep.getNextStepKey(), equalTo(expectedFifthStepKey));
PauseFollowerIndexStep fifthStep = (PauseFollowerIndexStep) steps.get(4);
assertThat(fifthStep.getKey(), equalTo(expectedFifthStepKey));
assertThat(fifthStep.getNextStepKey(), equalTo(expectedSixthStepKey));
CloseFollowerIndexStep sixthStep = (CloseFollowerIndexStep) steps.get(5);
assertThat(sixthStep.getKey(), equalTo(expectedSixthStepKey));
assertThat(sixthStep.getNextStepKey(), equalTo(expectedSeventhStepKey));
UnfollowFollowerIndexStep seventhStep = (UnfollowFollowerIndexStep) steps.get(6);
assertThat(seventhStep.getKey(), equalTo(expectedSeventhStepKey));
assertThat(seventhStep.getNextStepKey(), equalTo(expectedEighthStepKey));
OpenIndexStep eighthStep = (OpenIndexStep) steps.get(7);
assertThat(eighthStep.getKey(), equalTo(expectedEighthStepKey));
assertThat(eighthStep.getNextStepKey(), equalTo(expectedNinthStepKey));
WaitForIndexColorStep ninth = (WaitForIndexColorStep) steps.get(8);
assertThat(ninth.getColor(), is(ClusterHealthStatus.YELLOW));
assertThat(ninth.getKey(), equalTo(expectedNinthStepKey));
assertThat(ninth.getNextStepKey(), equalTo(nextStepKey));
}
@Override
protected void assertEqualInstances(UnfollowAction expectedInstance, UnfollowAction newInstance) {
assertThat(newInstance, equalTo(expectedInstance));
assertThat(newInstance.hashCode(), equalTo(expectedInstance.hashCode()));
}
}
|
UnfollowActionTests
|
java
|
spring-projects__spring-framework
|
spring-context-indexer/src/test/java/org/springframework/context/index/test/TestCompiler.java
|
{
"start": 3150,
"end": 3526
}
|
class ____ {
private final JavaCompiler.CompilationTask task;
public TestCompilationTask(JavaCompiler.CompilationTask task) {
this.task = task;
}
public void call(Processor... processors) {
this.task.setProcessors(Arrays.asList(processors));
if (!this.task.call()) {
throw new IllegalStateException("Compilation failed");
}
}
}
}
|
TestCompilationTask
|
java
|
apache__camel
|
components/camel-tracing/src/test/java/org/apache/camel/tracing/decorators/VertxHttpSpanDecoratorTest.java
|
{
"start": 1124,
"end": 2797
}
|
class ____ {
private VertxHttpSpanDecorator decorator;
@BeforeEach
public void before() {
this.decorator = new VertxHttpSpanDecorator();
}
@Test
public void testMethodInHttpMethodParam() {
Endpoint endpoint = Mockito.mock(Endpoint.class);
Exchange exchange = Mockito.mock(Exchange.class);
Message message = Mockito.mock(Message.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn("vertx-http://localhost:8080/endpoint?httpMethod=POST");
Mockito.when(exchange.getIn()).thenReturn(message);
Mockito.when(message.getHeader(Exchange.HTTP_URI, String.class))
.thenReturn("http://localhost:8080/endpoint?httpMethod=POST");
assertEquals(AbstractHttpSpanDecorator.POST_METHOD,
decorator.getHttpMethod(exchange, endpoint));
}
@Test
public void testMethodInHttpMethodParamUsingHeader() {
Endpoint endpoint = Mockito.mock(Endpoint.class);
Exchange exchange = Mockito.mock(Exchange.class);
Message message = Mockito.mock(Message.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn("vertx-http://localhost:8080/endpoint?httpMethod=POST");
Mockito.when(exchange.getIn()).thenReturn(message);
Mockito.when(message.getHeader(Exchange.HTTP_METHOD)).thenReturn(HttpMethods.GET);
Mockito.when(message.getHeader(Exchange.HTTP_URI, String.class))
.thenReturn("vertx-http://localhost:8080/endpoint?httpMethod=POST");
assertEquals(AbstractHttpSpanDecorator.POST_METHOD,
decorator.getHttpMethod(exchange, endpoint));
}
}
|
VertxHttpSpanDecoratorTest
|
java
|
google__dagger
|
javatests/dagger/grpc/functional/server/VerifyInterceptor.java
|
{
"start": 1002,
"end": 1074
}
|
class ____ implements TestRule {
@Retention(RUNTIME)
@
|
VerifyInterceptor
|
java
|
apache__flink
|
flink-connectors/flink-hadoop-compatibility/src/main/java/org/apache/flink/api/java/typeutils/WritableTypeInfo.java
|
{
"start": 1969,
"end": 5127
}
|
class ____<T extends Writable> extends TypeInformation<T>
implements AtomicType<T> {
private static final long serialVersionUID = 1L;
private final Class<T> typeClass;
@PublicEvolving
public WritableTypeInfo(Class<T> typeClass) {
this.typeClass = checkNotNull(typeClass);
checkArgument(
Writable.class.isAssignableFrom(typeClass) && !typeClass.equals(Writable.class),
"WritableTypeInfo can only be used for subclasses of %s",
Writable.class.getName());
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Override
@PublicEvolving
public TypeComparator<T> createComparator(
boolean sortOrderAscending, ExecutionConfig executionConfig) {
if (Comparable.class.isAssignableFrom(typeClass)) {
return new WritableComparator(sortOrderAscending, typeClass);
} else {
throw new UnsupportedOperationException(
"Cannot create Comparator for "
+ typeClass.getCanonicalName()
+ ". "
+ "Class does not implement Comparable interface.");
}
}
@Override
@PublicEvolving
public boolean isBasicType() {
return false;
}
@Override
@PublicEvolving
public boolean isTupleType() {
return false;
}
@Override
@PublicEvolving
public int getArity() {
return 1;
}
@Override
@PublicEvolving
public int getTotalFields() {
return 1;
}
@Override
@PublicEvolving
public Class<T> getTypeClass() {
return this.typeClass;
}
@Override
@PublicEvolving
public boolean isKeyType() {
return Comparable.class.isAssignableFrom(typeClass);
}
@Override
@PublicEvolving
public TypeSerializer<T> createSerializer(SerializerConfig serializerConfig) {
return new WritableSerializer<>(typeClass);
}
@Override
public String toString() {
return "WritableType<" + typeClass.getName() + ">";
}
@Override
public int hashCode() {
return typeClass.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof WritableTypeInfo) {
@SuppressWarnings("unchecked")
WritableTypeInfo<T> writableTypeInfo = (WritableTypeInfo<T>) obj;
return writableTypeInfo.canEqual(this) && typeClass == writableTypeInfo.typeClass;
} else {
return false;
}
}
@Override
public boolean canEqual(Object obj) {
return obj instanceof WritableTypeInfo;
}
// --------------------------------------------------------------------------------------------
@PublicEvolving
static <T extends Writable> TypeInformation<T> getWritableTypeInfo(Class<T> typeClass) {
if (Writable.class.isAssignableFrom(typeClass) && !typeClass.equals(Writable.class)) {
return new WritableTypeInfo<T>(typeClass);
} else {
throw new InvalidTypesException(
"The given
|
WritableTypeInfo
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/ParseContext.java
|
{
"start": 1843,
"end": 12114
}
|
class ____ {
private static final Map<String, Schema.Type> PRIMITIVES = new HashMap<>();
static {
PRIMITIVES.put("string", Schema.Type.STRING);
PRIMITIVES.put("bytes", Schema.Type.BYTES);
PRIMITIVES.put("int", Schema.Type.INT);
PRIMITIVES.put("long", Schema.Type.LONG);
PRIMITIVES.put("float", Schema.Type.FLOAT);
PRIMITIVES.put("double", Schema.Type.DOUBLE);
PRIMITIVES.put("boolean", Schema.Type.BOOLEAN);
PRIMITIVES.put("null", Schema.Type.NULL);
}
private static final Set<Schema.Type> NAMED_SCHEMA_TYPES = EnumSet.of(Schema.Type.RECORD, Schema.Type.ENUM,
Schema.Type.FIXED);
/**
* Collection of old schemata. Can contain unresolved references if !isResolved.
*/
private final Map<String, Schema> oldSchemas;
/**
* Collection of new schemata. Can contain unresolved references.
*/
private final Map<String, Schema> newSchemas;
/**
* The name validator to use.
*/
// Visible for use in JsonSchemaParser
final NameValidator nameValidator;
/**
* Visitor that was used to resolve schemata with. If not available, some
* schemata in {@code oldSchemas} may not be fully resolved. If available, all
* schemata in {@code oldSchemas} are resolved, and {@code newSchemas} is empty.
* After visiting a schema, it can return the corresponding resolved schema for
* a schema that possibly contains unresolved references.
*/
private SchemaResolver.ResolvingVisitor resolvingVisitor;
/**
* Create a {@code ParseContext} for the default/{@code null} namespace, using
* default name validation for new schemata.
*/
public ParseContext() {
this(NameValidator.UTF_VALIDATOR);
}
/**
* Create a {@code ParseContext} using the specified name validation for new
* schemata.
*/
public ParseContext(NameValidator nameValidator) {
this(requireNonNull(nameValidator), new LinkedHashMap<>(), new LinkedHashMap<>());
}
private ParseContext(NameValidator nameValidator, Map<String, Schema> oldSchemas, Map<String, Schema> newSchemas) {
this.nameValidator = nameValidator;
this.oldSchemas = oldSchemas;
this.newSchemas = newSchemas;
resolvingVisitor = null;
}
/**
* Tell whether this context contains a schema with the given name.
*
* @param name a schema name
* @return {@code true} if the context contains a schema with this name,
* {@code false} otherwise
*/
public boolean contains(String name) {
return PRIMITIVES.containsKey(name) || oldSchemas.containsKey(name) || newSchemas.containsKey(name);
}
/**
* <p>
* Find a schema by name and namespace.
* </p>
*
* <p>
* That is:
* </p>
*
* <ol>
* <li>If {@code name} is a primitive name, return a (new) schema for it</li>
* <li>Otherwise, determine the full schema name (using the given
* {@code namespace} if necessary), and find it</li>
* <li>If no schema was found and {@code name} is a simple name, find the schema
* in the default (null) namespace</li>
* <li>If still no schema was found, return an unresolved reference for the full
* schema name (see step 2)</li>
* </ol>
*
* <p>
* Note: as an unresolved reference might be returned, the schema is not
* directly usable. Please {@link #put(Schema)} the schema using it in the
* context. The {@link SchemaParser} and protocol parsers will ensure you'll
* only get a resolved schema that is usable.
* </p>
*
* @param name the schema name to find
* @param namespace the namespace to find the schema against
* @return the schema, or an unresolved reference
*/
public Schema find(String name, String namespace) {
Schema.Type type = PRIMITIVES.get(name);
if (type != null) {
return Schema.create(type);
}
String fullName = fullName(name, namespace);
Schema schema = getNamedSchema(fullName);
if (schema == null) {
schema = getNamedSchema(name);
}
return schema != null ? schema : SchemaResolver.unresolvedSchema(fullName);
}
private String fullName(String name, String namespace) {
if (namespace != null && name.lastIndexOf('.') < 0) {
return namespace + "." + name;
}
return name;
}
/**
* Get a schema by name. Note that the schema might not (yet) be resolved/usable
* until {@link #resolveAllSchemas()} has been called.
*
* @param fullName a full schema name
* @return the schema, if known
*/
public Schema getNamedSchema(String fullName) {
Schema schema = oldSchemas.get(fullName);
if (schema == null) {
schema = newSchemas.get(fullName);
}
return schema;
}
/**
* Put the schema into this context. This is an idempotent operation: it only
* fails if this context already has a different schema with the same name.
*
* <p>
* Note that although this method works for all types except for arrays, maps
* and unions, all primitive types have already been defined upon construction.
* This means you cannot redefine a 'long' with a logical timestamp type.
* </p>
*
* @param schema the schema to put into the context
*/
public void put(Schema schema) {
if (!(NAMED_SCHEMA_TYPES.contains(schema.getType()))) {
throw new AvroTypeException("You can only put a named schema into the context");
}
String fullName = requireValidFullName(schema.getFullName());
Schema alreadyKnownSchema = oldSchemas.get(fullName);
if (alreadyKnownSchema != null) {
if (!schema.equals(alreadyKnownSchema)) {
throw new SchemaParseException("Can't redefine: " + fullName);
}
} else {
resolvingVisitor = null;
Schema previouslyAddedSchema = newSchemas.putIfAbsent(fullName, schema);
if (previouslyAddedSchema != null && !previouslyAddedSchema.equals(schema)) {
throw new SchemaParseException("Can't redefine: " + fullName);
}
}
}
private String requireValidFullName(String fullName) {
String[] names = fullName.split("\\.");
for (int i = 0; i < names.length - 1; i++) {
validateName(names[i], "Namespace part");
}
validateName(names[names.length - 1], "Name");
return fullName;
}
private void validateName(String name, String typeOfName) {
NameValidator.Result result = nameValidator.validate(name);
if (!result.isOK()) {
throw new SchemaParseException(typeOfName + " \"" + name + "\" is invalid: " + result.getErrors());
}
}
public boolean hasNewSchemas() {
return !newSchemas.isEmpty();
}
public void commit() {
oldSchemas.putAll(newSchemas);
newSchemas.clear();
}
public SchemaParser.ParseResult commit(Schema mainSchema) {
List<Schema> parsedNamedSchemas = new ArrayList<>(newSchemas.values());
SchemaParser.ParseResult parseResult = new SchemaParser.ParseResult() {
@Override
public Schema mainSchema() {
return mainSchema == null ? null : resolve(mainSchema);
}
@Override
public List<Schema> parsedNamedSchemas() {
return parsedNamedSchemas.stream().map(ParseContext.this::resolve).collect(Collectors.toList());
}
};
commit();
return parseResult;
}
public void rollback() {
newSchemas.clear();
}
/**
* Resolve all (named) schemas that were parsed. This resolves all forward
* references, even if parsed from different files. Note: the context must be
* committed for this method to work.
*
* @return all parsed schemas
* @throws AvroTypeException if a schema reference cannot be resolved
*/
public List<Schema> resolveAllSchemas() {
ensureSchemasAreResolved();
return new ArrayList<>(oldSchemas.values());
}
private void ensureSchemasAreResolved() {
if (hasNewSchemas()) {
throw new IllegalStateException("Schemas cannot be resolved unless the ParseContext is committed.");
}
if (resolvingVisitor == null) {
NameValidator saved = Schema.getNameValidator();
try {
// Ensure we use the same validation when copying schemas as when they were
// defined.
Schema.setNameValidator(nameValidator);
SchemaResolver.ResolvingVisitor visitor = new SchemaResolver.ResolvingVisitor(oldSchemas::get);
oldSchemas.values().forEach(schema -> Schemas.visit(schema, visitor));
// Before this point is where we can get exceptions due to resolving failures.
for (Map.Entry<String, Schema> entry : oldSchemas.entrySet()) {
entry.setValue(visitor.getResolved(entry.getValue()));
}
resolvingVisitor = visitor;
} finally {
Schema.setNameValidator(saved);
}
}
}
/**
* Resolve unresolved references in a schema <em>that was parsed for this
* context</em> using the types known to this context. Note: this method will
* ensure all known schemas are resolved, or throw, and thus requires the
* context to be committed.
*
* @param schema the schema resolve
* @return the fully resolved schema
* @throws AvroTypeException if a schema reference cannot be resolved
*/
public Schema resolve(Schema schema) {
ensureSchemasAreResolved();
// As all (named) schemas are resolved now, we know:
// — All named types are either in oldSchemas or unknown.
// — All unnamed types can be visited&resolved without validation.
if (NAMED_SCHEMA_TYPES.contains(schema.getType()) && schema.getFullName() != null) {
return requireNonNull(oldSchemas.get(schema.getFullName()), () -> "Unknown schema: " + schema.getFullName());
} else {
// Unnamed or anonymous schema
// (protocol message request parameters are anonymous records)
Schemas.visit(schema, resolvingVisitor); // This field is set, as ensureSchemasAreResolved(); was called.
return resolvingVisitor.getResolved(schema);
}
}
/**
* Return all known types by their fullname. Warning: this returns all types,
* even uncommitted ones, including unresolved references!
*
* @return a map of all types by their name
*/
public Map<String, Schema> typesByName() {
LinkedHashMap<String, Schema> result = new LinkedHashMap<>();
result.putAll(oldSchemas);
result.putAll(newSchemas);
return result;
}
}
|
ParseContext
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/runtime/watermark/AlignedWatermarkCombiner.java
|
{
"start": 1461,
"end": 2779
}
|
class ____ implements WatermarkCombiner {
/** The number of upstream input channels. */
private int numberOfInputChannels;
/** A bitset to record whether the watermark has been received from each channel. */
private final BitSet hasReceiveWatermarks;
/** The input gate resume callback. */
private Runnable gateResumer;
public AlignedWatermarkCombiner(int numberOfInputChannels, Runnable gateResumer) {
this.numberOfInputChannels = numberOfInputChannels;
this.hasReceiveWatermarks = new BitSet(numberOfInputChannels);
this.gateResumer = gateResumer;
}
@Override
public void combineWatermark(
Watermark watermark, int channelIndex, Consumer<Watermark> watermarkEmitter)
throws Exception {
// mark the channel has received the watermark
hasReceiveWatermarks.set(channelIndex);
// once receive all watermarks, perform combine process
if (hasReceiveWatermarks.cardinality() == numberOfInputChannels) {
// send the combined watermark to downstream
watermarkEmitter.accept(watermark);
// clear the received watermarks
hasReceiveWatermarks.clear();
// resume input gate
gateResumer.run();
}
}
}
|
AlignedWatermarkCombiner
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/PropertySourceAnnotationTests.java
|
{
"start": 20538,
"end": 20813
}
|
class ____ {
}
@Configuration
@PropertySources({
@PropertySource("classpath:org/springframework/context/annotation/p2.properties"),
@PropertySource("classpath:org/springframework/context/annotation/p1.properties")
})
static
|
ConfigWithSameSourceImportedInDifferentOrder
|
java
|
quarkusio__quarkus
|
core/runtime/src/main/java/io/quarkus/runtime/annotations/RegisterForProxy.java
|
{
"start": 1203,
"end": 1382
}
|
interface ____ {
/**
* The {@link RegisterForProxy} instances.
*
* @return the instances
*/
RegisterForProxy[] value();
}
}
|
List
|
java
|
apache__flink
|
flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/utils/FSDataInputStreamWrapper.java
|
{
"start": 1180,
"end": 1964
}
|
class ____ implements Closeable, SeekableInput {
private final FSDataInputStream stream;
private final long len;
public FSDataInputStreamWrapper(FSDataInputStream stream, long len) {
this.stream = stream;
this.len = len;
}
@Override
public long length() throws IOException {
return this.len;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
return stream.read(b, off, len);
}
@Override
public void seek(long p) throws IOException {
stream.seek(p);
}
@Override
public long tell() throws IOException {
return stream.getPos();
}
@Override
public void close() throws IOException {
stream.close();
}
}
|
FSDataInputStreamWrapper
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/parallel/ParallelFilterTry.java
|
{
"start": 6253,
"end": 8917
}
|
class ____<T> extends BaseFilterSubscriber<T> {
final ConditionalSubscriber<? super T> downstream;
ParallelFilterConditionalSubscriber(ConditionalSubscriber<? super T> actual,
Predicate<? super T> predicate,
BiFunction<? super Long, ? super Throwable, ParallelFailureHandling> errorHandler) {
super(predicate, errorHandler);
this.downstream = actual;
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
}
}
@Override
public boolean tryOnNext(T t) {
if (!done) {
long retries = 0L;
for (;;) {
boolean b;
try {
b = predicate.test(t);
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
ParallelFailureHandling h;
try {
h = Objects.requireNonNull(errorHandler.apply(++retries, ex), "The errorHandler returned a null ParallelFailureHandling");
} catch (Throwable exc) {
Exceptions.throwIfFatal(exc);
cancel();
onError(new CompositeException(ex, exc));
return false;
}
switch (h) {
case RETRY:
continue;
case SKIP:
return false;
case STOP:
cancel();
onComplete();
return false;
default:
cancel();
onError(ex);
return false;
}
}
return b && downstream.tryOnNext(t);
}
}
return false;
}
@Override
public void onError(Throwable t) {
if (done) {
RxJavaPlugins.onError(t);
return;
}
done = true;
downstream.onError(t);
}
@Override
public void onComplete() {
if (!done) {
done = true;
downstream.onComplete();
}
}
}}
|
ParallelFilterConditionalSubscriber
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/ConfigurableRocksDBOptionsFactory.java
|
{
"start": 1070,
"end": 1722
}
|
interface ____ extends RocksDBOptionsFactory {
/**
* Creates a variant of the options factory that applies additional configuration parameters.
*
* <p>If no configuration is applied, or if the method directly applies configuration values to
* the (mutable) options factory object, this method may return the original options factory
* object. Otherwise it typically returns a modified copy.
*
* @param configuration The configuration to pick the values from.
* @return A reconfigured options factory.
*/
RocksDBOptionsFactory configure(ReadableConfig configuration);
}
|
ConfigurableRocksDBOptionsFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/RandomizedTimeSeriesIT.java
|
{
"start": 2351,
"end": 6001
}
|
class ____ extends AbstractEsqlIntegTestCase {
private static final Long NUM_DOCS = 2000L;
private static final Long TIME_RANGE_SECONDS = 3600L;
private static final String DATASTREAM_NAME = "tsit_ds";
private static final Integer SECONDS_IN_WINDOW = 60;
private static final List<Tuple<String, Integer>> WINDOW_OPTIONS = List.of(
Tuple.tuple("10 seconds", 10),
Tuple.tuple("30 seconds", 30),
Tuple.tuple("1 minute", 60),
Tuple.tuple("2 minutes", 120),
Tuple.tuple("3 minutes", 180),
Tuple.tuple("5 minutes", 300),
Tuple.tuple("10 minutes", 600),
Tuple.tuple("30 minutes", 1800),
Tuple.tuple("1 hour", 3600)
);
private static final List<Tuple<String, DeltaAgg>> DELTA_AGG_OPTIONS = List.of(
Tuple.tuple("rate", DeltaAgg.RATE),
Tuple.tuple("irate", DeltaAgg.IRATE),
Tuple.tuple("increase", DeltaAgg.INCREASE),
Tuple.tuple("idelta", DeltaAgg.IDELTA),
Tuple.tuple("delta", DeltaAgg.DELTA)
);
private static final Map<DeltaAgg, String> DELTA_AGG_METRIC_MAP = Map.of(
DeltaAgg.RATE,
"counterl_hdd.bytes.read",
DeltaAgg.IRATE,
"counterl_hdd.bytes.read",
DeltaAgg.IDELTA,
"gaugel_hdd.bytes.used",
DeltaAgg.INCREASE,
"counterl_hdd.bytes.read",
DeltaAgg.DELTA,
"gaugel_hdd.bytes.used"
);
private List<XContentBuilder> documents;
private TSDataGenerationHelper dataGenerationHelper;
List<List<Object>> consumeRows(EsqlQueryResponse resp) {
List<List<Object>> rows = new ArrayList<>();
resp.rows().forEach(rowIter -> {
List<Object> row = new ArrayList<>();
rowIter.forEach(row::add);
rows.add(row);
});
return rows;
}
Map<List<String>, List<Map<String, Object>>> groupedRows(
List<XContentBuilder> docs,
List<String> groupingAttributes,
int secondsInWindow
) {
Map<List<String>, List<Map<String, Object>>> groupedMap = new HashMap<>();
for (XContentBuilder doc : docs) {
Map<String, Object> docMap = XContentHelper.convertToMap(BytesReference.bytes(doc), false, XContentType.JSON).v2();
@SuppressWarnings("unchecked")
List<String> groupingPairs = groupingAttributes.stream()
.map(
attr -> Tuple.tuple(
attr,
((Map<String, Object>) docMap.getOrDefault("attributes", Map.of())).getOrDefault(attr, "").toString()
)
)
.filter(val -> val.v2().isEmpty() == false) // Filter out empty values
.map(tup -> tup.v1() + ":" + tup.v2())
.toList();
long timeBucketStart = windowStart(docMap.get("@timestamp"), secondsInWindow);
var keyList = new ArrayList<>(groupingPairs);
keyList.add(Long.toString(timeBucketStart));
groupedMap.computeIfAbsent(keyList, k -> new ArrayList<>()).add(docMap);
}
return groupedMap;
}
static Long windowStart(Object timestampCell, int secondsInWindow) {
// This calculation looks a little weird, but it simply performs an integer division that
// throws away the remainder of the division by secondsInWindow. It rounds down
// the timestamp to the nearest multiple of secondsInWindow.
var timestampSeconds = Instant.parse((String) timestampCell).toEpochMilli() / 1000;
return (timestampSeconds / secondsInWindow) * secondsInWindow;
}
|
RandomizedTimeSeriesIT
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/inject/ast/WildcardElement.java
|
{
"start": 1934,
"end": 2501
}
|
class ____ type
* @return the most upper type
*/
@NonNull
static <T extends ClassElement> T findUpperType(@NonNull List<T> bounds1, @NonNull List<T> bounds2) {
T upper = null;
for (T lowerBound : bounds2) {
if (upper == null || lowerBound.isAssignable(upper)) {
upper = lowerBound;
}
}
for (T upperBound : bounds1) {
if (upper == null || upperBound.isAssignable(upper)) {
upper = upperBound;
}
}
return upper;
}
}
|
element
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/processors/FlowableProcessor.java
|
{
"start": 1147,
"end": 3120
}
|
class ____<@NonNull T> extends Flowable<T> implements Processor<T, T>, FlowableSubscriber<T> {
/**
* Returns true if the FlowableProcessor has subscribers.
* <p>The method is thread-safe.
* @return true if the FlowableProcessor has subscribers
*/
@CheckReturnValue
public abstract boolean hasSubscribers();
/**
* Returns true if the FlowableProcessor has reached a terminal state through an error event.
* <p>The method is thread-safe.
* @return true if the FlowableProcessor has reached a terminal state through an error event
* @see #getThrowable()
* @see #hasComplete()
*/
@CheckReturnValue
public abstract boolean hasThrowable();
/**
* Returns true if the FlowableProcessor has reached a terminal state through a complete event.
* <p>The method is thread-safe.
* @return true if the FlowableProcessor has reached a terminal state through a complete event
* @see #hasThrowable()
*/
@CheckReturnValue
public abstract boolean hasComplete();
/**
* Returns the error that caused the FlowableProcessor to terminate or null if the FlowableProcessor
* hasn't terminated yet.
* <p>The method is thread-safe.
* @return the error that caused the FlowableProcessor to terminate or null if the FlowableProcessor
* hasn't terminated yet
*/
@Nullable
@CheckReturnValue
public abstract Throwable getThrowable();
/**
* Wraps this FlowableProcessor and serializes the calls to the onSubscribe, onNext, onError and
* onComplete methods, making them thread-safe.
* <p>The method is thread-safe.
* @return the wrapped and serialized FlowableProcessor
*/
@NonNull
@CheckReturnValue
public final FlowableProcessor<T> toSerialized() {
if (this instanceof SerializedProcessor) {
return this;
}
return new SerializedProcessor<>(this);
}
}
|
FlowableProcessor
|
java
|
elastic__elasticsearch
|
build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java
|
{
"start": 2136,
"end": 10929
}
|
class ____ implements Plugin<Project> {
public static final String PLUGIN_EXTENSION_NAME = "esplugin";
public static final String BUNDLE_PLUGIN_TASK_NAME = "bundlePlugin";
public static final String EXPLODED_BUNDLE_PLUGIN_TASK_NAME = "explodedBundlePlugin";
public static final String EXPLODED_BUNDLE_CONFIG = "explodedBundleZip";
public static final Attribute<Boolean> EXPLODED_PLUGIN_BUNDLE_ATTRIBUTE = Attribute.of("exploded-plugin-bundle", Boolean.class);
protected final ProviderFactory providerFactory;
@Inject
public BasePluginBuildPlugin(ProviderFactory providerFactory) {
this.providerFactory = providerFactory;
}
@Override
public void apply(final Project project) {
project.getPluginManager().apply(JavaPlugin.class);
project.getPluginManager().apply(TestClustersPlugin.class);
project.getPluginManager().apply(CompileOnlyResolvePlugin.class);
project.getPluginManager().apply(JarHellPlugin.class);
project.getPluginManager().apply(GradleTestPolicySetupPlugin.class);
var extension = project.getExtensions()
.create(BasePluginBuildPlugin.PLUGIN_EXTENSION_NAME, PluginPropertiesExtension.class, project);
final var bundleTask = createBundleTasks(project, extension);
project.getConfigurations().getByName("default").extendsFrom(project.getConfigurations().getByName("runtimeClasspath"));
// allow running ES with this plugin in the foreground of a build
var testClusters = testClusters(project, TestClustersPlugin.EXTENSION_NAME);
var runCluster = testClusters.register("runTask", c -> {
// TODO: use explodedPlugin here for modules
if (GradleUtils.isModuleProject(project.getPath())) {
c.module(bundleTask.flatMap((Transformer<Provider<RegularFile>, Zip>) zip -> zip.getArchiveFile()));
} else {
c.plugin(bundleTask.flatMap((Transformer<Provider<RegularFile>, Zip>) zip -> zip.getArchiveFile()));
}
});
project.getTasks().register("run", RunTask.class, r -> {
r.useCluster(runCluster);
r.dependsOn(project.getTasks().named(BUNDLE_PLUGIN_TASK_NAME));
});
}
@SuppressWarnings("unchecked")
private static NamedDomainObjectContainer<ElasticsearchCluster> testClusters(Project project, String extensionName) {
return (NamedDomainObjectContainer<ElasticsearchCluster>) project.getExtensions().getByName(extensionName);
}
/**
* Adds bundle tasks which builds the dir and zip containing the plugin jars,
* metadata, properties, and packaging files
*/
private TaskProvider<Zip> createBundleTasks(final Project project, PluginPropertiesExtension extension) {
final var pluginMetadata = project.file("src/main/plugin-metadata");
final var buildProperties = project.getTasks().register("pluginProperties", GeneratePluginPropertiesTask.class, task -> {
task.getPluginName().set(providerFactory.provider(extension::getName));
task.getPluginDescription().set(providerFactory.provider(extension::getDescription));
task.getPluginVersion().set(providerFactory.provider(extension::getVersion));
task.getElasticsearchVersion().set(Version.fromString(VersionProperties.getElasticsearch()).toString());
var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class);
task.getJavaVersion().set(providerFactory.provider(() -> javaExtension.getTargetCompatibility().toString()));
task.getExtendedPlugins().set(providerFactory.provider(extension::getExtendedPlugins));
task.getHasNativeController().set(providerFactory.provider(extension::isHasNativeController));
task.getRequiresKeystore().set(providerFactory.provider(extension::isRequiresKeystore));
task.getIsLicensed().set(providerFactory.provider(extension::isLicensed));
var mainSourceSet = project.getExtensions().getByType(SourceSetContainer.class).getByName(SourceSet.MAIN_SOURCE_SET_NAME);
FileCollection moduleInfoFile = mainSourceSet.getOutput()
.getClassesDirs()
.getAsFileTree()
.matching(p -> p.include("module-info.class"));
task.getModuleInfoFile().setFrom(moduleInfoFile);
});
// add the plugin properties and metadata to test resources, so unit tests can
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
var testSourceSet = project.getExtensions().getByType(SourceSetContainer.class).getByName("test");
Map<String, Object> map = Map.of("builtBy", buildProperties);
File generatedResources = new File(project.getBuildDir(), "generated-resources");
testSourceSet.getOutput().dir(map, generatedResources);
testSourceSet.getResources().srcDir(pluginMetadata);
// expose the plugin properties and metadata for other plugins to use in their tests.
// See TestWithDependenciesPlugin for how this is used.
project.getConfigurations().create("pluginMetadata", conf -> {
conf.getAttributes().attribute(Attribute.of("pluginMetadata", Boolean.class), true);
conf.getAttributes()
.attribute(
LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE,
project.getObjects().named(LibraryElements.class, LibraryElements.RESOURCES)
);
});
project.getArtifacts().add("pluginMetadata", new File(project.getBuildDir(), "generated-descriptor"), artifact -> {
artifact.builtBy(buildProperties);
});
project.getArtifacts().add("pluginMetadata", pluginMetadata);
// getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, "plugin-metadata");
var bundleSpec = createBundleSpec(project, pluginMetadata, buildProperties);
extension.setBundleSpec(bundleSpec);
// create the actual bundle task, which zips up all the files for the plugin
final var bundle = project.getTasks().register("bundlePlugin", Zip.class, zip -> zip.with(bundleSpec));
project.getTasks().named(BasePlugin.ASSEMBLE_TASK_NAME).configure(task -> task.dependsOn(bundle));
// also make the zip available as a configuration (used when depending on this project)
var configuration = project.getConfigurations().create("zip");
configuration.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE);
project.getArtifacts().add("zip", bundle);
var explodedBundle = project.getTasks().register(EXPLODED_BUNDLE_PLUGIN_TASK_NAME, Sync.class, sync -> {
sync.with(bundleSpec);
sync.into(new File(project.getBuildDir(), "explodedBundle/" + extension.getName()));
});
// also make the exploded bundle available as a configuration (used when depending on this project)
var explodedBundleZip = project.getConfigurations().create(EXPLODED_BUNDLE_CONFIG);
explodedBundleZip.setCanBeResolved(false);
explodedBundleZip.setCanBeConsumed(true);
explodedBundleZip.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE);
explodedBundleZip.getAttributes().attribute(EXPLODED_PLUGIN_BUNDLE_ATTRIBUTE, true);
project.getArtifacts().add(EXPLODED_BUNDLE_CONFIG, explodedBundle);
return bundle;
}
private static CopySpec createBundleSpec(
Project project,
File pluginMetadata,
TaskProvider<GeneratePluginPropertiesTask> buildProperties
) {
var bundleSpec = project.copySpec();
bundleSpec.from(buildProperties);
bundleSpec.from(pluginMetadata);
bundleSpec.from(
(Callable<TaskProvider<Task>>) () -> project.getPluginManager().hasPlugin("com.gradleup.shadow")
? project.getTasks().named("shadowJar")
: project.getTasks().named("jar")
);
bundleSpec.from(
project.getConfigurations()
.getByName("runtimeClasspath")
.minus(project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME))
);
// extra files for the plugin to go into the zip
bundleSpec.from("src/main/packaging");// TODO: move all config/bin/_size/etc into packaging
bundleSpec.from("src/main", copySpec -> {
copySpec.include("config/**");
copySpec.include("bin/**");
});
return bundleSpec;
}
}
|
BasePluginBuildPlugin
|
java
|
elastic__elasticsearch
|
modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java
|
{
"start": 4060,
"end": 4444
}
|
class ____ extends FieldMapper {
public static final String CONTENT_TYPE = "scaled_float";
// use the same default as numbers
private static final Setting<Boolean> COERCE_SETTING = NumberFieldMapper.COERCE_SETTING;
private static ScaledFloatFieldMapper toType(FieldMapper in) {
return (ScaledFloatFieldMapper) in;
}
public static
|
ScaledFloatFieldMapper
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/internals/DefaultStreamPartitioner.java
|
{
"start": 1131,
"end": 2107
}
|
class ____<K, V> implements StreamPartitioner<K, V> {
private final Serializer<K> keySerializer;
public DefaultStreamPartitioner(final Serializer<K> keySerializer) {
this.keySerializer = keySerializer;
}
@Override
public Optional<Set<Integer>> partitions(final String topic, final K key, final V value, final int numPartitions) {
final byte[] keyBytes = keySerializer.serialize(topic, key);
// if the key bytes are not available, we just return empty optional to let the producer decide
// which partition to send internally; otherwise stick with the same built-in partitioner
// util functions that producer used to make sure its behavior is consistent with the producer
if (keyBytes == null) {
return Optional.empty();
} else {
return Optional.of(Collections.singleton(BuiltInPartitioner.partitionForKey(keyBytes, numPartitions)));
}
}
}
|
DefaultStreamPartitioner
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/multisource/MultiSourceMapper.java
|
{
"start": 705,
"end": 1156
}
|
class ____ {
private int value;
private Collection<String> elements;
public int getValue() {
return value;
}
public void setValue(int value) {
this.value = value;
}
public Collection<String> getElements() {
return elements;
}
public void setElements(Collection<String> elements) {
this.elements = elements;
}
}
}
|
Target
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/rpc/FencedRpcEndpointTest.java
|
{
"start": 1693,
"end": 5937
}
|
class ____ {
private static final Duration timeout = Duration.ofSeconds(10L);
private static RpcService rpcService;
@BeforeAll
static void setup() {
rpcService = new TestingRpcService();
}
@AfterAll
static void teardown() throws ExecutionException, InterruptedException, TimeoutException {
if (rpcService != null) {
RpcUtils.terminateRpcService(rpcService);
}
}
/** Tests that messages with the wrong fencing token are filtered out. */
@Test
void testFencing() throws Exception {
final UUID fencingToken = UUID.randomUUID();
final UUID wrongFencingToken = UUID.randomUUID();
final String value = "barfoo";
FencedTestingEndpoint fencedTestingEndpoint =
new FencedTestingEndpoint(rpcService, value, fencingToken);
try {
fencedTestingEndpoint.start();
final FencedTestingGateway properFencedGateway =
rpcService
.connect(
fencedTestingEndpoint.getAddress(),
fencingToken,
FencedTestingGateway.class)
.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
final FencedTestingGateway wronglyFencedGateway =
rpcService
.connect(
fencedTestingEndpoint.getAddress(),
wrongFencingToken,
FencedTestingGateway.class)
.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
assertThat(
properFencedGateway
.foobar(timeout)
.get(timeout.toMillis(), TimeUnit.MILLISECONDS))
.isEqualTo(value);
try {
wronglyFencedGateway.foobar(timeout).get(timeout.toMillis(), TimeUnit.MILLISECONDS);
fail("This should fail since we have the wrong fencing token.");
} catch (ExecutionException e) {
assertThat(ExceptionUtils.stripExecutionException(e))
.isInstanceOf(FencingTokenException.class);
}
} finally {
RpcUtils.terminateRpcEndpoint(fencedTestingEndpoint);
fencedTestingEndpoint.validateResourceClosed();
}
}
/**
* Tests that all calls from an unfenced remote gateway are ignored and that one cannot obtain
* the fencing token from such a gateway.
*/
@Test
void testUnfencedRemoteGateway() throws Exception {
final UUID initialFencingToken = UUID.randomUUID();
final String value = "foobar";
final FencedTestingEndpoint fencedTestingEndpoint =
new FencedTestingEndpoint(rpcService, value, initialFencingToken);
try {
fencedTestingEndpoint.start();
FencedTestingGateway unfencedGateway =
rpcService
.connect(fencedTestingEndpoint.getAddress(), FencedTestingGateway.class)
.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
try {
unfencedGateway.foobar(timeout).get(timeout.toMillis(), TimeUnit.MILLISECONDS);
fail("This should have failed because we have an unfenced gateway.");
} catch (ExecutionException e) {
assertThat(ExceptionUtils.stripExecutionException(e))
.isInstanceOf(RpcRuntimeException.class);
}
// we should not be able to call getFencingToken on an unfenced gateway
assertThatThrownBy(unfencedGateway::getFencingToken)
.withFailMessage(
"We should not be able to call getFencingToken on an unfenced gateway.")
.isInstanceOf(UnsupportedOperationException.class);
} finally {
RpcUtils.terminateRpcEndpoint(fencedTestingEndpoint);
fencedTestingEndpoint.validateResourceClosed();
}
}
public
|
FencedRpcEndpointTest
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/apigenerator/CreateSyncApi.java
|
{
"start": 1431,
"end": 3385
}
|
class ____ {
private static final Set<String> FILTER_METHODS = LettuceSets.unmodifiableSet("setAutoFlushCommands", "flushCommands");
/**
* Mutate type comment.
*
* @return
*/
Function<String, String> commentMutator() {
return s -> s.replaceAll("\\$\\{intent\\}", "Synchronous executed commands") + "* @generated by " + getClass().getName()
+ "\r\n ";
}
/**
* Method filter
*
* @return
*/
Predicate<MethodDeclaration> methodFilter() {
return method -> !CompilationUnitFactory.contains(FILTER_METHODS, method);
}
/**
* Mutate type to async result.
*
* @return
*/
Function<MethodDeclaration, Type> methodTypeMutator() {
return MethodDeclaration::getType;
}
/**
* Supply additional imports.
*
* @return
*/
Supplier<List<String>> importSupplier() {
return Collections::emptyList;
}
@ParameterizedTest
@MethodSource("arguments")
@Tag(API_GENERATOR)
void createInterface(String argument) throws Exception {
createFactory(argument).createInterface();
}
static List<String> arguments() {
return Arrays.asList(Constants.TEMPLATE_NAMES);
}
private CompilationUnitFactory createFactory(String templateName) {
String targetName = templateName;
File templateFile = new File(Constants.TEMPLATES, "io/lettuce/core/api/" + templateName + ".java");
String targetPackage;
if (templateName.contains("RedisSentinel")) {
targetPackage = "io.lettuce.core.sentinel.api.sync";
} else {
targetPackage = "io.lettuce.core.api.sync";
}
return new CompilationUnitFactory(templateFile, Constants.SOURCES, targetPackage, targetName, commentMutator(),
methodTypeMutator(), methodFilter(), importSupplier(), null, Function.identity());
}
}
|
CreateSyncApi
|
java
|
spring-projects__spring-security
|
web/src/test/java/org/springframework/security/web/jackson2/AbstractMixinTests.java
|
{
"start": 935,
"end": 1201
}
|
class ____ {
protected ObjectMapper mapper;
@BeforeEach
public void setup() {
this.mapper = new ObjectMapper();
ClassLoader loader = getClass().getClassLoader();
this.mapper.registerModules(SecurityJackson2Modules.getModules(loader));
}
}
|
AbstractMixinTests
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/internal/tools/MutableInteger.java
|
{
"start": 203,
"end": 609
}
|
class ____ {
private int value;
public MutableInteger() {
}
public MutableInteger(int value) {
this.value = value;
}
public MutableInteger deepCopy() {
return new MutableInteger( value );
}
public int getAndIncrease() {
return value++;
}
public int get() {
return value;
}
public void set(int value) {
this.value = value;
}
public void increase() {
++value;
}
}
|
MutableInteger
|
java
|
apache__camel
|
components/camel-metrics/src/test/java/org/apache/camel/component/metrics/messagehistory/MetricsMessageHistoryTest.java
|
{
"start": 1381,
"end": 3439
}
|
class ____ extends CamelTestSupport {
private final Logger log = LoggerFactory.getLogger(getClass());
private MetricRegistry registry = new MetricRegistry();
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
MetricsMessageHistoryFactory factory = new MetricsMessageHistoryFactory();
factory.setUseJmx(false);
factory.setMetricsRegistry(registry);
context.setMessageHistoryFactory(factory);
return context;
}
@Test
public void testMetricsHistory() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(5);
getMockEndpoint("mock:bar").expectedMessageCount(5);
getMockEndpoint("mock:baz").expectedMessageCount(5);
for (int i = 0; i < 10; i++) {
if (i % 2 == 0) {
template.sendBody("seda:foo", "Hello " + i);
} else {
template.sendBody("seda:bar", "Hello " + i);
}
}
MockEndpoint.assertIsSatisfied(context);
// there should be 3 names
assertEquals(3, registry.getNames().size());
// get the message history service
MetricsMessageHistoryService service = context.hasService(MetricsMessageHistoryService.class);
assertNotNull(service);
String json = service.dumpStatisticsAsJson();
assertNotNull(json);
log.info(json);
assertTrue(json.contains("foo.history"));
assertTrue(json.contains("bar.history"));
assertTrue(json.contains("baz.history"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("seda:foo")
.to("mock:foo").id("foo");
from("seda:bar")
.to("mock:bar").id("bar")
.to("mock:baz").id("baz");
}
};
}
}
|
MetricsMessageHistoryTest
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/aot/generate/PublicClass.java
|
{
"start": 704,
"end": 952
}
|
class ____ {
private String privateField;
String protectedField;
public PackagePrivateClass[] packagePrivateClasses;
public PackagePrivateClass protectedClassField;
String getProtectedMethod() {
return this.protectedField;
}
}
|
PublicClass
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/FutureTransformAsyncTest.java
|
{
"start": 28007,
"end": 28350
}
|
class ____ {
private Executor executor;
ListenableFuture<String> test() {
AsyncCallable<String> asyncCallable = () -> Futures.immediateFuture("Done");
return Futures.submitAsync(asyncCallable, runnable -> {});
}
}
""")
.doTest();
}
}
|
Test
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-ec2/src/main/java/org/apache/camel/component/aws2/ec2/AWS2EC2Configuration.java
|
{
"start": 1166,
"end": 9050
}
|
class ____ implements Cloneable {
@UriPath(description = "Logical name")
@Metadata(required = true)
private String label;
@UriParam(label = "producer")
@Metadata(label = "advanced", autowired = true)
private Ec2Client amazonEc2Client;
@UriParam(label = "producer,security", secret = true)
private String accessKey;
@UriParam(label = "producer,security", secret = true)
private String secretKey;
@UriParam(label = "producer,security", secret = true)
private String sessionToken;
@UriParam(label = "producer")
@Metadata(required = true)
private AWS2EC2Operations operation;
@UriParam(label = "producer,proxy", enums = "HTTP,HTTPS", defaultValue = "HTTPS")
private Protocol proxyProtocol = Protocol.HTTPS;
@UriParam(label = "producer,proxy")
private String proxyHost;
@UriParam(label = "producer,proxy")
private Integer proxyPort;
@UriParam(enums = "ap-south-2,ap-south-1,eu-south-1,eu-south-2,us-gov-east-1,me-central-1,il-central-1,ca-central-1,eu-central-1,us-iso-west-1,eu-central-2,eu-isoe-west-1,us-west-1,us-west-2,af-south-1,eu-north-1,eu-west-3,eu-west-2,eu-west-1,ap-northeast-3,ap-northeast-2,ap-northeast-1,me-south-1,sa-east-1,ap-east-1,cn-north-1,ca-west-1,us-gov-west-1,ap-southeast-1,ap-southeast-2,us-iso-east-1,ap-southeast-3,ap-southeast-4,us-east-1,us-east-2,cn-northwest-1,us-isob-east-1,aws-global,aws-cn-global,aws-us-gov-global,aws-iso-global,aws-iso-b-global")
private String region;
@UriParam
private boolean pojoRequest;
@UriParam(label = "security")
private boolean trustAllCertificates;
@UriParam
private boolean overrideEndpoint;
@UriParam
private String uriEndpointOverride;
@UriParam(label = "security")
private boolean useDefaultCredentialsProvider;
@UriParam(label = "security")
private boolean useProfileCredentialsProvider;
@UriParam(label = "security")
private boolean useSessionCredentials;
@UriParam(label = "security")
private String profileCredentialsName;
public Ec2Client getAmazonEc2Client() {
return amazonEc2Client;
}
/**
* To use an existing configured AmazonEC2Client client
*/
public void setAmazonEc2Client(Ec2Client amazonEc2Client) {
this.amazonEc2Client = amazonEc2Client;
}
public String getAccessKey() {
return accessKey;
}
/**
* Amazon AWS Access Key
*/
public void setAccessKey(String accessKey) {
this.accessKey = accessKey;
}
public String getSecretKey() {
return secretKey;
}
/**
* Amazon AWS Secret Key
*/
public void setSecretKey(String secretKey) {
this.secretKey = secretKey;
}
public String getSessionToken() {
return sessionToken;
}
/**
* Amazon AWS Session Token used when the user needs to assume an IAM role
*/
public void setSessionToken(String sessionToken) {
this.sessionToken = sessionToken;
}
public AWS2EC2Operations getOperation() {
return operation;
}
/**
* The operation to perform. It can be createAndRunInstances, startInstances, stopInstances, terminateInstances,
* describeInstances, describeInstancesStatus, rebootInstances, monitorInstances, unmonitorInstances, createTags or
* deleteTags
*/
public void setOperation(AWS2EC2Operations operation) {
this.operation = operation;
}
public Protocol getProxyProtocol() {
return proxyProtocol;
}
/**
* To define a proxy protocol when instantiating the EC2 client
*/
public void setProxyProtocol(Protocol proxyProtocol) {
this.proxyProtocol = proxyProtocol;
}
public String getProxyHost() {
return proxyHost;
}
/**
* To define a proxy host when instantiating the EC2 client
*/
public void setProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
}
public Integer getProxyPort() {
return proxyPort;
}
/**
* To define a proxy port when instantiating the EC2 client
*/
public void setProxyPort(Integer proxyPort) {
this.proxyPort = proxyPort;
}
public String getRegion() {
return region;
}
/**
* The region in which EC2 client needs to work. When using this parameter, the configuration will expect the
* lowercase name of the region (for example, ap-east-1) You'll need to use the name Region.EU_WEST_1.id()
*/
public void setRegion(String region) {
this.region = region;
}
public boolean isPojoRequest() {
return pojoRequest;
}
/**
* If we want to use a POJO request as body or not
*/
public void setPojoRequest(boolean pojoRequest) {
this.pojoRequest = pojoRequest;
}
public boolean isTrustAllCertificates() {
return trustAllCertificates;
}
/**
* If we want to trust all certificates in case of overriding the endpoint
*/
public void setTrustAllCertificates(boolean trustAllCertificates) {
this.trustAllCertificates = trustAllCertificates;
}
public boolean isOverrideEndpoint() {
return overrideEndpoint;
}
/**
* Set the need for overriding the endpoint. This option needs to be used in combination with the
* uriEndpointOverride option
*/
public void setOverrideEndpoint(boolean overrideEndpoint) {
this.overrideEndpoint = overrideEndpoint;
}
public String getUriEndpointOverride() {
return uriEndpointOverride;
}
/**
* Set the overriding uri endpoint. This option needs to be used in combination with overrideEndpoint option
*/
public void setUriEndpointOverride(String uriEndpointOverride) {
this.uriEndpointOverride = uriEndpointOverride;
}
/**
* Set whether the EC2 client should expect to load credentials through a default credentials provider or to expect
* static credentials to be passed in.
*/
public void setUseDefaultCredentialsProvider(Boolean useDefaultCredentialsProvider) {
this.useDefaultCredentialsProvider = useDefaultCredentialsProvider;
}
public Boolean isUseDefaultCredentialsProvider() {
return useDefaultCredentialsProvider;
}
public boolean isUseProfileCredentialsProvider() {
return useProfileCredentialsProvider;
}
/**
* Set whether the EC2 client should expect to load credentials through a profile credentials provider.
*/
public void setUseProfileCredentialsProvider(boolean useProfileCredentialsProvider) {
this.useProfileCredentialsProvider = useProfileCredentialsProvider;
}
public boolean isUseSessionCredentials() {
return useSessionCredentials;
}
/**
* Set whether the EC2 client should expect to use Session Credentials. This is useful in a situation in which the
* user needs to assume an IAM role for doing operations in EC2.
*/
public void setUseSessionCredentials(boolean useSessionCredentials) {
this.useSessionCredentials = useSessionCredentials;
}
public String getProfileCredentialsName() {
return profileCredentialsName;
}
/**
* If using a profile credentials provider, this parameter will set the profile name
*/
public void setProfileCredentialsName(String profileCredentialsName) {
this.profileCredentialsName = profileCredentialsName;
}
// *************************************************
//
// *************************************************
public AWS2EC2Configuration copy() {
try {
return (AWS2EC2Configuration) super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
|
AWS2EC2Configuration
|
java
|
alibaba__nacos
|
api/src/test/java/com/alibaba/nacos/api/config/remote/request/ConfigFuzzyWatchSyncRequestTest.java
|
{
"start": 1071,
"end": 4398
}
|
class ____ extends BasedConfigRequestTest {
private static final String GROUP_KEY_PATTERN = "test.*";
private static final String SYNC_TYPE = Constants.FUZZY_WATCH_INIT_NOTIFY;
private static final String GROUP_KEY = "test-group-key";
private static final String CHANGED_TYPE = "ADD";
@Override
@Test
public void testSerialize() throws JsonProcessingException {
Set<Context> contexts = new HashSet<>();
Context context = Context.build(GROUP_KEY, CHANGED_TYPE);
contexts.add(context);
ConfigFuzzyWatchSyncRequest configFuzzyWatchSyncRequest = ConfigFuzzyWatchSyncRequest.buildSyncRequest(
SYNC_TYPE, contexts, GROUP_KEY_PATTERN, 2, 1);
configFuzzyWatchSyncRequest.putAllHeader(HEADERS);
final String requestId = injectRequestUuId(configFuzzyWatchSyncRequest);
String json = mapper.writeValueAsString(configFuzzyWatchSyncRequest);
assertTrue(json.contains("\"module\":\"" + Constants.Config.CONFIG_MODULE));
assertTrue(json.contains("\"groupKeyPattern\":\"" + GROUP_KEY_PATTERN));
assertTrue(json.contains("\"syncType\":\"" + SYNC_TYPE));
assertTrue(json.contains("\"totalBatch\":" + 2));
assertTrue(json.contains("\"currentBatch\":" + 1));
assertTrue(json.contains("\"contexts\":["));
assertTrue(json.contains("\"groupKey\":\"" + GROUP_KEY));
assertTrue(json.contains("\"changedType\":\"" + CHANGED_TYPE));
assertTrue(json.contains("\"requestId\":\"" + requestId));
}
@Override
@Test
public void testDeserialize() throws JsonProcessingException {
String json = "{\"headers\":{\"header1\":\"test_header1\"},\"groupKeyPattern\":\"test.*\","
+ "\"syncType\":\"" + Constants.FUZZY_WATCH_INIT_NOTIFY + "\",\"totalBatch\":2,\"currentBatch\":1,"
+ "\"contexts\":[{\"groupKey\":\"test-group-key\",\"changedType\":\"ADD\"}],\"module\":\"config\"}";
ConfigFuzzyWatchSyncRequest actual = mapper.readValue(json, ConfigFuzzyWatchSyncRequest.class);
assertEquals(GROUP_KEY_PATTERN, actual.getGroupKeyPattern());
assertEquals(SYNC_TYPE, actual.getSyncType());
assertEquals(2, actual.getTotalBatch());
assertEquals(1, actual.getCurrentBatch());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getModule());
assertEquals(HEADER_VALUE, actual.getHeader(HEADER_KEY));
assertEquals(1, actual.getContexts().size());
Context context = actual.getContexts().iterator().next();
assertEquals(GROUP_KEY, context.getGroupKey());
assertEquals(CHANGED_TYPE, context.getChangedType());
}
@Test
void testBuildInitFinishRequest() {
ConfigFuzzyWatchSyncRequest request = ConfigFuzzyWatchSyncRequest.buildInitFinishRequest(GROUP_KEY_PATTERN);
assertEquals(GROUP_KEY_PATTERN, request.getGroupKeyPattern());
assertEquals(Constants.FINISH_FUZZY_WATCH_INIT_NOTIFY, request.getSyncType());
}
@Test
void testContextBuild() {
Context context = Context.build(GROUP_KEY, CHANGED_TYPE);
assertEquals(GROUP_KEY, context.getGroupKey());
assertEquals(CHANGED_TYPE, context.getChangedType());
}
}
|
ConfigFuzzyWatchSyncRequestTest
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/util/SchemaResolver.java
|
{
"start": 3785,
"end": 5062
}
|
class ____ implements SchemaVisitor<Boolean> {
boolean hasUnresolvedParts;
IsResolvedSchemaVisitor() {
hasUnresolvedParts = false;
}
@Override
public SchemaVisitorAction visitTerminal(Schema terminal) {
hasUnresolvedParts = isUnresolvedSchema(terminal);
return hasUnresolvedParts ? SchemaVisitorAction.TERMINATE : SchemaVisitorAction.CONTINUE;
}
@Override
public SchemaVisitorAction visitNonTerminal(Schema nonTerminal) {
hasUnresolvedParts = isUnresolvedSchema(nonTerminal);
if (hasUnresolvedParts) {
return SchemaVisitorAction.TERMINATE;
}
if (nonTerminal.getType() == Schema.Type.RECORD && !nonTerminal.hasFields()) {
// We're still initializing the type...
return SchemaVisitorAction.SKIP_SUBTREE;
}
return SchemaVisitorAction.CONTINUE;
}
@Override
public SchemaVisitorAction afterVisitNonTerminal(Schema nonTerminal) {
return SchemaVisitorAction.CONTINUE;
}
@Override
public Boolean get() {
return !hasUnresolvedParts;
}
}
/**
* This visitor creates clone of the visited Schemata, minus the specified
* schema properties, and resolves all unresolved schemas.
*/
public static final
|
IsResolvedSchemaVisitor
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-ses/src/main/java/org/apache/camel/component/aws2/ses/Ses2Constants.java
|
{
"start": 948,
"end": 2740
}
|
interface ____ {
@Metadata(description = "The sender's email address.", javaType = "String")
String FROM = "CamelAwsSesFrom";
@Metadata(description = "The Amazon SES message ID.", javaType = "String")
String MESSAGE_ID = "CamelAwsSesMessageId";
@Metadata(description = "The reply-to email address(es) for the message. Use comma to separate multiple values.",
javaType = "String")
String REPLY_TO_ADDRESSES = "CamelAwsSesReplyToAddresses";
@Metadata(description = "The email address to which bounce notifications are to be forwarded.", javaType = "String")
String RETURN_PATH = "CamelAwsSesReturnPath";
@Metadata(description = "The subject of the message.", javaType = "String")
String SUBJECT = "CamelAwsSesSubject";
@Metadata(description = "List of comma separated destination email address.", javaType = "String")
String TO = "CamelAwsSesTo";
@Metadata(description = "List of comma separated destination carbon copy (cc) email address.", javaType = "String")
String CC = "CamelAwsSesCc";
@Metadata(description = "List of comma separated destination blind carbon copy (bcc) email address.", javaType = "String")
String BCC = "CamelAwsSesBcc";
@Metadata(description = "A collection of name/value pairs to apply when sending the email. Tags are user-defined properties that are included in published email sending events.",
javaType = "Map<String, String>")
String TAGS = "CamelAwsSesTags";
@Metadata(description = "The flag to show if email content is HTML.", javaType = "Boolean")
String HTML_EMAIL = "CamelAwsSesHtmlEmail";
@Metadata(description = "TThe configuration set to send.", javaType = "String")
String CONFIGURATION_SET = "CamelAwsSesConfigurationSet";
}
|
Ses2Constants
|
java
|
apache__camel
|
components/camel-fop/src/test/java/org/apache/camel/component/fop/FopComponentTest.java
|
{
"start": 1514,
"end": 3160
}
|
class ____ extends CamelTestSupport {
@EndpointInject("mock:result")
protected MockEndpoint resultEndpoint;
@Produce("direct:start")
protected ProducerTemplate template;
@Override
public void doPreSetup() {
deleteDirectory("target/data");
}
@Test
public void createPdfUsingXmlDataAndXsltTransformation() throws Exception {
resultEndpoint.expectedMessageCount(1);
FileInputStream inputStream = new FileInputStream("src/test/data/xml/data.xml");
template.sendBody(inputStream);
resultEndpoint.assertIsSatisfied();
PDDocument document = Loader.loadPDF(new File("target/data/result.pdf"));
String pdfText = FopHelper.extractTextFrom(document);
assertTrue(pdfText.contains("Project")); //from xsl template
assertTrue(pdfText.contains("John Doe")); //from data xml
// assert on the header "foo" being populated
Exchange exchange = resultEndpoint.getReceivedExchanges().get(0);
assertEquals("bar", exchange.getIn().getHeader("foo"), "Header value is lost!");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
.to("xslt:xslt/template.xsl")
.setHeader("foo", constant("bar"))
.to("fop:pdf")
.setHeader(Exchange.FILE_NAME, constant("result.pdf"))
.to("file:target/data")
.to("mock:result");
}
};
}
}
|
FopComponentTest
|
java
|
grpc__grpc-java
|
binder/src/androidTest/java/io/grpc/binder/HostServices.java
|
{
"start": 1671,
"end": 1926
}
|
class ____ {
private static final Logger logger = Logger.getLogger(HostServices.class.getName());
private static final Class<?>[] hostServiceClasses =
new Class<?>[] {
HostService1.class, HostService2.class,
};
public
|
HostServices
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/SymlinkBaseTest.java
|
{
"start": 12409,
"end": 55882
}
|
class ____ IOException, since File.exists
// returns false for a link to link.
} catch (IOException x) {
assertEquals("Possible cyclic loop while following symbolic link "+
link1.toString(), x.getMessage());
}
}
/* Assert that the given link to a file behaves as expected. */
private void checkLink(Path linkAbs, Path expectedTarget, Path targetQual)
throws IOException {
Path dir = new Path(testBaseDir1());
// isFile/Directory
assertTrue(wrapper.isFile(linkAbs));
assertFalse(wrapper.isDir(linkAbs));
// Check getFileStatus
assertFalse(wrapper.getFileStatus(linkAbs).isSymlink());
assertFalse(wrapper.getFileStatus(linkAbs).isDirectory());
assertEquals(fileSize, wrapper.getFileStatus(linkAbs).getLen());
// Check getFileLinkStatus
assertTrue(wrapper.isSymlink(linkAbs));
assertFalse(wrapper.getFileLinkStatus(linkAbs).isDirectory());
// Check getSymlink always returns a qualified target, except
// when partially qualified paths are used (see tests below).
assertEquals(targetQual.toString(),
wrapper.getFileLinkStatus(linkAbs).getSymlink().toString());
assertEquals(targetQual, wrapper.getFileLinkStatus(linkAbs).getSymlink());
// Check that the target is qualified using the file system of the
// path used to access the link (if the link target was not specified
// fully qualified, in that case we use the link target verbatim).
if (!"file".equals(getScheme())) {
FileContext localFc = FileContext.getLocalFSFileContext();
Path linkQual = new Path(testURI().toString(), linkAbs);
assertEquals(targetQual,
localFc.getFileLinkStatus(linkQual).getSymlink());
}
// Check getLinkTarget
assertEquals(expectedTarget, wrapper.getLinkTarget(linkAbs));
// Now read using all path types..
wrapper.setWorkingDirectory(dir);
readFile(new Path("linkToFile"));
readFile(linkAbs);
// And fully qualified.. (NB: for local fs this is partially qualified)
readFile(new Path(testURI().toString(), linkAbs));
// And partially qualified..
boolean failureExpected = true;
// local files are special cased, no authority
if ("file".equals(getScheme())) {
failureExpected = false;
}
// FileSystem automatically adds missing authority if scheme matches default
else if (wrapper instanceof FileSystemTestWrapper) {
failureExpected = false;
}
try {
readFile(new Path(getScheme()+":///"+testBaseDir1()+"/linkToFile"));
assertFalse(failureExpected);
} catch (Exception e) {
if (!failureExpected) {
throw new IOException(e);
}
//assertTrue(failureExpected);
}
// Now read using a different file context (for HDFS at least)
if (wrapper instanceof FileContextTestWrapper
&& !"file".equals(getScheme())) {
FSTestWrapper localWrapper = wrapper.getLocalFSWrapper();
localWrapper.readFile(new Path(testURI().toString(), linkAbs), fileSize);
}
}
@Test
@Timeout(value = 10)
/** Test creating a symlink using relative paths */
public void testCreateLinkUsingRelPaths() throws IOException {
Path fileAbs = new Path(testBaseDir1(), "file");
Path linkAbs = new Path(testBaseDir1(), "linkToFile");
Path schemeAuth = new Path(testURI().toString());
Path fileQual = new Path(schemeAuth, testBaseDir1()+"/file");
createAndWriteFile(fileAbs);
wrapper.setWorkingDirectory(new Path(testBaseDir1()));
wrapper.createSymlink(new Path("file"), new Path("linkToFile"), false);
checkLink(linkAbs, new Path("file"), fileQual);
// Now rename the link's parent. Because the target was specified
// with a relative path the link should still resolve.
Path dir1 = new Path(testBaseDir1());
Path dir2 = new Path(testBaseDir2());
Path linkViaDir2 = new Path(testBaseDir2(), "linkToFile");
Path fileViaDir2 = new Path(schemeAuth, testBaseDir2()+"/file");
wrapper.rename(dir1, dir2, Rename.OVERWRITE);
FileStatus[] stats = wrapper.listStatus(dir2);
assertEquals(fileViaDir2,
wrapper.getFileLinkStatus(linkViaDir2).getSymlink());
readFile(linkViaDir2);
}
@Test
@Timeout(value = 10)
/** Test creating a symlink using absolute paths */
public void testCreateLinkUsingAbsPaths() throws IOException {
Path fileAbs = new Path(testBaseDir1()+"/file");
Path linkAbs = new Path(testBaseDir1()+"/linkToFile");
Path schemeAuth = new Path(testURI().toString());
Path fileQual = new Path(schemeAuth, testBaseDir1()+"/file");
createAndWriteFile(fileAbs);
wrapper.createSymlink(fileAbs, linkAbs, false);
checkLink(linkAbs, fileAbs, fileQual);
// Now rename the link's parent. The target doesn't change and
// now no longer exists so accessing the link should fail.
Path dir1 = new Path(testBaseDir1());
Path dir2 = new Path(testBaseDir2());
Path linkViaDir2 = new Path(testBaseDir2(), "linkToFile");
wrapper.rename(dir1, dir2, Rename.OVERWRITE);
assertEquals(fileQual, wrapper.getFileLinkStatus(linkViaDir2).getSymlink());
try {
readFile(linkViaDir2);
fail("The target should not exist");
} catch (FileNotFoundException x) {
// Expected
}
}
@Test
@Timeout(value = 10)
/**
* Test creating a symlink using fully and partially qualified paths.
* NB: For local fs this actually tests partially qualified paths,
* as they don't support fully qualified paths.
*/
public void testCreateLinkUsingFullyQualPaths() throws IOException {
Path fileAbs = new Path(testBaseDir1(), "file");
Path linkAbs = new Path(testBaseDir1(), "linkToFile");
Path fileQual = new Path(testURI().toString(), fileAbs);
Path linkQual = new Path(testURI().toString(), linkAbs);
createAndWriteFile(fileAbs);
wrapper.createSymlink(fileQual, linkQual, false);
checkLink(linkAbs,
"file".equals(getScheme()) ? fileAbs : fileQual,
fileQual);
// Now rename the link's parent. The target doesn't change and
// now no longer exists so accessing the link should fail.
Path dir1 = new Path(testBaseDir1());
Path dir2 = new Path(testBaseDir2());
Path linkViaDir2 = new Path(testBaseDir2(), "linkToFile");
wrapper.rename(dir1, dir2, Rename.OVERWRITE);
assertEquals(fileQual, wrapper.getFileLinkStatus(linkViaDir2).getSymlink());
try {
readFile(linkViaDir2);
fail("The target should not exist");
} catch (FileNotFoundException x) {
// Expected
}
}
@Test
@Timeout(value = 10)
/**
* Test creating a symlink using partially qualified paths, ie a scheme
* but no authority and vice versa. We just test link targets here since
* creating using a partially qualified path is file system specific.
*/
public void testCreateLinkUsingPartQualPath1() throws IOException {
// Partially qualified paths are covered for local file systems
// in the previous test.
assumeTrue(!"file".equals(getScheme()));
Path schemeAuth = new Path(testURI().toString());
Path fileWoHost = new Path(getScheme()+"://"+testBaseDir1()+"/file");
Path link = new Path(testBaseDir1()+"/linkToFile");
Path linkQual = new Path(schemeAuth, testBaseDir1()+"/linkToFile");
FSTestWrapper localWrapper = wrapper.getLocalFSWrapper();
wrapper.createSymlink(fileWoHost, link, false);
// Partially qualified path is stored
assertEquals(fileWoHost, wrapper.getLinkTarget(linkQual));
// NB: We do not add an authority
assertEquals(fileWoHost.toString(),
wrapper.getFileLinkStatus(link).getSymlink().toString());
assertEquals(fileWoHost.toString(),
wrapper.getFileLinkStatus(linkQual).getSymlink().toString());
// Ditto even from another file system
if (wrapper instanceof FileContextTestWrapper) {
assertEquals(fileWoHost.toString(),
localWrapper.getFileLinkStatus(linkQual).getSymlink().toString());
}
// Same as if we accessed a partially qualified path directly
try {
readFile(link);
fail("DFS requires URIs with schemes have an authority");
} catch (java.lang.RuntimeException e) {
assertTrue(wrapper instanceof FileContextTestWrapper);
// Expected
} catch (FileNotFoundException e) {
assertTrue(wrapper instanceof FileSystemTestWrapper);
GenericTestUtils.assertExceptionContains(
"File does not exist: /test1/file", e);
}
}
@Test
@Timeout(value = 10)
/** Same as above but vice versa (authority but no scheme) */
public void testCreateLinkUsingPartQualPath2() throws IOException {
Path link = new Path(testBaseDir1(), "linkToFile");
Path fileWoScheme = new Path("//"+testURI().getAuthority()+
testBaseDir1()+"/file");
if ("file".equals(getScheme())) {
return;
}
wrapper.createSymlink(fileWoScheme, link, false);
assertEquals(fileWoScheme, wrapper.getLinkTarget(link));
assertEquals(fileWoScheme.toString(),
wrapper.getFileLinkStatus(link).getSymlink().toString());
try {
readFile(link);
fail("Accessed a file with w/o scheme");
} catch (IOException e) {
// Expected
if (wrapper instanceof FileContextTestWrapper) {
GenericTestUtils.assertExceptionContains(
AbstractFileSystem.NO_ABSTRACT_FS_ERROR, e);
} else if (wrapper instanceof FileSystemTestWrapper) {
assertEquals("No FileSystem for scheme " + "\"" + "null" + "\"",
e.getMessage());
}
}
}
@Test
@Timeout(value = 10)
/** Lstat and readlink on a normal file and directory */
public void testLinkStatusAndTargetWithNonLink() throws IOException {
Path schemeAuth = new Path(testURI().toString());
Path dir = new Path(testBaseDir1());
Path dirQual = new Path(schemeAuth, dir.toString());
Path file = new Path(testBaseDir1(), "file");
Path fileQual = new Path(schemeAuth, file.toString());
createAndWriteFile(file);
assertEquals(wrapper.getFileStatus(file), wrapper.getFileLinkStatus(file));
assertEquals(wrapper.getFileStatus(dir), wrapper.getFileLinkStatus(dir));
try {
wrapper.getLinkTarget(file);
fail("Get link target on non-link should throw an IOException");
} catch (IOException x) {
assertEquals("Path "+fileQual+" is not a symbolic link", x.getMessage());
}
try {
wrapper.getLinkTarget(dir);
fail("Get link target on non-link should throw an IOException");
} catch (IOException x) {
assertEquals("Path "+dirQual+" is not a symbolic link", x.getMessage());
}
}
@Test
@Timeout(value = 10)
/** Test create symlink to a directory */
public void testCreateLinkToDirectory() throws IOException {
Path dir1 = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
createAndWriteFile(file);
wrapper.createSymlink(dir1, linkToDir, false);
assertFalse(wrapper.isFile(linkToDir));
assertTrue(wrapper.isDir(linkToDir));
assertTrue(wrapper.getFileStatus(linkToDir).isDirectory());
assertTrue(wrapper.getFileLinkStatus(linkToDir).isSymlink());
}
@Test
@Timeout(value = 10)
/** Test create and remove a file through a symlink */
public void testCreateFileViaSymlink() throws IOException {
Path dir = new Path(testBaseDir1());
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path fileViaLink = new Path(linkToDir, "file");
wrapper.createSymlink(dir, linkToDir, false);
createAndWriteFile(fileViaLink);
assertTrue(wrapper.isFile(fileViaLink));
assertFalse(wrapper.isDir(fileViaLink));
assertFalse(wrapper.getFileLinkStatus(fileViaLink).isSymlink());
assertFalse(wrapper.getFileStatus(fileViaLink).isDirectory());
readFile(fileViaLink);
wrapper.delete(fileViaLink, true);
assertFalse(wrapper.exists(fileViaLink));
}
@Test
@Timeout(value = 10)
/** Test make and delete directory through a symlink */
public void testCreateDirViaSymlink() throws IOException {
Path dir1 = new Path(testBaseDir1());
Path subDir = new Path(testBaseDir1(), "subDir");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path subDirViaLink = new Path(linkToDir, "subDir");
wrapper.createSymlink(dir1, linkToDir, false);
wrapper.mkdir(subDirViaLink, FileContext.DEFAULT_PERM, true);
assertTrue(wrapper.isDir(subDirViaLink));
wrapper.delete(subDirViaLink, false);
assertFalse(wrapper.exists(subDirViaLink));
assertFalse(wrapper.exists(subDir));
}
@Test
@Timeout(value = 10)
/** Create symlink through a symlink */
public void testCreateLinkViaLink() throws IOException {
Path dir1 = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path fileViaLink = new Path(linkToDir, "file");
Path linkToFile = new Path(linkToDir, "linkToFile");
/*
* /b2/linkToDir -> /b1
* /b2/linkToDir/linkToFile -> /b2/linkToDir/file
*/
createAndWriteFile(file);
wrapper.createSymlink(dir1, linkToDir, false);
wrapper.createSymlink(fileViaLink, linkToFile, false);
assertTrue(wrapper.isFile(linkToFile));
assertTrue(wrapper.getFileLinkStatus(linkToFile).isSymlink());
readFile(linkToFile);
assertEquals(fileSize, wrapper.getFileStatus(linkToFile).getLen());
assertEquals(fileViaLink, wrapper.getLinkTarget(linkToFile));
}
@Test
@Timeout(value = 10)
/** Test create symlink to a directory */
public void testListStatusUsingLink() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "link");
createAndWriteFile(file);
wrapper.createSymlink(new Path(testBaseDir1()), link, false);
// The size of the result is file system dependent, Hdfs is 2 (file
// and link) and LocalFs is 3 (file, link, file crc).
FileStatus[] stats = wrapper.listStatus(link);
assertTrue(stats.length == 2 || stats.length == 3);
RemoteIterator<FileStatus> statsItor = wrapper.listStatusIterator(link);
int dirLen = 0;
while(statsItor.hasNext()) {
statsItor.next();
dirLen++;
}
assertTrue(dirLen == 2 || dirLen == 3);
}
@Test
@Timeout(value = 10)
/** Test create symlink using the same path */
public void testCreateLinkTwice() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file);
wrapper.createSymlink(file, link, false);
try {
wrapper.createSymlink(file, link, false);
fail("link already exists");
} catch (IOException x) {
// Expected
}
}
@Test
@Timeout(value = 10)
/** Test access via a symlink to a symlink */
public void testCreateLinkToLink() throws IOException {
Path dir1 = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path linkToLink = new Path(testBaseDir2(), "linkToLink");
Path fileViaLink = new Path(testBaseDir2(), "linkToLink/file");
createAndWriteFile(file);
wrapper.createSymlink(dir1, linkToDir, false);
wrapper.createSymlink(linkToDir, linkToLink, false);
assertTrue(wrapper.isFile(fileViaLink));
assertFalse(wrapper.isDir(fileViaLink));
assertFalse(wrapper.getFileLinkStatus(fileViaLink).isSymlink());
assertFalse(wrapper.getFileStatus(fileViaLink).isDirectory());
readFile(fileViaLink);
}
@Test
@Timeout(value = 10)
/** Can not create a file with path that refers to a symlink */
public void testCreateFileDirExistingLink() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file);
wrapper.createSymlink(file, link, false);
try {
createAndWriteFile(link);
fail("link already exists");
} catch (IOException x) {
// Expected
}
try {
wrapper.mkdir(link, FsPermission.getDefault(), false);
fail("link already exists");
} catch (IOException x) {
// Expected
}
}
@Test
@Timeout(value = 10)
/** Test deleting and recreating a symlink */
public void testUseLinkAferDeleteLink() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file);
wrapper.createSymlink(file, link, false);
wrapper.delete(link, false);
try {
readFile(link);
fail("link was deleted");
} catch (IOException x) {
// Expected
}
readFile(file);
wrapper.createSymlink(file, link, false);
readFile(link);
}
@Test
@Timeout(value = 10)
/** Test create symlink to . */
public void testCreateLinkToDot() throws IOException {
Path dir = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "linkToDot");
createAndWriteFile(file);
wrapper.setWorkingDirectory(dir);
try {
wrapper.createSymlink(new Path("."), link, false);
fail("Created symlink to dot");
} catch (IOException x) {
// Expected. Path(".") resolves to "" because URI normalizes
// the dot away and AbstractFileSystem considers "" invalid.
}
}
@Test
@Timeout(value = 10)
/** Test create symlink to .. */
public void testCreateLinkToDotDot() throws IOException {
Path file = new Path(testBaseDir1(), "test/file");
Path dotDot = new Path(testBaseDir1(), "test/..");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path fileViaLink = new Path(linkToDir, "test/file");
// Symlink to .. is not a problem since the .. is squashed early
assertEquals(new Path(testBaseDir1()), dotDot);
createAndWriteFile(file);
wrapper.createSymlink(dotDot, linkToDir, false);
readFile(fileViaLink);
assertEquals(fileSize, wrapper.getFileStatus(fileViaLink).getLen());
}
@Test
@Timeout(value = 10)
/** Test create symlink to ../file */
public void testCreateLinkToDotDotPrefix() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path dir = new Path(testBaseDir1(), "test");
Path link = new Path(testBaseDir1(), "test/link");
createAndWriteFile(file);
wrapper.mkdir(dir, FsPermission.getDefault(), false);
wrapper.setWorkingDirectory(dir);
wrapper.createSymlink(new Path("../file"), link, false);
readFile(link);
assertEquals(new Path("../file"), wrapper.getLinkTarget(link));
}
@Test
@Timeout(value = 10)
/** Test rename file using a path that contains a symlink. The rename should
* work as if the path did not contain a symlink */
public void testRenameFileViaSymlink() throws IOException {
Path dir = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path fileViaLink = new Path(linkToDir, "file");
Path fileNewViaLink = new Path(linkToDir, "fileNew");
createAndWriteFile(file);
wrapper.createSymlink(dir, linkToDir, false);
wrapper.rename(fileViaLink, fileNewViaLink);
assertFalse(wrapper.exists(fileViaLink));
assertFalse(wrapper.exists(file));
assertTrue(wrapper.exists(fileNewViaLink));
}
@Test
@Timeout(value = 10)
/** Test rename a file through a symlink but this time only the
* destination path has an intermediate symlink. The rename should work
* as if the path did not contain a symlink */
public void testRenameFileToDestViaSymlink() throws IOException {
Path dir = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path subDir = new Path(linkToDir, "subDir");
createAndWriteFile(file);
wrapper.createSymlink(dir, linkToDir, false);
wrapper.mkdir(subDir, FileContext.DEFAULT_PERM, false);
try {
wrapper.rename(file, subDir);
fail("Renamed file to a directory");
} catch (IOException e) {
// Expected. Both must be directories.
assertTrue(unwrapException(e) instanceof IOException);
}
assertTrue(wrapper.exists(file));
}
@Test
@Timeout(value = 10)
/** Similar tests as the previous ones but rename a directory */
public void testRenameDirViaSymlink() throws IOException {
Path baseDir = new Path(testBaseDir1());
Path dir = new Path(baseDir, "dir");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path dirViaLink = new Path(linkToDir, "dir");
Path dirNewViaLink = new Path(linkToDir, "dirNew");
wrapper.mkdir(dir, FileContext.DEFAULT_PERM, false);
wrapper.createSymlink(baseDir, linkToDir, false);
assertTrue(wrapper.exists(dirViaLink));
wrapper.rename(dirViaLink, dirNewViaLink);
assertFalse(wrapper.exists(dirViaLink));
assertFalse(wrapper.exists(dir));
assertTrue(wrapper.exists(dirNewViaLink));
}
@Test
@Timeout(value = 10)
/** Similar tests as the previous ones but rename a symlink */
public void testRenameSymlinkViaSymlink() throws IOException {
Path baseDir = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "link");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path linkViaLink = new Path(linkToDir, "link");
Path linkNewViaLink = new Path(linkToDir, "linkNew");
createAndWriteFile(file);
wrapper.createSymlink(file, link, false);
wrapper.createSymlink(baseDir, linkToDir, false);
wrapper.rename(linkViaLink, linkNewViaLink);
assertFalse(wrapper.exists(linkViaLink));
// Check that we didn't rename the link target
assertTrue(wrapper.exists(file));
assertTrue(wrapper.getFileLinkStatus(linkNewViaLink).isSymlink());
readFile(linkNewViaLink);
}
@Test
@Timeout(value = 10)
/** Test rename a directory to a symlink to a directory */
public void testRenameDirToSymlinkToDir() throws IOException {
Path dir1 = new Path(testBaseDir1());
Path subDir = new Path(testBaseDir2(), "subDir");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
wrapper.mkdir(subDir, FileContext.DEFAULT_PERM, false);
wrapper.createSymlink(subDir, linkToDir, false);
try {
wrapper.rename(dir1, linkToDir, Rename.OVERWRITE);
fail("Renamed directory to a symlink");
} catch (IOException e) {
// Expected. Both must be directories.
assertTrue(unwrapException(e) instanceof IOException);
}
assertTrue(wrapper.exists(dir1));
assertTrue(wrapper.exists(linkToDir));
}
@Test
@Timeout(value = 10)
/** Test rename a directory to a symlink to a file */
public void testRenameDirToSymlinkToFile() throws IOException {
Path dir1 = new Path(testBaseDir1());
Path file = new Path(testBaseDir2(), "file");
Path linkToFile = new Path(testBaseDir2(), "linkToFile");
createAndWriteFile(file);
wrapper.createSymlink(file, linkToFile, false);
try {
wrapper.rename(dir1, linkToFile, Rename.OVERWRITE);
fail("Renamed directory to a symlink");
} catch (IOException e) {
// Expected. Both must be directories.
assertTrue(unwrapException(e) instanceof IOException);
}
assertTrue(wrapper.exists(dir1));
assertTrue(wrapper.exists(linkToFile));
}
@Test
@Timeout(value = 10)
/** Test rename a directory to a dangling symlink */
public void testRenameDirToDanglingSymlink() throws IOException {
Path dir = new Path(testBaseDir1());
Path link = new Path(testBaseDir2(), "linkToFile");
wrapper.createSymlink(new Path("/doesNotExist"), link, false);
try {
wrapper.rename(dir, link, Rename.OVERWRITE);
fail("Renamed directory to a symlink");
} catch (IOException e) {
// Expected. Both must be directories.
assertTrue(unwrapException(e) instanceof IOException);
}
assertTrue(wrapper.exists(dir));
assertTrue(wrapper.getFileLinkStatus(link) != null);
}
@Test
@Timeout(value = 10)
/** Test rename a file to a symlink to a directory */
public void testRenameFileToSymlinkToDir() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path subDir = new Path(testBaseDir1(), "subDir");
Path link = new Path(testBaseDir1(), "link");
wrapper.mkdir(subDir, FileContext.DEFAULT_PERM, false);
wrapper.createSymlink(subDir, link, false);
createAndWriteFile(file);
try {
wrapper.rename(file, link);
fail("Renamed file to symlink w/o overwrite");
} catch (IOException e) {
// Expected
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
wrapper.rename(file, link, Rename.OVERWRITE);
assertFalse(wrapper.exists(file));
assertTrue(wrapper.exists(link));
assertTrue(wrapper.isFile(link));
assertFalse(wrapper.getFileLinkStatus(link).isSymlink());
}
@Test
@Timeout(value = 10)
/** Test rename a file to a symlink to a file */
public void testRenameFileToSymlinkToFile() throws IOException {
Path file1 = new Path(testBaseDir1(), "file1");
Path file2 = new Path(testBaseDir1(), "file2");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file1);
createAndWriteFile(file2);
wrapper.createSymlink(file2, link, false);
try {
wrapper.rename(file1, link);
fail("Renamed file to symlink w/o overwrite");
} catch (IOException e) {
// Expected
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
wrapper.rename(file1, link, Rename.OVERWRITE);
assertFalse(wrapper.exists(file1));
assertTrue(wrapper.exists(link));
assertTrue(wrapper.isFile(link));
assertFalse(wrapper.getFileLinkStatus(link).isSymlink());
}
@Test
@Timeout(value = 10)
/** Test rename a file to a dangling symlink */
public void testRenameFileToDanglingSymlink() throws IOException {
/* NB: Local file system doesn't handle dangling links correctly
* since File.exists(danglinLink) returns false. */
if ("file".equals(getScheme())) {
return;
}
Path file1 = new Path(testBaseDir1(), "file1");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file1);
wrapper.createSymlink(new Path("/doesNotExist"), link, false);
try {
wrapper.rename(file1, link);
} catch (IOException e) {
// Expected
}
wrapper.rename(file1, link, Rename.OVERWRITE);
assertFalse(wrapper.exists(file1));
assertTrue(wrapper.exists(link));
assertTrue(wrapper.isFile(link));
assertFalse(wrapper.getFileLinkStatus(link).isSymlink());
}
@Test
@Timeout(value = 10)
/** Rename a symlink to a new non-existant name */
public void testRenameSymlinkNonExistantDest() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path link1 = new Path(testBaseDir1(), "linkToFile1");
Path link2 = new Path(testBaseDir1(), "linkToFile2");
createAndWriteFile(file);
wrapper.createSymlink(file, link1, false);
wrapper.rename(link1, link2);
assertTrue(wrapper.getFileLinkStatus(link2).isSymlink());
readFile(link2);
readFile(file);
assertFalse(wrapper.exists(link1));
}
@Test
@Timeout(value = 10)
/** Rename a symlink to a file that exists */
public void testRenameSymlinkToExistingFile() throws IOException {
Path file1 = new Path(testBaseDir1(), "file");
Path file2 = new Path(testBaseDir1(), "someFile");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file1);
createAndWriteFile(file2);
wrapper.createSymlink(file2, link, false);
try {
wrapper.rename(link, file1);
fail("Renamed w/o passing overwrite");
} catch (IOException e) {
// Expected
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
wrapper.rename(link, file1, Rename.OVERWRITE);
assertFalse(wrapper.exists(link));
assertTrue(wrapper.getFileLinkStatus(file1).isSymlink());
assertEquals(file2, wrapper.getLinkTarget(file1));
}
@Test
@Timeout(value = 10)
/** Rename a symlink to a directory that exists */
public void testRenameSymlinkToExistingDir() throws IOException {
Path dir1 = new Path(testBaseDir1());
Path dir2 = new Path(testBaseDir2());
Path subDir = new Path(testBaseDir2(), "subDir");
Path link = new Path(testBaseDir1(), "linkToDir");
wrapper.createSymlink(dir1, link, false);
try {
wrapper.rename(link, dir2);
fail("Renamed link to a directory");
} catch (IOException e) {
// Expected. Both must be directories.
assertTrue(unwrapException(e) instanceof IOException);
}
try {
wrapper.rename(link, dir2, Rename.OVERWRITE);
fail("Renamed link to a directory");
} catch (IOException e) {
// Expected. Both must be directories.
assertTrue(unwrapException(e) instanceof IOException);
}
// Also fails when dir2 has a sub-directory
wrapper.mkdir(subDir, FsPermission.getDefault(), false);
try {
wrapper.rename(link, dir2, Rename.OVERWRITE);
fail("Renamed link to a directory");
} catch (IOException e) {
// Expected. Both must be directories.
assertTrue(unwrapException(e) instanceof IOException);
}
}
@Test
@Timeout(value = 10)
/** Rename a symlink to itself */
public void testRenameSymlinkToItself() throws IOException {
Path file = new Path(testBaseDir1(), "file");
createAndWriteFile(file);
Path link = new Path(testBaseDir1(), "linkToFile1");
wrapper.createSymlink(file, link, false);
try {
wrapper.rename(link, link);
fail("Failed to get expected IOException");
} catch (IOException e) {
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
// Fails with overwrite as well
try {
wrapper.rename(link, link, Rename.OVERWRITE);
fail("Failed to get expected IOException");
} catch (IOException e) {
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
}
@Test
@Timeout(value = 10)
/** Rename a symlink */
public void testRenameSymlink() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path link1 = new Path(testBaseDir1(), "linkToFile1");
Path link2 = new Path(testBaseDir1(), "linkToFile2");
createAndWriteFile(file);
wrapper.createSymlink(file, link1, false);
wrapper.rename(link1, link2);
assertTrue(wrapper.getFileLinkStatus(link2).isSymlink());
assertFalse(wrapper.getFileStatus(link2).isDirectory());
readFile(link2);
readFile(file);
try {
createAndWriteFile(link2);
fail("link was not renamed");
} catch (IOException x) {
// Expected
}
}
@Test
@Timeout(value = 10)
/** Rename a symlink to the file it links to */
public void testRenameSymlinkToFileItLinksTo() throws IOException {
/* NB: The rename is not atomic, so file is deleted before renaming
* linkToFile. In this interval linkToFile is dangling and local file
* system does not handle dangling links because File.exists returns
* false for dangling links. */
if ("file".equals(getScheme())) {
return;
}
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file);
wrapper.createSymlink(file, link, false);
try {
wrapper.rename(link, file);
fail("Renamed symlink to its target");
} catch (IOException e) {
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
// Check the rename didn't happen
assertTrue(wrapper.isFile(file));
assertTrue(wrapper.exists(link));
assertTrue(wrapper.isSymlink(link));
assertEquals(file, wrapper.getLinkTarget(link));
try {
wrapper.rename(link, file, Rename.OVERWRITE);
fail("Renamed symlink to its target");
} catch (IOException e) {
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
// Check the rename didn't happen
assertTrue(wrapper.isFile(file));
assertTrue(wrapper.exists(link));
assertTrue(wrapper.isSymlink(link));
assertEquals(file, wrapper.getLinkTarget(link));
}
@Test
@Timeout(value = 10)
/** Rename a symlink to the directory it links to */
public void testRenameSymlinkToDirItLinksTo() throws IOException {
/* NB: The rename is not atomic, so dir is deleted before renaming
* linkToFile. In this interval linkToFile is dangling and local file
* system does not handle dangling links because File.exists returns
* false for dangling links. */
if ("file".equals(getScheme())) {
return;
}
Path dir = new Path(testBaseDir1(), "dir");
Path link = new Path(testBaseDir1(), "linkToDir");
wrapper.mkdir(dir, FileContext.DEFAULT_PERM, false);
wrapper.createSymlink(dir, link, false);
try {
wrapper.rename(link, dir);
fail("Renamed symlink to its target");
} catch (IOException e) {
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
// Check the rename didn't happen
assertTrue(wrapper.isDir(dir));
assertTrue(wrapper.exists(link));
assertTrue(wrapper.isSymlink(link));
assertEquals(dir, wrapper.getLinkTarget(link));
try {
wrapper.rename(link, dir, Rename.OVERWRITE);
fail("Renamed symlink to its target");
} catch (IOException e) {
assertTrue(unwrapException(e) instanceof FileAlreadyExistsException);
}
// Check the rename didn't happen
assertTrue(wrapper.isDir(dir));
assertTrue(wrapper.exists(link));
assertTrue(wrapper.isSymlink(link));
assertEquals(dir, wrapper.getLinkTarget(link));
}
@Test
@Timeout(value = 10)
/** Test rename the symlink's target */
public void testRenameLinkTarget() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path fileNew = new Path(testBaseDir1(), "fileNew");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file);
wrapper.createSymlink(file, link, false);
wrapper.rename(file, fileNew, Rename.OVERWRITE);
try {
readFile(link);
fail("Link should be dangling");
} catch (IOException x) {
// Expected
}
wrapper.rename(fileNew, file, Rename.OVERWRITE);
readFile(link);
}
@Test
@Timeout(value = 10)
/** Test rename a file to path with destination that has symlink parent */
public void testRenameFileWithDestParentSymlink() throws IOException {
Path link = new Path(testBaseDir1(), "link");
Path file1 = new Path(testBaseDir1(), "file1");
Path file2 = new Path(testBaseDir1(), "file2");
Path file3 = new Path(link, "file3");
Path dir2 = new Path(testBaseDir2());
// Renaming /dir1/file1 to non-existant file /dir1/link/file3 is OK
// if link points to a directory...
wrapper.createSymlink(dir2, link, false);
createAndWriteFile(file1);
wrapper.rename(file1, file3);
assertFalse(wrapper.exists(file1));
assertTrue(wrapper.exists(file3));
wrapper.rename(file3, file1);
// But fails if link is dangling...
wrapper.delete(link, false);
wrapper.createSymlink(file2, link, false);
try {
wrapper.rename(file1, file3);
} catch (IOException e) {
// Expected
assertTrue(unwrapException(e) instanceof FileNotFoundException);
}
// And if link points to a file...
createAndWriteFile(file2);
try {
wrapper.rename(file1, file3);
} catch (IOException e) {
// Expected
assertTrue(unwrapException(e) instanceof ParentNotDirectoryException);
}
}
@Test
@Timeout(value = 10)
/**
* Create, write, read, append, rename, get the block locations,
* checksums, and delete a file using a path with a symlink as an
* intermediate path component where the link target was specified
* using an absolute path. Rename is covered in more depth below.
*/
public void testAccessFileViaInterSymlinkAbsTarget() throws IOException {
Path baseDir = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path fileNew = new Path(baseDir, "fileNew");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path fileViaLink = new Path(linkToDir, "file");
Path fileNewViaLink = new Path(linkToDir, "fileNew");
wrapper.createSymlink(baseDir, linkToDir, false);
createAndWriteFile(fileViaLink);
assertTrue(wrapper.exists(fileViaLink));
assertTrue(wrapper.isFile(fileViaLink));
assertFalse(wrapper.isDir(fileViaLink));
assertFalse(wrapper.getFileLinkStatus(fileViaLink).isSymlink());
assertFalse(wrapper.isDir(fileViaLink));
assertEquals(wrapper.getFileStatus(file),
wrapper.getFileLinkStatus(file));
assertEquals(wrapper.getFileStatus(fileViaLink),
wrapper.getFileLinkStatus(fileViaLink));
readFile(fileViaLink);
appendToFile(fileViaLink);
wrapper.rename(fileViaLink, fileNewViaLink);
assertFalse(wrapper.exists(fileViaLink));
assertTrue(wrapper.exists(fileNewViaLink));
readFile(fileNewViaLink);
assertEquals(wrapper.getFileBlockLocations(fileNew, 0, 1).length,
wrapper.getFileBlockLocations(fileNewViaLink, 0, 1).length);
assertEquals(wrapper.getFileChecksum(fileNew),
wrapper.getFileChecksum(fileNewViaLink));
wrapper.delete(fileNewViaLink, true);
assertFalse(wrapper.exists(fileNewViaLink));
}
@Test
@Timeout(value = 10)
/**
* Operate on a file using a path with an intermediate symlink where
* the link target was specified as a fully qualified path.
*/
public void testAccessFileViaInterSymlinkQualTarget() throws IOException {
Path baseDir = new Path(testBaseDir1());
Path file = new Path(testBaseDir1(), "file");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path fileViaLink = new Path(linkToDir, "file");
wrapper.createSymlink(wrapper.makeQualified(baseDir), linkToDir, false);
createAndWriteFile(fileViaLink);
assertEquals(wrapper.getFileStatus(file),
wrapper.getFileLinkStatus(file));
assertEquals(wrapper.getFileStatus(fileViaLink),
wrapper.getFileLinkStatus(fileViaLink));
readFile(fileViaLink);
}
@Test
@Timeout(value = 10)
/**
* Operate on a file using a path with an intermediate symlink where
* the link target was specified as a relative path.
*/
public void testAccessFileViaInterSymlinkRelTarget() throws IOException {
assumeTrue(!"file".equals(getScheme()));
Path dir = new Path(testBaseDir1(), "dir");
Path file = new Path(dir, "file");
Path linkToDir = new Path(testBaseDir1(), "linkToDir");
Path fileViaLink = new Path(linkToDir, "file");
wrapper.mkdir(dir, FileContext.DEFAULT_PERM, false);
wrapper.createSymlink(new Path("dir"), linkToDir, false);
createAndWriteFile(fileViaLink);
// Note that getFileStatus returns fully qualified paths even
// when called on an absolute path.
assertEquals(wrapper.makeQualified(file),
wrapper.getFileStatus(file).getPath());
// In each case getFileLinkStatus returns the same FileStatus
// as getFileStatus since we're not calling it on a link and
// FileStatus objects are compared by Path.
assertEquals(wrapper.getFileStatus(file),
wrapper.getFileLinkStatus(file));
assertEquals(wrapper.getFileStatus(fileViaLink),
wrapper.getFileLinkStatus(fileViaLink));
assertEquals(wrapper.getFileStatus(fileViaLink),
wrapper.getFileLinkStatus(file));
}
@Test
@Timeout(value = 10)
/** Test create, list, and delete a directory through a symlink */
public void testAccessDirViaSymlink() throws IOException {
Path baseDir = new Path(testBaseDir1());
Path dir = new Path(testBaseDir1(), "dir");
Path linkToDir = new Path(testBaseDir2(), "linkToDir");
Path dirViaLink = new Path(linkToDir, "dir");
wrapper.createSymlink(baseDir, linkToDir, false);
wrapper.mkdir(dirViaLink, FileContext.DEFAULT_PERM, true);
assertTrue(wrapper.getFileStatus(dirViaLink).isDirectory());
FileStatus[] stats = wrapper.listStatus(dirViaLink);
assertEquals(0, stats.length);
RemoteIterator<FileStatus> statsItor = wrapper.listStatusIterator(dirViaLink);
assertFalse(statsItor.hasNext());
wrapper.delete(dirViaLink, false);
assertFalse(wrapper.exists(dirViaLink));
assertFalse(wrapper.exists(dir));
}
@Test
@Timeout(value = 10)
/** setTimes affects the target file not the link */
public void testSetTimesSymlinkToFile() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file);
wrapper.createSymlink(file, link, false);
long at = wrapper.getFileLinkStatus(link).getAccessTime();
// the local file system may not support millisecond timestamps
wrapper.setTimes(link, 2000L, 3000L);
assertTrue(wrapper.getFileLinkStatus(link).getAccessTime() >= at,
"The atime of symlink should not be lesser after setTimes()");
assertEquals(2000, wrapper.getFileStatus(file).getModificationTime());
assertEquals(3000, wrapper.getFileStatus(file).getAccessTime());
}
@Test
@Timeout(value = 10)
/** setTimes affects the target directory not the link */
public void testSetTimesSymlinkToDir() throws IOException {
Path dir = new Path(testBaseDir1(), "dir");
Path link = new Path(testBaseDir1(), "linkToDir");
wrapper.mkdir(dir, FileContext.DEFAULT_PERM, false);
wrapper.createSymlink(dir, link, false);
long at = wrapper.getFileLinkStatus(link).getAccessTime();
// the local file system may not support millisecond timestamps
wrapper.setTimes(link, 2000L, 3000L);
assertTrue(wrapper.getFileLinkStatus(link).getAccessTime() >= at,
"The atime of symlink should not be lesser after setTimes()");
assertEquals(2000, wrapper.getFileStatus(dir).getModificationTime());
assertEquals(3000, wrapper.getFileStatus(dir).getAccessTime());
}
@Test
@Timeout(value = 10)
/** setTimes does not affect the link even though target does not exist */
public void testSetTimesDanglingLink() throws IOException {
Path file = new Path("/noSuchFile");
Path link = new Path(testBaseDir1()+"/link");
wrapper.createSymlink(file, link, false);
long at = wrapper.getFileLinkStatus(link).getAccessTime();
try {
wrapper.setTimes(link, 2000L, 3000L);
fail("set times to non-existant file");
} catch (IOException e) {
// Expected
}
assertTrue(wrapper.getFileLinkStatus(link).getAccessTime() >= at,
"The atime of symlink should not be lesser after setTimes()");
}
}
|
of
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/inject/processing/ConfigurationReaderBeanElementCreator.java
|
{
"start": 1911,
"end": 8455
}
|
class ____ extends DeclaredBeanElementCreator {
ConfigurationReaderBeanElementCreator(ClassElement classElement, VisitorContext visitorContext) {
super(classElement, visitorContext, false);
}
@Override
protected void applyConfigurationInjectionIfNecessary(BeanDefinitionVisitor visitor,
MethodElement constructor) {
if (!classElement.isRecord() && !constructor.hasAnnotation(ConfigurationInject.class)) {
return;
}
if (constructor.hasStereotype(ANN_REQUIRES_VALIDATION)) {
visitor.setValidated(true);
}
}
public static boolean isConfigurationProperties(ClassElement classElement) {
return classElement.hasStereotype(ConfigurationReader.class);
}
@Override
protected void makeInterceptedForValidationIfNeeded(MethodElement element) {
// Configuration beans are validated by the introspection
}
@Override
protected boolean processAsProperties() {
return true;
}
@Override
protected boolean visitProperty(BeanDefinitionVisitor visitor, PropertyElement propertyElement) {
Optional<MethodElement> readMethod = propertyElement.getReadMethod();
Optional<FieldElement> field = propertyElement.getField();
if (propertyElement.hasStereotype(ConfigurationBuilder.class)) {
// Exclude / ignore shouldn't affect builders
if (readMethod.isPresent()) {
visitor.visitConfigBuilder(
ConfigurationBuilderDefinition.of(classElement, propertyElement, visitorContext)
);
return true;
}
if (field.isPresent()) {
FieldElement fieldElement = field.get();
if (fieldElement.isAccessible(classElement)) {
visitor.visitConfigBuilder(
ConfigurationBuilderDefinition.of(classElement, propertyElement, visitorContext)
);
return true;
}
throw new ProcessingException(fieldElement, "ConfigurationBuilder applied to a non accessible (private or package-private/protected in a different package) field must have a corresponding non-private getter method.");
}
} else if (!propertyElement.isExcluded()) {
boolean claimed = false;
Optional<MethodElement> writeMethod = propertyElement.getWriteMethod();
if (propertyElement.getWriteAccessKind() == PropertyElement.AccessKind.METHOD && writeMethod.isPresent()) {
visitor.setValidated(visitor.isValidated() || propertyElement.hasAnnotation(ANN_REQUIRES_VALIDATION));
MethodElement methodElement = writeMethod.get();
ParameterElement parameter = methodElement.getParameters()[0];
AnnotationMetadata annotationMetadata = new AnnotationMetadataHierarchy(
propertyElement,
parameter
).merge();
annotationMetadata = calculatePath(propertyElement, methodElement, annotationMetadata);
AnnotationMetadata finalAnnotationMetadata = annotationMetadata;
methodElement = methodElement
.withAnnotationMetadata(annotationMetadata)
.withParameters(
Arrays.stream(methodElement.getParameters())
.map(p -> p == parameter ? parameter.withAnnotationMetadata(finalAnnotationMetadata) : p)
.toArray(ParameterElement[]::new)
);
visitor.visitSetterValue(methodElement.getDeclaringType(), methodElement, annotationMetadata, methodElement.isReflectionRequired(classElement), true);
claimed = true;
} else if (propertyElement.getWriteAccessKind() == PropertyElement.AccessKind.FIELD && field.isPresent()) {
visitor.setValidated(visitor.isValidated() || propertyElement.hasAnnotation(ANN_REQUIRES_VALIDATION));
FieldElement fieldElement = field.get();
AnnotationMetadata annotationMetadata = MutableAnnotationMetadata.of(propertyElement.getAnnotationMetadata());
annotationMetadata = calculatePath(propertyElement, fieldElement, annotationMetadata);
visitor.visitFieldValue(fieldElement.getDeclaringType(), fieldElement.withAnnotationMetadata(annotationMetadata), fieldElement.isReflectionRequired(classElement), true);
claimed = true;
}
if (readMethod.isPresent()) {
MethodElement methodElement = readMethod.get();
if (methodElement.hasStereotype(Executable.class)) {
claimed |= visitExecutableMethod(visitor, methodElement);
}
}
return claimed;
}
return false;
}
@Override
protected boolean visitField(BeanDefinitionVisitor visitor, FieldElement fieldElement) {
if (fieldElement.hasStereotype(ConfigurationBuilder.class)) {
if (fieldElement.isAccessible(classElement)) {
visitor.visitConfigBuilder(
ConfigurationBuilderDefinition.of(classElement, fieldElement, visitorContext)
);
return true;
}
throw new ProcessingException(fieldElement, "ConfigurationBuilder applied to a non accessible (private or package-private/protected in a different package) field must have a corresponding non-private getter method.");
}
return super.visitField(visitor, fieldElement);
}
private AnnotationMetadata calculatePath(PropertyElement propertyElement, MemberElement writeMember, AnnotationMetadata annotationMetadata) {
String path = ConfigurationMetadataBuilder.calculatePath(
writeMember.getOwningType(),
writeMember.getDeclaringType(),
propertyElement.getGenericType(),
propertyElement.getName()
);
return visitorContext.getAnnotationMetadataBuilder().annotate(annotationMetadata, AnnotationValue.builder(Property.class).member("name", path).build());
}
@Override
protected boolean isInjectPointMethod(MemberElement memberElement) {
return super.isInjectPointMethod(memberElement) || memberElement.hasDeclaredStereotype(ConfigurationInject.class);
}
}
|
ConfigurationReaderBeanElementCreator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java
|
{
"start": 11218,
"end": 13554
}
|
class ____ {
private Integer numAllocations;
private int numThreads;
private String modelId;
private AdaptiveAllocationsSettings adaptiveAllocationsSettings;
private String deploymentId;
public ElasticsearchInternalServiceSettings build() {
return new ElasticsearchInternalServiceSettings(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, deploymentId);
}
public Builder setNumAllocations(Integer numAllocations) {
this.numAllocations = numAllocations;
return this;
}
public Builder setNumThreads(int numThreads) {
this.numThreads = numThreads;
return this;
}
public Builder setModelId(String modelId) {
this.modelId = modelId;
return this;
}
public Builder setDeploymentId(String deploymentId) {
this.deploymentId = deploymentId;
return this;
}
public Builder setAdaptiveAllocationsSettings(AdaptiveAllocationsSettings adaptiveAllocationsSettings) {
this.adaptiveAllocationsSettings = adaptiveAllocationsSettings;
return this;
}
public String getModelId() {
return modelId;
}
public Integer getNumAllocations() {
return numAllocations;
}
public int getNumThreads() {
return numThreads;
}
public AdaptiveAllocationsSettings getAdaptiveAllocationsSettings() {
return adaptiveAllocationsSettings;
}
}
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ElasticsearchInternalServiceSettings that = (ElasticsearchInternalServiceSettings) o;
return Objects.equals(numAllocations, that.numAllocations)
&& numThreads == that.numThreads
&& Objects.equals(modelId, that.modelId)
&& Objects.equals(adaptiveAllocationsSettings, that.adaptiveAllocationsSettings)
&& Objects.equals(deploymentId, that.deploymentId);
}
@Override
public int hashCode() {
return Objects.hash(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, deploymentId);
}
}
|
Builder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java
|
{
"start": 2349,
"end": 3551
}
|
class ____ for all YARN traffic */
private static final int YARN_ROOT_CLASS_ID = 3;
/** Classes 0-3 are used already. We need to ensure that container classes
* do not collide with these classids.
*/
private static final int MIN_CONTAINER_CLASS_ID = 4;
/** This is the number of distinct (container) traffic shaping classes
* that are supported */
private static final int MAX_CONTAINER_CLASSES = 1024;
private static final String MBIT_SUFFIX = "mbit";
private static final String TMP_FILE_PREFIX = "tc.";
private static final String TMP_FILE_SUFFIX = ".cmds";
/** Root queuing discipline attached to the root of the interface */
private static final String FORMAT_QDISC_ADD_TO_ROOT_WITH_DEFAULT =
"qdisc add dev %s root handle %d: htb default %s";
/** Specifies a cgroup/classid based filter - based on the classid associated
* with the outbound packet, the corresponding traffic shaping rule is used
* . Please see tc documentation for additional details.
*/
private static final String FORMAT_FILTER_CGROUP_ADD_TO_PARENT =
"filter add dev %s parent %d: protocol ip prio 10 handle 1: cgroup";
/** Standard format for adding a traffic shaping
|
used
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/AbstractPropertyAccessorTests.java
|
{
"start": 71383,
"end": 71599
}
|
class ____ {
private Enum<?> enumValue;
public Enum<?> getEnumValue() {
return enumValue;
}
public void setEnumValue(Enum<?> enumValue) {
this.enumValue = enumValue;
}
}
public
|
WildcardEnumConsumer
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/internals/StoreFactory.java
|
{
"start": 1864,
"end": 2256
}
|
interface ____ can then be configured <i>after</i> the
* creation of the Topology but before the stores themselves are created.
* This allows Kafka Streams to respect configurations such as
* {@link StreamsConfig#DEFAULT_DSL_STORE_CONFIG} even if it isn't passed
* to {@link org.apache.kafka.streams.StreamsBuilder#StreamsBuilder(TopologyConfig)}</li>
* </ul>
*/
public
|
that
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_2133/Issue2133Mapper.java
|
{
"start": 1082,
"end": 1283
}
|
class ____ {
private String s1;
public String getS1() {
return s1;
}
public void setS1(String s1) {
this.s1 = s1;
}
}
|
EmbeddedDto
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java
|
{
"start": 6763,
"end": 7133
}
|
class ____ {
private final int actual = 1;
@Test
void should_run_test_when_assumption_passes() {
thenCode(() -> given(actual).isOne()).doesNotThrowAnyException();
}
@Test
void should_ignore_test_when_assumption_fails() {
expectAssumptionNotMetException(() -> given(actual).isZero());
}
}
@Nested
|
BDDAssumptions_given_int_Test
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_2541/Issue2541Mapper.java
|
{
"start": 555,
"end": 801
}
|
class ____ {
private Optional<String> value;
public Optional<String> getValue() {
return value;
}
public void setValue(Optional<String> value) {
this.value = value;
}
}
|
Target
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ProductionComponentProcessorTest.java
|
{
"start": 14048,
"end": 14374
}
|
interface ____ {",
" Child child();",
"}");
Source child =
CompilerTests.javaSource(
"test.Child",
"package test;",
"",
"import dagger.producers.ProductionSubcomponent;",
"",
"@ProductionSubcomponent",
"
|
Parent
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/metrics/dump/QueryScopeInfo.java
|
{
"start": 3893,
"end": 4596
}
|
class ____ extends QueryScopeInfo {
public final String jobID;
public JobQueryScopeInfo(String jobID) {
this(jobID, "");
}
public JobQueryScopeInfo(String jobID, String scope) {
super(scope);
this.jobID = jobID;
}
@Override
public JobQueryScopeInfo copy(String additionalScope) {
return new JobQueryScopeInfo(this.jobID, concatScopes(additionalScope));
}
@Override
public byte getCategory() {
return INFO_CATEGORY_JOB;
}
}
/** Container for the task scope. Stores the ID of the job/vertex and subtask index. */
public static
|
JobQueryScopeInfo
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/SpringMDCWithBreadcrumbTest.java
|
{
"start": 1815,
"end": 2105
}
|
class ____ implements Processor {
@Override
public void process(Exchange exchange) throws Exception {
assertEquals("route-b", MDC.get("camel.routeId"));
assertEquals(exchange.getExchangeId(), MDC.get("camel.breadcrumbId"));
}
}
}
|
ProcessorB
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/core/src/main/java/io/quarkus/bootstrap/classloading/ClassLoaderLimiter.java
|
{
"start": 7128,
"end": 7531
}
|
class ____ as one that you don't expect to be loaded at runtime.
* If there is an attempt of loading the matched class, a runtime exception will be thrown instead:
* useful for running integration tests to verify your assumptions.
* <p>
* DO NOT list the name by doing using <code>literal.class.getName()</code> as this will implicitly get you
* to load the
|
name
|
java
|
apache__avro
|
lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java
|
{
"start": 2156,
"end": 5460
}
|
class ____ {
public Map<String, String> map;
}
@Test
void getMapSchema() throws Exception {
SpecificData.get().getSchema(X.class.getField("map").getGenericType());
}
/** Test nesting of specific data within generic. */
@Test
void specificWithinGeneric() throws Exception {
// define a record with a field that's a generated TestRecord
Schema schema = Schema.createRecord("Foo", "", "x.y.z", false);
List<Schema.Field> fields = new ArrayList<>();
fields.add(new Schema.Field("f", TestRecord.SCHEMA$, "", null));
schema.setFields(fields);
// create a generic instance of this record
TestRecord nested = new TestRecord();
nested.setName("foo");
nested.setKind(Kind.BAR);
nested.setHash(new MD5(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
GenericData.Record record = new GenericData.Record(schema);
record.put("f", nested);
// test that this instance can be written & re-read
TestSchema.checkBinary(schema, record, new SpecificDatumWriter<>(), new SpecificDatumReader<>());
TestSchema.checkDirectBinary(schema, record, new SpecificDatumWriter<>(), new SpecificDatumReader<>());
TestSchema.checkBlockingBinary(schema, record, new SpecificDatumWriter<>(), new SpecificDatumReader<>());
}
@Test
void convertGenericToSpecific() {
GenericRecord generic = new GenericData.Record(TestRecord.SCHEMA$);
generic.put("name", "foo");
generic.put("kind", new GenericData.EnumSymbol(Kind.SCHEMA$, "BAR"));
generic.put("hash",
new GenericData.Fixed(MD5.SCHEMA$, new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
TestRecord specific = (TestRecord) SpecificData.get().deepCopy(TestRecord.SCHEMA$, generic);
}
@Test
void getClassSchema() throws Exception {
assertEquals(TestRecord.getClassSchema(), TestRecord.SCHEMA$);
assertEquals(MD5.getClassSchema(), MD5.SCHEMA$);
assertEquals(Kind.getClassSchema(), Kind.SCHEMA$);
}
@Test
void specificRecordToString() throws IOException {
FooBarSpecificRecord foo = FooBarSpecificRecord.newBuilder().setId(123).setName("foo")
.setNicknames(Collections.singletonList("bar")).setRelatedids(Arrays.asList(1, 2, 3)).setTypeEnum(TypeEnum.c)
.build();
String json = foo.toString();
JsonFactory factory = new JsonFactory();
JsonParser parser = factory.createParser(json);
ObjectMapper mapper = new ObjectMapper();
// will throw exception if string is not parsable json
mapper.readTree(parser);
}
@Test
void externalizeable() throws Exception {
TestRecord before = new TestRecord();
before.setName("foo");
before.setKind(Kind.BAR);
before.setHash(new MD5(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(bytes);
out.writeObject(before);
out.close();
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()));
TestRecord after = (TestRecord) in.readObject();
assertEquals(before, after);
}
@Test
void reservedEnumSymbol() throws Exception {
assertEquals(Reserved.default$, SpecificData.get().createEnum("default", Reserved.SCHEMA$));
}
}
|
X
|
java
|
junit-team__junit5
|
junit-platform-commons/src/main/java/org/junit/platform/commons/support/AnnotationSupport.java
|
{
"start": 1751,
"end": 5154
}
|
class ____ {
private AnnotationSupport() {
/* no-op */
}
/**
* Determine if an annotation of {@code annotationType} is either
* <em>present</em> or <em>meta-present</em> on the supplied optional
* {@code element}.
*
* <p><strong>Note:</strong> This method does not find repeatable annotations.
* To check for repeatable annotations, use {@link #findRepeatableAnnotations(Optional, Class)}
* and verify that the returned list is not empty.
*
* @param element an {@link Optional} containing the element on which to
* search for the annotation; may be {@code null} or <em>empty</em>
* @param annotationType the annotation type to search for; never {@code null}
* @return {@code true} if the annotation is present or meta-present
* @since 1.3
* @see #isAnnotated(AnnotatedElement, Class)
* @see #findAnnotation(Optional, Class)
* @see #findRepeatableAnnotations(Optional, Class)
*/
@API(status = MAINTAINED, since = "1.3")
@Contract("null, _ -> false")
@SuppressWarnings("NullableOptional")
public static boolean isAnnotated(@Nullable Optional<? extends AnnotatedElement> element,
Class<? extends Annotation> annotationType) {
return AnnotationUtils.isAnnotated(element, annotationType);
}
/**
* Determine if an annotation of {@code annotationType} is either
* <em>present</em> or <em>meta-present</em> on the supplied
* {@code element}.
*
* <p><strong>Note:</strong> This method does not find repeatable annotations.
* To check for repeatable annotations, use {@link #findRepeatableAnnotations(AnnotatedElement, Class)}
* and verify that the returned list is not empty.
*
* @param element the element on which to search for the annotation; may be
* {@code null}
* @param annotationType the annotation type to search for; never {@code null}
* @return {@code true} if the annotation is present or meta-present
* @see #isAnnotated(Optional, Class)
* @see #findAnnotation(AnnotatedElement, Class)
* @see #findRepeatableAnnotations(AnnotatedElement, Class)
*/
@Contract("null, _ -> false")
public static boolean isAnnotated(@Nullable AnnotatedElement element, Class<? extends Annotation> annotationType) {
return AnnotationUtils.isAnnotated(element, annotationType);
}
/**
* Find the first annotation of {@code annotationType} that is either
* <em>present</em> or <em>meta-present</em> on the supplied optional
* {@code element}.
*
* @param <A> the annotation type
* @param element an {@link Optional} containing the element on which to
* search for the annotation; may be {@code null} or <em>empty</em>
* @param annotationType the annotation type to search for; never {@code null}
* @return an {@code Optional} containing the annotation; never {@code null} but
* potentially empty
* @since 1.1
* @see #findAnnotation(AnnotatedElement, Class)
*/
@API(status = MAINTAINED, since = "1.1")
@SuppressWarnings("NullableOptional")
public static <A extends Annotation> Optional<A> findAnnotation(
@Nullable Optional<? extends AnnotatedElement> element, Class<A> annotationType) {
return AnnotationUtils.findAnnotation(element, annotationType);
}
/**
* Find the first annotation of {@code annotationType} that is either
* <em>directly present</em>, <em>meta-present</em>, or <em>indirectly
* present</em> on the supplied {@code element}.
*
* <p>If the element is a
|
AnnotationSupport
|
java
|
apache__camel
|
core/camel-support/src/main/java/org/apache/camel/support/SupplierRegistry.java
|
{
"start": 1431,
"end": 5338
}
|
class ____ extends SimpleRegistry {
@Override
public <T> T lookupByNameAndType(String name, Class<T> type) {
Map<Class<?>, Object> map = this.get(name);
if (map == null) {
return null;
}
Object answer = map.get(type);
if (answer instanceof Supplier<?> supplier) {
// okay then eval the supplier to get the actual value
answer = supplier.get();
}
if (answer == null) {
// no direct type match then check if assignable
for (Map.Entry<Class<?>, Object> entry : map.entrySet()) {
if (type.isAssignableFrom(entry.getKey())) {
Object value = entry.getValue();
if (value instanceof Supplier<?> supplier) {
// okay then eval the supplier to get the actual value
value = supplier.get();
}
answer = value;
break;
}
}
}
if (answer == null) {
// okay fallback to check all entries that are unassigned type (java.lang.Object)
for (Map.Entry<Class<?>, Object> entry : map.entrySet()) {
if (Object.class == entry.getKey()) {
Object value = entry.getValue();
if (value instanceof Supplier<?> supplier) {
// okay then eval the supplier to get the actual value
value = supplier.get();
}
if (type.isInstance(value)) {
answer = value;
break;
}
}
}
}
if (answer == null) {
return null;
}
try {
answer = unwrap(answer);
return type.cast(answer);
} catch (Exception e) {
String msg = "Found bean: " + name + " in SupplierRegistry: " + this
+ " of type: " + answer.getClass().getName() + " expected type was: " + type;
throw new NoSuchBeanException(name, msg, e);
}
}
@Override
public <T> Set<T> findByType(Class<T> type) {
Set<T> result = new LinkedHashSet<>();
for (Map.Entry<String, Map<Class<?>, Object>> entry : entrySet()) {
for (Map.Entry<Class<?>, Object> subEntry : entry.getValue().entrySet()) {
if (type.isAssignableFrom(subEntry.getKey())) {
Object value = subEntry.getValue();
if (value instanceof Supplier<?> supplier) {
// okay then eval the supplier to get the actual value
value = supplier.get();
}
result.add(type.cast(value));
}
}
}
return result;
}
@Override
public <T> Map<String, T> findByTypeWithName(Class<T> type) {
Map<String, T> result = new LinkedHashMap<>();
for (Map.Entry<String, Map<Class<?>, Object>> entry : entrySet()) {
for (Map.Entry<Class<?>, Object> subEntry : entry.getValue().entrySet()) {
if (type.isAssignableFrom(subEntry.getKey())) {
Object value = subEntry.getValue();
if (value instanceof Supplier<?> supplier) {
// okay then eval the supplier to get the actual value
value = supplier.get();
}
result.put(entry.getKey(), type.cast(value));
}
}
}
return result;
}
@Override
public void bind(String id, Class<?> type, Supplier<Object> bean) {
if (bean != null) {
computeIfAbsent(id, k -> new LinkedHashMap<>()).put(type, wrap(bean));
}
}
}
|
SupplierRegistry
|
java
|
apache__camel
|
components/camel-univocity-parsers/src/test/java/org/apache/camel/dataformat/univocity/UniVocityFixedDataFormatMarshalTest.java
|
{
"start": 1582,
"end": 5655
}
|
class ____ extends CamelTestSupport {
@EndpointInject("mock:result")
MockEndpoint result;
/**
* Tests that we can marshal fixed-width with the default configuration.
*/
@Test
public void shouldMarshalWithDefaultConfiguration() throws Exception {
template.sendBody("direct:default", Arrays.asList(
asMap("A", "1", "B", "2", "C", "3"),
asMap("A", "one", "B", "two", "C", "three")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1 2 3 ", "onetwothree"), body);
}
/**
* Tests that we can marshal a single line with fixed-width.
*/
@Test
public void shouldMarshalSingleLine() throws Exception {
template.sendBody("direct:default", asMap("A", "1", "B", "2", "C", "3"));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1 2 3 "), body);
}
/**
* Tests that the marshalling adds new columns on the fly and keep its order
*/
@Test
public void shouldMarshalAndAddNewColumns() throws Exception {
template.sendBody("direct:default", Arrays.asList(
asMap("A", "1", "B", "2"),
asMap("C", "three", "A", "one", "B", "two")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1 2 ", "onetwothree"), body);
}
/**
* Tests that we can marshal fixed-width with specific headers
*/
@Test
public void shouldMarshalWithSpecificHeaders() throws Exception {
template.sendBody("direct:header", Arrays.asList(
asMap("A", "1", "B", "2", "C", "3"),
asMap("A", "one", "B", "two", "C", "three")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1 3 ", "onethree"), body);
}
/**
* Tests that we can marshal fixed-width using and advanced configuration
*/
@Test
public void shouldMarshalUsingAdvancedConfiguration() throws Exception {
template.sendBody("direct:advanced", Arrays.asList(
asMap("A", null, "B", ""),
asMap("A", "one", "B", "two")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("N/A__empty", "one__two__"), body);
}
@Override
protected RouteBuilder createRouteBuilder() {
final Map<String, DataFormat> tests = new HashMap<>();
// Default writing of fixed-width
var df = new UniVocityFixedDataFormat();
df.setFieldLengths("3,3,5");
tests.put("default", df);
// Write a fixed-width with specific headers
df = new UniVocityFixedDataFormat();
df.setFieldLengths("3,5");
df.setHeaders("A,C");
tests.put("header", df);
// Write a fixed-width with an advanced configuration
df = new UniVocityFixedDataFormat();
df.setFieldLengths("5,5");
df.setNullValue("N/A");
df.setEmptyValue("empty");
df.setPadding('_');
tests.put("advanced", df);
return new RouteBuilder() {
@Override
public void configure() {
for (Map.Entry<String, DataFormat> test : tests.entrySet()) {
from("direct:" + test.getKey()).marshal(test.getValue()).convertBodyTo(String.class).to("mock:result");
}
}
};
}
}
|
UniVocityFixedDataFormatMarshalTest
|
java
|
quarkusio__quarkus
|
independent-projects/tools/registry-client/src/main/java/io/quarkus/registry/config/RegistryConfigImpl.java
|
{
"start": 6660,
"end": 12590
}
|
class ____ implements RegistryConfig.Mutable {
protected String id;
protected boolean enabled = true;
protected String updatePolicy;
protected RegistryDescriptorConfig descriptor;
protected RegistryPlatformsConfig platforms;
protected RegistryNonPlatformExtensionsConfig nonPlatformExtensions;
protected RegistryMavenConfig mavenConfig;
protected RegistryQuarkusVersionsConfig versionsConfig;
protected Map<String, Object> extra;
public Builder() {
}
Builder(String id) {
this.id = id;
}
@JsonIgnore
Builder(RegistryConfig config) {
this.id = config.getId();
this.updatePolicy = config.getUpdatePolicy();
this.descriptor = config.getDescriptor();
this.platforms = config.getPlatforms();
this.nonPlatformExtensions = config.getNonPlatformExtensions();
this.mavenConfig = config.getMaven();
this.versionsConfig = config.getQuarkusVersions();
this.extra = config.getExtra() == null
? null
: new HashMap<>(config.getExtra());
}
@Override
public String getId() {
return this.id;
}
public Builder setId(String id) {
this.id = id;
return this;
}
@Override
public boolean isEnabled() {
return this.enabled;
}
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = JsonBooleanTrueFilter.class)
public Builder setEnabled(boolean enabled) {
this.enabled = enabled;
return this;
}
@Override
public String getUpdatePolicy() {
return this.updatePolicy;
}
public Builder setUpdatePolicy(String updatePolicy) {
this.updatePolicy = updatePolicy;
return this;
}
@Override
public RegistryDescriptorConfig getDescriptor() {
return this.descriptor;
}
@JsonDeserialize(as = RegistryDescriptorConfigImpl.Builder.class)
public Builder setDescriptor(RegistryDescriptorConfig descriptor) {
this.descriptor = descriptor;
return this;
}
@Override
public RegistryPlatformsConfig getPlatforms() {
return this.platforms;
}
@JsonDeserialize(as = RegistryPlatformsConfigImpl.Builder.class)
public Builder setPlatforms(RegistryPlatformsConfig platforms) {
this.platforms = platforms;
return this;
}
@Override
public RegistryNonPlatformExtensionsConfig getNonPlatformExtensions() {
return this.nonPlatformExtensions;
}
@JsonDeserialize(as = RegistryNonPlatformExtensionsConfigImpl.Builder.class)
public Builder setNonPlatformExtensions(RegistryNonPlatformExtensionsConfig nonPlatformExtensions) {
this.nonPlatformExtensions = nonPlatformExtensions;
return this;
}
@Override
public RegistryMavenConfig getMaven() {
return this.mavenConfig;
}
@JsonDeserialize(as = RegistryMavenConfigImpl.Builder.class)
public Builder setMaven(RegistryMavenConfig mavenConfig) {
this.mavenConfig = mavenConfig;
return this;
}
@Override
public RegistryQuarkusVersionsConfig getQuarkusVersions() {
return this.versionsConfig;
}
@JsonDeserialize(as = RegistryQuarkusVersionsConfigImpl.Builder.class)
public Builder setQuarkusVersions(RegistryQuarkusVersionsConfig versionsConfig) {
this.versionsConfig = versionsConfig;
return this;
}
@Override
public Map<String, Object> getExtra() {
return extra == null ? Collections.emptyMap() : extra;
}
@Override
public Mutable setExtra(Map<String, Object> newValues) {
if (newValues != Collections.EMPTY_MAP) {
this.extra = newValues;
}
return this;
}
@JsonAnySetter
public Builder setExtra(String name, Object value) {
mutableExtra().put(name, value);
return this;
}
@SuppressWarnings("unchecked")
public <V> V computeExtraIfAbsent(String name, Function<String, ? extends V> func) {
return (V) mutableExtra().computeIfAbsent(name, func);
}
private Map<String, Object> mutableExtra() {
return extra == null ? extra = new HashMap<>(4) : extra;
}
@Override
public RegistryConfigImpl build() {
if (Constants.DEFAULT_REGISTRY_ID.equals(id)) {
fillInFromDefaultRegistry();
}
return new RegistryConfigImpl(this);
}
private void fillInFromDefaultRegistry() {
// The default registry itself is also built (and so will hit this path).
// Keep references to the global default tucked behind tests for missing
// pieces, as the default registry will be complete.
if (descriptor == null) {
descriptor = getDefaultRegistry().getDescriptor();
}
if (platforms == null) {
platforms = getDefaultRegistry().getPlatforms();
}
if (nonPlatformExtensions == null) {
nonPlatformExtensions = getDefaultRegistry().getNonPlatformExtensions();
}
if (mavenConfig == null) {
mavenConfig = getDefaultRegistry().getMaven();
}
}
}
/**
* Serializer for RegistryConfig objects. Deals set entries that could
* be a single string, or a string key for an object.
*/
static
|
Builder
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/provider/PersistenceProviderUnitTests.java
|
{
"start": 6267,
"end": 7586
}
|
class ____ extends ClassLoader {
private CustomClassLoader(ClassLoader parent) {
super(parent);
}
@Override
protected Class<?> findClass(String name) throws ClassNotFoundException {
if (name.equals(interfaceName)) {
byte[] byteCode = generateByteCodeForInterface(interfaceName, interfaces);
return defineClass(name, byteCode, 0, byteCode.length);
}
return super.findClass(name);
}
}
return new CustomClassLoader(parentClassLoader).loadClass(interfaceName);
}
private static byte[] generateByteCodeForInterface(final String interfaceName, Class<?>... interfacesToImplement) {
String interfaceResourcePath = ClassUtils.convertClassNameToResourcePath(interfaceName);
ClassWriter cw = new ClassWriter(0);
cw.visit(V1_6, ACC_PUBLIC + ACC_ABSTRACT + ACC_INTERFACE, interfaceResourcePath, null, "java/lang/Object",
toResourcePaths(interfacesToImplement));
cw.visitSource(interfaceResourcePath + ".java", null);
cw.visitEnd();
return cw.toByteArray();
}
private static String[] toResourcePaths(Class<?>... interfacesToImplement) {
return Arrays.stream(interfacesToImplement) //
.map(Class::getName) //
.map(ClassUtils::convertClassNameToResourcePath) //
.toArray(String[]::new);
}
}
static
|
CustomClassLoader
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java
|
{
"start": 3988,
"end": 4401
}
|
class ____ {
private final boolean[] actual = { true, true };
@Test
void should_run_test_when_assumption_passes() {
thenCode(() -> given(actual).contains(true)).doesNotThrowAnyException();
}
@Test
void should_ignore_test_when_assumption_fails() {
expectAssumptionNotMetException(() -> given(actual).contains(false));
}
}
@Nested
|
BDDAssumptions_given_boolean_array_Test
|
java
|
quarkusio__quarkus
|
extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java
|
{
"start": 10394,
"end": 11071
}
|
enum ____ {
TRUE,
FALSE,
AUTO
}
default boolean isRemoveUnusedBeansFieldValid() {
return ALLOWED_REMOVE_UNUSED_BEANS_VALUES.contains(removeUnusedBeans().toLowerCase());
}
default boolean shouldEnableBeanRemoval() {
final String lowerCase = removeUnusedBeans().toLowerCase();
return "all".equals(lowerCase) || "true".equals(lowerCase) || "fwk".equals(lowerCase) || "framework".equals(lowerCase);
}
default boolean shouldOnlyKeepAppBeans() {
final String lowerCase = removeUnusedBeans().toLowerCase();
return "fwk".equals(lowerCase) || "framework".equals(lowerCase);
}
}
|
OptimizeContexts
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/beanvalidation/CustomNotNullAndNotBlank.java
|
{
"start": 1271,
"end": 1497
}
|
interface ____ {
String message() default "{org.hibernate.validator.constraints.CustomNotNullAndNotBlank.message}";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
}
|
CustomNotNullAndNotBlank
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/objects/data/AlwaysDifferentAddress.java
|
{
"start": 673,
"end": 792
}
|
class ____ extends Address {
@Override
public boolean equals(Object o) {
return false;
}
}
|
AlwaysDifferentAddress
|
java
|
micronaut-projects__micronaut-core
|
core-reactive/src/main/java/io/micronaut/core/async/publisher/PublishersOptimizations.java
|
{
"start": 917,
"end": 1599
}
|
class ____ {
private final List<Class<?>> reactiveTypes;
private final List<Class<?>> singleTypes;
private final List<Class<?>> completableTypes;
public PublishersOptimizations(List<Class<?>> reactiveTypes, List<Class<?>> singleTypes, List<Class<?>> completableTypes) {
this.reactiveTypes = reactiveTypes;
this.singleTypes = singleTypes;
this.completableTypes = completableTypes;
}
List<Class<?>> getReactiveTypes() {
return reactiveTypes;
}
List<Class<?>> getSingleTypes() {
return singleTypes;
}
List<Class<?>> getCompletableTypes() {
return completableTypes;
}
}
|
PublishersOptimizations
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/DoNotCallSuggester.java
|
{
"start": 2399,
"end": 3327
}
|
class ____ extends BugChecker implements MethodTreeMatcher {
// TODO(kak): Consider adding "newInstance" to this list (some frameworks use that method name)
private static final ImmutableSet<String> METHOD_PREFIXES_TO_IGNORE =
ImmutableSet.of(
// likely providing Dagger bindings
"produce", "provide");
private static final ImmutableSet<String> METHOD_SUBSTRINGS_TO_IGNORE =
ImmutableSet.of(
// common substrings in the names of exception factory methods
"throw", "fail", "exception", "propagate");
private static final ImmutableSet<String> ANNOTATIONS_TO_IGNORE =
ImmutableSet.of(
// ignore methods that are already annotated w/ @DoNotCall
DO_NOT_CALL,
// We exclude methods that are overrides; at call sites, rarely is the variable reference
// statically typed as the subclass (often it's typed as the
|
DoNotCallSuggester
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectivesWithViewDFS.java
|
{
"start": 1253,
"end": 2294
}
|
class ____ extends TestCacheDirectives {
@Override
public DistributedFileSystem getDFS() throws IOException {
Configuration conf = getConf();
conf.set("fs.hdfs.impl", ViewDistributedFileSystem.class.getName());
URI defaultFSURI =
URI.create(conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY));
ConfigUtil.addLinkFallback(conf, defaultFSURI.getHost(),
new Path(defaultFSURI.toString()).toUri());
ConfigUtil.addLink(conf, defaultFSURI.getHost(), "/tmp",
new Path(defaultFSURI.toString()).toUri());
return super.getDFS();
}
@Override
public DistributedFileSystem getDFS(MiniDFSCluster cluster, int nnIdx)
throws IOException {
Configuration conf = cluster.getConfiguration(nnIdx);
conf.set("fs.hdfs.impl", ViewDistributedFileSystem.class.getName());
URI uri = cluster.getURI(0);
ConfigUtil.addLinkFallback(conf, uri.getHost(), uri);
ConfigUtil.addLink(conf, uri.getHost(), "/tmp", uri);
return cluster.getFileSystem(0);
}
}
|
TestCacheDirectivesWithViewDFS
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/module/TestAbstractTypes.java
|
{
"start": 906,
"end": 983
}
|
interface ____ {
public int getValue();
}
public static
|
Abstract
|
java
|
spring-projects__spring-boot
|
test-support/spring-boot-test-support/src/main/java/org/springframework/boot/testsupport/classpath/resources/WithPackageResources.java
|
{
"start": 1228,
"end": 1371
}
|
interface ____ {
/**
* The resources to make available from the root.
* @return the resources
*/
String[] value();
}
|
WithPackageResources
|
java
|
quarkusio__quarkus
|
integration-tests/security-webauthn/src/main/java/io/quarkus/it/security/webauthn/User.java
|
{
"start": 319,
"end": 683
}
|
class ____ extends PanacheEntity {
@Column(unique = true)
public String username;
// non-owning side, so we can add more credentials later
@OneToOne(mappedBy = "user")
public WebAuthnCredential webAuthnCredential;
public static Uni<User> findByUsername(String username) {
return find("username", username).firstResult();
}
}
|
User
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/servlet/client/ExchangeResult.java
|
{
"start": 6791,
"end": 8777
}
|
class ____
* category, and after that re-throw the error.
*/
public void assertWithDiagnostics(Runnable assertion) {
try {
assertion.run();
}
catch (AssertionError ex) {
if (!this.diagnosticsLogged && logger.isErrorEnabled()) {
this.diagnosticsLogged = true;
logger.error("Request details for assertion failure:\n" + this);
}
throw ex;
}
}
@Override
public String toString() {
return "\n" +
"> " + getMethod() + " " + getUrl() + "\n" +
"> " + formatHeaders(getRequestHeaders(), "\n> ") + "\n" +
"\n" +
formatBody(getRequestHeaders().getContentType(), this.requestBody) + "\n" +
"\n" +
"< " + formatStatus(getStatus()) + "\n" +
"< " + formatHeaders(getResponseHeaders(), "\n< ") + "\n" +
"\n" +
formatBody(getResponseHeaders().getContentType(), getResponseBodyContent()) +"\n";
}
private void bufferResponseBody() {
try {
StreamUtils.drain(this.clientResponse.getBody());
}
catch (IOException ex) {
throw new IllegalStateException("Failed to get response content: " + ex);
}
}
private String formatStatus(HttpStatusCode statusCode) {
String result = statusCode.toString();
if (statusCode instanceof HttpStatus status) {
result += " " + status.getReasonPhrase();
}
return result;
}
private String formatHeaders(HttpHeaders headers, String delimiter) {
return headers.headerSet().stream()
.map(entry -> entry.getKey() + ": " + entry.getValue())
.collect(Collectors.joining(delimiter));
}
private String formatBody(@Nullable MediaType contentType, byte[] bytes) {
if (contentType == null) {
return bytes.length + " bytes of content (unknown content-type).";
}
Charset charset = contentType.getCharset();
if (charset != null) {
return new String(bytes, charset);
}
if (PRINTABLE_MEDIA_TYPES.stream().anyMatch(contentType::isCompatibleWith)) {
return new String(bytes, StandardCharsets.UTF_8);
}
return bytes.length + " bytes of content.";
}
}
|
log
|
java
|
quarkusio__quarkus
|
extensions/jaxb/deployment/src/main/java/io/quarkus/jaxb/deployment/utils/JaxbType.java
|
{
"start": 194,
"end": 2112
}
|
class ____ {
private static final String DEFAULT_JAXB_ANNOTATION_VALUE = "##default";
private final String modelName;
private final Class<?> clazz;
public JaxbType(Class<?> clazz) {
this.modelName = findModelNameFromType(clazz);
this.clazz = clazz;
}
public String getModelName() {
return modelName;
}
public Class<?> getType() {
return clazz;
}
private String findModelNameFromType(Class<?> clazz) {
String nameFromAnnotation = DEFAULT_JAXB_ANNOTATION_VALUE;
String namespaceFromAnnotation = DEFAULT_JAXB_ANNOTATION_VALUE;
XmlType xmlType = clazz.getAnnotation(XmlType.class);
if (xmlType != null) {
nameFromAnnotation = xmlType.name();
namespaceFromAnnotation = xmlType.namespace();
} else {
XmlRootElement rootElement = clazz.getAnnotation(XmlRootElement.class);
if (rootElement != null) {
nameFromAnnotation = rootElement.name();
namespaceFromAnnotation = rootElement.namespace();
}
}
String modelName = nameFromAnnotation;
if (DEFAULT_JAXB_ANNOTATION_VALUE.equals(nameFromAnnotation)) {
modelName = clazz.getSimpleName().toLowerCase(Locale.ROOT);
}
if (!DEFAULT_JAXB_ANNOTATION_VALUE.equals(namespaceFromAnnotation)) {
modelName += "." + namespaceFromAnnotation;
}
return modelName;
}
public static boolean isValidType(Class<?> clazz) {
return clazz != null && !clazz.isPrimitive() && !clazz.isArray();
}
public static JaxbType findExistingType(Set<JaxbType> dictionary, JaxbType jaxbType) {
for (JaxbType existing : dictionary) {
if (existing.modelName.equals(jaxbType.modelName)) {
return existing;
}
}
return null;
}
}
|
JaxbType
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/SchemaFormatter.java
|
{
"start": 2922,
"end": 5076
}
|
interface ____ {
/**
* Get the schema formatter for the specified format name with optional variant.
*
* @param name a format with optional variant, for example "json/pretty",
* "canonical" or "idl"
* @return the schema formatter for the specified format
* @throws AvroRuntimeException if the schema format is not supported
*/
static SchemaFormatter getInstance(String name) {
int slashPos = name.indexOf("/");
// SchemaFormatterFactory.getFormatterForVariant(String) receives the name of
// the variant in lowercase (as stated in its javadoc). We're doing a
// case-insensitive comparison on the format name instead, so we don't have to
// convert the format name provided by the factory to lower case.
// This ensures the least amount of assumptions about implementations.
String formatName = slashPos < 0 ? name : name.substring(0, slashPos);
String variantName = slashPos < 0 ? null : name.substring(slashPos + 1).toLowerCase(Locale.ROOT);
for (SchemaFormatterFactory formatterFactory : SchemaFormatterCache.LOADER) {
if (formatName.equalsIgnoreCase(formatterFactory.formatName())) {
if (variantName == null) {
return formatterFactory.getDefaultFormatter();
} else {
return formatterFactory.getFormatterForVariant(variantName);
}
}
}
throw new AvroRuntimeException("Unsupported schema format: " + name + "; see the javadoc for valid examples");
}
/**
* Format a schema with the specified format. Shorthand for
* {@code getInstance(name).format(schema)}.
*
* @param name the name of the schema format
* @param schema the schema to format
* @return the formatted schema
* @throws AvroRuntimeException if the schema format is not supported
* @see #getInstance(String)
* @see #format(Schema)
*/
static String format(String name, Schema schema) {
return getInstance(name).format(schema);
}
/**
* Write the specified schema as a String.
*
* @param schema the schema to write
* @return the formatted schema
*/
String format(Schema schema);
}
|
SchemaFormatter
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/snapshots/SnapshotException.java
|
{
"start": 782,
"end": 2749
}
|
class ____ extends ElasticsearchException {
@Nullable
private final String repositoryName;
@Nullable
private final String snapshotName;
public SnapshotException(final Snapshot snapshot, final String msg) {
this(snapshot, msg, null);
}
public SnapshotException(final Snapshot snapshot, final String msg, final Throwable cause) {
super("[" + (snapshot == null ? "_na" : snapshot) + "] " + msg, cause);
if (snapshot != null) {
this.repositoryName = snapshot.getRepository();
this.snapshotName = snapshot.getSnapshotId().getName();
} else {
this.repositoryName = null;
this.snapshotName = null;
}
}
public SnapshotException(final String repositoryName, final SnapshotId snapshotId, final String msg, final Throwable cause) {
super("[" + repositoryName + ":" + snapshotId + "] " + msg, cause);
this.repositoryName = repositoryName;
this.snapshotName = snapshotId.getName();
}
public SnapshotException(final String repositoryName, final String snapshotName, final String msg) {
super("[" + repositoryName + ":" + snapshotName + "] " + msg);
this.repositoryName = repositoryName;
this.snapshotName = snapshotName;
}
public SnapshotException(final StreamInput in) throws IOException {
super(in);
repositoryName = in.readOptionalString();
snapshotName = in.readOptionalString();
}
@Override
protected void writeTo(StreamOutput out, Writer<Throwable> nestedExceptionsWriter) throws IOException {
super.writeTo(out, nestedExceptionsWriter);
out.writeOptionalString(repositoryName);
out.writeOptionalString(snapshotName);
}
@Nullable
public String getRepositoryName() {
return repositoryName;
}
@Nullable
public String getSnapshotName() {
return snapshotName;
}
}
|
SnapshotException
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/main/java/org/springframework/jdbc/datasource/UserCredentialsDataSourceAdapter.java
|
{
"start": 7288,
"end": 7662
}
|
class ____ {
public final String username;
public final String password;
public JdbcUserCredentials(String username, String password) {
this.username = username;
this.password = password;
}
@Override
public String toString() {
return "JdbcUserCredentials[username='" + this.username + "',password='" + this.password + "']";
}
}
}
|
JdbcUserCredentials
|
java
|
junit-team__junit5
|
junit-jupiter-migrationsupport/src/main/java/org/junit/jupiter/migrationsupport/rules/adapter/AbstractTestRuleAdapter.java
|
{
"start": 1044,
"end": 2028
}
|
class ____ implements GenericBeforeAndAfterAdvice {
private final TestRule target;
public AbstractTestRuleAdapter(TestRuleAnnotatedMember annotatedMember, Class<? extends TestRule> adapteeClass) {
this.target = annotatedMember.getTestRule();
Preconditions.condition(adapteeClass.isAssignableFrom(this.target.getClass()),
() -> adapteeClass + " is not assignable from " + this.target.getClass());
}
protected @Nullable Object executeMethod(String name) {
return executeMethod(name, new Class<?>[0]);
}
protected @Nullable Object executeMethod(String methodName, Class<?>[] parameterTypes, Object... arguments) {
Method method = findMethod(this.target.getClass(), methodName, parameterTypes).orElseThrow(
() -> new JUnitException("Failed to find method %s(%s) in class %s".formatted(methodName,
ClassUtils.nullSafeToString(parameterTypes), this.target.getClass().getName())));
return invokeMethod(method, this.target, arguments);
}
}
|
AbstractTestRuleAdapter
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_2825/Issue2825Test.java
|
{
"start": 560,
"end": 969
}
|
class ____ {
@ProcessorTest
public void mappingMethodShouldNotBeReusedForSubclassMappings() {
Dog dog = new Dog();
dog.setName( "Lucky" );
dog.setRace( "Shepherd" );
TargetAnimal target = Issue2825Mapper.INSTANCE.map( dog );
assertThat( target.getName() ).isEqualTo( "Lucky" );
assertThat( target.getRace() ).isEqualTo( "Shepherd" );
}
}
|
Issue2825Test
|
java
|
apache__kafka
|
trogdor/src/test/java/org/apache/kafka/trogdor/common/JsonUtilTest.java
|
{
"start": 1325,
"end": 2561
}
|
class ____ {
@Test
public void testOpenBraceComesFirst() {
assertTrue(JsonUtil.openBraceComesFirst("{}"));
assertTrue(JsonUtil.openBraceComesFirst(" \t{\"foo\":\"bar\"}"));
assertTrue(JsonUtil.openBraceComesFirst(" { \"foo\": \"bar\" }"));
assertFalse(JsonUtil.openBraceComesFirst("/my/file/path"));
assertFalse(JsonUtil.openBraceComesFirst("mypath"));
assertFalse(JsonUtil.openBraceComesFirst(" blah{}"));
}
record Foo(@JsonProperty int bar) {
@JsonCreator
Foo(@JsonProperty("bar") int bar) {
this.bar = bar;
}
}
@Test
public void testObjectFromCommandLineArgument() throws Exception {
assertEquals(123, JsonUtil.objectFromCommandLineArgument("{\"bar\":123}", Foo.class).bar);
assertEquals(1, JsonUtil.objectFromCommandLineArgument(" {\"bar\": 1} ", Foo.class).bar);
File tempFile = TestUtils.tempFile();
try {
Files.writeString(tempFile.toPath(), "{\"bar\": 456}");
assertEquals(456, JsonUtil.objectFromCommandLineArgument(tempFile.getAbsolutePath(), Foo.class).bar);
} finally {
Files.delete(tempFile.toPath());
}
}
}
|
JsonUtilTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.