language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2109/Issue2109Test.java | {
"start": 528,
"end": 1676
} | class ____ {
@ProcessorTest
public void shouldCorrectlyMapArrayInConstructorMapping() {
Target target = Issue2109Mapper.INSTANCE.map( new Source( 100L, new byte[] { 100, 120, 40, 40 } ) );
assertThat( target ).isNotNull();
assertThat( target.getId() ).isEqualTo( 100L );
assertThat( target.getData() ).containsExactly( 100, 120, 40, 40 );
target = Issue2109Mapper.INSTANCE.map( new Source( 50L, null ) );
assertThat( target ).isNotNull();
assertThat( target.getId() ).isEqualTo( 50L );
assertThat( target.getData() ).isNull();
target = Issue2109Mapper.INSTANCE.mapWithEmptyData( new Source( 100L, new byte[] { 100, 120, 40, 40 } ) );
assertThat( target ).isNotNull();
assertThat( target.getId() ).isEqualTo( 100L );
assertThat( target.getData() ).containsExactly( 100, 120, 40, 40 );
target = Issue2109Mapper.INSTANCE.mapWithEmptyData( new Source( 50L, null ) );
assertThat( target ).isNotNull();
assertThat( target.getId() ).isEqualTo( 50L );
assertThat( target.getData() ).isEmpty();
}
}
| Issue2109Test |
java | netty__netty | codec-haproxy/src/main/java/io/netty/handler/codec/haproxy/HAProxyProxiedProtocol.java | {
"start": 6303,
"end": 8199
} | enum ____ {
/**
* The UNSPEC transport protocol represents a connection which was forwarded for an unknown protocol.
*/
UNSPEC(TRANSPORT_UNSPEC_BYTE),
/**
* The STREAM transport protocol represents a connection which was forwarded for a TCP connection.
*/
STREAM(TRANSPORT_STREAM_BYTE),
/**
* The DGRAM transport protocol represents a connection which was forwarded for a UDP connection.
*/
DGRAM(TRANSPORT_DGRAM_BYTE);
/**
* The transport protocol is specified in the lowest 4 bits of the transport protocol and address family byte
*/
private static final byte TRANSPORT_MASK = 0x0f;
private final byte transportByte;
/**
* Creates a new instance.
*/
TransportProtocol(byte transportByte) {
this.transportByte = transportByte;
}
/**
* Returns the {@link TransportProtocol} represented by the lowest 4 bits of the specified byte.
*
* @param tpafByte transport protocol and address family byte
*/
public static TransportProtocol valueOf(byte tpafByte) {
int transportProtocol = tpafByte & TRANSPORT_MASK;
switch ((byte) transportProtocol) {
case TRANSPORT_STREAM_BYTE:
return STREAM;
case TRANSPORT_UNSPEC_BYTE:
return UNSPEC;
case TRANSPORT_DGRAM_BYTE:
return DGRAM;
default:
throw new IllegalArgumentException("unknown transport protocol: " + transportProtocol);
}
}
/**
* Returns the byte value of this transport protocol.
*/
public byte byteValue() {
return transportByte;
}
}
}
| TransportProtocol |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/diagnostics/analyzer/NoSuchBeanDefinitionFailureAnalyzerTests.java | {
"start": 12796,
"end": 13001
} | class ____ {
@ConditionalOnBean(Integer.class)
@Bean(name = "test-string")
String string() {
return "Test";
}
}
@Configuration(proxyBeanMethods = false)
static | TestMissingBeanAutoConfiguration |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java | {
"start": 3612,
"end": 4406
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory left;
private final Component2D right;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory left,
Component2D right) {
this.source = source;
this.left = left;
this.right = right;
}
@Override
public SpatialDisjointCartesianSourceAndConstantEvaluator get(DriverContext context) {
return new SpatialDisjointCartesianSourceAndConstantEvaluator(source, left.get(context), right, context);
}
@Override
public String toString() {
return "SpatialDisjointCartesianSourceAndConstantEvaluator[" + "left=" + left + ", right=" + right + "]";
}
}
}
| Factory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/InsertOrderingWithSecondaryTable.java | {
"start": 3385,
"end": 3618
} | class ____ extends ShapeEntity {
}
@Entity(name = "ShapeCircleEntity")
@DiscriminatorValue("CIRCLE")
@SecondaryTable(name = "SHAPE_CIRCLE", pkJoinColumns = @PrimaryKeyJoinColumn(name = "SHAPE_ID"))
public static | ShapePolygonEntity |
java | elastic__elasticsearch | x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/SourceModeLicenseChangeTestCase.java | {
"start": 643,
"end": 2673
} | interface ____ {
String dataStreamName();
void prepareDataStream() throws IOException;
String indexMode();
SourceFieldMapper.Mode initialMode();
SourceFieldMapper.Mode finalMode();
void rollover() throws IOException;
}
protected abstract void licenseChange() throws IOException;
protected abstract void applyInitialLicense() throws IOException;
protected abstract List<TestCase> cases();
public void testLicenseChange() throws IOException {
applyInitialLicense();
for (var testCase : cases()) {
testCase.prepareDataStream();
var indexMode = (String) getSetting(client(), getDataStreamBackingIndex(client(), testCase.dataStreamName(), 0), "index.mode");
assertEquals(testCase.indexMode(), indexMode);
var sourceMode = (String) getSetting(
client(),
getDataStreamBackingIndex(client(), testCase.dataStreamName(), 0),
"index.mapping.source.mode"
);
assertEquals(testCase.initialMode().toString(), sourceMode);
}
licenseChange();
for (var testCase : cases()) {
testCase.rollover();
var indexMode = (String) getSetting(client(), getDataStreamBackingIndex(client(), testCase.dataStreamName(), 1), "index.mode");
assertEquals(testCase.indexMode(), indexMode);
var sourceMode = (String) getSetting(
client(),
getDataStreamBackingIndex(client(), testCase.dataStreamName(), 1),
"index.mapping.source.mode"
);
assertEquals(testCase.finalMode().toString(), sourceMode);
}
}
protected static Response removeComponentTemplate(final RestClient client, final String componentTemplate) throws IOException {
final Request request = new Request("DELETE", "/_component_template/" + componentTemplate);
return client.performRequest(request);
}
}
| TestCase |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/localfs/TestLocalFSContractContentSummary.java | {
"start": 1051,
"end": 1261
} | class ____ extends AbstractContractContentSummaryTest {
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new LocalFSContract(conf);
}
}
| TestLocalFSContractContentSummary |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/core/SingleOnSubscribe.java | {
"start": 943,
"end": 1242
} | interface ____<@NonNull T> {
/**
* Called for each {@link SingleObserver} that subscribes.
* @param emitter the safe emitter instance, never {@code null}
* @throws Throwable on error
*/
void subscribe(@NonNull SingleEmitter<T> emitter) throws Throwable;
}
| SingleOnSubscribe |
java | alibaba__nacos | plugin/datasource/src/test/java/com/alibaba/nacos/plugin/datasource/impl/mysql/ConfigInfoMapperByMySqlTest.java | {
"start": 1429,
"end": 22588
} | class ____ {
private final Object[] emptyObjs = new Object[] {};
int startRow = 0;
int pageSize = 5;
String appName = "appName";
String groupId = "groupId";
String tenantId = "tenantId";
String id = "123";
long lastMaxId = 1234;
List<Long> ids = Lists.newArrayList(1L, 2L, 3L, 5L, 144L);
Timestamp startTime = new Timestamp(System.currentTimeMillis());
Timestamp endTime = new Timestamp(System.currentTimeMillis());
MapperContext context;
private ConfigInfoMapperByMySql configInfoMapperByMySql;
@BeforeEach
void setUp() throws Exception {
configInfoMapperByMySql = new ConfigInfoMapperByMySql();
context = new MapperContext(startRow, pageSize);
context.putWhereParameter(FieldConstant.APP_NAME, appName);
context.putWhereParameter(FieldConstant.TENANT_ID, tenantId);
context.putWhereParameter(FieldConstant.ID, id);
context.putWhereParameter(FieldConstant.START_TIME, startTime);
context.putWhereParameter(FieldConstant.END_TIME, endTime);
context.putWhereParameter(FieldConstant.IDS, ids);
context.putWhereParameter(FieldConstant.PAGE_SIZE, pageSize);
context.putWhereParameter(FieldConstant.LAST_MAX_ID, lastMaxId);
}
@Test
void testFindConfigMaxId() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigMaxId(null);
assertEquals("SELECT MAX(id) FROM config_info", mapperResult.getSql());
}
@Test
void testFindAllDataIdAndGroup() {
MapperResult mapperResult = configInfoMapperByMySql.findAllDataIdAndGroup(null);
assertEquals("SELECT DISTINCT data_id, group_id FROM config_info", mapperResult.getSql());
assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs);
}
@Test
void testFindConfigInfoByAppCountRows() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfoByAppCountRows(context);
assertEquals("SELECT count(*) FROM config_info WHERE tenant_id LIKE ? AND app_name = ?", mapperResult.getSql());
assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray());
}
@Test
void testFindConfigInfoByAppFetchRows() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfoByAppFetchRows(context);
assertEquals(mapperResult.getSql(),
"SELECT id,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE tenant_id LIKE ? AND app_name= ? LIMIT "
+ startRow + "," + pageSize);
assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray());
}
@Test
void testConfigInfoLikeTenantCount() {
MapperResult mapperResult = configInfoMapperByMySql.configInfoLikeTenantCount(context);
assertEquals("SELECT count(*) FROM config_info WHERE tenant_id LIKE ?", mapperResult.getSql());
assertArrayEquals(new Object[] {tenantId}, mapperResult.getParamList().toArray());
}
@Test
void testGetTenantIdList() {
MapperResult mapperResult = configInfoMapperByMySql.getTenantIdList(context);
assertEquals(mapperResult.getSql(), "SELECT tenant_id FROM config_info WHERE tenant_id != '" + NamespaceUtil.getNamespaceDefaultId()
+ "' GROUP BY tenant_id LIMIT " + startRow + "," + pageSize);
assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs);
}
@Test
void testGetGroupIdList() {
MapperResult mapperResult = configInfoMapperByMySql.getGroupIdList(context);
assertEquals(mapperResult.getSql(),
"SELECT group_id FROM config_info WHERE tenant_id ='public' GROUP BY group_id LIMIT " + startRow + "," + pageSize);
assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs);
}
@Test
void testFindAllConfigKey() {
MapperResult mapperResult = configInfoMapperByMySql.findAllConfigKey(context);
assertEquals(mapperResult.getSql(),
" SELECT data_id,group_id,app_name FROM ( " + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT "
+ context.getStartRow() + "," + context.getPageSize() + " )" + " g, config_info t WHERE g.id = t.id ");
assertArrayEquals(new Object[] {tenantId}, mapperResult.getParamList().toArray());
}
@Test
void testFindAllConfigInfoBaseFetchRows() {
MapperResult mapperResult = configInfoMapperByMySql.findAllConfigInfoBaseFetchRows(context);
assertEquals(mapperResult.getSql(),
"SELECT t.id,data_id,group_id,content,md5 FROM ( SELECT id FROM config_info ORDER BY id LIMIT " + context.getStartRow()
+ "," + context.getPageSize() + " ) g, config_info t WHERE g.id = t.id ");
assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs);
}
@Test
void testFindAllConfigInfoFragment() {
//with content
context.putContextParameter(ContextConstant.NEED_CONTENT, "true");
MapperResult mapperResult = configInfoMapperByMySql.findAllConfigInfoFragment(context);
assertEquals("SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type,encrypted_data_key "
+ "FROM config_info WHERE id > ? ORDER BY id ASC LIMIT " + startRow + "," + pageSize, mapperResult.getSql());
assertArrayEquals(new Object[] {id}, mapperResult.getParamList().toArray());
context.putContextParameter(ContextConstant.NEED_CONTENT, "false");
MapperResult mapperResult2 = configInfoMapperByMySql.findAllConfigInfoFragment(context);
assertEquals("SELECT id,data_id,group_id,tenant_id,app_name,md5,gmt_modified,type,encrypted_data_key "
+ "FROM config_info WHERE id > ? ORDER BY id ASC LIMIT " + startRow + "," + pageSize, mapperResult2.getSql());
assertArrayEquals(new Object[] {id}, mapperResult2.getParamList().toArray());
}
@Test
void testFindChangeConfig() {
MapperResult mapperResult = configInfoMapperByMySql.findChangeConfig(context);
assertEquals(mapperResult.getSql(),
"SELECT id, data_id, group_id, tenant_id, app_name,md5, gmt_modified, encrypted_data_key FROM config_info"
+ " WHERE gmt_modified >= ? and id > ? order by id limit ? ");
assertArrayEquals(new Object[] {startTime, lastMaxId, pageSize}, mapperResult.getParamList().toArray());
}
@Test
void testFindChangeConfigCountRows() {
MapperResult mapperResult = configInfoMapperByMySql.findChangeConfigCountRows(context);
assertEquals("SELECT count(*) FROM config_info WHERE 1=1 AND app_name = ? AND gmt_modified >=? AND gmt_modified <=? ",
mapperResult.getSql());
assertArrayEquals(new Object[] {appName, startTime, endTime}, mapperResult.getParamList().toArray());
}
@Test
void testFindChangeConfigFetchRows() {
Object lastMaxId = 100;
context.putWhereParameter(FieldConstant.LAST_MAX_ID, lastMaxId);
MapperResult mapperResult = configInfoMapperByMySql.findChangeConfigFetchRows(context);
assertEquals(mapperResult.getSql(), "SELECT id,data_id,group_id,tenant_id,app_name,type,md5,gmt_modified FROM config_info "
+ "WHERE 1=1 AND tenant_id = ? AND app_name = ? AND gmt_modified >=? AND gmt_modified <=? AND id > " + lastMaxId
+ " ORDER BY id ASC LIMIT " + startRow + "," + pageSize);
assertArrayEquals(new Object[] {tenantId, appName, startTime, endTime}, mapperResult.getParamList().toArray());
}
@Test
void testListGroupKeyMd5ByPageFetchRows() {
MapperResult mapperResult = configInfoMapperByMySql.listGroupKeyMd5ByPageFetchRows(context);
assertEquals(mapperResult.getSql(), "SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified,encrypted_data_key FROM "
+ "( SELECT id FROM config_info ORDER BY id LIMIT 0,5 ) g, config_info t WHERE g.id = t.id");
assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs);
}
@Test
void testFindAllConfigInfo4Export() {
MapperResult mapperResult = configInfoMapperByMySql.findAllConfigInfo4Export(context);
assertEquals(mapperResult.getSql(),
"SELECT id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,"
+ "src_ip,c_desc,c_use,effect,c_schema,encrypted_data_key FROM config_info WHERE id IN (?, ?, ?, ?, ?) ");
assertArrayEquals(mapperResult.getParamList().toArray(), ids.toArray());
context.putWhereParameter(FieldConstant.IDS, null);
mapperResult = configInfoMapperByMySql.findAllConfigInfo4Export(context);
assertEquals(mapperResult.getSql(),
"SELECT id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,"
+ "src_ip,c_desc,c_use,effect,c_schema,encrypted_data_key FROM config_info WHERE tenant_id = ? AND app_name= ? ");
assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray());
}
@Test
void testFindConfigInfoBaseLikeCountRows() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfoBaseLikeCountRows(context);
assertEquals("SELECT count(*) FROM config_info WHERE 1=1 AND tenant_id='public' ", mapperResult.getSql());
assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs);
}
@Test
void testFindConfigInfoBaseLikeFetchRows() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfoBaseLikeFetchRows(context);
assertEquals(mapperResult.getSql(),
"SELECT id,data_id,group_id,tenant_id,content FROM config_info WHERE 1=1 AND tenant_id='public' LIMIT " + startRow + ","
+ pageSize);
assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs);
}
@Test
void testFindConfigInfo4PageCountRows() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfo4PageCountRows(context);
assertEquals("SELECT count(*) FROM config_info WHERE tenant_id=? AND app_name=? ", mapperResult.getSql());
assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray());
}
@Test
void testFindConfigInfo4PageFetchRows() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfo4PageFetchRows(context);
// 验证新的优化后的 SQL 结构:先 LIMIT 再 JOIN
String expectedInnerSql = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,type,encrypted_data_key,c_desc FROM config_info "
+ "WHERE tenant_id=? AND app_name=? LIMIT " + startRow + "," + pageSize;
String expectedSql = "SELECT a.id,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content,a.md5,a.type,a.encrypted_data_key,a.c_desc,"
+ "GROUP_CONCAT(b.tag_name SEPARATOR ',') as config_tags "
+ "FROM (" + expectedInnerSql + ") a "
+ "LEFT JOIN config_tags_relation b ON a.id=b.id "
+ "GROUP BY a.id,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content,a.md5,a.type,a.encrypted_data_key,a.c_desc";
assertEquals(expectedSql, mapperResult.getSql());
assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray());
}
@Test
void testFindConfigInfoBaseByGroupFetchRows() {
context.putWhereParameter(FieldConstant.GROUP_ID, groupId);
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfoBaseByGroupFetchRows(context);
assertEquals(mapperResult.getSql(),
"SELECT id,data_id,group_id,content FROM config_info WHERE group_id=? AND tenant_id=? LIMIT " + startRow + "," + pageSize);
assertArrayEquals(new Object[] {groupId, tenantId}, mapperResult.getParamList().toArray());
}
@Test
void testFindConfigInfoLike4PageCountRows() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfoLike4PageCountRows(context);
assertEquals("SELECT count(*) FROM config_info WHERE tenant_id LIKE ? AND app_name = ? ", mapperResult.getSql());
assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray());
}
@Test
void testFindConfigInfoLike4PageFetchRows() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfoLike4PageFetchRows(context);
// 验证新的优化后的 SQL 结构:先 LIMIT 再 JOIN
String expectedInnerSql = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,encrypted_data_key,type,c_desc"
+ " FROM config_info WHERE tenant_id LIKE ? AND app_name = ? LIMIT " + startRow + "," + pageSize;
String expectedSql = "SELECT a.id,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content,a.md5,a.encrypted_data_key,a.type,a.c_desc,"
+ "GROUP_CONCAT(b.tag_name SEPARATOR ',') as config_tags "
+ "FROM (" + expectedInnerSql + ") a "
+ "LEFT JOIN config_tags_relation b ON a.id=b.id "
+ "GROUP BY a.id,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content,a.md5,a.encrypted_data_key,a.type,a.c_desc";
assertEquals(expectedSql, mapperResult.getSql());
assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray());
}
@Test
void testFindAllConfigInfoFetchRows() {
MapperResult mapperResult = configInfoMapperByMySql.findAllConfigInfoFetchRows(context);
assertEquals(mapperResult.getSql(),
"SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 FROM ( SELECT id FROM config_info "
+ "WHERE tenant_id LIKE ? ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id ");
assertArrayEquals(new Object[] {tenantId, startRow, pageSize}, mapperResult.getParamList().toArray());
}
@Test
void testFindConfigInfosByIds() {
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfosByIds(context);
assertEquals("SELECT id,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE id IN (?, ?, ?, ?, ?) ",
mapperResult.getSql());
assertArrayEquals(mapperResult.getParamList().toArray(), ids.toArray());
}
@Test
void testRemoveConfigInfoByIdsAtomic() {
MapperResult mapperResult = configInfoMapperByMySql.removeConfigInfoByIdsAtomic(context);
assertEquals("DELETE FROM config_info WHERE id IN (?, ?, ?, ?, ?) ", mapperResult.getSql());
assertArrayEquals(mapperResult.getParamList().toArray(), ids.toArray());
}
@Test
void testGetTableName() {
String sql = configInfoMapperByMySql.getTableName();
assertEquals(TableConstant.CONFIG_INFO, sql);
}
@Test
void testGetDataSource() {
String sql = configInfoMapperByMySql.getDataSource();
assertEquals(DataSourceConstant.MYSQL, sql);
}
@Test
void testUpdateConfigInfoAtomicCas() {
String newContent = "new Content";
String newMD5 = "newMD5";
String srcIp = "1.1.1.1";
Object srcUser = "nacos";
Object appNameTmp = "newAppName";
Object desc = "description";
Object use = "use";
Object effect = "effect";
Object type = "type";
Object schema = "schema";
String encryptedDataKey = "ey456789";
context.putUpdateParameter(FieldConstant.CONTENT, newContent);
context.putUpdateParameter(FieldConstant.MD5, newMD5);
context.putUpdateParameter(FieldConstant.SRC_IP, srcIp);
context.putUpdateParameter(FieldConstant.SRC_USER, srcUser);
context.putUpdateParameter(FieldConstant.APP_NAME, appNameTmp);
context.putUpdateParameter(FieldConstant.C_DESC, desc);
context.putUpdateParameter(FieldConstant.C_USE, use);
context.putUpdateParameter(FieldConstant.EFFECT, effect);
context.putUpdateParameter(FieldConstant.TYPE, type);
context.putUpdateParameter(FieldConstant.C_SCHEMA, schema);
context.putUpdateParameter(FieldConstant.ENCRYPTED_DATA_KEY, encryptedDataKey);
Object dataId = "dataId";
Object group = "group";
Object md5 = "md5";
context.putWhereParameter(FieldConstant.DATA_ID, dataId);
context.putWhereParameter(FieldConstant.GROUP_ID, group);
context.putWhereParameter(FieldConstant.TENANT_ID, tenantId);
context.putWhereParameter(FieldConstant.MD5, md5);
MapperResult mapperResult = configInfoMapperByMySql.updateConfigInfoAtomicCas(context);
assertEquals(mapperResult.getSql(),
"UPDATE config_info SET " + "content=?, md5=?, src_ip=?, src_user=?, gmt_modified=NOW(3),"
+ " app_name=?, c_desc=?, c_use=?, effect=?, type=?, c_schema=?, encrypted_data_key=? "
+ "WHERE data_id=? AND group_id=? AND tenant_id=? AND (md5=? OR md5 IS NULL OR md5='')");
assertArrayEquals(
new Object[]{newContent, newMD5, srcIp, srcUser, appNameTmp, desc, use, effect, type, schema,
encryptedDataKey, dataId, group, tenantId, md5}, mapperResult.getParamList().toArray());
}
@Test
void testFindConfigInfo4PageFetchRowsWithDescAndTags() {
ConfigInfoMapperByMySql configInfoMapperByMySql = new ConfigInfoMapperByMySql();
MapperContext context = new MapperContext(startRow, pageSize);
context.putWhereParameter(FieldConstant.TENANT_ID, tenantId);
context.putWhereParameter(FieldConstant.DATA_ID, "test.properties");
context.putWhereParameter(FieldConstant.GROUP_ID, groupId);
context.putWhereParameter(FieldConstant.APP_NAME, appName);
context.putWhereParameter(FieldConstant.CONTENT, "key=value");
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfo4PageFetchRows(context);
String sql = mapperResult.getSql();
List<Object> paramList = mapperResult.getParamList();
// 验证优化后的 SQL 结构:包含新字段和优化结构
assertEquals(true, sql.contains("c_desc"));
assertEquals(true, sql.contains("GROUP_CONCAT(b.tag_name SEPARATOR ',') as config_tags"));
assertEquals(true, sql.contains("LEFT JOIN config_tags_relation b ON a.id=b.id"));
assertEquals(true, sql.contains("GROUP BY"));
assertEquals(true, sql.contains("FROM (SELECT"));
assertEquals(true, sql.contains("LIMIT"));
// 验证参数
assertEquals(5, paramList.size());
assertEquals(tenantId, paramList.get(0));
assertEquals("test.properties", paramList.get(1));
assertEquals(groupId, paramList.get(2));
assertEquals(appName, paramList.get(3));
assertEquals("key=value", paramList.get(4));
}
@Test
void testFindConfigInfoLike4PageFetchRowsWithDescAndTags() {
ConfigInfoMapperByMySql configInfoMapperByMySql = new ConfigInfoMapperByMySql();
MapperContext context = new MapperContext(startRow, pageSize);
context.putWhereParameter(FieldConstant.TENANT_ID, tenantId);
context.putWhereParameter(FieldConstant.DATA_ID, "test");
context.putWhereParameter(FieldConstant.GROUP_ID, "DEFAULT");
context.putWhereParameter(FieldConstant.APP_NAME, appName);
context.putWhereParameter(FieldConstant.CONTENT, "key");
context.putWhereParameter(FieldConstant.TYPE, new String[]{"properties", "yaml"});
MapperResult mapperResult = configInfoMapperByMySql.findConfigInfoLike4PageFetchRows(context);
String sql = mapperResult.getSql();
List<Object> paramList = mapperResult.getParamList();
// 验证优化后的 SQL 结构:包含新字段和优化结构
assertEquals(true, sql.contains("c_desc"));
assertEquals(true, sql.contains("GROUP_CONCAT(b.tag_name SEPARATOR ',') as config_tags"));
assertEquals(true, sql.contains("LEFT JOIN config_tags_relation b ON a.id=b.id"));
assertEquals(true, sql.contains("GROUP BY"));
assertEquals(true, sql.contains("LIKE"));
assertEquals(true, sql.contains("IN"));
assertEquals(true, sql.contains("FROM (SELECT"));
assertEquals(true, sql.contains("LIMIT"));
// 验证参数数量(tenant + dataId + group + appName + content + 2个type)
assertEquals(7, paramList.size());
assertEquals(tenantId, paramList.get(0));
assertEquals("test", paramList.get(1));
assertEquals("DEFAULT", paramList.get(2));
assertEquals(appName, paramList.get(3));
assertEquals("key", paramList.get(4));
assertEquals("properties", paramList.get(5));
assertEquals("yaml", paramList.get(6));
}
}
| ConfigInfoMapperByMySqlTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/JDKScalarsDeserTest.java | {
"start": 3320,
"end": 32572
} | class ____ {
public Void value;
}
// [databind#4858] Changes defaults for 3.0 so ensure configs work for 2.x and 3.x
private final ObjectMapper MAPPER = jsonMapperBuilder()
.disable(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES)
.build();
/*
/**********************************************************
/* Scalar tests for boolean
/**********************************************************
*/
@Test
public void testBooleanPrimitive() throws Exception
{
// first, simple case:
BooleanBean result = MAPPER.readValue("{\"v\":true}", BooleanBean.class);
assertTrue(result._v);
result = MAPPER.readValue("{\"v\":null}", BooleanBean.class);
assertNotNull(result);
assertFalse(result._v);
result = MAPPER.readValue("{\"v\":1}", BooleanBean.class);
assertNotNull(result);
assertTrue(result._v);
// should work with arrays too..
boolean[] array = MAPPER.readValue("[ null, false ]", boolean[].class);
assertNotNull(array);
assertEquals(2, array.length);
assertFalse(array[0]);
assertFalse(array[1]);
}
/**
* Simple unit test to verify that we can map boolean values to
* java.lang.Boolean.
*/
@Test
public void testBooleanWrapper() throws Exception
{
Boolean result = MAPPER.readValue("true", Boolean.class);
assertEquals(Boolean.TRUE, result);
result = MAPPER.readValue("false", Boolean.class);
assertEquals(Boolean.FALSE, result);
}
/*
/**********************************************************
/* Scalar tests for integral types
/**********************************************************
*/
@Test
public void testByteWrapper() throws Exception
{
Byte result = MAPPER.readValue(" -42\t", Byte.class);
assertEquals(Byte.valueOf((byte)-42), result);
// Also: should be able to coerce floats, strings:
result = MAPPER.readValue(" \"-12\"", Byte.class);
assertEquals(Byte.valueOf((byte)-12), result);
result = MAPPER.readValue(" 39.07", Byte.class);
assertEquals(Byte.valueOf((byte)39), result);
}
@Test
public void testShortWrapper() throws Exception
{
Short result = MAPPER.readValue("37", Short.class);
assertEquals(Short.valueOf((short)37), result);
// Also: should be able to coerce floats, strings:
result = MAPPER.readValue(" \"-1009\"", Short.class);
assertEquals(Short.valueOf((short)-1009), result);
result = MAPPER.readValue("-12.9", Short.class);
assertEquals(Short.valueOf((short)-12), result);
}
@Test
public void testCharacterWrapper() throws Exception
{
// First: canonical value is 1-char string
assertEquals(Character.valueOf('a'), MAPPER.readValue(q("a"), Character.class));
// But can also pass in ascii code
Character result = MAPPER.readValue(" "+((int) 'X'), Character.class);
assertEquals(Character.valueOf('X'), result);
// 22-Jun-2020, tatu: one special case turns out to be white space;
// need to avoid considering it "blank" value
assertEquals(Character.valueOf(' '), MAPPER.readValue(q(" "), Character.class));
final CharacterWrapperBean wrapper = MAPPER.readValue("{\"v\":null}", CharacterWrapperBean.class);
assertNotNull(wrapper);
assertNull(wrapper.getV());
try {
MAPPER.readerFor(CharacterBean.class)
.with(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES)
.readValue("{\"v\":null}");
fail("Attempting to deserialize a 'null' JSON reference into a 'char' property did not throw an exception");
} catch (MismatchedInputException e) {
verifyException(e, "cannot map `null`");
}
final CharacterBean charBean = MAPPER.readerFor(CharacterBean.class)
.without(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES)
.readValue("{\"v\":null}");
assertNotNull(wrapper);
assertEquals('\u0000', charBean.getV());
}
@Test
public void testIntWrapper() throws Exception
{
Integer result = MAPPER.readValue(" -42\t", Integer.class);
assertEquals(Integer.valueOf(-42), result);
// Also: should be able to coerce floats, strings:
result = MAPPER.readValue(" \"-1200\"", Integer.class);
assertEquals(Integer.valueOf(-1200), result);
result = MAPPER.readValue(" 39.07", Integer.class);
assertEquals(Integer.valueOf(39), result);
}
@Test
public void testIntPrimitive() throws Exception
{
// first, simple case:
IntBean result = MAPPER.readValue("{\"v\":3}", IntBean.class);
assertEquals(3, result._v);
result = MAPPER.readValue("{\"v\":null}", IntBean.class);
assertNotNull(result);
assertEquals(0, result._v);
// should work with arrays too..
int[] array = MAPPER.readValue("[ null ]", int[].class);
assertNotNull(array);
assertEquals(1, array.length);
assertEquals(0, array[0]);
// [databind#381]
try {
MAPPER.readerFor(IntBean.class)
.without(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS)
.readValue("{\"v\":[3]}");
fail("Did not throw exception when reading a value from a single value array with the UNWRAP_SINGLE_VALUE_ARRAYS feature disabled");
} catch (MismatchedInputException exp) {
//Correctly threw exception
}
ObjectReader unwrappingR = MAPPER.readerFor(IntBean.class)
.with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS);
result = unwrappingR.readValue("{\"v\":[3]}");
assertEquals(3, result._v);
result = unwrappingR.readValue("[{\"v\":[3]}]");
assertEquals(3, result._v);
try {
unwrappingR.readValue("[{\"v\":[3,3]}]");
fail("Did not throw exception while reading a value from a multi value array with UNWRAP_SINGLE_VALUE_ARRAY feature enabled");
} catch (MismatchedInputException exp) {
//threw exception as required
}
result = unwrappingR.readValue("{\"v\":[null]}");
assertNotNull(result);
assertEquals(0, result._v);
array = MAPPER.readerFor(int[].class)
.with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS)
.readValue("[ [ null ] ]");
assertNotNull(array);
assertEquals(1, array.length);
assertEquals(0, array[0]);
}
@Test
public void testLongWrapper() throws Exception
{
Long result = MAPPER.readValue("12345678901", Long.class);
assertEquals(Long.valueOf(12345678901L), result);
// Also: should be able to coerce floats, strings:
result = MAPPER.readValue(" \"-9876\"", Long.class);
assertEquals(Long.valueOf(-9876), result);
result = MAPPER.readValue("1918.3", Long.class);
assertEquals(Long.valueOf(1918), result);
}
@Test
public void testLongPrimitive() throws Exception
{
// first, simple case:
LongBean result = MAPPER.readValue("{\"v\":3}", LongBean.class);
assertEquals(3, result._v);
result = MAPPER.readValue("{\"v\":null}", LongBean.class);
assertNotNull(result);
assertEquals(0, result._v);
// should work with arrays too..
long[] array = MAPPER.readValue("[ null ]", long[].class);
assertNotNull(array);
assertEquals(1, array.length);
assertEquals(0, array[0]);
// [databind#381]
try {
MAPPER.readerFor(LongBean.class)
.without(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS)
.readValue("{\"v\":[3]}");
fail("Did not throw exception when reading a value from a single value array with the UNWRAP_SINGLE_VALUE_ARRAYS feature disabled");
} catch (MismatchedInputException exp) {
//Correctly threw exception
}
ObjectReader unwrappingR = MAPPER.readerFor(LongBean.class)
.with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS);
result = unwrappingR.readValue("{\"v\":[3]}");
assertEquals(3, result._v);
result = unwrappingR.readValue("[{\"v\":[3]}]");
assertEquals(3, result._v);
try {
unwrappingR.readValue("[{\"v\":[3,3]}]");
fail("Did not throw exception while reading a value from a multi value array with UNWRAP_SINGLE_VALUE_ARRAY feature enabled");
} catch (MismatchedInputException exp) {
//threw exception as required
}
result = unwrappingR.readValue("{\"v\":[null]}");
assertNotNull(result);
assertEquals(0, result._v);
array = MAPPER.readerFor(long[].class)
.with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS)
.readValue("[ [ null ] ]");
assertNotNull(array);
assertEquals(1, array.length);
assertEquals(0, array[0]);
}
/**
* Beyond simple case, let's also ensure that method overriding works as
* expected.
*/
@Test
public void testIntWithOverride() throws Exception
{
IntBean2 result = MAPPER.readValue("{\"v\":8}", IntBean2.class);
assertEquals(9, result._v);
}
/*
/**********************************************************
/* Scalar tests for floating point types
/**********************************************************
*/
@Test
public void testDoublePrimitive() throws Exception
{
// first, simple case:
// bit tricky with binary fps but...
final double value = 0.016;
DoubleBean result = MAPPER.readValue("{\"v\":"+value+"}", DoubleBean.class);
assertEquals(value, result._v);
// then [JACKSON-79]:
result = MAPPER.readValue("{\"v\":null}", DoubleBean.class);
assertNotNull(result);
assertEquals(0.0, result._v);
// should work with arrays too..
double[] array = MAPPER.readValue("[ null ]", double[].class);
assertNotNull(array);
assertEquals(1, array.length);
assertEquals(0.0, array[0]);
}
/* Note: dealing with floating-point values is tricky; not sure if
* we can really use equality tests here... JDK does have decent
* conversions though, to retain accuracy and round-trippability.
* But still...
*/
@Test
public void testFloatWrapper() throws Exception
{
// Also: should be able to coerce floats, strings:
String[] STRS = new String[] {
"1.0", "0.0", "-0.3", "0.7", "42.012", "-999.0", NAN_STRING
};
for (String str : STRS) {
Float exp = Float.valueOf(str);
Float result;
if (NAN_STRING != str) {
// First, as regular floating point value
result = MAPPER.readValue(str, Float.class);
assertEquals(exp, result);
}
// and then as coerced String:
result = MAPPER.readValue(" \""+str+"\"", Float.class);
assertEquals(exp, result);
}
}
@Test
public void testDoubleWrapper() throws Exception
{
// Also: should be able to coerce doubles, strings:
String[] STRS = new String[] {
"1.0", "0.0", "-0.3", "0.7", "42.012", "-999.0", NAN_STRING
};
for (String str : STRS) {
Double exp = Double.valueOf(str);
Double result;
// First, as regular double value
if (NAN_STRING != str) {
result = MAPPER.readValue(str, Double.class);
assertEquals(exp, result);
}
// and then as coerced String:
result = MAPPER.readValue(" \""+str+"\"", Double.class);
assertEquals(exp, result);
}
}
public void testDoubleAsArray() throws Exception
{
final double value = 0.016;
try {
MAPPER.readerFor(DoubleBean.class)
.without(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS)
.readValue("{\"v\":[" + value + "]}");
fail("Did not throw exception when reading a value from a single value array with the UNWRAP_SINGLE_VALUE_ARRAYS feature disabled");
} catch (MismatchedInputException e) {
verifyException(e, "value of type `double` from Array value");
}
ObjectReader unwrappingR = MAPPER.readerFor(DoubleBean.class)
.with(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS);
DoubleBean result = unwrappingR.readValue("{\"v\":[" + value + "]}");
assertEquals(value, result._v);
result = unwrappingR.readValue("[{\"v\":[" + value + "]}]");
assertEquals(value, result._v);
try {
unwrappingR.readValue("[{\"v\":[" + value + "," + value + "]}]");
fail("Did not throw exception while reading a value from a multi value array with UNWRAP_SINGLE_VALUE_ARRAY feature enabled");
} catch (MismatchedInputException e) {
verifyException(e, "Unexpected token (`JsonToken.VALUE_NUMBER_FLOAT`)");
}
result = unwrappingR.readValue("{\"v\":[null]}");
assertNotNull(result);
assertEquals(0d, result._v);
double[] array = unwrappingR.forType(double[].class)
.readValue("[ [ null ] ]");
assertNotNull(array);
assertEquals(1, array.length);
assertEquals(0d, array[0]);
}
/*
/**********************************************************
/* Scalar tests, other
/**********************************************************
*/
@Test
public void testBase64Variants() throws Exception
{
final byte[] INPUT = "abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz1234567890X".getBytes("UTF-8");
// default encoding is "MIME, no linefeeds", so:
assertArrayEquals(INPUT, MAPPER.readValue(
q("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXoxMjM0NTY3ODkwYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXoxMjM0NTY3ODkwWA=="),
byte[].class));
ObjectReader reader = MAPPER.readerFor(byte[].class);
assertArrayEquals(INPUT, (byte[]) reader.with(Base64Variants.MIME_NO_LINEFEEDS).readValue(
q("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXoxMjM0NTY3ODkwYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXoxMjM0NTY3ODkwWA=="
)));
// but others should be slightly different
assertArrayEquals(INPUT, (byte[]) reader.with(Base64Variants.MIME).readValue(
q("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXoxMjM0NTY3ODkwYWJjZGVmZ2hpamtsbW5vcHFyc3R1\\ndnd4eXoxMjM0NTY3ODkwWA=="
)));
assertArrayEquals(INPUT, (byte[]) reader.with(Base64Variants.MODIFIED_FOR_URL).readValue(
q("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXoxMjM0NTY3ODkwYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXoxMjM0NTY3ODkwWA"
)));
// PEM mandates 64 char lines:
assertArrayEquals(INPUT, (byte[]) reader.with(Base64Variants.PEM).readValue(
q("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXoxMjM0NTY3ODkwYWJjZGVmZ2hpamts\\nbW5vcHFyc3R1dnd4eXoxMjM0NTY3ODkwWA=="
)));
}
/*
/**********************************************************
/* Sequence tests
/**********************************************************
*/
/**
* Then a unit test to verify that we can conveniently bind sequence of
* space-separated simple values
*/
@Test
public void testSequenceOfInts() throws Exception
{
final int NR_OF_INTS = 100;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < NR_OF_INTS; ++i) {
sb.append(" ");
sb.append(i);
}
ObjectMapper mapper = jsonMapperBuilder()
.disable(DeserializationFeature.FAIL_ON_TRAILING_TOKENS)
.build();
try (JsonParser p = mapper.createParser(sb.toString())) {
for (int i = 0; i < NR_OF_INTS; ++i) {
Integer result = mapper.readValue(p, Integer.class);
assertEquals(Integer.valueOf(i), result);
}
}
}
/*
/**********************************************************
/* Empty String coercion, handling
/**********************************************************
*/
@Test
public void testEmptyStringForIntegerWrappers() throws IOException
{
WrappersBean bean = MAPPER.readValue("{\"byteValue\":\"\"}", WrappersBean.class);
assertNull(bean.byteValue);
// char/Character is different... not sure if this should work or not:
bean = MAPPER.readValue("{\"charValue\":\"\"}", WrappersBean.class);
assertNull(bean.charValue);
bean = MAPPER.readValue("{\"shortValue\":\"\"}", WrappersBean.class);
assertNull(bean.shortValue);
bean = MAPPER.readValue("{\"intValue\":\"\"}", WrappersBean.class);
assertNull(bean.intValue);
bean = MAPPER.readValue("{\"longValue\":\"\"}", WrappersBean.class);
assertNull(bean.longValue);
}
@Test
public void testEmptyStringForFloatWrappers() throws IOException
{
WrappersBean bean = MAPPER.readValue("{\"floatValue\":\"\"}", WrappersBean.class);
assertNull(bean.floatValue);
bean = MAPPER.readValue("{\"doubleValue\":\"\"}", WrappersBean.class);
assertNull(bean.doubleValue);
}
@Test
public void testEmptyStringForBooleanPrimitive() throws IOException
{
PrimitivesBean bean = MAPPER.readValue("{\"booleanValue\":\"\"}", PrimitivesBean.class);
assertFalse(bean.booleanValue);
}
@Test
public void testEmptyStringForIntegerPrimitives() throws IOException
{
PrimitivesBean bean = MAPPER.readValue("{\"byteValue\":\"\"}", PrimitivesBean.class);
assertEquals((byte) 0, bean.byteValue);
bean = MAPPER.readValue("{\"charValue\":\"\"}", PrimitivesBean.class);
assertEquals((char) 0, bean.charValue);
bean = MAPPER.readValue("{\"shortValue\":\"\"}", PrimitivesBean.class);
assertEquals((short) 0, bean.shortValue);
bean = MAPPER.readValue("{\"intValue\":\"\"}", PrimitivesBean.class);
assertEquals(0, bean.intValue);
bean = MAPPER.readValue("{\"longValue\":\"\"}", PrimitivesBean.class);
assertEquals(0L, bean.longValue);
}
@Test
public void testEmptyStringForFloatPrimitives() throws IOException
{
PrimitivesBean bean = MAPPER.readValue("{\"floatValue\":\"\"}", PrimitivesBean.class);
assertEquals(0.0f, bean.floatValue);
bean = MAPPER.readValue("{\"doubleValue\":\"\"}", PrimitivesBean.class);
assertEquals(0.0, bean.doubleValue);
}
/*
/**********************************************************
/* Null handling for scalars in POJO
/**********************************************************
*/
@Test
public void testNullForPrimitivesDefault() throws IOException
{
// by default, ok to rely on defaults
PrimitivesBean bean = MAPPER.readValue(
"{\"intValue\":null, \"booleanValue\":null, \"doubleValue\":null}",
PrimitivesBean.class);
assertNotNull(bean);
assertEquals(0, bean.intValue);
assertFalse(bean.booleanValue);
assertEquals(0.0, bean.doubleValue);
bean = MAPPER.readValue("{\"byteValue\":null, \"longValue\":null, \"floatValue\":null}",
PrimitivesBean.class);
assertNotNull(bean);
assertEquals((byte) 0, bean.byteValue);
assertEquals(0L, bean.longValue);
assertEquals(0.0f, bean.floatValue);
}
@Test
public void testNullForPrimitivesNotAllowedInts() throws IOException
{
final ObjectReader reader = MAPPER
.readerFor(PrimitivesBean.class)
.with(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES);
try {
reader.readValue("{\"byteValue\":null}");
fail("Expected failure for byte + null");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot map `null` into type `byte`");
verifyPath(e, "byteValue");
}
try {
reader.readValue("{\"shortValue\":null}");
fail("Expected failure for short + null");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot map `null` into type `short`");
verifyPath(e, "shortValue");
}
try {
reader.readValue("{\"intValue\":null}");
fail("Expected failure for int + null");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot map `null` into type `int`");
verifyPath(e, "intValue");
}
try {
reader.readValue("{\"longValue\":null}");
fail("Expected failure for long + null");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot map `null` into type `long`");
verifyPath(e, "longValue");
}
}
@Test
public void testNullForPrimitivesNotAllowedFP() throws IOException
{
final ObjectReader reader = MAPPER
.readerFor(PrimitivesBean.class)
.with(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES);
// float/double
try {
reader.readValue("{\"floatValue\":null}");
fail("Expected failure for float + null");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot map `null` into type `float`");
verifyPath(e, "floatValue");
}
try {
reader.readValue("{\"doubleValue\":null}");
fail("Expected failure for double + null");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot map `null` into type `double`");
verifyPath(e, "doubleValue");
}
}
@Test
public void testNullForPrimitivesNotAllowedMisc() throws IOException
{
final ObjectReader reader = MAPPER
.readerFor(PrimitivesBean.class)
.with(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES);
// boolean
try {
reader.readValue("{\"booleanValue\":null}");
fail("Expected failure for boolean + null");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot map `null` into type `boolean`");
verifyPath(e, "booleanValue");
}
try {
reader.readValue("{\"charValue\":null}");
fail("Expected failure for char + null");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot map `null` into type `char`");
verifyPath(e, "charValue");
}
}
// [databind#2101]
@Test
public void testNullForPrimitivesViaCreator() throws IOException
{
try {
/*PrimitiveCreatorBean bean =*/ MAPPER
.readerFor(PrimitiveCreatorBean.class)
.with(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES)
.readValue(a2q("{'a': null}"));
fail("Expected failure for `int` and `null`");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot map `null` into type `int`");
verifyPath(e, "a");
}
}
private void verifyPath(MismatchedInputException e, String propName) {
assertEquals(1, e.getPath().size());
assertEquals(propName, e.getPath().get(0).getPropertyName());
}
@Test
public void testNullForPrimitiveArrays() throws IOException
{
_testNullForPrimitiveArrays(boolean[].class, Boolean.FALSE);
_testNullForPrimitiveArrays(byte[].class, Byte.valueOf((byte) 0));
_testNullForPrimitiveArrays(char[].class, Character.valueOf((char) 0), false);
_testNullForPrimitiveArrays(short[].class, Short.valueOf((short)0));
_testNullForPrimitiveArrays(int[].class, Integer.valueOf(0));
_testNullForPrimitiveArrays(long[].class, Long.valueOf(0L));
_testNullForPrimitiveArrays(float[].class, Float.valueOf(0f));
_testNullForPrimitiveArrays(double[].class, Double.valueOf(0d));
}
private void _testNullForPrimitiveArrays(Class<?> cls, Object defValue) throws IOException {
_testNullForPrimitiveArrays(cls, defValue, true);
}
private void _testNullForPrimitiveArrays(Class<?> cls, Object defValue,
boolean testEmptyString) throws IOException
{
final String EMPTY_STRING_JSON = "[ \"\" ]";
final String JSON_WITH_NULL = "[ null ]";
final String SIMPLE_NAME = "`"+cls.getSimpleName()+"`";
final ObjectReader readerCoerceOk = MAPPER.readerFor(cls);
final ObjectReader readerNoNulls = readerCoerceOk
.with(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES);
Object ob = readerCoerceOk.forType(cls).readValue(JSON_WITH_NULL);
assertEquals(1, Array.getLength(ob));
assertEquals(defValue, Array.get(ob, 0));
try {
readerNoNulls.readValue(JSON_WITH_NULL);
fail("Should not pass");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot coerce `null`");
verifyException(e, "to element of "+SIMPLE_NAME);
}
if (testEmptyString) {
ob = readerCoerceOk.forType(cls).readValue(EMPTY_STRING_JSON);
assertEquals(1, Array.getLength(ob));
assertEquals(defValue, Array.get(ob, 0));
}
}
// [databind#2197], [databind#2679]
@Test
public void testVoidDeser() throws Exception
{
// First, `Void` as bean property
VoidBean bean = MAPPER.readValue(a2q("{'value' : 123 }"),
VoidBean.class);
assertNull(bean.value);
// Then `Void` and `void` (Void.TYPE) as root values
assertNull(MAPPER.readValue("{}", Void.class));
assertNull(MAPPER.readValue("1234", Void.class));
assertNull(MAPPER.readValue("[ 1, true ]", Void.class));
assertNull(MAPPER.readValue("{}", Void.TYPE));
assertNull(MAPPER.readValue("1234", Void.TYPE));
assertNull(MAPPER.readValue("[ 1, true ]", Void.TYPE));
}
/*
/**********************************************************
/* Test for invalid String values
/**********************************************************
*/
@Test
public void testInvalidStringCoercionFail() throws IOException
{
_testInvalidStringCoercionFail(boolean[].class, "boolean");
_testInvalidStringCoercionFail(byte[].class);
// char[] is special, cannot use generalized test here
// _testInvalidStringCoercionFail(char[].class);
_testInvalidStringCoercionFail(short[].class, "short");
_testInvalidStringCoercionFail(int[].class, "int");
_testInvalidStringCoercionFail(long[].class, "long");
_testInvalidStringCoercionFail(float[].class, "float");
_testInvalidStringCoercionFail(double[].class, "double");
}
private void _testInvalidStringCoercionFail(Class<?> cls) throws IOException
{
_testInvalidStringCoercionFail(cls, cls.getSimpleName());
}
private void _testInvalidStringCoercionFail(Class<?> cls, String targetTypeName)
throws IOException
{
final String JSON = "[ \"foobar\" ]";
try {
MAPPER.readerFor(cls).readValue(JSON);
fail("Should not pass");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot deserialize value of type `"+targetTypeName+"` from String \"foobar\"");
}
}
/*
/**********************************************************
/* Tests for mismatch: JSON Object for scalars (not supported
/* for JSON
/**********************************************************
*/
@Test
public void testFailForScalarFromObject() throws Exception
{
_testFailForNumberFromObject(Byte.TYPE);
_testFailForNumberFromObject(Short.TYPE);
_testFailForNumberFromObject(Long.TYPE);
_testFailForNumberFromObject(Float.TYPE);
_testFailForNumberFromObject(Double.TYPE);
_testFailForNumberFromObject(BigInteger.class);
_testFailForNumberFromObject(BigDecimal.class);
}
private void _testFailForNumberFromObject(Class<?> targetType) throws Exception
{
try {
MAPPER.readValue(a2q("{'value':12}"), targetType);
fail("Should not pass");
} catch (MismatchedInputException e) {
verifyException(e, "from Object value");
verifyException(e, ClassUtil.getClassDescription(targetType));
}
}
}
| VoidBean |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/enums/AbfsBackoffMetricsEnum.java | {
"start": 1237,
"end": 4240
} | enum ____ {
NUMBER_OF_IOPS_THROTTLED_REQUESTS("numberOfIOPSThrottledRequests",
"Number of IOPS throttled requests", BASE, TYPE_COUNTER),
NUMBER_OF_BANDWIDTH_THROTTLED_REQUESTS("numberOfBandwidthThrottledRequests",
"Number of bandwidth throttled requests", BASE, TYPE_COUNTER),
NUMBER_OF_OTHER_THROTTLED_REQUESTS("numberOfOtherThrottledRequests",
"Number of other throttled requests", BASE, TYPE_COUNTER),
NUMBER_OF_NETWORK_FAILED_REQUESTS("numberOfNetworkFailedRequests",
"Number of network failed requests", BASE, TYPE_COUNTER),
MAX_RETRY_COUNT("maxRetryCount", "Max retry count", BASE, TYPE_COUNTER),
TOTAL_NUMBER_OF_REQUESTS("totalNumberOfRequests",
"Total number of requests", BASE, TYPE_COUNTER),
NUMBER_OF_REQUESTS_SUCCEEDED_WITHOUT_RETRYING("numberOfRequestsSucceededWithoutRetrying",
"Number of requests succeeded without retrying", BASE, TYPE_COUNTER),
NUMBER_OF_REQUESTS_FAILED("numberOfRequestsFailed",
"Number of requests failed", BASE, TYPE_COUNTER),
NUMBER_OF_REQUESTS_SUCCEEDED("numberOfRequestsSucceeded",
"Number of requests succeeded", RETRY, TYPE_COUNTER),
MIN_BACK_OFF("minBackOff", "Minimum backoff", RETRY, TYPE_GAUGE),
MAX_BACK_OFF("maxBackOff", "Maximum backoff", RETRY, TYPE_GAUGE),
TOTAL_BACK_OFF("totalBackoff", "Total backoff", RETRY, TYPE_GAUGE),
TOTAL_REQUESTS("totalRequests", "Total requests", RETRY, TYPE_COUNTER);
private final String name;
private final String description;
private final String type;
private final StatisticTypeEnum statisticType;
/**
* Constructor for AbfsBackoffMetricsEnum.
*
* @param name the name of the metric
* @param description the description of the metric
* @param type the type of the metric (BASE or RETRY)
* @param statisticType the statistic type of the metric (counter or gauge)
*/
AbfsBackoffMetricsEnum(String name,
String description,
String type,
StatisticTypeEnum statisticType) {
this.name = name;
this.description = description;
this.type = type;
this.statisticType = statisticType;
}
/**
* Gets the name of the metric.
*
* @return the name of the metric
*/
public String getName() {
return name;
}
/**
* Gets the description of the metric.
*
* @return the description of the metric
*/
public String getDescription() {
return description;
}
/**
* Gets the type of the metric.
*
* @return the type of the metric
*/
public String getType() {
return type;
}
/**
* Gets the statistic type of the metric.
*
* @return the statistic type of the metric
*/
public StatisticTypeEnum getStatisticType() {
return statisticType;
}
}
| AbfsBackoffMetricsEnum |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/autoproxy/AspectJAutoProxyCreatorTests.java | {
"start": 19367,
"end": 19711
} | class ____ extends StaticMethodMatcherPointcutAdvisor {
public int count;
public TestBeanAdvisor() {
setAdvice((MethodBeforeAdvice) (method, args, target) -> ++count);
}
@Override
public boolean matches(Method method, @Nullable Class<?> targetClass) {
return ITestBean.class.isAssignableFrom(targetClass);
}
}
abstract | TestBeanAdvisor |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/CheckpointStorageLoader.java | {
"start": 1551,
"end": 1650
} | class ____ utility methods to load checkpoint storage from configurations. */
@Internal
public | contains |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/SealedTypesWithTypedDeserializationTest.java | {
"start": 562,
"end": 949
} | class ____
extends DatabindTestUtil
{
/*
/**********************************************************
/* Helper types
/**********************************************************
*/
/**
* Polymorphic base class
*/
@JsonTypeInfo(use=Id.CLASS, include=As.PROPERTY, property="@classy")
static sealed abstract | SealedTypesWithTypedDeserializationTest |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/JSONArray.java | {
"start": 1820,
"end": 11288
} | class ____ extends JSON implements List<Object>, Cloneable, RandomAccess, Serializable {
private static final long serialVersionUID = 1L;
private final List<Object> list;
protected transient Object relatedArray;
protected transient Type componentType;
public JSONArray(){
this.list = new ArrayList<Object>();
}
public JSONArray(List<Object> list){
if (list == null){
throw new IllegalArgumentException("list is null.");
}
this.list = list;
}
public JSONArray(int initialCapacity){
this.list = new ArrayList<Object>(initialCapacity);
}
/**
* @since 1.1.16
* @return
*/
public Object getRelatedArray() {
return relatedArray;
}
public void setRelatedArray(Object relatedArray) {
this.relatedArray = relatedArray;
}
public Type getComponentType() {
return componentType;
}
public void setComponentType(Type componentType) {
this.componentType = componentType;
}
public int size() {
return list.size();
}
public boolean isEmpty() {
return list.isEmpty();
}
public boolean contains(Object o) {
return list.contains(o);
}
public Iterator<Object> iterator() {
return list.iterator();
}
public Object[] toArray() {
return list.toArray();
}
public <T> T[] toArray(T[] a) {
return list.toArray(a);
}
public boolean add(Object e) {
return list.add(e);
}
public JSONArray fluentAdd(Object e) {
list.add(e);
return this;
}
public boolean remove(Object o) {
return list.remove(o);
}
public JSONArray fluentRemove(Object o) {
list.remove(o);
return this;
}
public boolean containsAll(Collection<?> c) {
return list.containsAll(c);
}
public boolean addAll(Collection<?> c) {
return list.addAll(c);
}
public JSONArray fluentAddAll(Collection<?> c) {
list.addAll(c);
return this;
}
public boolean addAll(int index, Collection<?> c) {
return list.addAll(index, c);
}
public JSONArray fluentAddAll(int index, Collection<?> c) {
list.addAll(index, c);
return this;
}
public boolean removeAll(Collection<?> c) {
return list.removeAll(c);
}
public JSONArray fluentRemoveAll(Collection<?> c) {
list.removeAll(c);
return this;
}
public boolean retainAll(Collection<?> c) {
return list.retainAll(c);
}
public JSONArray fluentRetainAll(Collection<?> c) {
list.retainAll(c);
return this;
}
public void clear() {
list.clear();
}
public JSONArray fluentClear() {
list.clear();
return this;
}
public Object set(int index, Object element) {
if (index == -1) {
list.add(element);
return null;
}
if (list.size() <= index) {
for (int i = list.size(); i < index; ++i) {
list.add(null);
}
list.add(element);
return null;
}
return list.set(index, element);
}
public JSONArray fluentSet(int index, Object element) {
set(index, element);
return this;
}
public void add(int index, Object element) {
list.add(index, element);
}
public JSONArray fluentAdd(int index, Object element) {
list.add(index, element);
return this;
}
public Object remove(int index) {
return list.remove(index);
}
public JSONArray fluentRemove(int index) {
list.remove(index);
return this;
}
public int indexOf(Object o) {
return list.indexOf(o);
}
public int lastIndexOf(Object o) {
return list.lastIndexOf(o);
}
public ListIterator<Object> listIterator() {
return list.listIterator();
}
public ListIterator<Object> listIterator(int index) {
return list.listIterator(index);
}
public List<Object> subList(int fromIndex, int toIndex) {
return list.subList(fromIndex, toIndex);
}
public Object get(int index) {
return list.get(index);
}
public JSONObject getJSONObject(int index) {
Object value = list.get(index);
if (value instanceof JSONObject) {
return (JSONObject) value;
}
if (value instanceof Map) {
return new JSONObject((Map) value);
}
return (JSONObject) toJSON(value);
}
public JSONArray getJSONArray(int index) {
Object value = list.get(index);
if (value instanceof JSONArray) {
return (JSONArray) value;
}
if (value instanceof List) {
return new JSONArray((List) value);
}
return (JSONArray) toJSON(value);
}
public <T> T getObject(int index, Class<T> clazz) {
Object obj = list.get(index);
return TypeUtils.castToJavaBean(obj, clazz);
}
public <T> T getObject(int index, Type type) {
Object obj = list.get(index);
if (type instanceof Class) {
return (T) TypeUtils.castToJavaBean(obj, (Class) type);
} else {
String json = JSON.toJSONString(obj);
return (T) JSON.parseObject(json, type);
}
}
public Boolean getBoolean(int index) {
Object value = get(index);
if (value == null) {
return null;
}
return castToBoolean(value);
}
public boolean getBooleanValue(int index) {
Object value = get(index);
if (value == null) {
return false;
}
return castToBoolean(value).booleanValue();
}
public Byte getByte(int index) {
Object value = get(index);
return castToByte(value);
}
public byte getByteValue(int index) {
Object value = get(index);
Byte byteVal = castToByte(value);
if (byteVal == null) {
return 0;
}
return byteVal;
}
public Short getShort(int index) {
Object value = get(index);
return castToShort(value);
}
public short getShortValue(int index) {
Object value = get(index);
Short shortVal = castToShort(value);
if (shortVal == null) {
return 0;
}
return shortVal;
}
public Integer getInteger(int index) {
Object value = get(index);
return castToInt(value);
}
public int getIntValue(int index) {
Object value = get(index);
Integer intVal = castToInt(value);
if (intVal == null) {
return 0;
}
return intVal;
}
public Long getLong(int index) {
Object value = get(index);
return castToLong(value);
}
public long getLongValue(int index) {
Object value = get(index);
Long longVal = castToLong(value);
if (longVal == null) {
return 0L;
}
return longVal;
}
public Float getFloat(int index) {
Object value = get(index);
return castToFloat(value);
}
public float getFloatValue(int index) {
Object value = get(index);
Float floatValue = castToFloat(value);
if (floatValue == null) {
return 0F;
}
return floatValue;
}
public Double getDouble(int index) {
Object value = get(index);
return castToDouble(value);
}
public double getDoubleValue(int index) {
Object value = get(index);
Double doubleValue = castToDouble(value);
if (doubleValue == null) {
return 0D;
}
return doubleValue;
}
public BigDecimal getBigDecimal(int index) {
Object value = get(index);
return castToBigDecimal(value);
}
public BigInteger getBigInteger(int index) {
Object value = get(index);
return castToBigInteger(value);
}
public String getString(int index) {
Object value = get(index);
return castToString(value);
}
public java.util.Date getDate(int index) {
Object value = get(index);
return castToDate(value);
}
public Object getSqlDate(int index) {
Object value = get(index);
return castToSqlDate(value);
}
public Object getTimestamp(int index) {
Object value = get(index);
return castToTimestamp(value);
}
/**
* @since 1.2.23
*/
public <T> List<T> toJavaList(Class<T> clazz) {
List<T> list = new ArrayList<T>(this.size());
ParserConfig config = ParserConfig.getGlobalInstance();
for (Object item : this) {
T classItem = (T) TypeUtils.cast(item, clazz, config);
list.add(classItem);
}
return list;
}
@Override
public Object clone() {
return new JSONArray(new ArrayList<Object>(list));
}
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof JSONArray) {
return this.list.equals(((JSONArray) obj).list);
}
return this.list.equals(obj);
}
public int hashCode() {
return this.list.hashCode();
}
}
| JSONArray |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobVertexBackPressureInfo.java | {
"start": 9916,
"end": 10303
} | enum ____ {
DEPRECATED("deprecated"),
OK("ok");
private final String status;
VertexBackPressureStatus(String status) {
this.status = status;
}
@JsonValue
@Override
public String toString() {
return status;
}
}
/** Level of vertex back-pressure. */
public | VertexBackPressureStatus |
java | quarkusio__quarkus | extensions/elasticsearch-rest-client/deployment/src/test/java/io/quarkus/elasticsearch/restclient/lowlevel/runtime/TestResource.java | {
"start": 2351,
"end": 2456
} | class ____ {
public String id;
public String name;
public String color;
}
}
| Fruit |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OAuth2DeviceCodeGrantTests.java | {
"start": 6117,
"end": 29156
} | class ____ {
private static final String DEFAULT_DEVICE_AUTHORIZATION_ENDPOINT_URI = "/oauth2/device_authorization";
private static final String DEFAULT_DEVICE_VERIFICATION_ENDPOINT_URI = "/oauth2/device_verification";
private static final String DEFAULT_TOKEN_ENDPOINT_URI = "/oauth2/token";
private static final OAuth2TokenType DEVICE_CODE_TOKEN_TYPE = new OAuth2TokenType(OAuth2ParameterNames.DEVICE_CODE);
private static final String USER_CODE = "ABCD-EFGH";
private static final String STATE = "123";
private static final String DEVICE_CODE = "abc-XYZ";
private static EmbeddedDatabase db;
private static JWKSource<SecurityContext> jwkSource;
private static NimbusJwtEncoder dPoPProofJwtEncoder;
private static final HttpMessageConverter<OAuth2DeviceAuthorizationResponse> deviceAuthorizationResponseHttpMessageConverter = new OAuth2DeviceAuthorizationResponseHttpMessageConverter();
private static final HttpMessageConverter<OAuth2AccessTokenResponse> accessTokenResponseHttpMessageConverter = new OAuth2AccessTokenResponseHttpMessageConverter();
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
private MockMvc mvc;
@Autowired
private JdbcOperations jdbcOperations;
@Autowired
private RegisteredClientRepository registeredClientRepository;
@Autowired
private OAuth2AuthorizationService authorizationService;
@Autowired
private OAuth2AuthorizationConsentService authorizationConsentService;
@BeforeAll
public static void init() {
JWKSet jwkSet = new JWKSet(TestJwks.DEFAULT_RSA_JWK);
jwkSource = (jwkSelector, securityContext) -> jwkSelector.select(jwkSet);
JWKSet clientJwkSet = new JWKSet(TestJwks.DEFAULT_EC_JWK);
JWKSource<SecurityContext> clientJwkSource = (jwkSelector, securityContext) -> jwkSelector.select(clientJwkSet);
dPoPProofJwtEncoder = new NimbusJwtEncoder(clientJwkSource);
// @formatter:off
db = new EmbeddedDatabaseBuilder()
.generateUniqueName(true)
.setType(EmbeddedDatabaseType.HSQL)
.setScriptEncoding("UTF-8")
.addScript("org/springframework/security/oauth2/server/authorization/oauth2-authorization-schema.sql")
.addScript("org/springframework/security/oauth2/server/authorization/oauth2-authorization-consent-schema.sql")
.addScript("org/springframework/security/oauth2/server/authorization/client/oauth2-registered-client-schema.sql")
.build();
// @formatter:on
}
@AfterEach
public void tearDown() {
this.jdbcOperations.update("truncate table oauth2_authorization");
this.jdbcOperations.update("truncate table oauth2_authorization_consent");
this.jdbcOperations.update("truncate table oauth2_registered_client");
}
@AfterAll
public static void destroy() {
db.shutdown();
}
@Test
public void requestWhenDeviceAuthorizationRequestNotAuthenticatedThenUnauthorized() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
this.registeredClientRepository.save(registeredClient);
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.CLIENT_ID, registeredClient.getClientId());
parameters.set(OAuth2ParameterNames.SCOPE,
StringUtils.collectionToDelimitedString(registeredClient.getScopes(), " "));
// @formatter:off
this.mvc.perform(post(DEFAULT_DEVICE_AUTHORIZATION_ENDPOINT_URI)
.params(parameters))
.andExpect(status().isUnauthorized());
// @formatter:on
}
@Test
public void requestWhenRegisteredClientMissingThenUnauthorized() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.CLIENT_ID, registeredClient.getClientId());
parameters.set(OAuth2ParameterNames.SCOPE,
StringUtils.collectionToDelimitedString(registeredClient.getScopes(), " "));
// @formatter:off
this.mvc.perform(post(DEFAULT_DEVICE_AUTHORIZATION_ENDPOINT_URI)
.params(parameters)
.headers(withClientAuth(registeredClient)))
.andExpect(status().isUnauthorized());
// @formatter:on
}
@Test
public void requestWhenDeviceAuthorizationRequestValidThenReturnDeviceAuthorizationResponse() throws Exception {
this.spring.register(AuthorizationServerConfigurationWithMultipleIssuersAllowed.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
this.registeredClientRepository.save(registeredClient);
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.CLIENT_ID, registeredClient.getClientId());
parameters.set(OAuth2ParameterNames.SCOPE,
StringUtils.collectionToDelimitedString(registeredClient.getScopes(), " "));
String issuer = "https://example.com:8443/issuer1";
// @formatter:off
MvcResult mvcResult = this.mvc.perform(post(issuer.concat(DEFAULT_DEVICE_AUTHORIZATION_ENDPOINT_URI))
.params(parameters)
.headers(withClientAuth(registeredClient)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.device_code").isNotEmpty())
.andExpect(jsonPath("$.user_code").isNotEmpty())
.andExpect(jsonPath("$.expires_in").isNumber())
.andExpect(jsonPath("$.verification_uri").isNotEmpty())
.andExpect(jsonPath("$.verification_uri_complete").isNotEmpty())
.andReturn();
// @formatter:on
MockHttpServletResponse servletResponse = mvcResult.getResponse();
MockClientHttpResponse httpResponse = new MockClientHttpResponse(servletResponse.getContentAsByteArray(),
HttpStatus.OK);
OAuth2DeviceAuthorizationResponse deviceAuthorizationResponse = deviceAuthorizationResponseHttpMessageConverter
.read(OAuth2DeviceAuthorizationResponse.class, httpResponse);
String userCode = deviceAuthorizationResponse.getUserCode().getTokenValue();
assertThat(userCode).matches("[A-Z]{4}-[A-Z]{4}");
assertThat(deviceAuthorizationResponse.getVerificationUri())
.isEqualTo("https://example.com:8443/oauth2/device_verification");
assertThat(deviceAuthorizationResponse.getVerificationUriComplete())
.isEqualTo("https://example.com:8443/oauth2/device_verification?user_code=" + userCode);
String deviceCode = deviceAuthorizationResponse.getDeviceCode().getTokenValue();
OAuth2Authorization authorization = this.authorizationService.findByToken(deviceCode, DEVICE_CODE_TOKEN_TYPE);
assertThat(authorization.getToken(OAuth2DeviceCode.class)).isNotNull();
assertThat(authorization.getToken(OAuth2UserCode.class)).isNotNull();
}
@Test
public void requestWhenDeviceVerificationRequestUnauthenticatedThenUnauthorized() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
this.registeredClientRepository.save(registeredClient);
Instant issuedAt = Instant.now();
Instant expiresAt = issuedAt.plusSeconds(300);
// @formatter:off
OAuth2Authorization authorization = OAuth2Authorization.withRegisteredClient(registeredClient)
.principalName(registeredClient.getClientId())
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.token(new OAuth2DeviceCode(DEVICE_CODE, issuedAt, expiresAt))
.token(new OAuth2UserCode(USER_CODE, issuedAt, expiresAt))
.attribute(OAuth2ParameterNames.SCOPE, registeredClient.getScopes())
.build();
// @formatter:on
this.authorizationService.save(authorization);
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.USER_CODE, USER_CODE);
// @formatter:off
this.mvc.perform(get(DEFAULT_DEVICE_VERIFICATION_ENDPOINT_URI)
.queryParams(parameters))
.andExpect(status().isUnauthorized());
// @formatter:on
}
@Test
public void requestWhenDeviceVerificationRequestValidThenDisplaysConsentPage() throws Exception {
this.spring.register(AuthorizationServerConfigurationWithMultipleIssuersAllowed.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
this.registeredClientRepository.save(registeredClient);
Instant issuedAt = Instant.now();
Instant expiresAt = issuedAt.plusSeconds(300);
// @formatter:off
OAuth2Authorization authorization = OAuth2Authorization.withRegisteredClient(registeredClient)
.principalName(registeredClient.getClientId())
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.token(new OAuth2DeviceCode(DEVICE_CODE, issuedAt, expiresAt))
.token(new OAuth2UserCode(USER_CODE, issuedAt, expiresAt))
.attribute(OAuth2ParameterNames.SCOPE, registeredClient.getScopes())
.build();
// @formatter:on
this.authorizationService.save(authorization);
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.USER_CODE, USER_CODE);
String issuer = "https://example.com:8443/issuer1";
// @formatter:off
MvcResult mvcResult = this.mvc.perform(get(issuer.concat(DEFAULT_DEVICE_VERIFICATION_ENDPOINT_URI))
.queryParams(parameters)
.with(user("user")))
.andExpect(status().isOk())
.andExpect(content().contentTypeCompatibleWith(MediaType.TEXT_HTML))
.andReturn();
// @formatter:on
String responseHtml = mvcResult.getResponse().getContentAsString();
assertThat(responseHtml).contains("Consent required");
OAuth2Authorization updatedAuthorization = this.authorizationService.findById(authorization.getId());
assertThat(updatedAuthorization.getPrincipalName()).isEqualTo("user");
assertThat(updatedAuthorization).isNotNull();
// @formatter:off
assertThat(updatedAuthorization.getToken(OAuth2UserCode.class))
.extracting(isInvalidated())
.isEqualTo(false);
// @formatter:on
}
@Test
public void requestWhenDeviceAuthorizationConsentRequestUnauthenticatedThenBadRequest() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
this.registeredClientRepository.save(registeredClient);
Instant issuedAt = Instant.now();
Instant expiresAt = issuedAt.plusSeconds(300);
// @formatter:off
OAuth2Authorization authorization = OAuth2Authorization.withRegisteredClient(registeredClient)
.principalName("user")
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.token(new OAuth2DeviceCode(DEVICE_CODE, issuedAt, expiresAt))
.token(new OAuth2UserCode(USER_CODE, issuedAt, expiresAt))
.attribute(OAuth2ParameterNames.SCOPE, registeredClient.getScopes())
.attribute(OAuth2ParameterNames.STATE, STATE)
.build();
// @formatter:on
this.authorizationService.save(authorization);
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.USER_CODE, USER_CODE);
parameters.set(OAuth2ParameterNames.CLIENT_ID, registeredClient.getClientId());
parameters.set(OAuth2ParameterNames.SCOPE, registeredClient.getScopes().iterator().next());
parameters.set(OAuth2ParameterNames.STATE, STATE);
// @formatter:off
this.mvc.perform(post(DEFAULT_DEVICE_VERIFICATION_ENDPOINT_URI)
.params(parameters))
.andExpect(status().isBadRequest());
// @formatter:on
}
@Test
public void requestWhenDeviceAuthorizationConsentRequestValidThenRedirectsToSuccessPage() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
this.registeredClientRepository.save(registeredClient);
Instant issuedAt = Instant.now();
Instant expiresAt = issuedAt.plusSeconds(300);
// @formatter:off
OAuth2Authorization authorization = OAuth2Authorization.withRegisteredClient(registeredClient)
.principalName("user")
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.token(new OAuth2DeviceCode(DEVICE_CODE, issuedAt, expiresAt))
.token(new OAuth2UserCode(USER_CODE, issuedAt, expiresAt))
.attribute(OAuth2ParameterNames.SCOPE, registeredClient.getScopes())
.attribute(OAuth2ParameterNames.STATE, STATE)
.build();
// @formatter:on
this.authorizationService.save(authorization);
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.USER_CODE, USER_CODE);
parameters.set(OAuth2ParameterNames.CLIENT_ID, registeredClient.getClientId());
parameters.set(OAuth2ParameterNames.SCOPE, registeredClient.getScopes().iterator().next());
parameters.set(OAuth2ParameterNames.STATE, STATE);
// @formatter:off
MvcResult mvcResult = this.mvc.perform(post(DEFAULT_DEVICE_VERIFICATION_ENDPOINT_URI)
.params(parameters)
.with(user("user")))
.andExpect(status().is3xxRedirection())
.andReturn();
// @formatter:on
assertThat(mvcResult.getResponse().getHeader(HttpHeaders.LOCATION)).isEqualTo("/?success");
OAuth2Authorization updatedAuthorization = this.authorizationService.findById(authorization.getId());
assertThat(updatedAuthorization).isNotNull();
// @formatter:off
assertThat(updatedAuthorization.getToken(OAuth2UserCode.class))
.extracting(isInvalidated())
.isEqualTo(true);
// @formatter:on
}
@Test
public void requestWhenAccessTokenRequestUnauthenticatedThenUnauthorized() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
this.registeredClientRepository.save(registeredClient);
Instant issuedAt = Instant.now();
Instant expiresAt = issuedAt.plusSeconds(300);
// @formatter:off
OAuth2Authorization authorization = OAuth2Authorization.withRegisteredClient(registeredClient)
.principalName(registeredClient.getClientId())
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.token(new OAuth2DeviceCode(DEVICE_CODE, issuedAt, expiresAt))
.token(new OAuth2UserCode(USER_CODE, issuedAt, expiresAt), withInvalidated())
.authorizedScopes(registeredClient.getScopes())
.attribute(Principal.class.getName(), new UsernamePasswordAuthenticationToken("user", null))
.build();
// @formatter:on
this.authorizationService.save(authorization);
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.DEVICE_CODE.getValue());
parameters.set(OAuth2ParameterNames.DEVICE_CODE, DEVICE_CODE);
// @formatter:off
this.mvc.perform(post(DEFAULT_TOKEN_ENDPOINT_URI)
.params(parameters))
.andExpect(status().isUnauthorized());
// @formatter:on
}
@Test
public void requestWhenAccessTokenRequestValidThenReturnAccessTokenResponse() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
this.registeredClientRepository.save(registeredClient);
Instant issuedAt = Instant.now();
Instant expiresAt = issuedAt.plusSeconds(300);
// @formatter:off
OAuth2Authorization authorization = OAuth2Authorization.withRegisteredClient(registeredClient)
.principalName(registeredClient.getClientId())
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.token(new OAuth2DeviceCode(DEVICE_CODE, issuedAt, expiresAt))
.token(new OAuth2UserCode(USER_CODE, issuedAt, expiresAt), withInvalidated())
.authorizedScopes(registeredClient.getScopes())
.attribute(Principal.class.getName(), new UsernamePasswordAuthenticationToken("user", null))
.build();
// @formatter:on
this.authorizationService.save(authorization);
// @formatter:off
OAuth2AuthorizationConsent authorizationConsent =
OAuth2AuthorizationConsent.withId(registeredClient.getClientId(), "user")
.scope(registeredClient.getScopes().iterator().next())
.build();
// @formatter:on
this.authorizationConsentService.save(authorizationConsent);
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.DEVICE_CODE.getValue());
parameters.set(OAuth2ParameterNames.DEVICE_CODE, DEVICE_CODE);
// @formatter:off
MvcResult mvcResult = this.mvc.perform(post(DEFAULT_TOKEN_ENDPOINT_URI)
.params(parameters)
.headers(withClientAuth(registeredClient)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.access_token").isNotEmpty())
.andExpect(jsonPath("$.refresh_token").isNotEmpty())
.andExpect(jsonPath("$.expires_in").isNumber())
.andExpect(jsonPath("$.scope").isNotEmpty())
.andExpect(jsonPath("$.token_type").isNotEmpty())
.andReturn();
// @formatter:on
OAuth2Authorization updatedAuthorization = this.authorizationService.findById(authorization.getId());
assertThat(updatedAuthorization).isNotNull();
assertThat(updatedAuthorization.getAccessToken()).isNotNull();
assertThat(updatedAuthorization.getRefreshToken()).isNotNull();
// @formatter:off
assertThat(updatedAuthorization.getToken(OAuth2DeviceCode.class))
.extracting(isInvalidated())
.isEqualTo(true);
// @formatter:on
MockHttpServletResponse servletResponse = mvcResult.getResponse();
MockClientHttpResponse httpResponse = new MockClientHttpResponse(servletResponse.getContentAsByteArray(),
HttpStatus.OK);
OAuth2AccessTokenResponse accessTokenResponse = accessTokenResponseHttpMessageConverter
.read(OAuth2AccessTokenResponse.class, httpResponse);
String accessToken = accessTokenResponse.getAccessToken().getTokenValue();
OAuth2Authorization accessTokenAuthorization = this.authorizationService.findByToken(accessToken,
OAuth2TokenType.ACCESS_TOKEN);
assertThat(accessTokenAuthorization).isEqualTo(updatedAuthorization);
}
@Test
public void requestWhenAccessTokenRequestWithDPoPProofThenReturnDPoPBoundAccessToken() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
RegisteredClient registeredClient = TestRegisteredClients.registeredClient()
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.build();
// @formatter:on
this.registeredClientRepository.save(registeredClient);
Instant issuedAt = Instant.now();
Instant expiresAt = issuedAt.plusSeconds(300);
// @formatter:off
OAuth2Authorization authorization = OAuth2Authorization.withRegisteredClient(registeredClient)
.principalName(registeredClient.getClientId())
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.token(new OAuth2DeviceCode(DEVICE_CODE, issuedAt, expiresAt))
.token(new OAuth2UserCode(USER_CODE, issuedAt, expiresAt), withInvalidated())
.authorizedScopes(registeredClient.getScopes())
.attribute(Principal.class.getName(), new UsernamePasswordAuthenticationToken("user", null))
.build();
// @formatter:on
this.authorizationService.save(authorization);
// @formatter:off
OAuth2AuthorizationConsent authorizationConsent =
OAuth2AuthorizationConsent.withId(registeredClient.getClientId(), "user")
.scope(registeredClient.getScopes().iterator().next())
.build();
// @formatter:on
this.authorizationConsentService.save(authorizationConsent);
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.DEVICE_CODE.getValue());
parameters.set(OAuth2ParameterNames.DEVICE_CODE, DEVICE_CODE);
String tokenEndpointUri = "http://localhost" + DEFAULT_TOKEN_ENDPOINT_URI;
String dPoPProof = generateDPoPProof(tokenEndpointUri);
// @formatter:off
this.mvc.perform(post(DEFAULT_TOKEN_ENDPOINT_URI)
.params(parameters)
.headers(withClientAuth(registeredClient))
.header(OAuth2AccessToken.TokenType.DPOP.getValue(), dPoPProof))
.andExpect(status().isOk())
.andExpect(jsonPath("$.token_type").value(OAuth2AccessToken.TokenType.DPOP.getValue()));
// @formatter:on
authorization = this.authorizationService.findById(authorization.getId());
assertThat(authorization.getAccessToken().getClaims()).containsKey("cnf");
@SuppressWarnings("unchecked")
Map<String, Object> cnfClaims = (Map<String, Object>) authorization.getAccessToken().getClaims().get("cnf");
assertThat(cnfClaims).containsKey("jkt");
String jwkThumbprintClaim = (String) cnfClaims.get("jkt");
assertThat(jwkThumbprintClaim).isEqualTo(TestJwks.DEFAULT_EC_JWK.toPublicJWK().computeThumbprint().toString());
}
private static String generateDPoPProof(String tokenEndpointUri) {
// @formatter:off
Map<String, Object> publicJwk = TestJwks.DEFAULT_EC_JWK
.toPublicJWK()
.toJSONObject();
JwsHeader jwsHeader = JwsHeader.with(SignatureAlgorithm.ES256)
.type("dpop+jwt")
.jwk(publicJwk)
.build();
JwtClaimsSet claims = JwtClaimsSet.builder()
.issuedAt(Instant.now())
.claim("htm", "POST")
.claim("htu", tokenEndpointUri)
.id(UUID.randomUUID().toString())
.build();
// @formatter:on
Jwt jwt = dPoPProofJwtEncoder.encode(JwtEncoderParameters.from(jwsHeader, claims));
return jwt.getTokenValue();
}
private static HttpHeaders withClientAuth(RegisteredClient registeredClient) {
HttpHeaders headers = new HttpHeaders();
headers.setBasicAuth(registeredClient.getClientId(), registeredClient.getClientSecret());
return headers;
}
private static Consumer<Map<String, Object>> withInvalidated() {
return (metadata) -> metadata.put(OAuth2Authorization.Token.INVALIDATED_METADATA_NAME, true);
}
private static Function<OAuth2Authorization.Token<? extends OAuth2Token>, Boolean> isInvalidated() {
return (token) -> token.getMetadata(OAuth2Authorization.Token.INVALIDATED_METADATA_NAME);
}
@EnableWebSecurity
@Configuration(proxyBeanMethods = false)
static | OAuth2DeviceCodeGrantTests |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/http/HttpConfigurationBuilder.java | {
"start": 46334,
"end": 47143
} | class ____
extends GrantedAuthorityDefaultsParserUtils.AbstractGrantedAuthorityDefaultsBeanFactory {
private SecurityContextHolderAwareRequestFilter filter = new SecurityContextHolderAwareRequestFilter();
private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder
.getContextHolderStrategy();
@Override
public SecurityContextHolderAwareRequestFilter getBean() {
this.filter.setSecurityContextHolderStrategy(this.securityContextHolderStrategy);
this.filter.setRolePrefix(this.rolePrefix);
return this.filter;
}
void setSecurityContextHolderStrategy(SecurityContextHolderStrategy securityContextHolderStrategy) {
this.securityContextHolderStrategy = securityContextHolderStrategy;
}
}
static | SecurityContextHolderAwareRequestFilterBeanFactory |
java | quarkusio__quarkus | independent-projects/tools/registry-client/src/main/java/io/quarkus/registry/catalog/ExtensionOrigin.java | {
"start": 165,
"end": 1118
} | interface ____ {
/**
* Origin ID. E.g. GAV of the descriptor.
*
* @return origin ID
*/
String getId();
/**
* BOM that should be imported by a project
* using extensions from this origin. This method normally won't return null.
* Given that any Quarkus project would typically be importing at least some version of
* io.quarkus:quarkus-bom even if extensions used in the project aren't managed by the quarkus-bom/
* the project
*
* @return BOM coordinates
*/
ArtifactCoords getBom();
/**
* Whether the origin represents a platform.
*
* @return true in case the origin is a platform, otherwise - false
*/
boolean isPlatform();
/**
* @return optional metadata attached to the origin
*/
Map<String, Object> getMetadata();
default Mutable mutable() {
return new ExtensionOriginImpl.Builder(this);
}
| ExtensionOrigin |
java | quarkusio__quarkus | extensions/grpc/deployment/src/test/java/io/quarkus/grpc/deployment/GrpcServerProcessorTest.java | {
"start": 8192,
"end": 8428
} | class ____ extends OverridingTransactionalRoot {
static final Set<String> EXPECTED = ImmutableSet.of("method", "transactional", "another");
@Blocking
void method() {
}
}
}
| BlockingOverridingTransactional |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/IgnoredPureGetterTest.java | {
"start": 8410,
"end": 8764
} | class ____ {
void test(TestProtoMessage message) {
message.getMessage();
message.hasMessage();
}
}
""")
.addOutputLines(
"Test.java",
"""
import com.google.errorprone.bugpatterns.proto.ProtoTest.TestProtoMessage;
| Test |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteWithInOnlyAndMultipleAcksTest.java | {
"start": 5681,
"end": 5995
} | class ____ {
private final String id;
public MyOrderServiceNotificationWithAckBean(String id) {
this.id = id;
}
public String handleOrderNotificationWithAck(String body) {
return "Ack-" + id + ":" + body;
}
}
}
| MyOrderServiceNotificationWithAckBean |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/SendRequestTransportException.java | {
"start": 708,
"end": 1137
} | class ____ extends ActionTransportException implements ElasticsearchWrapperException {
public SendRequestTransportException(DiscoveryNode node, String action, Throwable cause) {
super(node == null ? null : node.getName(), node == null ? null : node.getAddress(), action, cause);
}
public SendRequestTransportException(StreamInput in) throws IOException {
super(in);
}
}
| SendRequestTransportException |
java | quarkusio__quarkus | integration-tests/grpc-hibernate/src/test/java/com/example/grpc/hibernate/BlockingMutinyTestBase.java | {
"start": 449,
"end": 3214
} | class ____ {
public static final int NO_OF_ELTS = 100;
public static final int TIMEOUT = 60;
public static final TestOuterClass.Empty EMPTY = TestOuterClass.Empty.getDefaultInstance();
@GrpcClient
Test client;
Test getClient() {
return client;
}
@BeforeEach
void clear() {
getClient().clear(EMPTY).onFailure().invoke(e -> {
throw new RuntimeException("Failed to clear items", e);
}).await().atMost(Duration.ofSeconds(20));
}
@org.junit.jupiter.api.Test
@Timeout(TIMEOUT)
void shouldAddItems() {
List<String> expected = new ArrayList<>();
for (int i = 0; i < NO_OF_ELTS; i++) {
String text = "text " + i;
expected.add(text);
final int attempt = i;
getClient().add(TestOuterClass.Item.newBuilder().setText(text).build())
.onFailure().invoke(e -> {
throw new RuntimeException("Failed to add on attempt " + attempt, e);
})
.await().atMost(Duration.ofSeconds(5));
}
List<String> actual = new ArrayList<>();
Multi<TestOuterClass.Item> all = getClient().getAll(EMPTY);
all.subscribe().with(item -> actual.add(item.getText()));
await().atMost(Duration.ofSeconds(TIMEOUT / 2))
.until(() -> actual.size() == NO_OF_ELTS);
assertThat(actual).containsExactlyInAnyOrderElementsOf(expected);
}
@org.junit.jupiter.api.Test
@Timeout(TIMEOUT)
void shouldAddViaBidi() {
List<String> expected = new ArrayList<>();
List<String> echoed = new ArrayList<>();
List<String> actual = new ArrayList<>();
Multi<TestOuterClass.Item> request = Multi.createFrom().emitter(
m -> {
for (int i = 0; i < NO_OF_ELTS; i++) {
String text = "text " + i;
expected.add(text);
m.emit(TestOuterClass.Item.newBuilder().setText(text).build());
}
m.complete();
});
getClient().bidi(request).subscribe().with(item -> echoed.add(item.getText()));
await().atMost(Duration.ofSeconds(TIMEOUT / 2))
.until(() -> echoed.size() == NO_OF_ELTS);
assertThat(echoed).containsExactlyInAnyOrderElementsOf(expected);
Multi<TestOuterClass.Item> all = getClient().getAll(EMPTY);
all.subscribe().with(item -> actual.add(item.getText()));
await().atMost(Duration.ofSeconds(TIMEOUT / 2))
.until(() -> actual.size() == NO_OF_ELTS);
assertThat(actual).containsExactlyInAnyOrderElementsOf(expected);
}
}
| BlockingMutinyTestBase |
java | apache__camel | components/camel-threadpoolfactory-vertx/src/test/java/org/apache/camel/reactive/SplitCustomThreadPoolTest.java | {
"start": 1282,
"end": 3157
} | class ____ extends CamelTestSupport {
private final Vertx vertx = Vertx.vertx();
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
VertXThreadPoolFactory tpf = (VertXThreadPoolFactory) context.getExecutorServiceManager().getThreadPoolFactory();
tpf.setVertx(vertx);
return context;
}
@Test
public void testSplit() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("A,B,C,D,E,F,G,H,I,J");
getMockEndpoint("mock:split").expectedBodiesReceivedInAnyOrder("A", "B", "C", "D", "E", "F", "G", "H", "I", "J");
template.sendBody("direct:start", "A,B,C,D,E,F,G,H,I,J");
MockEndpoint.assertIsSatisfied(context);
vertx.close();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// register a custom thread pool profile with id myLowPool
context.getExecutorServiceManager().registerThreadPoolProfile(
new ThreadPoolProfileBuilder("myLowPool").poolSize(2).maxPoolSize(10).build());
from("direct:start")
.to("log:foo")
.split(body()).executorService("myLowPool")
.to("log:bar")
.process(e -> {
String name = Thread.currentThread().getName();
assertTrue(name.startsWith("Camel"), "Should use Camel thread");
})
.to("mock:split")
.end()
.to("log:result")
.to("mock:result");
}
};
}
}
| SplitCustomThreadPoolTest |
java | dropwizard__dropwizard | dropwizard-jackson/src/main/java/io/dropwizard/jackson/FuzzyEnumModule.java | {
"start": 1358,
"end": 1416
} | enum ____ is case insensitive.</li>
* </ul>
*/
public | values |
java | quarkusio__quarkus | extensions/reactive-mysql-client/deployment/src/test/java/io/quarkus/reactive/mysql/client/ConfigActiveFalseNamedDatasourceDynamicInjectionTest.java | {
"start": 632,
"end": 3126
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.overrideConfigKey("quarkus.datasource.ds-1.active", "false")
// We need at least one build-time property for the datasource,
// otherwise it's considered unconfigured at build time...
.overrideConfigKey("quarkus.datasource.ds-1.db-kind", "mysql");
@Inject
@ReactiveDataSource("ds-1")
InjectableInstance<Pool> pool;
@Inject
@ReactiveDataSource("ds-1")
InjectableInstance<io.vertx.mutiny.sqlclient.Pool> mutinyPool;
@Inject
@ReactiveDataSource("ds-1")
InjectableInstance<MySQLPool> vendorPool;
@Inject
@ReactiveDataSource("ds-1")
InjectableInstance<io.vertx.mutiny.mysqlclient.MySQLPool> mutinyVendorPool;
@Test
public void pool() {
doTest(pool, Pool::getConnection);
}
@Test
public void mutinyPool() {
doTest(mutinyPool, io.vertx.mutiny.sqlclient.Pool::getConnection);
}
@Test
public void vendorPool() {
doTest(vendorPool, Pool::getConnection);
}
@Test
public void mutinyVendorPool() {
doTest(mutinyVendorPool, io.vertx.mutiny.sqlclient.Pool::getConnection);
}
private <T> void doTest(InjectableInstance<T> instance, Consumer<T> action) {
// The bean is always available to be injected during static init
// since we don't know whether the datasource will be active at runtime.
// So the bean proxy cannot be null.
assertThat(instance.getHandle().getBean())
.isNotNull()
.returns(false, InjectableBean::isActive);
var pool = instance.get();
assertThat(pool).isNotNull();
// However, any attempt to use it at runtime will fail.
assertThatThrownBy(() -> action.accept(pool))
.isInstanceOf(InactiveBeanException.class)
.hasMessageContainingAll("Datasource 'ds-1' was deactivated through configuration properties.",
"To avoid this exception while keeping the bean inactive", // Message from Arc with generic hints
"To activate the datasource, set configuration property 'quarkus.datasource.\"ds-1\".active'"
+ " to 'true' and configure datasource 'ds-1'",
"Refer to https://quarkus.io/guides/datasource for guidance.");
}
}
| ConfigActiveFalseNamedDatasourceDynamicInjectionTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/id/uuid/GeneratedValueTest.java | {
"start": 3352,
"end": 3442
} | class ____ {
@Id
@Column( length = 16 )
@GeneratedValue
public UUID id;
}
}
| TheEntity |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java | {
"start": 2603,
"end": 3128
} | class ____
extends Mapper<LongWritable, Text, IntWritable, IntWritable> {
public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
IntWritable outKey = new IntWritable();
IntWritable outValue = new IntWritable();
for (int j = 0; j < NUM_TESTS; j++) {
for (int i = 0; i < NUM_VALUES; i++) {
outKey.set(j);
outValue.set(i);
context.write(outKey, outValue);
}
}
}
}
public static | TestMapper |
java | playframework__playframework | documentation/manual/working/javaGuide/main/dependencyinjection/code/javaguide/di/guice/CircularDependencies.java | {
"start": 327,
"end": 422
} | class ____ {
@Inject
public Foo(Bar bar) {
// ...
}
}
public | Foo |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/ide-dev-mode-build-props/src/test/java/orc/acme/TestApp.java | {
"start": 62,
"end": 158
} | class ____ {
public static void main(String[] args) {
Quarkus.run(args);
}
}
| TestApp |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/jackson/OidcIdTokenMixin.java | {
"start": 1449,
"end": 1707
} | class ____ {
@JsonCreator
OidcIdTokenMixin(@JsonProperty("tokenValue") String tokenValue, @JsonProperty("issuedAt") Instant issuedAt,
@JsonProperty("expiresAt") Instant expiresAt, @JsonProperty("claims") Map<String, Object> claims) {
}
}
| OidcIdTokenMixin |
java | quarkusio__quarkus | extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/FailedExecutionTest.java | {
"start": 510,
"end": 1272
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(FailedExecutionTest.Jobs.class));
static final CountDownLatch ERROR_LATCH = new CountDownLatch(2);
static FailedExecution failedExecution;
@Test
public void testTriggerErrorStatus() throws InterruptedException {
assertTrue(ERROR_LATCH.await(5, TimeUnit.SECONDS));
assertInstanceOf(RuntimeException.class, failedExecution.getException());
}
void observeFailedExecution(@Observes FailedExecution failedExecution) {
FailedExecutionTest.failedExecution = failedExecution;
ERROR_LATCH.countDown();
}
static | FailedExecutionTest |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/ParameterizedRuleExecutor.java | {
"start": 395,
"end": 969
} | class ____<TreeType extends Node<TreeType>, Context> extends RuleExecutor<TreeType> {
private final Context context;
protected ParameterizedRuleExecutor(Context context) {
this.context = context;
}
protected Context context() {
return context;
}
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
protected Function<TreeType, TreeType> transform(Rule<?, TreeType> rule) {
return (rule instanceof ParameterizedRule pr) ? t -> (TreeType) pr.apply(t, context) : t -> rule.apply(t);
}
}
| ParameterizedRuleExecutor |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/AbstractDataToProcessWriter.java | {
"start": 1268,
"end": 14462
} | class ____ implements DataToProcessWriter {
private static final int TIME_FIELD_OUT_INDEX = 0;
private static final long MS_IN_SECOND = 1000;
private final boolean includeControlField;
private final boolean includeTokensField;
protected final AutodetectProcess autodetectProcess;
protected final DataDescription dataDescription;
protected final AnalysisConfig analysisConfig;
protected final DataCountsReporter dataCountsReporter;
private final Logger logger;
private final DateTransformer dateTransformer;
private final long bucketSpanMs;
private final long latencySeconds;
protected Map<String, Integer> inFieldIndexes;
protected List<InputOutputMap> inputOutputMap;
// epoch in seconds
private long latestEpochMs;
private long latestEpochMsThisUpload;
private Set<String> termFields;
protected AbstractDataToProcessWriter(
boolean includeControlField,
boolean includeTokensField,
AutodetectProcess autodetectProcess,
DataDescription dataDescription,
AnalysisConfig analysisConfig,
DataCountsReporter dataCountsReporter,
Logger logger
) {
this.includeControlField = includeControlField;
this.includeTokensField = includeTokensField;
this.autodetectProcess = Objects.requireNonNull(autodetectProcess);
this.dataDescription = Objects.requireNonNull(dataDescription);
this.analysisConfig = Objects.requireNonNull(analysisConfig);
this.dataCountsReporter = Objects.requireNonNull(dataCountsReporter);
this.logger = Objects.requireNonNull(logger);
this.latencySeconds = analysisConfig.getLatency() == null ? 0 : analysisConfig.getLatency().seconds();
this.bucketSpanMs = analysisConfig.getBucketSpan().getMillis();
this.termFields = analysisConfig.termFields();
Date date = dataCountsReporter.getLatestRecordTime();
latestEpochMsThisUpload = 0;
latestEpochMs = 0;
if (date != null) {
latestEpochMs = date.getTime();
}
boolean isDateFormatString = dataDescription.isTransformTime() && dataDescription.isEpochMs() == false;
if (isDateFormatString) {
dateTransformer = new DateFormatDateTransformer(dataDescription.getTimeFormat());
} else {
dateTransformer = new DoubleDateTransformer(dataDescription.isEpochMs());
}
}
public String maybeTruncateCatgeorizationField(String categorizationField) {
if (termFields.contains(analysisConfig.getCategorizationFieldName()) == false) {
return categorizationField.substring(0, Math.min(categorizationField.length(), AnalysisConfig.MAX_CATEGORIZATION_FIELD_LENGTH));
}
return categorizationField;
}
/**
* Set up the field index mappings. This must be called before
* {@linkplain DataToProcessWriter#write(InputStream, CategorizationAnalyzer, XContentType, BiConsumer)}
* <p>
* Finds the required input indexes in the <code>header</code> and sets the
* mappings to the corresponding output indexes.
*/
void buildFieldIndexMapping(String[] header) {
Collection<String> inputFields = inputFields();
inFieldIndexes = inputFieldIndexes(header, inputFields);
checkForMissingFields(inputFields, inFieldIndexes, header);
inputOutputMap = createInputOutputMap(inFieldIndexes);
// The time field doesn't count
dataCountsReporter.setAnalysedFieldsPerRecord(inputFields().size() - 1);
}
/**
* Write the header.
* The header is created from the list of analysis input fields, the time field and the control field.
*/
@Override
public void writeHeader() throws IOException {
Map<String, Integer> outFieldIndexes = outputFieldIndexes();
// header is all the analysis input fields + the time field + control field
int numFields = outFieldIndexes.size();
String[] record = new String[numFields];
for (Map.Entry<String, Integer> entry : outFieldIndexes.entrySet()) {
record[entry.getValue()] = entry.getKey();
}
// Write the header
autodetectProcess.writeRecord(record);
}
/**
* Tokenize the field that has been configured for categorization, and store the resulting list of tokens in CSV
* format in the appropriate field of the record to be sent to the process.
* @param categorizationAnalyzer The analyzer to use to convert the categorization field to a list of tokens
* @param categorizationFieldValue The value of the categorization field to be tokenized
* @param record The record to be sent to the process
*/
protected void tokenizeForCategorization(
CategorizationAnalyzer categorizationAnalyzer,
String categorizationFieldValue,
String[] record
) {
assert includeTokensField;
// -2 because last field is the control field, and last but one is the pre-tokenized tokens field
record[record.length - 2] = tokenizeForCategorization(
categorizationAnalyzer,
analysisConfig.getCategorizationFieldName(),
categorizationFieldValue
);
}
/**
* Accessible for testing only.
*/
static String tokenizeForCategorization(
CategorizationAnalyzer categorizationAnalyzer,
String categorizationFieldName,
String categorizationFieldValue
) {
StringBuilder builder = new StringBuilder();
boolean first = true;
for (String token : categorizationAnalyzer.tokenizeField(categorizationFieldName, categorizationFieldValue)) {
if (first) {
first = false;
} else {
builder.append(',');
}
if (needsEscaping(token)) {
builder.append('"');
for (int i = 0; i < token.length(); ++i) {
char c = token.charAt(i);
if (c == '"') {
builder.append('"');
}
builder.append(c);
}
builder.append('"');
} else {
builder.append(token);
}
}
return builder.toString();
}
private static boolean needsEscaping(String value) {
for (int i = 0; i < value.length(); ++i) {
char c = value.charAt(i);
if (c == '"' || c == ',' || c == '\n' || c == '\r') {
return true;
}
}
return false;
}
/**
* Transform the date in the input data and write all fields to the length encoded writer.
* <p>
* Fields must be copied from input to output before this function is called.
*
* @param record The record that will be written to the length encoded writer after the time has been transformed.
* This should be the same size as the number of output (analysis fields) i.e.
* the size of the map returned by {@linkplain #outputFieldIndexes()}
* @param numberOfFieldsRead The total number read not just those included in the analysis
*/
protected boolean transformTimeAndWrite(String[] record, long numberOfFieldsRead) throws IOException {
long epochMs;
try {
epochMs = dateTransformer.transform(record[TIME_FIELD_OUT_INDEX]);
} catch (CannotParseTimestampException e) {
dataCountsReporter.reportDateParseError(numberOfFieldsRead);
logger.error(e.getMessage());
return false;
}
record[TIME_FIELD_OUT_INDEX] = Long.toString(epochMs / MS_IN_SECOND);
final long latestBucketFloor = alignToFloor(latestEpochMs, bucketSpanMs);
// We care only about records that are older than the current bucket according to our latest timestamp
// The native side handles random order within the same bucket without issue
if (epochMs / MS_IN_SECOND < latestBucketFloor / MS_IN_SECOND - latencySeconds) {
// out of order
dataCountsReporter.reportOutOfOrderRecord(numberOfFieldsRead);
if (epochMs > latestEpochMsThisUpload) {
// record this timestamp even if the record won't be processed
latestEpochMsThisUpload = epochMs;
dataCountsReporter.reportLatestTimeIncrementalStats(latestEpochMsThisUpload);
}
return false;
}
latestEpochMs = Math.max(latestEpochMs, epochMs);
latestEpochMsThisUpload = latestEpochMs;
autodetectProcess.writeRecord(record);
dataCountsReporter.reportRecordWritten(numberOfFieldsRead, epochMs, latestEpochMs);
return true;
}
@Override
public void flushStream() throws IOException {
autodetectProcess.flushStream();
}
/**
* Get all the expected input fields i.e. all the fields we
* must see in the input
*/
final Collection<String> inputFields() {
Set<String> requiredFields = analysisConfig.analysisFields();
requiredFields.add(dataDescription.getTimeField());
requiredFields.remove(AnalysisConfig.ML_CATEGORY_FIELD);
return requiredFields;
}
/**
* Find the indexes of the input fields from the header
*/
protected static Map<String, Integer> inputFieldIndexes(String[] header, Collection<String> inputFields) {
List<String> headerList = Arrays.asList(header); // TODO header could be empty
Map<String, Integer> fieldIndexes = new HashMap<>();
for (String field : inputFields) {
int index = headerList.indexOf(field);
if (index >= 0) {
fieldIndexes.put(field, index);
}
}
return fieldIndexes;
}
Map<String, Integer> getInputFieldIndexes() {
return inFieldIndexes;
}
/**
* Create indexes of the output fields.
* This is the time field and all the fields configured for analysis
* and the control field.
* Time is the first field and the last is the control field
*/
protected final Map<String, Integer> outputFieldIndexes() {
Map<String, Integer> fieldIndexes = new HashMap<>();
// time field
fieldIndexes.put(dataDescription.getTimeField(), TIME_FIELD_OUT_INDEX);
int index = TIME_FIELD_OUT_INDEX + 1;
for (String field : analysisConfig.analysisFields()) {
if (AnalysisConfig.ML_CATEGORY_FIELD.equals(field) == false) {
fieldIndexes.put(field, index++);
}
}
// field for categorization tokens
if (includeTokensField) {
fieldIndexes.put(LengthEncodedWriter.PRETOKENISED_TOKEN_FIELD, index++);
}
// control field
if (includeControlField) {
fieldIndexes.put(LengthEncodedWriter.CONTROL_FIELD_NAME, index++);
}
return fieldIndexes;
}
/**
* The number of fields used in the analysis field,
* the time field and (sometimes) the control field
*/
protected int outputFieldCount() {
return inputFields().size() + (includeControlField ? 1 : 0) + (includeTokensField ? 1 : 0);
}
/**
* Create a map of input index to output index. This does not include the time or control fields.
*
* @param inFieldIndexes Map of field name to index in the input array
*/
private List<InputOutputMap> createInputOutputMap(Map<String, Integer> inFieldIndexes) {
List<InputOutputMap> inputOutputMap = new ArrayList<>();
int outIndex = TIME_FIELD_OUT_INDEX;
Integer inIndex = inFieldIndexes.get(dataDescription.getTimeField());
if (inIndex == null) {
throw new IllegalStateException(String.format(Locale.ROOT, "Input time field '%s' not found", dataDescription.getTimeField()));
}
inputOutputMap.add(new InputOutputMap(inIndex, outIndex));
for (String field : analysisConfig.analysisFields()) {
if (AnalysisConfig.ML_CATEGORY_FIELD.equals(field) == false) {
++outIndex;
inIndex = inFieldIndexes.get(field);
if (inIndex != null) {
inputOutputMap.add(new InputOutputMap(inIndex, outIndex));
}
}
}
return inputOutputMap;
}
protected List<InputOutputMap> getInputOutputMap() {
return inputOutputMap;
}
/**
* Check that all the fields are present in the header.
* Either return true or throw a MissingFieldException
* <p>
* Every input field should have an entry in <code>inputFieldIndexes</code>
* otherwise the field cannot be found.
*/
protected abstract boolean checkForMissingFields(
Collection<String> inputFields,
Map<String, Integer> inputFieldIndexes,
String[] header
);
/**
* Input and output array indexes map
*/
protected static | AbstractDataToProcessWriter |
java | spring-projects__spring-framework | spring-jdbc/src/test/java/org/springframework/jdbc/datasource/init/H2DatabasePopulatorTests.java | {
"start": 1334,
"end": 3679
} | class ____ extends AbstractDatabasePopulatorTests {
@Override
protected EmbeddedDatabaseType getEmbeddedDatabaseType() {
return EmbeddedDatabaseType.H2;
}
@Test // SPR-15896
void scriptWithH2Alias() {
databasePopulator.addScript(usersSchema());
databasePopulator.addScript(resource("db-test-data-h2-alias.sql"));
// Set statement separator to double newline so that ";" is not
// considered a statement separator within the source code of the
// aliased function 'REVERSE'.
databasePopulator.setSeparator("\n\n");
DatabasePopulatorUtils.execute(databasePopulator, db);
String sql = "select REVERSE(first_name) from users where last_name='Brannen'";
assertThat(jdbcTemplate.queryForObject(sql, String.class)).isEqualTo("maS");
}
@Test // gh-27008
void automaticallyCommitsIfAutoCommitIsDisabled() throws Exception {
EmbeddedDatabase database = null;
try {
EmbeddedDatabaseFactory databaseFactory = new EmbeddedDatabaseFactory();
databaseFactory.setDatabaseConfigurer(new AutoCommitDisabledH2EmbeddedDatabaseConfigurer());
database = databaseFactory.getDatabase();
assertAutoCommitDisabledPreconditions(database);
// Set up schema
databasePopulator.setScripts(usersSchema());
DatabasePopulatorUtils.execute(databasePopulator, database);
assertThat(selectFirstNames(database)).isEmpty();
// Insert data
databasePopulator.setScripts(resource("users-data.sql"));
DatabasePopulatorUtils.execute(databasePopulator, database);
assertThat(selectFirstNames(database)).containsExactly("Sam");
}
finally {
if (database != null) {
database.shutdown();
}
}
}
/**
* DatabasePopulatorUtils.execute() will obtain a new Connection, so we're
* really just testing the configuration of the database here.
*/
private void assertAutoCommitDisabledPreconditions(DataSource dataSource) throws Exception {
Connection connection = DataSourceUtils.getConnection(dataSource);
assertThat(connection.getAutoCommit()).as("auto-commit").isFalse();
assertThat(DataSourceUtils.isConnectionTransactional(connection, dataSource)).as("transactional").isFalse();
connection.close();
}
private List<String> selectFirstNames(DataSource dataSource) {
return new JdbcTemplate(dataSource).queryForList("select first_name from users", String.class);
}
}
| H2DatabasePopulatorTests |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java | {
"start": 41448,
"end": 43948
} | class ____ extends ParserRuleContext {
public EventFilterContext eventFilter() {
return getRuleContext(EventFilterContext.class, 0);
}
public TerminalNode RB() {
return getToken(EqlBaseParser.RB, 0);
}
public TerminalNode LB() {
return getToken(EqlBaseParser.LB, 0);
}
public TerminalNode MISSING_EVENT_OPEN() {
return getToken(EqlBaseParser.MISSING_EVENT_OPEN, 0);
}
public SubqueryContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override
public int getRuleIndex() {
return RULE_subquery;
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).enterSubquery(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).exitSubquery(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof EqlBaseVisitor) return ((EqlBaseVisitor<? extends T>) visitor).visitSubquery(this);
else return visitor.visitChildren(this);
}
}
public final SubqueryContext subquery() throws RecognitionException {
SubqueryContext _localctx = new SubqueryContext(_ctx, getState());
enterRule(_localctx, 24, RULE_subquery);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(166);
_la = _input.LA(1);
if (!(_la == LB || _la == MISSING_EVENT_OPEN)) {
_errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(167);
eventFilter();
setState(168);
match(RB);
}
} catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
} finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static | SubqueryContext |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineAuthenticationFilterForV1.java | {
"start": 3114,
"end": 15302
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestTimelineAuthenticationFilterForV1.class);
private static final String FOO_USER = "foo";
private static final String BAR_USER = "bar";
private static final String HTTP_USER = "HTTP";
private static final String PRINCIPAL = HTTP_USER + "/localhost";
private static final File TEST_ROOT_DIR = new File(
System.getProperty("test.build.dir", "target/test-dir"),
TestTimelineAuthenticationFilterForV1.class.getName() + "-root");
private static final File httpSpnegoKeytabFile = new File(
KerberosTestUtils.getKeytabFile());
private static final String httpSpnegoPrincipal =
KerberosTestUtils.getServerPrincipal();
private static final String BASEDIR =
System.getProperty("test.build.dir", "target/test-dir") + "/"
+ TestTimelineAuthenticationFilterForV1.class.getSimpleName();
public static Collection<Object[]> withSsl() {
return Arrays.asList(new Object[][]{{false}, {true}});
}
private static MiniKdc testMiniKDC;
private static String keystoresDir;
private static String sslConfDir;
private static ApplicationHistoryServer testTimelineServer;
private static Configuration conf;
private static boolean withSsl;
public void initTestTimelineAuthenticationFilterForV1(boolean isSslEnabled) {
TestTimelineAuthenticationFilterForV1.withSsl = isSslEnabled;
}
@BeforeAll
public static void setup() {
try {
testMiniKDC = new MiniKdc(MiniKdc.createConf(), TEST_ROOT_DIR);
testMiniKDC.start();
testMiniKDC.createPrincipal(
httpSpnegoKeytabFile, PRINCIPAL);
} catch (Exception e) {
LOG.error("Failed to setup MiniKDC", e);
fail("Couldn't setup MiniKDC");
}
try {
testTimelineServer = new ApplicationHistoryServer();
conf = new Configuration(false);
conf.setStrings(TIMELINE_HTTP_AUTH_PREFIX + "type", "kerberos");
conf.set(TIMELINE_HTTP_AUTH_PREFIX +
KerberosAuthenticationHandler.PRINCIPAL, httpSpnegoPrincipal);
conf.set(TIMELINE_HTTP_AUTH_PREFIX +
KerberosAuthenticationHandler.KEYTAB,
httpSpnegoKeytabFile.getAbsolutePath());
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
conf.set(YarnConfiguration.TIMELINE_SERVICE_PRINCIPAL,
httpSpnegoPrincipal);
conf.set(YarnConfiguration.TIMELINE_SERVICE_KEYTAB,
httpSpnegoKeytabFile.getAbsolutePath());
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setClass(YarnConfiguration.TIMELINE_SERVICE_STORE,
MemoryTimelineStore.class, TimelineStore.class);
conf.set(YarnConfiguration.TIMELINE_SERVICE_ADDRESS,
"localhost:10200");
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
"localhost:8188");
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS,
"localhost:8190");
conf.set("hadoop.proxyuser.HTTP.hosts", "*");
conf.set("hadoop.proxyuser.HTTP.users", FOO_USER);
conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 1);
if (withSsl) {
conf.set(YarnConfiguration.YARN_HTTP_POLICY_KEY,
HttpConfig.Policy.HTTPS_ONLY.name());
File base = new File(BASEDIR);
FileUtil.fullyDelete(base);
base.mkdirs();
keystoresDir = new File(BASEDIR).getAbsolutePath();
sslConfDir = KeyStoreTestUtil.getClasspathDir(
TestTimelineAuthenticationFilterForV1.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
}
UserGroupInformation.setConfiguration(conf);
testTimelineServer.init(conf);
testTimelineServer.start();
} catch (Exception e) {
LOG.error("Failed to setup TimelineServer", e);
fail("Couldn't setup TimelineServer");
}
}
private TimelineClient createTimelineClientForUGI() {
TimelineClient client = TimelineClient.createTimelineClient();
client.init(conf);
client.start();
return client;
}
@AfterAll
public static void tearDown() throws Exception {
if (testMiniKDC != null) {
testMiniKDC.stop();
}
if (testTimelineServer != null) {
testTimelineServer.stop();
}
if (withSsl) {
KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
File base = new File(BASEDIR);
FileUtil.fullyDelete(base);
}
}
@MethodSource("withSsl")
@ParameterizedTest
void testPutTimelineEntities(boolean isSslEnabled) throws Exception {
initTestTimelineAuthenticationFilterForV1(isSslEnabled);
KerberosTestUtils.doAs(PRINCIPAL, new Callable<Void>() {
@Override
public Void call() throws Exception {
TimelineClient client = createTimelineClientForUGI();
TimelineEntity entityToStore = new TimelineEntity();
entityToStore.setEntityType(
TestTimelineAuthenticationFilterForV1.class.getName());
entityToStore.setEntityId("entity1");
entityToStore.setStartTime(0L);
TimelinePutResponse putResponse = client.putEntities(entityToStore);
if (putResponse.getErrors().size() > 0) {
LOG.error("putResponse errors: {}", putResponse.getErrors());
}
assertTrue(putResponse.getErrors().isEmpty(),
"There were some errors in the putResponse");
TimelineEntity entityToRead =
testTimelineServer.getTimelineStore().getEntity("entity1",
TestTimelineAuthenticationFilterForV1.class.getName(), null);
assertNotNull(entityToRead,
"Timeline entity should not be null");
return null;
}
});
}
@MethodSource("withSsl")
@ParameterizedTest
void testPutDomains(boolean isSslEnabled) throws Exception {
initTestTimelineAuthenticationFilterForV1(isSslEnabled);
KerberosTestUtils.doAs(PRINCIPAL, new Callable<Void>() {
@Override
public Void call() throws Exception {
TimelineClient client = createTimelineClientForUGI();
TimelineDomain domainToStore = new TimelineDomain();
domainToStore.setId(
TestTimelineAuthenticationFilterForV1.class.getName());
domainToStore.setReaders("*");
domainToStore.setWriters("*");
client.putDomain(domainToStore);
TimelineDomain domainToRead =
testTimelineServer.getTimelineStore().getDomain(
TestTimelineAuthenticationFilterForV1.class.getName());
assertNotNull(domainToRead,
"Timeline domain should not be null");
return null;
}
});
}
@MethodSource("withSsl")
@ParameterizedTest
void testDelegationTokenOperations(boolean isSslEnabled) throws Exception {
initTestTimelineAuthenticationFilterForV1(isSslEnabled);
TimelineClient httpUserClient =
KerberosTestUtils.doAs(PRINCIPAL,
new Callable<TimelineClient>() {
@Override
public TimelineClient call() throws Exception {
return createTimelineClientForUGI();
}
});
UserGroupInformation httpUser =
KerberosTestUtils.doAs(PRINCIPAL,
new Callable<UserGroupInformation>() {
@Override
public UserGroupInformation call() throws Exception {
return UserGroupInformation.getCurrentUser();
}
});
// Let HTTP user to get the delegation for itself
Token<TimelineDelegationTokenIdentifier> token =
httpUserClient.getDelegationToken(httpUser.getShortUserName());
assertNotNull(token, "Delegation token should not be null");
TimelineDelegationTokenIdentifier tDT = token.decodeIdentifier();
assertNotNull(tDT,
"Delegation token identifier should not be null");
assertEquals(new Text(HTTP_USER), tDT.getOwner(),
"Owner of delegation token identifier does not match");
// Renew token
assertFalse(token.getService().toString().isEmpty(),
"Service field of token should not be empty");
// Renew the token from the token service address
long renewTime1 = httpUserClient.renewDelegationToken(token);
Thread.sleep(100);
token.setService(new Text());
assertTrue(token.getService().toString().isEmpty(),
"Service field of token should be empty");
// If the token service address is not available, it still can be renewed
// from the configured address
long renewTime2 = httpUserClient.renewDelegationToken(token);
assertTrue(renewTime1 < renewTime2,
"renewTime2 should be later than renewTime1");
// Cancel token
assertTrue(token.getService().toString().isEmpty(),
"Service field of token should be empty");
// If the token service address is not available, it still can be canceled
// from the configured address
httpUserClient.cancelDelegationToken(token);
// Renew should not be successful because the token is canceled
try {
httpUserClient.renewDelegationToken(token);
fail("Renew of delegation token should not be successful");
} catch (Exception e) {
LOG.info("Exception while renewing delegation token", e);
assertTrue(e.getMessage().contains(
"Renewal request for unknown token"));
}
// Let HTTP user to get the delegation token for FOO user
UserGroupInformation fooUgi = UserGroupInformation.createProxyUser(
FOO_USER, httpUser);
TimelineClient fooUserClient = fooUgi.doAs(
new PrivilegedExceptionAction<TimelineClient>() {
@Override
public TimelineClient run() {
return createTimelineClientForUGI();
}
});
token = fooUserClient.getDelegationToken(httpUser.getShortUserName());
assertNotNull(token, "Delegation token should not be null");
tDT = token.decodeIdentifier();
assertNotNull(tDT,
"Delegation token identifier should not be null");
assertEquals(new Text(FOO_USER), tDT.getOwner(),
"Owner of delegation token is not the expected");
assertEquals(new Text(HTTP_USER), tDT.getRealUser(),
"Real user of delegation token is not the expected");
// Renew token as the renewer
final Token<TimelineDelegationTokenIdentifier> tokenToRenew = token;
renewTime1 = httpUserClient.renewDelegationToken(tokenToRenew);
renewTime2 = httpUserClient.renewDelegationToken(tokenToRenew);
assertTrue(renewTime1 < renewTime2,
"renewTime2 should be later than renewTime1");
// Cancel token
assertFalse(tokenToRenew.getService().toString().isEmpty(),
"Service field of token should not be empty");
// Cancel the token from the token service address
fooUserClient.cancelDelegationToken(tokenToRenew);
// Renew should not be successful because the token is canceled
try {
httpUserClient.renewDelegationToken(tokenToRenew);
fail("Renew of delegation token should not be successful");
} catch (Exception e) {
LOG.info("Exception while renewing delegation token", e);
assertTrue(
e.getMessage().contains("Renewal request for unknown token"));
}
// Let HTTP user to get the delegation token for BAR user
UserGroupInformation barUgi = UserGroupInformation.createProxyUser(
BAR_USER, httpUser);
TimelineClient barUserClient = barUgi.doAs(
new PrivilegedExceptionAction<TimelineClient>() {
@Override
public TimelineClient run() {
return createTimelineClientForUGI();
}
});
try {
barUserClient.getDelegationToken(httpUser.getShortUserName());
fail("Retrieval of delegation token should not be successful");
} catch (Exception e) {
LOG.info("Exception while retrieving delegation token", e);
assertTrue(e.getCause() instanceof AuthorizationException ||
e.getCause() instanceof AuthenticationException ||
e.getCause().getMessage().contains("AuthorizationException") ||
e.getCause().getMessage().contains("AuthenticationException"));
}
}
}
| TestTimelineAuthenticationFilterForV1 |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/matcher/AssertionMatcher_matches_Test.java | {
"start": 1324,
"end": 4232
} | class ____ {
private static final Integer ZERO = 0;
private static final Integer ONE = 1;
private final AssertionMatcher<Integer> isZeroMatcher = new AssertionMatcher<>() {
@Override
public void assertion(Integer actual) throws AssertionError {
assertThat(actual).isZero();
}
};
private boolean removeAssertJRelatedElementsFromStackTrace;
/**
* Stacktrace filtering must be disabled in order to check frames in
* {@link this#matcher_should_fill_description_when_assertion_fails()}.
* I use setUp and tearDown methods to ensure that it is set to original value after a test.
*/
@BeforeEach
public void setUp() {
removeAssertJRelatedElementsFromStackTrace = Failures.instance().isRemoveAssertJRelatedElementsFromStackTrace();
Failures.instance().setRemoveAssertJRelatedElementsFromStackTrace(false);
}
@AfterEach
public void tearDown() {
Failures.instance().setRemoveAssertJRelatedElementsFromStackTrace(removeAssertJRelatedElementsFromStackTrace);
}
@Test
void matcher_should_pass_when_assertion_passes() {
assertThat(isZeroMatcher.matches(ZERO)).isTrue();
}
@Test
void matcher_should_not_fill_description_when_assertion_passes() {
Description description = mock();
assertThat(isZeroMatcher.matches(ZERO)).isTrue();
isZeroMatcher.describeTo(description);
verifyNoInteractions(description);
}
@Test
void matcher_should_fail_when_assertion_fails() {
assertThat(isZeroMatcher.matches(ONE)).isFalse();
}
/**
* {@link Failures#removeAssertJRelatedElementsFromStackTrace} must be set to true
* in order for this test to pass. It is in {@link this#setUp()}.
*/
@Test
void matcher_should_fill_description_when_assertion_fails() {
// WHEN
assertThat(isZeroMatcher.matches(ONE)).isFalse();
// THEN
Description description = mock();
InOrder inOrder = inOrder(description);
isZeroMatcher.describeTo(description);
inOrder.verify(description).appendText("AssertionError with message: ");
inOrder.verify(description).appendText(shouldBeEqualMessage("1", "0"));
inOrder.verify(description).appendText("%n%nStacktrace was: ".formatted());
inOrder.verify(description).appendText(assertArg(s -> then(s).contains("%nexpected: 0%n but was: 1".formatted(),
"org.assertj.tests.core.matcher.AssertionMatcher_matches_Test$1.assertion(AssertionMatcher_matches_Test.java:",
"org.assertj.core.matcher.AssertionMatcher.matches(AssertionMatcher.java:",
"org.assertj.tests.core.matcher.AssertionMatcher_matches_Test.matcher_should_fill_description_when_assertion_fails(AssertionMatcher_matches_Test.java:")));
}
}
| AssertionMatcher_matches_Test |
java | apache__camel | components/camel-huawei/camel-huaweicloud-iam/src/test/java/org/apache/camel/component/huaweicloud/iam/UpdateUserJsonTest.java | {
"start": 1245,
"end": 2925
} | class ____ extends CamelTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(UpdateUserJsonTest.class.getName());
TestConfiguration testConfiguration = new TestConfiguration();
@BindToRegistry("iamClient")
IAMMockClient mockClient = new IAMMockClient(null);
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:update_user")
.to("hwcloud-iam:updateUser?" +
"accessKey=" + testConfiguration.getProperty("accessKey") +
"&secretKey=" + testConfiguration.getProperty("secretKey") +
"®ion=" + testConfiguration.getProperty("region") +
"&userId=" + testConfiguration.getProperty("userId") +
"&ignoreSslVerification=true" +
"&iamClient=#iamClient")
.log("update user successful")
.to("mock:update_user_result");
}
};
}
@Test
public void testUpdateUser() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:update_user_result");
mock.expectedMinimumMessageCount(1);
template.sendBody("direct:update_user", "{\"description\":\"First\",\"name\":\"User 19\"}");
Exchange responseExchange = mock.getExchanges().get(0);
mock.assertIsSatisfied();
assertEquals("{\"description\":\"First\",\"name\":\"User 19\"}",
responseExchange.getIn().getBody(String.class));
}
}
| UpdateUserJsonTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/nested/SqlScriptNestedTests.java | {
"start": 3475,
"end": 3646
} | class ____ extends AbstractSqlMergeModeTests {
@Nested
@NestedTestConfiguration(EnclosingConfiguration.INHERIT)
@SqlMergeMode(MergeMode.MERGE)
| NestedSqlMergeModeTests |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/socket/AbstractReactiveWebSocketIntegrationTests.java | {
"start": 6729,
"end": 6964
} | class ____ {
@Bean
public WebFilter contextFilter() {
return new ServerWebExchangeContextFilter();
}
@Bean
public DispatcherHandler webHandler() {
return new DispatcherHandler();
}
}
abstract static | DispatcherConfig |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng0294MergeGlobalAndUserSettingsTest.java | {
"start": 1034,
"end": 2025
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Test merging of global- and user-level settings.xml files.
*
* @throws Exception in case of failure
*/
@Test
public void testitMNG294() throws Exception {
File testDir = extractResources("/mng-0294");
Verifier verifier = new Verifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("--settings");
verifier.addCliArgument("user-settings.xml");
// dedicated CLI option only available since MNG-3914
verifier.addCliArgument("--global-settings");
verifier.addCliArgument("global-settings.xml");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-touch:touch");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyFilePresent("target/test.txt");
}
}
| MavenITmng0294MergeGlobalAndUserSettingsTest |
java | quarkusio__quarkus | extensions/security/deployment/src/test/java/io/quarkus/security/test/permissionsallowed/PermitAllWithPermissionsAllowedValidationFailureTest.java | {
"start": 738,
"end": 835
} | class ____ {
public void securedBean() {
// EMPTY
}
}
}
| SecuredBean |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/processors/UnicastProcessorTest.java | {
"start": 1401,
"end": 13651
} | class ____ extends FlowableProcessorTest<Object> {
@Override
protected FlowableProcessor<Object> create() {
return UnicastProcessor.create();
}
@Test
public void fusionLive() {
UnicastProcessor<Integer> ap = UnicastProcessor.create();
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY);
ap.subscribe(ts);
ts
.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC);
ts.assertNoValues().assertNoErrors().assertNotComplete();
ap.onNext(1);
ts.assertValue(1).assertNoErrors().assertNotComplete();
ap.onComplete();
ts.assertResult(1);
}
@Test
public void fusionOfflie() {
UnicastProcessor<Integer> ap = UnicastProcessor.create();
ap.onNext(1);
ap.onComplete();
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY);
ap.subscribe(ts);
ts
.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(1);
}
@Test
public void failFast() {
UnicastProcessor<Integer> ap = UnicastProcessor.create(false);
ap.onNext(1);
ap.onError(new RuntimeException());
TestSubscriber<Integer> ts = TestSubscriber.create();
ap.subscribe(ts);
ts
.assertValueCount(0)
.assertError(RuntimeException.class);
}
@Test
public void failFastFusionOffline() {
UnicastProcessor<Integer> ap = UnicastProcessor.create(false);
ap.onNext(1);
ap.onError(new RuntimeException());
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY);
ap.subscribe(ts);
ts
.assertValueCount(0)
.assertError(RuntimeException.class);
}
@Test
public void threeArgsFactory() {
Runnable noop = new Runnable() {
@Override
public void run() {
}
};
UnicastProcessor<Integer> ap = UnicastProcessor.create(16, noop, false);
ap.onNext(1);
ap.onError(new RuntimeException());
TestSubscriber<Integer> ts = TestSubscriber.create();
ap.subscribe(ts);
ts
.assertValueCount(0)
.assertError(RuntimeException.class);
}
@Test
public void onTerminateCalledWhenOnError() {
final AtomicBoolean didRunOnTerminate = new AtomicBoolean();
UnicastProcessor<Integer> up = UnicastProcessor.create(Observable.bufferSize(), new Runnable() {
@Override public void run() {
didRunOnTerminate.set(true);
}
});
assertFalse(didRunOnTerminate.get());
up.onError(new RuntimeException("some error"));
assertTrue(didRunOnTerminate.get());
}
@Test
public void onTerminateCalledWhenOnComplete() {
final AtomicBoolean didRunOnTerminate = new AtomicBoolean();
UnicastProcessor<Integer> up = UnicastProcessor.create(Observable.bufferSize(), new Runnable() {
@Override public void run() {
didRunOnTerminate.set(true);
}
});
assertFalse(didRunOnTerminate.get());
up.onComplete();
assertTrue(didRunOnTerminate.get());
}
@Test
public void onTerminateCalledWhenCanceled() {
final AtomicBoolean didRunOnTerminate = new AtomicBoolean();
UnicastProcessor<Integer> up = UnicastProcessor.create(Observable.bufferSize(), new Runnable() {
@Override public void run() {
didRunOnTerminate.set(true);
}
});
final Disposable subscribe = up.subscribe();
assertFalse(didRunOnTerminate.get());
subscribe.dispose();
assertTrue(didRunOnTerminate.get());
}
@Test(expected = NullPointerException.class)
public void nullOnTerminate() {
UnicastProcessor.create(5, null);
}
@Test(expected = IllegalArgumentException.class)
public void negativeCapacityHint() {
UnicastProcessor.create(-1);
}
@Test(expected = IllegalArgumentException.class)
public void zeroCapacityHint() {
UnicastProcessor.create(0);
}
@Test
public void completeCancelRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final int[] calls = { 0 };
final UnicastProcessor<Object> up = UnicastProcessor.create(100, new Runnable() {
@Override
public void run() {
calls[0]++;
}
});
final TestSubscriber<Object> ts = up.test();
Runnable r1 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
up.onComplete();
}
};
TestHelper.race(r1, r2);
assertEquals(1, calls[0]);
}
}
@Test
public void afterDone() {
UnicastProcessor<Object> p = UnicastProcessor.create();
p.onComplete();
BooleanSubscription bs = new BooleanSubscription();
p.onSubscribe(bs);
p.onNext(1);
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
p.onError(new TestException());
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
p.onComplete();
p.test().assertResult();
assertNull(p.getThrowable());
assertTrue(p.hasComplete());
assertFalse(p.hasThrowable());
}
@Test
public void onErrorStatePeeking() {
UnicastProcessor<Object> p = UnicastProcessor.create();
assertFalse(p.hasComplete());
assertFalse(p.hasThrowable());
assertNull(p.getThrowable());
TestException ex = new TestException();
p.onError(ex);
assertFalse(p.hasComplete());
assertTrue(p.hasThrowable());
assertSame(ex, p.getThrowable());
}
@Test
public void rejectSyncFusion() {
UnicastProcessor<Object> p = UnicastProcessor.create();
TestSubscriberEx<Object> ts = new TestSubscriberEx<>().setInitialFusionMode(QueueFuseable.SYNC);
p.subscribe(ts);
ts.assertFusionMode(QueueFuseable.NONE);
}
@Test
public void cancelOnArrival() {
UnicastProcessor.create()
.test(0L, true)
.assertEmpty();
}
@Test
public void multiSubscriber() {
UnicastProcessor<Object> p = UnicastProcessor.create();
TestSubscriber<Object> ts = p.test();
p.test()
.assertFailure(IllegalStateException.class);
p.onNext(1);
p.onComplete();
ts.assertResult(1);
}
@Test
public void fusedDrainCancel() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final UnicastProcessor<Object> p = UnicastProcessor.create();
final TestSubscriberEx<Object> ts = new TestSubscriberEx<>().setInitialFusionMode(QueueFuseable.ANY);
p.subscribe(ts);
Runnable r1 = new Runnable() {
@Override
public void run() {
p.onNext(1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void subscribeRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final UnicastProcessor<Integer> up = UnicastProcessor.create();
final TestSubscriberEx<Integer> ts1 = new TestSubscriberEx<>();
final TestSubscriberEx<Integer> ts2 = new TestSubscriberEx<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
up.subscribe(ts1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
up.subscribe(ts2);
}
};
TestHelper.race(r1, r2);
if (ts1.errors().size() == 0) {
ts2.assertFailure(IllegalStateException.class);
} else
if (ts2.errors().size() == 0) {
ts1.assertFailure(IllegalStateException.class);
} else {
fail("Neither TestObserver failed");
}
}
}
@Test
public void hasObservers() {
UnicastProcessor<Integer> up = UnicastProcessor.create();
assertFalse(up.hasSubscribers());
TestSubscriber<Integer> ts = up.test();
assertTrue(up.hasSubscribers());
ts.cancel();
assertFalse(up.hasSubscribers());
}
@Test
public void drainFusedFailFast() {
UnicastProcessor<Integer> up = UnicastProcessor.create(false);
TestSubscriberEx<Integer> ts = up.to(TestHelper.<Integer>testSubscriber(1, QueueFuseable.ANY, false));
up.done = true;
up.drainFused(ts);
ts.assertResult();
}
@Test
public void drainFusedFailFastEmpty() {
UnicastProcessor<Integer> up = UnicastProcessor.create(false);
TestSubscriberEx<Integer> ts = up.to(TestHelper.<Integer>testSubscriber(1, QueueFuseable.ANY, false));
up.drainFused(ts);
ts.assertEmpty();
}
@Test
public void checkTerminatedFailFastEmpty() {
UnicastProcessor<Integer> up = UnicastProcessor.create(false);
TestSubscriberEx<Integer> ts = up.to(TestHelper.<Integer>testSubscriber(1, QueueFuseable.ANY, false));
up.checkTerminated(true, true, false, ts, up.queue);
ts.assertEmpty();
}
@Test
public void alreadyCancelled() {
UnicastProcessor<Integer> up = UnicastProcessor.create(false);
up.test().cancel();
BooleanSubscription bs = new BooleanSubscription();
up.onSubscribe(bs);
assertTrue(bs.isCancelled());
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
up.onError(new TestException());
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void unicastSubscriptionBadRequest() {
UnicastProcessor<Integer> up = UnicastProcessor.create(false);
UnicastProcessor<Integer>.UnicastQueueSubscription usc = (UnicastProcessor<Integer>.UnicastQueueSubscription)up.wip;
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
usc.request(-1);
TestHelper.assertError(errors, 0, IllegalArgumentException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void fusedNoConcurrentCleanDueToCancel() {
for (int j = 0; j < TestHelper.RACE_LONG_LOOPS; j++) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final UnicastProcessor<Integer> up = UnicastProcessor.create();
TestObserver<Integer> to = up
.observeOn(Schedulers.io())
.map(Functions.<Integer>identity())
.observeOn(Schedulers.single())
.firstOrError()
.test();
for (int i = 0; up.hasSubscribers(); i++) {
up.onNext(i);
}
to
.awaitDone(5, TimeUnit.SECONDS)
;
if (!errors.isEmpty()) {
throw new CompositeException(errors);
}
to.assertResult(0);
} finally {
RxJavaPlugins.reset();
}
}
}
}
| UnicastProcessorTest |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/ServerCallImpl.java | {
"start": 9618,
"end": 13603
} | class ____<ReqT> implements ServerStreamListener {
private final ServerCallImpl<ReqT, ?> call;
private final ServerCall.Listener<ReqT> listener;
private final Context.CancellableContext context;
public ServerStreamListenerImpl(
ServerCallImpl<ReqT, ?> call, ServerCall.Listener<ReqT> listener,
Context.CancellableContext context) {
this.call = checkNotNull(call, "call");
this.listener = checkNotNull(listener, "listener must not be null");
this.context = checkNotNull(context, "context");
// Wire ourselves up so that if the context is cancelled, our flag call.cancelled also
// reflects the new state. Use a DirectExecutor so that it happens in the same thread
// as the caller of {@link Context#cancel}.
this.context.addListener(
new Context.CancellationListener() {
@Override
public void cancelled(Context context) {
// If the context has a cancellation cause then something exceptional happened
// and we should also mark the call as cancelled.
if (context.cancellationCause() != null) {
ServerStreamListenerImpl.this.call.cancelled = true;
}
}
},
MoreExecutors.directExecutor());
}
@Override
public void messagesAvailable(MessageProducer producer) {
try (TaskCloseable ignore = PerfMark.traceTask("ServerStreamListener.messagesAvailable")) {
PerfMark.attachTag(call.tag);
messagesAvailableInternal(producer);
}
}
@SuppressWarnings("Finally") // The code avoids suppressing the exception thrown from try
private void messagesAvailableInternal(final MessageProducer producer) {
if (call.cancelled) {
GrpcUtil.closeQuietly(producer);
return;
}
InputStream message;
try {
while ((message = producer.next()) != null) {
try {
listener.onMessage(call.method.parseRequest(message));
} catch (Throwable t) {
GrpcUtil.closeQuietly(message);
throw t;
}
message.close();
}
} catch (Throwable t) {
GrpcUtil.closeQuietly(producer);
Throwables.throwIfUnchecked(t);
throw new RuntimeException(t);
}
}
@Override
public void halfClosed() {
try (TaskCloseable ignore = PerfMark.traceTask("ServerStreamListener.halfClosed")) {
PerfMark.attachTag(call.tag);
if (call.cancelled) {
return;
}
listener.onHalfClose();
}
}
@Override
public void closed(Status status) {
try (TaskCloseable ignore = PerfMark.traceTask("ServerStreamListener.closed")) {
PerfMark.attachTag(call.tag);
closedInternal(status);
}
}
private void closedInternal(Status status) {
Throwable cancelCause = null;
try {
if (status.isOk()) {
listener.onComplete();
} else {
call.cancelled = true;
listener.onCancel();
// The status will not have a cause in all failure scenarios, but we want to make sure
// we always cancel the context with one to keep the context cancelled state consistent.
cancelCause = InternalStatus.asRuntimeExceptionWithoutStacktrace(
Status.CANCELLED.withDescription("RPC cancelled"), null);
}
} finally {
// Cancel context after delivering RPC closure notification to allow the application to
// clean up and update any state based on whether onComplete or onCancel was called.
context.cancel(cancelCause);
}
}
@Override
public void onReady() {
try (TaskCloseable ignore = PerfMark.traceTask("ServerStreamListener.onReady")) {
PerfMark.attachTag(call.tag);
if (call.cancelled) {
return;
}
listener.onReady();
}
}
}
}
| ServerStreamListenerImpl |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/OpenshiftBuildsEndpointBuilderFactory.java | {
"start": 1431,
"end": 1573
} | interface ____ {
/**
* Builder for endpoint for the OpenShift Builds component.
*/
public | OpenshiftBuildsEndpointBuilderFactory |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/jdbc/SqlConfig.java | {
"start": 1303,
"end": 2856
} | class ____. When declared directly
* via the {@link Sql#config config} attribute of the {@code @Sql} annotation,
* {@code @SqlConfig} serves as <strong><em>local</em></strong> configuration
* for the SQL scripts declared within the enclosing {@code @Sql} annotation.
*
* <h3>Default Values</h3>
* <p>Every attribute in {@code @SqlConfig} has an <em>implicit</em> default value
* which is documented in the javadocs of the corresponding attribute. Due to the
* rules defined for annotation attributes in the Java Language Specification, it
* is unfortunately not possible to assign a value of {@code null} to an annotation
* attribute. Thus, in order to support overrides of <em>inherited</em> global
* configuration, {@code @SqlConfig} attributes have an <em>explicit</em>
* {@code default} value of either {@code ""} for Strings, <code>{}</code> for
* arrays, or {@code DEFAULT} for Enums. This approach allows local declarations
* of {@code @SqlConfig} to selectively override individual attributes from global
* declarations of {@code @SqlConfig} by providing a value other than {@code ""},
* <code>{}</code>, or {@code DEFAULT}.
*
* <h3>Inheritance and Overrides</h3>
* <p>Global {@code @SqlConfig} attributes are <em>inherited</em> whenever local
* {@code @SqlConfig} attributes do not supply an explicit value other than
* {@code ""}, <code>{}</code>, or {@code DEFAULT}. Explicit local configuration
* therefore <em>overrides</em> global configuration.
*
* <p>This annotation will be inherited from an enclosing test | hierarchy |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/jpa/internal/util/PersistenceUtilHelper.java | {
"start": 13333,
"end": 14232
} | class ____ implements Serializable {
private final ClassValue<ClassMetadataCache> metadataCacheClassValue;
public MetadataCache() {
this( new MetadataClassValue() );
}
//To help with serialization: no need to serialize the actual metadataCacheClassValue field
private MetadataCache(ClassValue<ClassMetadataCache> metadataCacheClassValue) {
this.metadataCacheClassValue = metadataCacheClassValue;
}
Object writeReplace() throws ObjectStreamException {
//Writing a different instance which doesn't include the cache
return new MetadataCache(null);
}
private Object readResolve() throws ObjectStreamException {
//Ensure we do instantiate a new cache instance on deserialization
return new MetadataCache();
}
ClassMetadataCache getClassMetadata(final Class<?> clazz) {
return metadataCacheClassValue.get( clazz );
}
}
private static final | MetadataCache |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/utils/UniqueTopicSerdeScope.java | {
"start": 4627,
"end": 5609
} | class ____<T> implements Deserializer<T> {
private final AtomicBoolean isKey;
private final Deserializer<T> delegate;
public UniqueTopicDeserializerDecorator(final AtomicBoolean isKey, final Deserializer<T> delegate) {
this.isKey = isKey;
this.delegate = delegate;
}
@Override
public void configure(final Map<String, ?> configs, final boolean isKey) {
delegate.configure(configs, isKey);
this.isKey.set(isKey);
}
@Override
public T deserialize(final String topic, final byte[] data) {
return delegate.deserialize(topic, data);
}
@Override
public T deserialize(final String topic, final Headers headers, final byte[] data) {
return delegate.deserialize(topic, headers, data);
}
@Override
public void close() {
delegate.close();
}
}
}
| UniqueTopicDeserializerDecorator |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/config/TestConfigDataBootstrap.java | {
"start": 2345,
"end": 3004
} | class ____ implements ConfigDataLoader<Resource> {
@Override
public ConfigData load(ConfigDataLoaderContext context, Resource location) throws IOException {
context.getBootstrapContext()
.registerIfAbsent(LoaderHelper.class,
(bootstrapContext) -> new LoaderHelper(location, () -> bootstrapContext.get(Binder.class)));
LoaderHelper helper = context.getBootstrapContext().get(LoaderHelper.class);
assertThat(helper).isNotNull();
context.getBootstrapContext().addCloseListener(helper);
return new ConfigData(
Collections.singleton(new MapPropertySource("loaded", Collections.singletonMap("test", "test"))));
}
}
static | Loader |
java | apache__camel | components/camel-coap/src/test/java/org/apache/camel/coap/CoAPMethodRestrictTest.java | {
"start": 1128,
"end": 3493
} | class ____ extends CoAPTestSupport {
@Test
void testDefaultCoAPMethodRestrict() {
Configuration.createStandardWithoutFile();
// All request methods should be valid on this endpoint
assertCoAPMethodRestrictResponse("/test", CoAPConstants.METHOD_RESTRICT_ALL, "GET: /test");
}
@Test
void testSpecifiedCoAPMethodRestrict() {
Configuration.createStandardWithoutFile();
// Only GET is valid for /test/a
assertCoAPMethodRestrictResponse("/test/a", "GET", "GET: /test/a");
// Only DELETE is valid for /test/a/b
assertCoAPMethodRestrictResponse("/test/a/b", "DELETE", "DELETE: /test/a/b");
// Only DELETE & GET are valid for /test/a/b/c
assertCoAPMethodRestrictResponse("/test/a/b/c", "DELETE,GET", "DELETE & GET: /test/a/b/c");
// Only GET is valid for /test/b
assertCoAPMethodRestrictResponse("/test/b", "GET", "GET: /test/b");
}
private void assertCoAPMethodRestrictResponse(String path, String methodRestrict, String expectedResponse) {
for (String method : CoAPConstants.METHOD_RESTRICT_ALL.split(",")) {
String result = template.requestBodyAndHeader("coap://localhost:" + PORT + path, null, CoAPConstants.COAP_METHOD,
method, String.class);
if (methodRestrict.contains(method)) {
assertEquals(expectedResponse, result);
} else {
assertArrayEquals(Bytes.EMPTY, result.getBytes());
}
}
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
fromF("coap://localhost:%d/test", PORT).setBody(constant("GET: /test"));
fromF("coap://localhost:%d/test/a?coapMethodRestrict=GET", PORT).setBody(constant("GET: /test/a"));
fromF("coap://localhost:%d/test/a/b?coapMethodRestrict=DELETE", PORT).setBody(constant("DELETE: /test/a/b"));
fromF("coap://localhost:%d/test/a/b/c?coapMethodRestrict=DELETE,GET", PORT)
.setBody(constant("DELETE & GET: /test/a/b/c"));
fromF("coap://localhost:%d/test/b?coapMethodRestrict=GET", PORT).setBody(constant("GET: /test/b"));
}
};
}
}
| CoAPMethodRestrictTest |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java | {
"start": 186917,
"end": 188002
} | class ____ extends ValueExpressionContext {
public PrimaryExpressionContext primaryExpression() {
return getRuleContext(PrimaryExpressionContext.class, 0);
}
public ValueExpressionDefaultContext(ValueExpressionContext ctx) {
copyFrom(ctx);
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).enterValueExpressionDefault(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).exitValueExpressionDefault(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof SqlBaseVisitor) return ((SqlBaseVisitor<? extends T>) visitor).visitValueExpressionDefault(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static | ValueExpressionDefaultContext |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/extraction/FunctionResultTemplate.java | {
"start": 2058,
"end": 2988
} | class ____ implements FunctionResultTemplate {
private final DataType dataType;
private FunctionOutputTemplate(DataType dataType) {
this.dataType = dataType;
}
TypeStrategy toTypeStrategy() {
return TypeStrategies.explicit(dataType);
}
Class<?> toClass() {
return dataType.getConversionClass();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final FunctionOutputTemplate template = (FunctionOutputTemplate) o;
return Objects.equals(dataType, template.dataType);
}
@Override
public int hashCode() {
return Objects.hash(dataType);
}
}
@Internal
| FunctionOutputTemplate |
java | apache__camel | components/camel-servlet/src/test/java/org/apache/camel/component/servlet/ServletStreamingGzipChunkedManualTest.java | {
"start": 1405,
"end": 3616
} | class ____ extends ServletCamelRouterTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(ServletStreamingGzipChunkedManualTest.class);
private PipedOutputStream pos = new PipedOutputStream();
private PipedInputStream pis;
@Test
public void testManual() throws Exception {
LOG.info("Sleeping 10 sec");
Thread.sleep(10000);
// use background thread to write to stream that camel-servlet uses as response
context.getExecutorServiceManager().newSingleThreadExecutor(this, "writer").execute(() -> {
try {
File file = new File("src/test/data/big-payload.json");
FileInputStream fis = new FileInputStream(file);
LOG.info(">>>> sleeping <<<<");
Thread.sleep(1000);
LOG.info(">>>> writing <<<<");
int ch = 0;
int len = 0;
while (ch != -1) {
ch = fis.read();
pos.write(ch);
len++;
if (len % 1000 == 0) {
LOG.info(">>>> sleeping <<<<");
pos.flush();
Thread.sleep(250);
LOG.info(">>>> writing <<<<");
}
}
LOG.info(">>>> Payload size: {}", len);
LOG.info(">>>> writing EOL <<<<");
pos.flush();
} catch (Exception e) {
// ignore
} finally {
LOG.info(">>>> closing <<<<");
IOHelper.close(pos);
}
});
LOG.info("Sleeping 60 sec");
Thread.sleep(60000);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
pis = new PipedInputStream(pos);
from("servlet:/hello")
.setHeader(Exchange.CONTENT_ENCODING, constant("gzip"))
.setBody().constant(pis);
}
};
}
}
| ServletStreamingGzipChunkedManualTest |
java | elastic__elasticsearch | x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtilsTests.java | {
"start": 784,
"end": 2611
} | class ____ extends ESTestCase {
public void testIndexName() {
final long timestamp = ZonedDateTime.of(2017, 8, 3, 13, 47, 58, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
DateFormatter formatter = DateFormatter.forPattern("yyyy.MM.dd").withZone(ZoneOffset.UTC);
assertThat(indexName(formatter, MonitoredSystem.ES, timestamp), equalTo(".monitoring-es-" + TEMPLATE_VERSION + "-2017.08.03"));
assertThat(
indexName(formatter, MonitoredSystem.KIBANA, timestamp),
equalTo(".monitoring-kibana-" + TEMPLATE_VERSION + "-2017.08.03")
);
assertThat(
indexName(formatter, MonitoredSystem.LOGSTASH, timestamp),
equalTo(".monitoring-logstash-" + TEMPLATE_VERSION + "-2017.08.03")
);
assertThat(
indexName(formatter, MonitoredSystem.BEATS, timestamp),
equalTo(".monitoring-beats-" + TEMPLATE_VERSION + "-2017.08.03")
);
formatter = DateFormatter.forPattern("yyyy-dd-MM-HH.mm.ss").withZone(ZoneOffset.UTC);
assertThat(
indexName(formatter, MonitoredSystem.ES, timestamp),
equalTo(".monitoring-es-" + TEMPLATE_VERSION + "-2017-03-08-13.47.58")
);
assertThat(
indexName(formatter, MonitoredSystem.KIBANA, timestamp),
equalTo(".monitoring-kibana-" + TEMPLATE_VERSION + "-2017-03-08-13.47.58")
);
assertThat(
indexName(formatter, MonitoredSystem.LOGSTASH, timestamp),
equalTo(".monitoring-logstash-" + TEMPLATE_VERSION + "-2017-03-08-13.47.58")
);
assertThat(
indexName(formatter, MonitoredSystem.BEATS, timestamp),
equalTo(".monitoring-beats-" + TEMPLATE_VERSION + "-2017-03-08-13.47.58")
);
}
}
| MonitoringTemplateUtilsTests |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/RocksDBCachingPriorityQueueSet.java | {
"start": 2432,
"end": 12599
} | class ____<E extends HeapPriorityQueueElement>
extends AbstractHeapPriorityQueueElement implements InternalPriorityQueue<E> {
/** Serialized empty value to insert into RocksDB. */
private static final byte[] DUMMY_BYTES = new byte[] {};
/** The RocksDB instance that serves as store. */
@Nonnull private final RocksDB db;
@Nonnull private final ReadOptions readOptions;
/** Handle to the column family of the RocksDB instance in which the elements are stored. */
@Nonnull private final ColumnFamilyHandle columnFamilyHandle;
/**
* Serializer for the contained elements. The lexicographical order of the bytes of serialized
* objects must be aligned with their logical order.
*/
@Nonnull private final TypeSerializer<E> byteOrderProducingSerializer;
/** Wrapper to batch all writes to RocksDB. */
@Nonnull private final RocksDBWriteBatchWrapper batchWrapper;
/** The key-group id in serialized form. */
@Nonnull private final byte[] groupPrefixBytes;
/** Output view that helps to serialize elements. */
@Nonnull private final DataOutputSerializer outputView;
/** Input view that helps to de-serialize elements. */
@Nonnull private final DataInputDeserializer inputView;
/** In memory cache that holds a head-subset of the elements stored in RocksDB. */
@Nonnull private final OrderedByteArraySetCache orderedCache;
/**
* This holds the key that we use to seek to the first element in RocksDB, to improve
* seek/iterator performance.
*/
@Nonnull private byte[] seekHint;
/** Cache for the head element in de-serialized form. */
@Nullable private E peekCache;
/** This flag is true iff all elements in RocksDB are also contained in the cache. */
private boolean allElementsInCache;
RocksDBCachingPriorityQueueSet(
@Nonnegative int keyGroupId,
@Nonnegative int keyGroupPrefixBytes,
@Nonnull RocksDB db,
@Nonnull ReadOptions readOptions,
@Nonnull ColumnFamilyHandle columnFamilyHandle,
@Nonnull TypeSerializer<E> byteOrderProducingSerializer,
@Nonnull DataOutputSerializer outputStream,
@Nonnull DataInputDeserializer inputStream,
@Nonnull RocksDBWriteBatchWrapper batchWrapper,
@Nonnull OrderedByteArraySetCache orderedByteArraySetCache) {
this.db = db;
this.readOptions = readOptions;
this.columnFamilyHandle = columnFamilyHandle;
this.byteOrderProducingSerializer = byteOrderProducingSerializer;
this.batchWrapper = batchWrapper;
this.outputView = outputStream;
this.inputView = inputStream;
this.orderedCache = orderedByteArraySetCache;
this.allElementsInCache = false;
this.groupPrefixBytes = createKeyGroupBytes(keyGroupId, keyGroupPrefixBytes);
this.seekHint = groupPrefixBytes;
}
@Nullable
@Override
public E peek() {
checkRefillCacheFromStore();
if (peekCache != null) {
return peekCache;
}
byte[] firstBytes = orderedCache.peekFirst();
if (firstBytes != null) {
peekCache = deserializeElement(firstBytes);
return peekCache;
} else {
return null;
}
}
@Nullable
@Override
public E poll() {
checkRefillCacheFromStore();
final byte[] firstBytes = orderedCache.pollFirst();
if (firstBytes == null) {
return null;
}
// write-through sync
removeFromRocksDB(firstBytes);
if (orderedCache.isEmpty()) {
seekHint = firstBytes;
}
if (peekCache != null) {
E fromCache = peekCache;
peekCache = null;
return fromCache;
} else {
return deserializeElement(firstBytes);
}
}
@Override
public boolean add(@Nonnull E toAdd) {
checkRefillCacheFromStore();
final byte[] toAddBytes = serializeElement(toAdd);
final boolean cacheFull = orderedCache.isFull();
if ((!cacheFull && allElementsInCache)
|| LEXICOGRAPHIC_BYTE_COMPARATOR.compare(toAddBytes, orderedCache.peekLast()) < 0) {
if (cacheFull) {
// we drop the element with lowest priority from the cache
orderedCache.pollLast();
// the dropped element is now only in the store
allElementsInCache = false;
}
if (orderedCache.add(toAddBytes)) {
// write-through sync
addToRocksDB(toAddBytes);
if (toAddBytes == orderedCache.peekFirst()) {
peekCache = null;
return true;
}
}
} else {
// we only added to the store
addToRocksDB(toAddBytes);
allElementsInCache = false;
}
return false;
}
@Override
public boolean remove(@Nonnull E toRemove) {
checkRefillCacheFromStore();
final byte[] oldHead = orderedCache.peekFirst();
if (oldHead == null) {
return false;
}
final byte[] toRemoveBytes = serializeElement(toRemove);
// write-through sync
removeFromRocksDB(toRemoveBytes);
orderedCache.remove(toRemoveBytes);
if (orderedCache.isEmpty()) {
seekHint = toRemoveBytes;
peekCache = null;
return true;
}
if (oldHead != orderedCache.peekFirst()) {
peekCache = null;
return true;
}
return false;
}
@Override
public void addAll(@Nullable Collection<? extends E> toAdd) {
if (toAdd == null) {
return;
}
for (E element : toAdd) {
add(element);
}
}
@Override
public boolean isEmpty() {
checkRefillCacheFromStore();
return orderedCache.isEmpty();
}
@Nonnull
@Override
public CloseableIterator<E> iterator() {
return new DeserializingIteratorWrapper(orderedBytesIterator());
}
/**
* This implementation comes at a relatively high cost per invocation. It should not be called
* repeatedly when it is clear that the value did not change. Currently this is only truly used
* to realize certain higher-level tests.
*/
@Override
public int size() {
if (allElementsInCache) {
return orderedCache.size();
} else {
int count = 0;
try (final RocksBytesIterator iterator = orderedBytesIterator()) {
while (iterator.hasNext()) {
iterator.next();
++count;
}
}
return count;
}
}
@Nonnull
private RocksBytesIterator orderedBytesIterator() {
flushWriteBatch();
return new RocksBytesIterator(
new RocksIteratorWrapper(db.newIterator(columnFamilyHandle, readOptions)));
}
/** Ensures that recent writes are flushed and reflect in the RocksDB instance. */
private void flushWriteBatch() {
try {
batchWrapper.flush();
} catch (RocksDBException e) {
throw new FlinkRuntimeException(e);
}
}
private void addToRocksDB(@Nonnull byte[] toAddBytes) {
try {
batchWrapper.put(columnFamilyHandle, toAddBytes, DUMMY_BYTES);
} catch (RocksDBException e) {
throw new FlinkRuntimeException(e);
}
}
private void removeFromRocksDB(@Nonnull byte[] toRemoveBytes) {
try {
batchWrapper.remove(columnFamilyHandle, toRemoveBytes);
} catch (RocksDBException e) {
throw new FlinkRuntimeException(e);
}
}
private void checkRefillCacheFromStore() {
if (!allElementsInCache && orderedCache.isEmpty()) {
try (final RocksBytesIterator iterator = orderedBytesIterator()) {
orderedCache.bulkLoadFromOrderedIterator(iterator);
allElementsInCache = !iterator.hasNext();
} catch (Exception e) {
throw new FlinkRuntimeException(
"Exception while refilling store from iterator.", e);
}
}
}
private static boolean isPrefixWith(byte[] bytes, byte[] prefixBytes) {
for (int i = 0; i < prefixBytes.length; ++i) {
if (bytes[i] != prefixBytes[i]) {
return false;
}
}
return true;
}
@Nonnull
private byte[] createKeyGroupBytes(int keyGroupId, int numPrefixBytes) {
outputView.clear();
try {
CompositeKeySerializationUtils.writeKeyGroup(keyGroupId, numPrefixBytes, outputView);
} catch (IOException e) {
throw new FlinkRuntimeException("Could not write key-group bytes.", e);
}
return outputView.getCopyOfBuffer();
}
@Nonnull
private byte[] serializeElement(@Nonnull E element) {
try {
outputView.clear();
outputView.write(groupPrefixBytes);
byteOrderProducingSerializer.serialize(element, outputView);
return outputView.getCopyOfBuffer();
} catch (IOException e) {
throw new FlinkRuntimeException("Error while serializing the element.", e);
}
}
@Nonnull
private E deserializeElement(@Nonnull byte[] bytes) {
try {
final int numPrefixBytes = groupPrefixBytes.length;
inputView.setBuffer(bytes, numPrefixBytes, bytes.length - numPrefixBytes);
return byteOrderProducingSerializer.deserialize(inputView);
} catch (IOException e) {
throw new FlinkRuntimeException("Error while deserializing the element.", e);
}
}
/**
* Wraps an iterator over byte-arrays with deserialization logic, so that it iterates over
* deserialized objects.
*/
private | RocksDBCachingPriorityQueueSet |
java | apache__camel | components/camel-pulsar/src/main/java/org/apache/camel/component/pulsar/DefaultPulsarMessageReceipt.java | {
"start": 1055,
"end": 2156
} | class ____ implements PulsarMessageReceipt {
private final Consumer consumer;
private final MessageId messageId;
public DefaultPulsarMessageReceipt(Consumer consumer, MessageId messageId) {
this.consumer = consumer;
this.messageId = messageId;
}
@Override
public void acknowledge() throws PulsarClientException {
consumer.acknowledge(messageId);
}
@Override
public void acknowledgeCumulative() throws PulsarClientException {
consumer.acknowledgeCumulative(messageId);
}
@Override
public CompletableFuture<Void> acknowledgeAsync() {
return consumer.acknowledgeAsync(messageId);
}
@Override
public CompletableFuture<Void> acknowledgeCumulativeAsync() {
return consumer.acknowledgeCumulativeAsync(messageId);
}
@Override
public void negativeAcknowledge() {
consumer.negativeAcknowledge(messageId);
}
public Consumer getConsumer() {
return consumer;
}
public MessageId getMessageId() {
return messageId;
}
}
| DefaultPulsarMessageReceipt |
java | apache__logging-log4j2 | log4j-layout-template-json/src/main/java/org/apache/logging/log4j/layout/template/json/resolver/TemplateResolverFactory.java | {
"start": 1148,
"end": 1667
} | interface ____<V, C extends TemplateResolverContext<V, C>> {
/**
* Main plugin category for {@link TemplateResolverFactory} implementations.
*/
String CATEGORY = "JsonTemplateResolverFactory";
/**
* The targeted value class.
*/
Class<V> getValueClass();
/**
* The targeted {@link TemplateResolverContext} class.
*/
Class<C> getContextClass();
String getName();
TemplateResolver<V> create(C context, TemplateResolverConfig config);
}
| TemplateResolverFactory |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/generated/java/org/apache/camel/component/salesforce/SalesforceEndpointConfigurer.java | {
"start": 737,
"end": 29766
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
SalesforceEndpoint target = (SalesforceEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allornone":
case "allOrNone": target.getConfiguration().setAllOrNone(property(camelContext, boolean.class, value)); return true;
case "apexmethod":
case "apexMethod": target.getConfiguration().setApexMethod(property(camelContext, java.lang.String.class, value)); return true;
case "apexqueryparams":
case "apexQueryParams": target.getConfiguration().setApexQueryParams(property(camelContext, java.util.Map.class, value)); return true;
case "apexurl":
case "apexUrl": target.getConfiguration().setApexUrl(property(camelContext, java.lang.String.class, value)); return true;
case "apiversion":
case "apiVersion": target.getConfiguration().setApiVersion(property(camelContext, java.lang.String.class, value)); return true;
case "backoffincrement":
case "backoffIncrement": target.getConfiguration().setBackoffIncrement(property(camelContext, java.time.Duration.class, value).toMillis()); return true;
case "batchid":
case "batchId": target.getConfiguration().setBatchId(property(camelContext, java.lang.String.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "compositemethod":
case "compositeMethod": target.getConfiguration().setCompositeMethod(property(camelContext, java.lang.String.class, value)); return true;
case "consumerworkerpoolenabled":
case "consumerWorkerPoolEnabled": target.setConsumerWorkerPoolEnabled(property(camelContext, boolean.class, value)); return true;
case "consumerworkerpoolexecutorservice":
case "consumerWorkerPoolExecutorService": target.setConsumerWorkerPoolExecutorService(property(camelContext, java.util.concurrent.ExecutorService.class, value)); return true;
case "consumerworkerpoolmaxsize":
case "consumerWorkerPoolMaxSize": target.setConsumerWorkerPoolMaxSize(property(camelContext, int.class, value)); return true;
case "consumerworkerpoolsize":
case "consumerWorkerPoolSize": target.setConsumerWorkerPoolSize(property(camelContext, int.class, value)); return true;
case "contenttype":
case "contentType": target.getConfiguration().setContentType(property(camelContext, org.apache.camel.component.salesforce.api.dto.bulk.ContentType.class, value)); return true;
case "defaultreplayid":
case "defaultReplayId": target.getConfiguration().setDefaultReplayId(property(camelContext, java.lang.Long.class, value)); return true;
case "eventname":
case "eventName": target.getConfiguration().setEventName(property(camelContext, java.lang.String.class, value)); return true;
case "eventschemaformat":
case "eventSchemaFormat": target.getConfiguration().setEventSchemaFormat(property(camelContext, org.apache.camel.component.salesforce.internal.dto.EventSchemaFormatEnum.class, value)); return true;
case "eventschemaid":
case "eventSchemaId": target.getConfiguration().setEventSchemaId(property(camelContext, java.lang.String.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "fallbackreplayid":
case "fallBackReplayId": target.getConfiguration().setFallBackReplayId(property(camelContext, java.lang.Long.class, value)); return true;
case "fallbacktolatestreplayid":
case "fallbackToLatestReplayId": target.getConfiguration().setFallbackToLatestReplayId(property(camelContext, boolean.class, value)); return true;
case "format": target.getConfiguration().setFormat(property(camelContext, org.apache.camel.component.salesforce.internal.PayloadFormat.class, value)); return true;
case "httpclient":
case "httpClient": target.getConfiguration().setHttpClient(property(camelContext, org.apache.camel.component.salesforce.SalesforceHttpClient.class, value)); return true;
case "includedetails":
case "includeDetails": target.getConfiguration().setIncludeDetails(property(camelContext, java.lang.Boolean.class, value)); return true;
case "initialreplayidmap":
case "initialReplayIdMap": target.getConfiguration().setInitialReplayIdMap(property(camelContext, java.util.Map.class, value)); return true;
case "instanceid":
case "instanceId": target.getConfiguration().setInstanceId(property(camelContext, java.lang.String.class, value)); return true;
case "jobid":
case "jobId": target.getConfiguration().setJobId(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "limit": target.getConfiguration().setLimit(property(camelContext, java.lang.Integer.class, value)); return true;
case "locator": target.getConfiguration().setLocator(property(camelContext, java.lang.String.class, value)); return true;
case "maxbackoff":
case "maxBackoff": target.getConfiguration().setMaxBackoff(property(camelContext, java.time.Duration.class, value).toMillis()); return true;
case "maxrecords":
case "maxRecords": target.getConfiguration().setMaxRecords(property(camelContext, java.lang.Integer.class, value)); return true;
case "notfoundbehaviour":
case "notFoundBehaviour": target.getConfiguration().setNotFoundBehaviour(property(camelContext, org.apache.camel.component.salesforce.NotFoundBehaviour.class, value)); return true;
case "notifyforfields":
case "notifyForFields": target.getConfiguration().setNotifyForFields(property(camelContext, org.apache.camel.component.salesforce.internal.dto.NotifyForFieldsEnum.class, value)); return true;
case "notifyforoperationcreate":
case "notifyForOperationCreate": target.getConfiguration().setNotifyForOperationCreate(property(camelContext, java.lang.Boolean.class, value)); return true;
case "notifyforoperationdelete":
case "notifyForOperationDelete": target.getConfiguration().setNotifyForOperationDelete(property(camelContext, java.lang.Boolean.class, value)); return true;
case "notifyforoperationundelete":
case "notifyForOperationUndelete": target.getConfiguration().setNotifyForOperationUndelete(property(camelContext, java.lang.Boolean.class, value)); return true;
case "notifyforoperationupdate":
case "notifyForOperationUpdate": target.getConfiguration().setNotifyForOperationUpdate(property(camelContext, java.lang.Boolean.class, value)); return true;
case "notifyforoperations":
case "notifyForOperations": target.getConfiguration().setNotifyForOperations(property(camelContext, org.apache.camel.component.salesforce.internal.dto.NotifyForOperationsEnum.class, value)); return true;
case "objectmapper":
case "objectMapper": target.getConfiguration().setObjectMapper(property(camelContext, com.fasterxml.jackson.databind.ObjectMapper.class, value)); return true;
case "pkchunking":
case "pkChunking": target.getConfiguration().setPkChunking(property(camelContext, java.lang.Boolean.class, value)); return true;
case "pkchunkingchunksize":
case "pkChunkingChunkSize": target.getConfiguration().setPkChunkingChunkSize(property(camelContext, java.lang.Integer.class, value)); return true;
case "pkchunkingparent":
case "pkChunkingParent": target.getConfiguration().setPkChunkingParent(property(camelContext, java.lang.String.class, value)); return true;
case "pkchunkingstartrow":
case "pkChunkingStartRow": target.getConfiguration().setPkChunkingStartRow(property(camelContext, java.lang.String.class, value)); return true;
case "pubsubbatchsize":
case "pubSubBatchSize": target.getConfiguration().setPubSubBatchSize(property(camelContext, int.class, value)); return true;
case "pubsubdeserializetype":
case "pubSubDeserializeType": target.getConfiguration().setPubSubDeserializeType(property(camelContext, org.apache.camel.component.salesforce.PubSubDeserializeType.class, value)); return true;
case "pubsubpojoclass":
case "pubSubPojoClass": target.getConfiguration().setPubSubPojoClass(property(camelContext, java.lang.String.class, value)); return true;
case "pubsubreplayid":
case "pubSubReplayId": target.setPubSubReplayId(property(camelContext, java.lang.String.class, value)); return true;
case "querylocator":
case "queryLocator": target.getConfiguration().setQueryLocator(property(camelContext, java.lang.String.class, value)); return true;
case "rawhttpheaders":
case "rawHttpHeaders": target.getConfiguration().setRawHttpHeaders(property(camelContext, java.lang.String.class, value)); return true;
case "rawmethod":
case "rawMethod": target.getConfiguration().setRawMethod(property(camelContext, java.lang.String.class, value)); return true;
case "rawpath":
case "rawPath": target.getConfiguration().setRawPath(property(camelContext, java.lang.String.class, value)); return true;
case "rawpayload":
case "rawPayload": target.getConfiguration().setRawPayload(property(camelContext, boolean.class, value)); return true;
case "rawqueryparameters":
case "rawQueryParameters": target.getConfiguration().setRawQueryParameters(property(camelContext, java.lang.String.class, value)); return true;
case "replayid":
case "replayId": target.setReplayId(property(camelContext, java.lang.Long.class, value)); return true;
case "replaypreset":
case "replayPreset": target.getConfiguration().setReplayPreset(property(camelContext, com.salesforce.eventbus.protobuf.ReplayPreset.class, value)); return true;
case "reportid":
case "reportId": target.getConfiguration().setReportId(property(camelContext, java.lang.String.class, value)); return true;
case "reportmetadata":
case "reportMetadata": target.getConfiguration().setReportMetadata(property(camelContext, org.apache.camel.component.salesforce.api.dto.analytics.reports.ReportMetadata.class, value)); return true;
case "resultid":
case "resultId": target.getConfiguration().setResultId(property(camelContext, java.lang.String.class, value)); return true;
case "sobjectblobfieldname":
case "sObjectBlobFieldName": target.getConfiguration().setSObjectBlobFieldName(property(camelContext, java.lang.String.class, value)); return true;
case "sobjectclass":
case "sObjectClass": target.getConfiguration().setSObjectClass(property(camelContext, java.lang.String.class, value)); return true;
case "sobjectfields":
case "sObjectFields": target.getConfiguration().setSObjectFields(property(camelContext, java.lang.String.class, value)); return true;
case "sobjectid":
case "sObjectId": target.getConfiguration().setSObjectId(property(camelContext, java.lang.String.class, value)); return true;
case "sobjectidname":
case "sObjectIdName": target.getConfiguration().setSObjectIdName(property(camelContext, java.lang.String.class, value)); return true;
case "sobjectidvalue":
case "sObjectIdValue": target.getConfiguration().setSObjectIdValue(property(camelContext, java.lang.String.class, value)); return true;
case "sobjectname":
case "sObjectName": target.getConfiguration().setSObjectName(property(camelContext, java.lang.String.class, value)); return true;
case "sobjectquery":
case "sObjectQuery": target.getConfiguration().setSObjectQuery(property(camelContext, java.lang.String.class, value)); return true;
case "sobjectsearch":
case "sObjectSearch": target.getConfiguration().setSObjectSearch(property(camelContext, java.lang.String.class, value)); return true;
case "streamqueryresult":
case "streamQueryResult": target.getConfiguration().setStreamQueryResult(property(camelContext, java.lang.Boolean.class, value)); return true;
case "updatetopic":
case "updateTopic": target.getConfiguration().setUpdateTopic(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "allornone":
case "allOrNone": return boolean.class;
case "apexmethod":
case "apexMethod": return java.lang.String.class;
case "apexqueryparams":
case "apexQueryParams": return java.util.Map.class;
case "apexurl":
case "apexUrl": return java.lang.String.class;
case "apiversion":
case "apiVersion": return java.lang.String.class;
case "backoffincrement":
case "backoffIncrement": return long.class;
case "batchid":
case "batchId": return java.lang.String.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "compositemethod":
case "compositeMethod": return java.lang.String.class;
case "consumerworkerpoolenabled":
case "consumerWorkerPoolEnabled": return boolean.class;
case "consumerworkerpoolexecutorservice":
case "consumerWorkerPoolExecutorService": return java.util.concurrent.ExecutorService.class;
case "consumerworkerpoolmaxsize":
case "consumerWorkerPoolMaxSize": return int.class;
case "consumerworkerpoolsize":
case "consumerWorkerPoolSize": return int.class;
case "contenttype":
case "contentType": return org.apache.camel.component.salesforce.api.dto.bulk.ContentType.class;
case "defaultreplayid":
case "defaultReplayId": return java.lang.Long.class;
case "eventname":
case "eventName": return java.lang.String.class;
case "eventschemaformat":
case "eventSchemaFormat": return org.apache.camel.component.salesforce.internal.dto.EventSchemaFormatEnum.class;
case "eventschemaid":
case "eventSchemaId": return java.lang.String.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "fallbackreplayid":
case "fallBackReplayId": return java.lang.Long.class;
case "fallbacktolatestreplayid":
case "fallbackToLatestReplayId": return boolean.class;
case "format": return org.apache.camel.component.salesforce.internal.PayloadFormat.class;
case "httpclient":
case "httpClient": return org.apache.camel.component.salesforce.SalesforceHttpClient.class;
case "includedetails":
case "includeDetails": return java.lang.Boolean.class;
case "initialreplayidmap":
case "initialReplayIdMap": return java.util.Map.class;
case "instanceid":
case "instanceId": return java.lang.String.class;
case "jobid":
case "jobId": return java.lang.String.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "limit": return java.lang.Integer.class;
case "locator": return java.lang.String.class;
case "maxbackoff":
case "maxBackoff": return long.class;
case "maxrecords":
case "maxRecords": return java.lang.Integer.class;
case "notfoundbehaviour":
case "notFoundBehaviour": return org.apache.camel.component.salesforce.NotFoundBehaviour.class;
case "notifyforfields":
case "notifyForFields": return org.apache.camel.component.salesforce.internal.dto.NotifyForFieldsEnum.class;
case "notifyforoperationcreate":
case "notifyForOperationCreate": return java.lang.Boolean.class;
case "notifyforoperationdelete":
case "notifyForOperationDelete": return java.lang.Boolean.class;
case "notifyforoperationundelete":
case "notifyForOperationUndelete": return java.lang.Boolean.class;
case "notifyforoperationupdate":
case "notifyForOperationUpdate": return java.lang.Boolean.class;
case "notifyforoperations":
case "notifyForOperations": return org.apache.camel.component.salesforce.internal.dto.NotifyForOperationsEnum.class;
case "objectmapper":
case "objectMapper": return com.fasterxml.jackson.databind.ObjectMapper.class;
case "pkchunking":
case "pkChunking": return java.lang.Boolean.class;
case "pkchunkingchunksize":
case "pkChunkingChunkSize": return java.lang.Integer.class;
case "pkchunkingparent":
case "pkChunkingParent": return java.lang.String.class;
case "pkchunkingstartrow":
case "pkChunkingStartRow": return java.lang.String.class;
case "pubsubbatchsize":
case "pubSubBatchSize": return int.class;
case "pubsubdeserializetype":
case "pubSubDeserializeType": return org.apache.camel.component.salesforce.PubSubDeserializeType.class;
case "pubsubpojoclass":
case "pubSubPojoClass": return java.lang.String.class;
case "pubsubreplayid":
case "pubSubReplayId": return java.lang.String.class;
case "querylocator":
case "queryLocator": return java.lang.String.class;
case "rawhttpheaders":
case "rawHttpHeaders": return java.lang.String.class;
case "rawmethod":
case "rawMethod": return java.lang.String.class;
case "rawpath":
case "rawPath": return java.lang.String.class;
case "rawpayload":
case "rawPayload": return boolean.class;
case "rawqueryparameters":
case "rawQueryParameters": return java.lang.String.class;
case "replayid":
case "replayId": return java.lang.Long.class;
case "replaypreset":
case "replayPreset": return com.salesforce.eventbus.protobuf.ReplayPreset.class;
case "reportid":
case "reportId": return java.lang.String.class;
case "reportmetadata":
case "reportMetadata": return org.apache.camel.component.salesforce.api.dto.analytics.reports.ReportMetadata.class;
case "resultid":
case "resultId": return java.lang.String.class;
case "sobjectblobfieldname":
case "sObjectBlobFieldName": return java.lang.String.class;
case "sobjectclass":
case "sObjectClass": return java.lang.String.class;
case "sobjectfields":
case "sObjectFields": return java.lang.String.class;
case "sobjectid":
case "sObjectId": return java.lang.String.class;
case "sobjectidname":
case "sObjectIdName": return java.lang.String.class;
case "sobjectidvalue":
case "sObjectIdValue": return java.lang.String.class;
case "sobjectname":
case "sObjectName": return java.lang.String.class;
case "sobjectquery":
case "sObjectQuery": return java.lang.String.class;
case "sobjectsearch":
case "sObjectSearch": return java.lang.String.class;
case "streamqueryresult":
case "streamQueryResult": return java.lang.Boolean.class;
case "updatetopic":
case "updateTopic": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
SalesforceEndpoint target = (SalesforceEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allornone":
case "allOrNone": return target.getConfiguration().isAllOrNone();
case "apexmethod":
case "apexMethod": return target.getConfiguration().getApexMethod();
case "apexqueryparams":
case "apexQueryParams": return target.getConfiguration().getApexQueryParams();
case "apexurl":
case "apexUrl": return target.getConfiguration().getApexUrl();
case "apiversion":
case "apiVersion": return target.getConfiguration().getApiVersion();
case "backoffincrement":
case "backoffIncrement": return target.getConfiguration().getBackoffIncrement();
case "batchid":
case "batchId": return target.getConfiguration().getBatchId();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "compositemethod":
case "compositeMethod": return target.getConfiguration().getCompositeMethod();
case "consumerworkerpoolenabled":
case "consumerWorkerPoolEnabled": return target.isConsumerWorkerPoolEnabled();
case "consumerworkerpoolexecutorservice":
case "consumerWorkerPoolExecutorService": return target.getConsumerWorkerPoolExecutorService();
case "consumerworkerpoolmaxsize":
case "consumerWorkerPoolMaxSize": return target.getConsumerWorkerPoolMaxSize();
case "consumerworkerpoolsize":
case "consumerWorkerPoolSize": return target.getConsumerWorkerPoolSize();
case "contenttype":
case "contentType": return target.getConfiguration().getContentType();
case "defaultreplayid":
case "defaultReplayId": return target.getConfiguration().getDefaultReplayId();
case "eventname":
case "eventName": return target.getConfiguration().getEventName();
case "eventschemaformat":
case "eventSchemaFormat": return target.getConfiguration().getEventSchemaFormat();
case "eventschemaid":
case "eventSchemaId": return target.getConfiguration().getEventSchemaId();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "fallbackreplayid":
case "fallBackReplayId": return target.getConfiguration().getFallBackReplayId();
case "fallbacktolatestreplayid":
case "fallbackToLatestReplayId": return target.getConfiguration().isFallbackToLatestReplayId();
case "format": return target.getConfiguration().getFormat();
case "httpclient":
case "httpClient": return target.getConfiguration().getHttpClient();
case "includedetails":
case "includeDetails": return target.getConfiguration().getIncludeDetails();
case "initialreplayidmap":
case "initialReplayIdMap": return target.getConfiguration().getInitialReplayIdMap();
case "instanceid":
case "instanceId": return target.getConfiguration().getInstanceId();
case "jobid":
case "jobId": return target.getConfiguration().getJobId();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "limit": return target.getConfiguration().getLimit();
case "locator": return target.getConfiguration().getLocator();
case "maxbackoff":
case "maxBackoff": return target.getConfiguration().getMaxBackoff();
case "maxrecords":
case "maxRecords": return target.getConfiguration().getMaxRecords();
case "notfoundbehaviour":
case "notFoundBehaviour": return target.getConfiguration().getNotFoundBehaviour();
case "notifyforfields":
case "notifyForFields": return target.getConfiguration().getNotifyForFields();
case "notifyforoperationcreate":
case "notifyForOperationCreate": return target.getConfiguration().getNotifyForOperationCreate();
case "notifyforoperationdelete":
case "notifyForOperationDelete": return target.getConfiguration().getNotifyForOperationDelete();
case "notifyforoperationundelete":
case "notifyForOperationUndelete": return target.getConfiguration().getNotifyForOperationUndelete();
case "notifyforoperationupdate":
case "notifyForOperationUpdate": return target.getConfiguration().getNotifyForOperationUpdate();
case "notifyforoperations":
case "notifyForOperations": return target.getConfiguration().getNotifyForOperations();
case "objectmapper":
case "objectMapper": return target.getConfiguration().getObjectMapper();
case "pkchunking":
case "pkChunking": return target.getConfiguration().getPkChunking();
case "pkchunkingchunksize":
case "pkChunkingChunkSize": return target.getConfiguration().getPkChunkingChunkSize();
case "pkchunkingparent":
case "pkChunkingParent": return target.getConfiguration().getPkChunkingParent();
case "pkchunkingstartrow":
case "pkChunkingStartRow": return target.getConfiguration().getPkChunkingStartRow();
case "pubsubbatchsize":
case "pubSubBatchSize": return target.getConfiguration().getPubSubBatchSize();
case "pubsubdeserializetype":
case "pubSubDeserializeType": return target.getConfiguration().getPubSubDeserializeType();
case "pubsubpojoclass":
case "pubSubPojoClass": return target.getConfiguration().getPubSubPojoClass();
case "pubsubreplayid":
case "pubSubReplayId": return target.getPubSubReplayId();
case "querylocator":
case "queryLocator": return target.getConfiguration().getQueryLocator();
case "rawhttpheaders":
case "rawHttpHeaders": return target.getConfiguration().getRawHttpHeaders();
case "rawmethod":
case "rawMethod": return target.getConfiguration().getRawMethod();
case "rawpath":
case "rawPath": return target.getConfiguration().getRawPath();
case "rawpayload":
case "rawPayload": return target.getConfiguration().isRawPayload();
case "rawqueryparameters":
case "rawQueryParameters": return target.getConfiguration().getRawQueryParameters();
case "replayid":
case "replayId": return target.getReplayId();
case "replaypreset":
case "replayPreset": return target.getConfiguration().getReplayPreset();
case "reportid":
case "reportId": return target.getConfiguration().getReportId();
case "reportmetadata":
case "reportMetadata": return target.getConfiguration().getReportMetadata();
case "resultid":
case "resultId": return target.getConfiguration().getResultId();
case "sobjectblobfieldname":
case "sObjectBlobFieldName": return target.getConfiguration().getSObjectBlobFieldName();
case "sobjectclass":
case "sObjectClass": return target.getConfiguration().getSObjectClass();
case "sobjectfields":
case "sObjectFields": return target.getConfiguration().getSObjectFields();
case "sobjectid":
case "sObjectId": return target.getConfiguration().getSObjectId();
case "sobjectidname":
case "sObjectIdName": return target.getConfiguration().getSObjectIdName();
case "sobjectidvalue":
case "sObjectIdValue": return target.getConfiguration().getSObjectIdValue();
case "sobjectname":
case "sObjectName": return target.getConfiguration().getSObjectName();
case "sobjectquery":
case "sObjectQuery": return target.getConfiguration().getSObjectQuery();
case "sobjectsearch":
case "sObjectSearch": return target.getConfiguration().getSObjectSearch();
case "streamqueryresult":
case "streamQueryResult": return target.getConfiguration().getStreamQueryResult();
case "updatetopic":
case "updateTopic": return target.getConfiguration().isUpdateTopic();
default: return null;
}
}
@Override
public Object getCollectionValueType(Object target, String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "apexqueryparams":
case "apexQueryParams": return java.lang.Object.class;
case "initialreplayidmap":
case "initialReplayIdMap": return java.lang.Long.class;
default: return null;
}
}
}
| SalesforceEndpointConfigurer |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/IntArrayAssertBaseTest.java | {
"start": 807,
"end": 900
} | class ____ {@link IntArrayAssert} tests.
*
* @author Olivier Michallat
*/
public abstract | for |
java | apache__kafka | group-coordinator/src/main/java/org/apache/kafka/coordinator/group/GroupMetadataManager.java | {
"start": 20185,
"end": 31789
} | class ____ {
private LogContext logContext = null;
private SnapshotRegistry snapshotRegistry = null;
private Time time = null;
private CoordinatorTimer<Void, CoordinatorRecord> timer = null;
private CoordinatorExecutor<CoordinatorRecord> executor = null;
private GroupCoordinatorConfig config = null;
private GroupConfigManager groupConfigManager = null;
private CoordinatorMetadataImage metadataImage = null;
private ShareGroupPartitionAssignor shareGroupAssignor = null;
private GroupCoordinatorMetricsShard metrics;
private Optional<Plugin<Authorizer>> authorizerPlugin = null;
private List<TaskAssignor> streamsGroupAssignors = null;
Builder withLogContext(LogContext logContext) {
this.logContext = logContext;
return this;
}
Builder withSnapshotRegistry(SnapshotRegistry snapshotRegistry) {
this.snapshotRegistry = snapshotRegistry;
return this;
}
Builder withTime(Time time) {
this.time = time;
return this;
}
Builder withTimer(CoordinatorTimer<Void, CoordinatorRecord> timer) {
this.timer = timer;
return this;
}
Builder withExecutor(CoordinatorExecutor<CoordinatorRecord> executor) {
this.executor = executor;
return this;
}
Builder withConfig(GroupCoordinatorConfig config) {
this.config = config;
return this;
}
Builder withGroupConfigManager(GroupConfigManager groupConfigManager) {
this.groupConfigManager = groupConfigManager;
return this;
}
Builder withStreamsGroupAssignors(List<TaskAssignor> streamsGroupAssignors) {
this.streamsGroupAssignors = streamsGroupAssignors;
return this;
}
Builder withMetadataImage(CoordinatorMetadataImage metadataImage) {
this.metadataImage = metadataImage;
return this;
}
Builder withGroupCoordinatorMetricsShard(GroupCoordinatorMetricsShard metrics) {
this.metrics = metrics;
return this;
}
Builder withShareGroupAssignor(ShareGroupPartitionAssignor shareGroupAssignor) {
this.shareGroupAssignor = shareGroupAssignor;
return this;
}
Builder withAuthorizerPlugin(Optional<Plugin<Authorizer>> authorizerPlugin) {
this.authorizerPlugin = authorizerPlugin;
return this;
}
GroupMetadataManager build() {
if (logContext == null) logContext = new LogContext();
if (snapshotRegistry == null) snapshotRegistry = new SnapshotRegistry(logContext);
if (metadataImage == null) metadataImage = CoordinatorMetadataImage.EMPTY;
if (time == null) time = Time.SYSTEM;
if (authorizerPlugin == null) authorizerPlugin = Optional.empty();
if (timer == null)
throw new IllegalArgumentException("Timer must be set.");
if (executor == null)
throw new IllegalArgumentException("Executor must be set.");
if (config == null)
throw new IllegalArgumentException("Config must be set.");
if (shareGroupAssignor == null)
shareGroupAssignor = new SimpleAssignor();
if (metrics == null)
throw new IllegalArgumentException("GroupCoordinatorMetricsShard must be set.");
if (groupConfigManager == null)
throw new IllegalArgumentException("GroupConfigManager must be set.");
if (streamsGroupAssignors == null)
streamsGroupAssignors = List.of(new StickyTaskAssignor());
return new GroupMetadataManager(
snapshotRegistry,
logContext,
time,
timer,
executor,
metrics,
metadataImage,
config,
groupConfigManager,
shareGroupAssignor,
authorizerPlugin,
streamsGroupAssignors
);
}
}
/**
* The minimum amount of time between two consecutive refreshes of
* the regular expressions within a single group.
*
* Package private for setting the lower limit of the refresh interval.
*/
static final long REGEX_BATCH_REFRESH_MIN_INTERVAL_MS = 10_000L;
/**
* The log context.
*/
private final LogContext logContext;
/**
* The logger.
*/
private final Logger log;
/**
* The snapshot registry.
*/
private final SnapshotRegistry snapshotRegistry;
/**
* The system time.
*/
private final Time time;
/**
* The system timer.
*/
private final CoordinatorTimer<Void, CoordinatorRecord> timer;
/**
* The executor to executor asynchronous tasks.
*/
private final CoordinatorExecutor<CoordinatorRecord> executor;
/**
* The coordinator metrics.
*/
private final GroupCoordinatorMetricsShard metrics;
/**
* The group coordinator config.
*/
private final GroupCoordinatorConfig config;
/**
* The supported consumer group partition assignors keyed by their name.
*/
private final Map<String, ConsumerGroupPartitionAssignor> consumerGroupAssignors;
/**
* The default consumer group assignor used.
*/
private final ConsumerGroupPartitionAssignor defaultConsumerGroupAssignor;
/**
* The classic and consumer groups keyed by their name.
*/
private final TimelineHashMap<String, Group> groups;
/**
* The group ids keyed by topic names.
*/
private final TimelineHashMap<String, TimelineHashSet<String>> groupsByTopics;
/**
* The share group partition metadata info keyed by group id.
*/
private final TimelineHashMap<String, ShareGroupStatePartitionMetadataInfo> shareGroupStatePartitionMetadata;
/**
* The group manager.
*/
private final GroupConfigManager groupConfigManager;
/**
* The supported task assignors keyed by their name.
*/
private final Map<String, TaskAssignor> streamsGroupAssignors;
/**
* The metadata image.
*/
private CoordinatorMetadataImage metadataImage;
/**
* The cache for topic hash value by topic name.
* A topic hash is calculated when there is a group subscribes to it.
* A topic hash is removed when it's updated in CoordinatorMetadataImage or there is no group subscribes to it.
*/
private final Map<String, Long> topicHashCache;
/**
* This tracks the version of the last metadata image
* with newly created topics.
*/
private long lastMetadataImageWithNewTopics = -1L;
/**
* An empty result returned to the state machine. This means that
* there are no records to append to the log.
*
* Package private for testing.
*/
static final CoordinatorResult<Void, CoordinatorRecord> EMPTY_RESULT =
new CoordinatorResult<>(List.of(), CompletableFuture.completedFuture(null), false);
/**
* The share group partition assignor.
*/
private final ShareGroupPartitionAssignor shareGroupAssignor;
/**
* The authorizer to validate the regex subscription topics.
*/
private final Optional<Plugin<Authorizer>> authorizerPlugin;
private GroupMetadataManager(
SnapshotRegistry snapshotRegistry,
LogContext logContext,
Time time,
CoordinatorTimer<Void, CoordinatorRecord> timer,
CoordinatorExecutor<CoordinatorRecord> executor,
GroupCoordinatorMetricsShard metrics,
CoordinatorMetadataImage metadataImage,
GroupCoordinatorConfig config,
GroupConfigManager groupConfigManager,
ShareGroupPartitionAssignor shareGroupAssignor,
Optional<Plugin<Authorizer>> authorizerPlugin,
List<TaskAssignor> streamsGroupAssignors
) {
this.logContext = logContext;
this.log = logContext.logger(GroupMetadataManager.class);
this.snapshotRegistry = snapshotRegistry;
this.time = time;
this.timer = timer;
this.executor = executor;
this.metrics = metrics;
this.config = config;
this.metadataImage = metadataImage;
this.consumerGroupAssignors = config
.consumerGroupAssignors()
.stream()
.collect(Collectors.toMap(ConsumerGroupPartitionAssignor::name, Function.identity()));
this.defaultConsumerGroupAssignor = config.consumerGroupAssignors().get(0);
this.groups = new TimelineHashMap<>(snapshotRegistry, 0);
this.groupsByTopics = new TimelineHashMap<>(snapshotRegistry, 0);
this.shareGroupStatePartitionMetadata = new TimelineHashMap<>(snapshotRegistry, 0);
this.groupConfigManager = groupConfigManager;
this.shareGroupAssignor = shareGroupAssignor;
this.authorizerPlugin = authorizerPlugin;
this.streamsGroupAssignors = streamsGroupAssignors.stream().collect(Collectors.toMap(TaskAssignor::name, Function.identity()));
this.topicHashCache = new HashMap<>();
}
/**
* @return The current metadata image used by the group metadata manager.
*/
public CoordinatorMetadataImage image() {
return metadataImage;
}
/**
* @return The group corresponding to the group id or throw GroupIdNotFoundException.
*/
public Group group(String groupId) throws GroupIdNotFoundException {
Group group = groups.get(groupId, Long.MAX_VALUE);
if (group == null) {
throw new GroupIdNotFoundException(String.format("Group %s not found.", groupId));
}
return group;
}
/**
* @return The group corresponding to the group id at the given committed offset
* or throw GroupIdNotFoundException.
*/
public Group group(String groupId, long committedOffset) throws GroupIdNotFoundException {
Group group = groups.get(groupId, committedOffset);
if (group == null) {
throw new GroupIdNotFoundException(String.format("Group %s not found.", groupId));
}
return group;
}
/**
* Get the Group List.
*
* @param statesFilter The states of the groups we want to list.
* If empty, all groups are returned with their state.
* If invalid, no groups are returned.
* @param typesFilter The types of the groups we want to list.
* If empty, all groups are returned with their type.
* If invalid, no groups are returned.
* @param committedOffset A specified committed offset corresponding to this shard.
*
* @return A list containing the ListGroupsResponseData.ListedGroup
*/
public List<ListGroupsResponseData.ListedGroup> listGroups(
Set<String> statesFilter,
Set<String> typesFilter,
long committedOffset
) {
// Converts each state filter string to lower case for a case-insensitive comparison.
Set<String> caseInsensitiveFilterSet = statesFilter.stream()
.map(String::toLowerCase)
.map(String::trim)
.collect(Collectors.toSet());
// Converts each type filter string to a value in the GroupType | Builder |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/PriorityQueueSetFactory.java | {
"start": 1086,
"end": 2993
} | interface ____ {
/**
* Creates a {@link KeyGroupedInternalPriorityQueue}.
*
* @param stateName unique name for associated with this queue.
* @param byteOrderedElementSerializer a serializer that with a format that is lexicographically
* ordered in alignment with elementPriorityComparator.
* @param <T> type of the stored elements.
* @return the queue with the specified unique name.
*/
@Nonnull
<T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> create(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer);
/**
* Creates a {@link KeyGroupedInternalPriorityQueue}.
*
* @param stateName unique name for associated with this queue.
* @param byteOrderedElementSerializer a serializer that with a format that is lexicographically
* ordered in alignment with elementPriorityComparator.
* @param allowFutureMetadataUpdates whether allow metadata to update in the future or not.
* @param <T> type of the stored elements.
* @return the queue with the specified unique name.
*/
default <T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> create(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer,
boolean allowFutureMetadataUpdates) {
if (allowFutureMetadataUpdates) {
throw new UnsupportedOperationException(
this.getClass().getName()
+ " doesn't support to allow to update future metadata.");
} else {
return create(stateName, byteOrderedElementSerializer);
}
}
}
| PriorityQueueSetFactory |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java | {
"start": 1295,
"end": 3542
} | class ____ extends InternalSingleBucketAggregationTestCase<InternalFilter> {
@Override
protected InternalFilter createTestInstance(
String name,
long docCount,
InternalAggregations aggregations,
Map<String, Object> metadata
) {
return new InternalFilter(name, docCount, aggregations, metadata);
}
@Override
protected boolean supportsSampling() {
return true;
}
@Override
protected void assertSampled(InternalFilter sampled, InternalFilter reduced, SamplingContext samplingContext) {
assertThat(sampled.getDocCount(), equalTo(samplingContext.scaleUp(reduced.getDocCount())));
}
@Override
protected void extraAssertReduced(InternalFilter reduced, List<InternalFilter> inputs) {
// Nothing extra to assert
}
public void testReducePipelinesReturnsSameInstanceWithoutPipelines() {
InternalFilter test = createTestInstance();
assertThat(test.reducePipelines(test, emptyReduceContextBuilder().forFinalReduction(), PipelineTree.EMPTY), sameInstance(test));
}
public void testReducePipelinesReducesBucketPipelines() {
/*
* Tests that a pipeline buckets by creating a mock pipeline that
* replaces "inner" with "dummy".
*/
InternalFilter dummy = createTestInstance();
InternalFilter inner = createTestInstance();
InternalAggregations sub = InternalAggregations.from(List.of(inner));
InternalFilter test = createTestInstance("test", randomNonNegativeLong(), sub, emptyMap());
PipelineAggregator mockPipeline = new PipelineAggregator(null, null, null) {
@Override
public InternalAggregation reduce(InternalAggregation aggregation, AggregationReduceContext reduceContext) {
return dummy;
}
};
PipelineTree tree = new PipelineTree(Map.of(inner.getName(), new PipelineTree(emptyMap(), List.of(mockPipeline))), emptyList());
InternalFilter reduced = (InternalFilter) test.reducePipelines(test, emptyReduceContextBuilder().forFinalReduction(), tree);
assertThat(reduced.getAggregations().get(dummy.getName()), sameInstance(dummy));
}
}
| InternalFilterTests |
java | elastic__elasticsearch | distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessUtils.java | {
"start": 886,
"end": 3022
} | class ____ {
/**
* Returns the java.io.tmpdir Elasticsearch should use, creating it if necessary.
*
* <p> On non-Windows OS, this will be created as a subdirectory of the default temporary directory.
* Note that this causes the created temporary directory to be a private temporary directory.
*/
public static Path setupTempDir(ProcessInfo processInfo) throws UserException {
final Path path;
String tmpDirOverride = processInfo.envVars().get("ES_TMPDIR");
if (tmpDirOverride != null) {
path = Paths.get(tmpDirOverride);
if (Files.exists(path) == false) {
throw new UserException(ExitCodes.CONFIG, "Temporary directory [" + path + "] does not exist or is not accessible");
}
if (Files.isDirectory(path) == false) {
throw new UserException(ExitCodes.CONFIG, "Temporary directory [" + path + "] is not a directory");
}
} else {
try {
if (processInfo.sysprops().get("os.name").startsWith("Windows")) {
/*
* On Windows, we avoid creating a unique temporary directory per invocation lest
* we pollute the temporary directory. On other operating systems, temporary directories
* will be cleaned automatically via various mechanisms (e.g., systemd, or restarts).
*/
path = Paths.get(processInfo.sysprops().get("java.io.tmpdir"), "elasticsearch");
Files.createDirectories(path);
} else {
path = createTempDirectory("elasticsearch-");
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
return path;
}
@SuppressForbidden(reason = "Files#createTempDirectory(String, FileAttribute...)")
private static Path createTempDirectory(final String prefix, final FileAttribute<?>... attrs) throws IOException {
return Files.createTempDirectory(prefix, attrs);
}
}
| ServerProcessUtils |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/sql/FunctionITCase.java | {
"start": 87150,
"end": 88084
} | class ____ {
@DataTypeHint(allowRawGlobally = HintFlag.TRUE)
public MapView<String, RawPojo> view = new MapView<>();
}
@Override
public AccWithRawView createAccumulator() {
return new AccWithRawView();
}
public void accumulate(AccWithRawView acc, String value) throws Exception {
if (value != null) {
acc.view.put(value, new RawPojo(value));
}
}
@Override
public String getValue(AccWithRawView acc) {
return acc.view.getMap().entrySet().stream()
.map(Objects::toString)
.sorted()
.collect(Collectors.joining(", "));
}
}
/**
* Synchronous table function that uses regular type inference for {@link LookupTableSource}.
*/
@DataTypeHint("ROW<s STRING, b BYTES>")
public static | AccWithRawView |
java | spring-projects__spring-boot | module/spring-boot-r2dbc/src/test/java/org/springframework/boot/r2dbc/autoconfigure/R2dbcProxyAutoConfigurationTests.java | {
"start": 1512,
"end": 2952
} | class ____ {
private final ApplicationContextRunner runner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(R2dbcProxyAutoConfiguration.class));
@Test
void shouldSupplyConnectionFactoryDecorator() {
this.runner.run((context) -> assertThat(context).hasSingleBean(ConnectionFactoryDecorator.class));
}
@Test
void shouldNotSupplyBeansIfR2dbcSpiIsNotOnClasspath() {
this.runner.withClassLoader(new FilteredClassLoader("io.r2dbc.spi"))
.run((context) -> assertThat(context).doesNotHaveBean(ConnectionFactoryDecorator.class));
}
@Test
void shouldNotSupplyBeansIfR2dbcProxyIsNotOnClasspath() {
this.runner.withClassLoader(new FilteredClassLoader("io.r2dbc.proxy"))
.run((context) -> assertThat(context).doesNotHaveBean(ConnectionFactoryDecorator.class));
}
@Test
void shouldApplyCustomizers() {
this.runner.withUserConfiguration(ProxyConnectionFactoryCustomizerConfig.class).run((context) -> {
ConnectionFactoryDecorator decorator = context.getBean(ConnectionFactoryDecorator.class);
ConnectionFactory connectionFactory = ConnectionFactoryBuilder
.withUrl("r2dbc:h2:mem:///" + UUID.randomUUID())
.build();
decorator.decorate(connectionFactory);
assertThat(context.getBean(ProxyConnectionFactoryCustomizerConfig.class).called).containsExactly("first",
"second");
});
}
@Configuration(proxyBeanMethods = false)
private static final | R2dbcProxyAutoConfigurationTests |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/support/TypeDiscoverer.java | {
"start": 9834,
"end": 10486
} | class ____ implements ResolvableType.VariableResolver {
private final Map<TypeVariable, Type> typeVariableMap;
public TypeVariableMapVariableResolver(Map<TypeVariable, Type> typeVariableMap) {
this.typeVariableMap = typeVariableMap;
}
@Override
public ResolvableType resolveVariable(TypeVariable<?> variable) {
Type type = this.typeVariableMap.get(variable);
return (type != null ? ResolvableType.forType(type) : null);
}
@Override
public Object getSource() {
return this.typeVariableMap;
}
}
}
| TypeVariableMapVariableResolver |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/validation/beanvalidation/MethodValidationAdapter.java | {
"start": 18735,
"end": 19926
} | class ____ {
private final MethodParameter parameter;
private final @Nullable Object bean;
private final @Nullable Object container;
private final @Nullable Integer containerIndex;
private final @Nullable Object containerKey;
private final Errors errors;
private final Set<ConstraintViolation<Object>> violations = new LinkedHashSet<>();
public ParamErrorsBuilder(
MethodParameter param, @Nullable Object bean, @Nullable Object container,
@Nullable Integer containerIndex, @Nullable Object containerKey) {
this.parameter = param;
this.bean = bean;
this.container = container;
this.containerIndex = containerIndex;
this.containerKey = containerKey;
this.errors = createBindingResult(param, this.bean);
}
public void addViolation(ConstraintViolation<Object> violation) {
this.violations.add(violation);
}
public ParameterErrors build() {
validatorAdapter.get().processConstraintViolations(this.violations, this.errors);
return new ParameterErrors(
this.parameter, this.bean, this.errors, this.container,
this.containerIndex, this.containerKey);
}
}
@SuppressWarnings("serial")
private static | ParamErrorsBuilder |
java | grpc__grpc-java | api/src/test/java/io/grpc/ManagedChannelRegistryTest.java | {
"start": 12347,
"end": 12472
} | class ____ extends SocketAddress {
}
ManagedChannelRegistry registry = new ManagedChannelRegistry();
| SocketAddress1 |
java | netty__netty | testsuite/src/main/java/io/netty/testsuite/transport/udt/UDTClientServerConnectionTest.java | {
"start": 6839,
"end": 10836
} | class ____ implements Runnable {
static final Logger log = LoggerFactory.getLogger(Server.class);
final ChannelGroup group = new DefaultChannelGroup("server group", GlobalEventExecutor.INSTANCE);
private final InetSocketAddress address;
volatile Channel channel;
volatile boolean isRunning;
volatile boolean isShutdown;
Server(InetSocketAddress address) {
this.address = address;
}
@Override
public void run() {
final ServerBootstrap boot = new ServerBootstrap();
final ThreadFactory factory = new DefaultThreadFactory("udp");
final EventLoopGroup eventLoopGroup = new MultiThreadIoEventLoopGroup(1,
factory, NioIoHandler.newFactory(NioUdtProvider.BYTE_PROVIDER));
try {
boot.group(eventLoopGroup)
.channelFactory(NioUdtProvider.BYTE_ACCEPTOR)
.childHandler(new ChannelInitializer<UdtChannel>() {
@Override
protected void initChannel(final UdtChannel ch)
throws Exception {
final ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast("framer",
new DelimiterBasedFrameDecoder(8192,
Delimiters.lineDelimiter()));
pipeline.addLast("decoder", new StringDecoder(
CharsetUtil.UTF_8));
pipeline.addLast("encoder", new StringEncoder(
CharsetUtil.UTF_8));
pipeline.addLast("handler", new ServerHandler(
group));
}
});
channel = boot.bind(address).sync().channel();
isRunning = true;
log.info("Server ready.");
waitForRunning(false);
log.info("Server closing acceptor...");
channel.close().sync();
log.info("Server closing connectors...");
group.close().sync();
isShutdown = true;
log.info("Server is done.");
} catch (final Throwable e) {
log.error("Server failure.", e);
} finally {
eventLoopGroup.shutdownGracefully();
eventLoopGroup.terminationFuture().syncUninterruptibly();
}
}
void shutdown() {
isRunning = false;
}
void waitForActive(final boolean isActive) throws Exception {
for (int k = 0; k < WAIT_COUNT; k++) {
Thread.sleep(WAIT_SLEEP);
if (isActive) {
for (final Channel channel : group) {
final ServerHandler handler = channel.pipeline().get(
ServerHandler.class);
if (handler != null && handler.isActive) {
return;
}
}
} else {
if (group.isEmpty()) {
return;
}
}
}
}
void waitForRunning(final boolean isRunning) throws Exception {
for (int k = 0; k < WAIT_COUNT; k++) {
if (isRunning == this.isRunning) {
return;
}
Thread.sleep(WAIT_SLEEP);
}
}
void waitForShutdown() throws Exception {
for (int k = 0; k < WAIT_COUNT; k++) {
if (isShutdown) {
return;
}
Thread.sleep(WAIT_SLEEP);
}
}
}
static | Server |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnMissingBeanTests.java | {
"start": 35174,
"end": 35391
} | class ____ extends GenericExampleBean<String> {
CustomGenericExampleBean() {
super("custom subclass");
}
}
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@ | CustomGenericExampleBean |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/scheduling/SpeculativeExecutionITCase.java | {
"start": 29297,
"end": 30364
} | class ____
implements CommittingSinkWriter<Long, Tuple3<Integer, Integer, Map<Long, Long>>> {
private final int subTaskIndex;
private final int attemptNumber;
public DummyPrecommittingSinkWriter(int subTaskIndex, int attemptNumber) {
this.subTaskIndex = subTaskIndex;
this.attemptNumber = attemptNumber;
}
private final Map<Long, Long> numberCountResult = new HashMap<>();
@Override
public void write(Long value, Context context) throws IOException, InterruptedException {
numberCountResult.merge(value, 1L, Long::sum);
maybeSleep();
}
@Override
public void flush(boolean endOfInput) {}
@Override
public Collection<Tuple3<Integer, Integer, Map<Long, Long>>> prepareCommit() {
return Collections.singleton(Tuple3.of(subTaskIndex, attemptNumber, numberCountResult));
}
@Override
public void close() throws Exception {}
}
private static | DummyPrecommittingSinkWriter |
java | elastic__elasticsearch | x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java | {
"start": 2625,
"end": 29060
} | class ____ extends AbstractActiveDirectoryTestCase {
private static final String REALM_NAME = "ad-test";
private static final RealmConfig.RealmIdentifier REALM_ID = new RealmConfig.RealmIdentifier("active_directory", REALM_NAME);
private final SecureString SECURED_PASSWORD = new SecureString(PASSWORD);
private ThreadPool threadPool;
private static final String BRUCE_BANNER_DN = "cn=Bruce Banner,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com";
@Before
public void init() throws Exception {
threadPool = new TestThreadPool("ActiveDirectorySessionFactoryTests thread pool");
}
@After
public void shutdown() throws InterruptedException {
terminate(threadPool);
}
@Override
public boolean enableWarningsCheck() {
return false;
}
public void testAdAuth() throws Exception {
RealmConfig config = configureRealm(
"ad-test",
LdapRealmSettings.AD_TYPE,
buildAdSettings(smbFixture.getAdLdapUrl(), AD_DOMAIN, false)
);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
String userName = "ironman";
try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
List<String> groups = groups(ldap);
assertThat(
groups,
containsInAnyOrder(
containsString("Geniuses"),
containsString("Billionaire"),
containsString("Playboy"),
containsString("Philanthropists"),
containsString("Avengers"),
containsString("SHIELD"),
containsString("CN=Users,CN=Builtin"),
containsString("Domain Users"),
containsString("Supers")
)
);
}
}
}
private RealmConfig configureRealm(String name, String type, Settings settings) {
final RealmConfig.RealmIdentifier identifier = new RealmConfig.RealmIdentifier(type, name);
final Settings mergedSettings = Settings.builder()
.put(settings)
.normalizePrefix("xpack.security.authc.realms." + type + "." + name + ".")
.put(globalSettings)
.put(getFullSettingKey(identifier, RealmSettings.ORDER_SETTING), 0)
.put(getFullSettingKey(identifier, SessionFactorySettings.TIMEOUT_RESPONSE_SETTING), "15s")
.build();
final Environment env = TestEnvironment.newEnvironment(mergedSettings);
this.sslService = new SSLService(env);
return new RealmConfig(identifier, mergedSettings, env, new ThreadContext(globalSettings));
}
public void testNetbiosAuth() throws Exception {
final String adUrl = randomFrom(smbFixture.getAdLdapUrl(), smbFixture.getAdLdapGcUrl());
RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, buildAdSettings(adUrl, AD_DOMAIN, false));
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
String userName = "ades\\ironman";
try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
List<String> groups = groups(ldap);
assertThat(
groups,
containsInAnyOrder(
containsString("Geniuses"),
containsString("Billionaire"),
containsString("Playboy"),
containsString("Philanthropists"),
containsString("Avengers"),
containsString("SHIELD"),
containsString("CN=Users,CN=Builtin"),
containsString("Domain Users"),
containsString("Supers")
)
);
}
}
}
public void testAdAuthAvengers() throws Exception {
RealmConfig config = configureRealm(
"ad-test",
LdapRealmSettings.AD_TYPE,
buildAdSettings(smbFixture.getAdLdapUrl(), AD_DOMAIN, false)
);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
String[] users = new String[] { "cap", "hawkeye", "hulk", "ironman", "thor", "blackwidow" };
for (String user : users) {
try (LdapSession ldap = session(sessionFactory, user, SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
assertThat("group avenger test for user " + user, groups(ldap), hasItem(containsString("Avengers")));
}
}
}
}
public void testAuthenticate() throws Exception {
Settings settings = buildAdSettings(
REALM_ID,
smbFixture.getAdLdapUrl(),
AD_DOMAIN,
"CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com",
LdapSearchScope.ONE_LEVEL,
false
);
RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, settings);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
String userName = "hulk";
try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
List<String> groups = groups(ldap);
assertThat(
groups,
containsInAnyOrder(
containsString("Avengers"),
containsString("SHIELD"),
containsString("Geniuses"),
containsString("Philanthropists"),
containsString("CN=Users,CN=Builtin"),
containsString("Domain Users"),
containsString("Supers")
)
);
}
}
}
public void testAuthenticateBaseUserSearch() throws Exception {
Settings settings = buildAdSettings(
REALM_ID,
smbFixture.getAdLdapUrl(),
AD_DOMAIN,
"CN=Bruce Banner, CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com",
LdapSearchScope.BASE,
false
);
RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, settings);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
String userName = "hulk";
try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
List<String> groups = groups(ldap);
assertThat(
groups,
containsInAnyOrder(
containsString("Avengers"),
containsString("SHIELD"),
containsString("Geniuses"),
containsString("Philanthropists"),
containsString("CN=Users,CN=Builtin"),
containsString("Domain Users"),
containsString("Supers")
)
);
}
}
}
public void testAuthenticateBaseGroupSearch() throws Exception {
Settings settings = Settings.builder()
.put(
buildAdSettings(
REALM_ID,
smbFixture.getAdLdapUrl(),
AD_DOMAIN,
"CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com",
LdapSearchScope.ONE_LEVEL,
false
)
)
.put(
ActiveDirectorySessionFactorySettings.AD_GROUP_SEARCH_BASEDN_SETTING,
"CN=Avengers,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"
)
.put(ActiveDirectorySessionFactorySettings.AD_GROUP_SEARCH_SCOPE_SETTING, LdapSearchScope.BASE)
.build();
RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, settings);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
String userName = "hulk";
try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
List<String> groups = groups(ldap);
assertThat(groups, hasItem(containsString("Avengers")));
}
}
}
public void testAuthenticateWithUserPrincipalName() throws Exception {
Settings settings = buildAdSettings(
REALM_ID,
smbFixture.getAdLdapUrl(),
AD_DOMAIN,
"CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com",
LdapSearchScope.ONE_LEVEL,
false
);
RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, settings);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
// Login with the UserPrincipalName
String userDN = "CN=Erik Selvig,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com";
try (LdapSession ldap = session(sessionFactory, "erik.selvig", SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
List<String> groups = groups(ldap);
assertThat(ldap.userDn(), is(userDN));
assertThat(
groups,
containsInAnyOrder(containsString("Geniuses"), containsString("CN=Users,CN=Builtin"), containsString("Domain Users"))
);
}
}
}
public void testAuthenticateWithSAMAccountName() throws Exception {
Settings settings = buildAdSettings(
REALM_ID,
smbFixture.getAdLdapUrl(),
AD_DOMAIN,
"CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com",
LdapSearchScope.ONE_LEVEL,
false
);
RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, settings);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
// login with sAMAccountName
String userDN = "CN=Erik Selvig,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com";
try (LdapSession ldap = session(sessionFactory, "selvig", SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
assertThat(ldap.userDn(), is(userDN));
List<String> groups = groups(ldap);
assertThat(
groups,
containsInAnyOrder(containsString("Geniuses"), containsString("CN=Users,CN=Builtin"), containsString("Domain Users"))
);
}
}
}
public void testCustomUserFilter() throws Exception {
Settings settings = Settings.builder()
.put(
buildAdSettings(
REALM_ID,
smbFixture.getAdLdapUrl(),
AD_DOMAIN,
"CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com",
LdapSearchScope.SUB_TREE,
false
)
)
.put(
getFullSettingKey(REALM_ID.getName(), ActiveDirectorySessionFactorySettings.AD_USER_SEARCH_FILTER_SETTING),
"(&(objectclass=user)(userPrincipalName={0}@ad.test.elasticsearch.com))"
)
.build();
RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, settings);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
// Login with the UserPrincipalName
try (LdapSession ldap = session(sessionFactory, "erik.selvig", SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
List<String> groups = groups(ldap);
assertThat(
groups,
containsInAnyOrder(
containsString("CN=Geniuses"),
containsString("CN=Domain Users"),
containsString("CN=Users,CN=Builtin")
)
);
}
}
}
public void testStandardLdapConnection() throws Exception {
String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com";
String userTemplate = "CN={0},CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com";
final RealmConfig.RealmIdentifier realmId = new RealmConfig.RealmIdentifier(LdapRealmSettings.LDAP_TYPE, "ad-as-ldap-test");
final Settings settings = Settings.builder()
.put(
LdapTestCase.buildLdapSettings(
realmId,
new String[] { smbFixture.getAdLdapUrl() },
new String[] { userTemplate },
groupSearchBase,
LdapSearchScope.SUB_TREE,
null,
false
)
)
.putList(RealmSettings.realmSslPrefix(realmId) + "certificate_authorities", certificatePaths)
.put(getFullSettingKey(realmId, SessionFactorySettings.FOLLOW_REFERRALS_SETTING), FOLLOW_REFERRALS)
.build();
RealmConfig config = configureRealm("ad-as-ldap-test", LdapRealmSettings.LDAP_TYPE, settings);
LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool);
String user = "Bruce Banner";
try (LdapSession ldap = session(sessionFactory, user, SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
List<String> groups = groups(ldap);
assertThat(
groups,
containsInAnyOrder(
containsString("Avengers"),
containsString("SHIELD"),
containsString("Geniuses"),
containsString("Philanthropists")
)
);
}
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29840")
public void testHandlingLdapReferralErrors() throws Exception {
String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com";
String userTemplate = "CN={0},CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com";
final RealmConfig.RealmIdentifier realmId = new RealmConfig.RealmIdentifier(LdapRealmSettings.LDAP_TYPE, "ad-as-ldap-test");
Settings settings = Settings.builder()
.put(
LdapTestCase.buildLdapSettings(
realmId,
new String[] { smbFixture.getAdLdapUrl() },
new String[] { userTemplate },
groupSearchBase,
LdapSearchScope.SUB_TREE,
null,
false
)
)
.putList(RealmSettings.realmSslPrefix(realmId) + "certificate_authorities", certificatePaths)
.put(getFullSettingKey(realmId, SessionFactorySettings.FOLLOW_REFERRALS_SETTING), FOLLOW_REFERRALS)
.build();
RealmConfig config = configureRealm("ad-as-ldap-test", LdapRealmSettings.LDAP_TYPE, settings);
LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool);
String user = "Bruce Banner";
try (LdapSession ldap = session(sessionFactory, user, SECURED_PASSWORD)) {
final UncategorizedExecutionException exception = expectThrows(UncategorizedExecutionException.class, () -> groups(ldap));
final Throwable cause = exception.getCause();
assertThat(cause, instanceOf(ExecutionException.class));
assertThat(cause.getCause(), instanceOf(LDAPException.class));
final LDAPException ldapException = (LDAPException) cause.getCause();
assertThat(ldapException.getResultCode(), is(ResultCode.INVALID_CREDENTIALS));
}
}
public void testStandardLdapWithAttributeGroups() throws Exception {
String userTemplate = "CN={0},CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com";
String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com";
final RealmConfig.RealmIdentifier realmId = new RealmConfig.RealmIdentifier(LdapRealmSettings.LDAP_TYPE, "ad-as-ldap-test");
Settings settings = Settings.builder()
.put(
LdapTestCase.buildLdapSettings(
realmId,
new String[] { smbFixture.getAdLdapUrl() },
new String[] { userTemplate },
groupSearchBase,
LdapSearchScope.SUB_TREE,
null,
false
)
)
.putList("ssl.certificate_authorities", certificatePaths)
.putList(RealmSettings.realmSslPrefix(realmId) + "certificate_authorities", certificatePaths)
.put(getFullSettingKey(realmId, SessionFactorySettings.FOLLOW_REFERRALS_SETTING), FOLLOW_REFERRALS)
.build();
RealmConfig config = configureRealm("ad-as-ldap-test", LdapRealmSettings.LDAP_TYPE, settings);
LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool);
String user = "Bruce Banner";
try (LdapSession ldap = session(sessionFactory, user, SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
List<String> groups = groups(ldap);
assertThat(
groups,
containsInAnyOrder(
containsString("Avengers"),
containsString("SHIELD"),
containsString("Geniuses"),
containsString("Philanthropists")
)
);
}
}
public void testADLookup() throws Exception {
RealmConfig config = configureRealm(
"ad-test",
LdapRealmSettings.AD_TYPE,
buildAdSettings(smbFixture.getAdLdapUrl(), AD_DOMAIN, false, true)
);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
List<String> users = randomSubsetOf(
Arrays.asList(
"cap",
"hawkeye",
"hulk",
"ironman",
"thor",
"blackwidow",
"cap@ad.test.elasticsearch.com",
"hawkeye@ad.test.elasticsearch.com",
"hulk@ad.test.elasticsearch.com",
"ironman@ad.test.elasticsearch.com",
"thor@ad.test.elasticsearch.com",
"blackwidow@ad.test.elasticsearch.com",
"ADES\\cap",
"ADES\\hawkeye",
"ADES\\hulk",
"ADES\\ironman",
"ADES\\thor",
"ADES\\blackwidow"
)
);
for (String user : users) {
try (LdapSession ldap = unauthenticatedSession(sessionFactory, user)) {
assertConnectionCanReconnect(ldap.getConnection());
assertNotNull("ldap session was null for user " + user, ldap);
assertThat("group avenger test for user " + user, groups(ldap), hasItem(containsString("Avengers")));
}
}
}
}
@SuppressWarnings("unchecked")
public void testResolveTokenGroupsSID() throws Exception {
Settings settings = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.getFullSettingKey(REALM_ID, RealmSettings.ORDER_SETTING), 0)
.put(
buildAdSettings(
REALM_ID,
smbFixture.getAdLdapUrl(),
AD_DOMAIN,
"CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com",
LdapSearchScope.SUB_TREE,
false
)
)
.put(ActiveDirectorySessionFactorySettings.AD_GROUP_SEARCH_BASEDN_SETTING, "DC=ad,DC=test,DC=elasticsearch,DC=com")
.put(ActiveDirectorySessionFactorySettings.AD_GROUP_SEARCH_SCOPE_SETTING, LdapSearchScope.SUB_TREE)
.put(getFullSettingKey(REALM_ID, LdapMetadataResolverSettings.ADDITIONAL_METADATA_SETTING), "tokenGroups")
.build();
RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, settings);
final PlainActionFuture<LdapMetadataResolver.LdapMetadataResult> future = new PlainActionFuture<>();
LdapMetadataResolver resolver = new LdapMetadataResolver(config, true);
try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) {
String userName = "hulk";
try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) {
assertConnectionCanReconnect(ldap.getConnection());
resolver.resolve(ldap.getConnection(), BRUCE_BANNER_DN, TimeValue.timeValueSeconds(1), logger, null, future);
Map<String, Object> metadataGroupSIDs = future.get().getMetaData();
assertThat(metadataGroupSIDs.size(), equalTo(1));
assertNotNull(metadataGroupSIDs.get("tokenGroups"));
List<String> SIDs = ((List<String>) metadataGroupSIDs.get("tokenGroups"));
assertThat(SIDs.size(), equalTo(7));
assertThat(SIDs, everyItem(matchesPattern("S-1-5-(?:21|32)-\\d+(?:-\\d+\\-\\d+\\-\\d+)?")));
}
}
}
private Settings buildAdSettings(String ldapUrl, String adDomainName, boolean hostnameVerification) {
return buildAdSettings(ldapUrl, adDomainName, hostnameVerification, randomBoolean());
}
private Settings buildAdSettings(String ldapUrl, String adDomainName, boolean hostnameVerification, boolean useBindUser) {
Settings.Builder builder = Settings.builder()
.put(getFullSettingKey(REALM_ID, SessionFactorySettings.URLS_SETTING), ldapUrl)
.put(getFullSettingKey(REALM_ID, ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING), adDomainName)
.put(getFullSettingKey(REALM_ID, SessionFactorySettings.FOLLOW_REFERRALS_SETTING), FOLLOW_REFERRALS);
if (randomBoolean()) {
builder.put(
getFullSettingKey(REALM_ID, SSLConfigurationSettings.VERIFICATION_MODE_SETTING_REALM),
hostnameVerification ? SslVerificationMode.FULL : SslVerificationMode.CERTIFICATE
);
} else {
builder.put(getFullSettingKey(REALM_ID, SessionFactorySettings.HOSTNAME_VERIFICATION_SETTING), hostnameVerification);
}
builder.putList(getFullSettingKey(REALM_ID, SSLConfigurationSettings.CAPATH_SETTING_REALM), certificatePaths);
if (useBindUser) {
final String user = randomFrom(
"cap",
"hawkeye",
"hulk",
"ironman",
"thor",
"blackwidow",
"cap@ad.test.elasticsearch.com",
"hawkeye@ad.test.elasticsearch.com",
"hulk@ad.test.elasticsearch.com",
"ironman@ad.test.elasticsearch.com",
"thor@ad.test.elasticsearch.com",
"blackwidow@ad.test.elasticsearch.com",
"ADES\\cap",
"ADES\\hawkeye",
"ADES\\hulk",
"ADES\\ironman",
"ADES\\thor",
"ADES\\blackwidow",
"CN=Bruce Banner,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"
);
final boolean poolingEnabled = randomBoolean();
builder.put("bind_dn", user).put("bind_password", PASSWORD).put("user_search.pool.enabled", poolingEnabled);
logger.info("using bind user [{}] with pooling enabled [{}]", user, poolingEnabled);
}
return builder.build();
}
private LdapSession session(SessionFactory factory, String username, SecureString password) {
PlainActionFuture<LdapSession> future = new PlainActionFuture<>();
factory.session(username, password, future);
return future.actionGet();
}
private LdapSession unauthenticatedSession(SessionFactory factory, String username) {
PlainActionFuture<LdapSession> future = new PlainActionFuture<>();
factory.unauthenticatedSession(username, future);
return future.actionGet();
}
private List<String> groups(LdapSession ldapSession) {
PlainActionFuture<List<String>> future = new PlainActionFuture<>();
ldapSession.groups(future);
return future.actionGet();
}
static ActiveDirectorySessionFactory getActiveDirectorySessionFactory(RealmConfig config, SSLService sslService, ThreadPool threadPool)
throws LDAPException {
ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService, threadPool);
if (sessionFactory.getConnectionPool() != null) {
// don't use this in production
// used here to catch bugs that might get masked by an automatic retry
sessionFactory.getConnectionPool().setRetryFailedOperationsDueToInvalidConnections(false);
}
return sessionFactory;
}
}
| ActiveDirectorySessionFactoryTests |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/config/ConfigCenterConfig.java | {
"start": 2039,
"end": 10553
} | class ____ extends AbstractConfig {
private final AtomicBoolean initialized = new AtomicBoolean(false);
/**
* The protocol used for accessing the config center.
*/
private String protocol;
/**
* The address (URL or hostname) of the config center server.
*/
private String address;
/**
* The port number for the config center server.
*/
private Integer port;
/**
* The config center cluster, its actual meaning may vary depending on the specific config center product.
*/
private String cluster;
/**
* The namespace of the config center, generally used for multi-tenancy.
* Its actual meaning depends on the specific config center you use. Default value is CommonConstants.DUBBO.
*/
private String namespace;
/**
* The group of the config center, often used to identify an isolated space for a batch of config items.
* Its actual meaning depends on the specific config center you use. Default value is CommonConstants.DUBBO.
*/
private String group;
/**
* Username for authentication with the config center.
*/
private String username;
/**
* Password for authentication with the config center.
*/
private String password;
/**
* The timeout for accessing the config center. Default value is 30000L.
*/
private Long timeout;
/**
* If the config center should have the highest priority and override all other configurations.
* Deprecated and no longer used. Default value is true.
*/
private Boolean highestPriority;
/**
* Behavior when the initial connection attempt to the config center fails.
* 'true' means interrupt the whole process once a failure occurs. Default value is true.
*/
private Boolean check;
/**
* Key mapping for properties files. Most of the time, you do not need to change this parameter.
* Default value is CommonConstants.DEFAULT_DUBBO_PROPERTIES.
*/
private String configFile;
/**
* The properties file under 'configFile' is global shared, while '.properties' under this one is limited only to this application.
*/
private String appConfigFile;
/**
* Additional parameters specific to your config center product can be added here.
* For example, with XML:
* <dubbo:config-center>
* <dubbo:parameter key="{your key}" value="{your value}" />
* </dubbo:config-center>
*/
private Map<String, String> parameters;
/**
* External configuration for the config center.
*/
private Map<String, String> externalConfiguration;
/**
* Application-specific external configuration for the config center.
*/
private Map<String, String> appExternalConfiguration;
public ConfigCenterConfig() {}
public ConfigCenterConfig(ApplicationModel applicationModel) {
super(applicationModel);
}
@Override
protected void checkDefault() {
super.checkDefault();
if (namespace == null) {
namespace = CommonConstants.DUBBO;
}
if (group == null) {
group = CommonConstants.DUBBO;
}
if (timeout == null) {
timeout = 30000L;
}
if (check == null) {
check = true;
}
if (configFile == null) {
configFile = CommonConstants.DEFAULT_DUBBO_PROPERTIES;
}
}
public URL toUrl() {
Map<String, String> map = new HashMap<>();
appendParameters(map, this);
if (StringUtils.isEmpty(address)) {
address = ANYHOST_VALUE;
}
map.put(PATH_KEY, ConfigCenterConfig.class.getName());
// use 'zookeeper' as the default config center.
if (StringUtils.isEmpty(map.get(PROTOCOL_KEY))) {
map.put(PROTOCOL_KEY, ZOOKEEPER_PROTOCOL);
}
return UrlUtils.parseURL(address, map).setScopeModel(getScopeModel());
}
public boolean checkOrUpdateInitialized(boolean update) {
return initialized.compareAndSet(false, update);
}
public void setInitialized(boolean val) {
initialized.set(val);
}
public Map<String, String> getExternalConfiguration() {
return externalConfiguration;
}
public Map<String, String> getAppExternalConfiguration() {
return appExternalConfiguration;
}
public void setExternalConfig(Map<String, String> externalConfiguration) {
this.externalConfiguration = externalConfiguration;
}
public void setAppExternalConfig(Map<String, String> appExternalConfiguration) {
this.appExternalConfiguration = appExternalConfiguration;
}
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
@Parameter(excluded = true)
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
if (address != null) {
try {
URL url = URL.valueOf(address);
updatePropertyIfAbsent(this::getUsername, this::setUsername, url.getUsername());
updatePropertyIfAbsent(this::getPassword, this::setPassword, url.getPassword());
updatePropertyIfAbsent(this::getProtocol, this::setProtocol, url.getProtocol());
updatePropertyIfAbsent(this::getPort, this::setPort, url.getPort());
Map<String, String> params = url.getParameters();
if (CollectionUtils.isNotEmptyMap(params)) {
params.remove(BACKUP_KEY);
}
updateParameters(params);
} catch (Exception ignored) {
}
}
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
public String getCluster() {
return cluster;
}
public void setCluster(String cluster) {
this.cluster = cluster;
}
public String getNamespace() {
return namespace;
}
public void setNamespace(String namespace) {
this.namespace = namespace;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public Boolean isCheck() {
return check;
}
public void setCheck(Boolean check) {
this.check = check;
}
@Deprecated
@Parameter(key = CONFIG_ENABLE_KEY)
public Boolean isHighestPriority() {
return highestPriority;
}
@Deprecated
public void setHighestPriority(Boolean highestPriority) {
this.highestPriority = highestPriority;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Long getTimeout() {
return timeout;
}
public void setTimeout(Long timeout) {
this.timeout = timeout;
}
@Parameter(key = CONFIG_CONFIGFILE_KEY)
public String getConfigFile() {
return configFile;
}
public void setConfigFile(String configFile) {
this.configFile = configFile;
}
@Parameter(excluded = true, key = CONFIG_APP_CONFIGFILE_KEY)
public String getAppConfigFile() {
return appConfigFile;
}
public void setAppConfigFile(String appConfigFile) {
this.appConfigFile = appConfigFile;
}
public Map<String, String> getParameters() {
return parameters;
}
public void setParameters(Map<String, String> parameters) {
this.parameters = parameters;
}
@Override
@Parameter(excluded = true, attribute = false)
public boolean isValid() {
if (StringUtils.isEmpty(address)) {
return false;
}
return address.contains("://") || StringUtils.isNotEmpty(protocol);
}
public void updateParameters(Map<String, String> parameters) {
if (CollectionUtils.isEmptyMap(parameters)) {
return;
}
if (this.parameters == null) {
this.parameters = parameters;
} else {
this.parameters.putAll(parameters);
}
}
}
| ConfigCenterConfig |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DockerEndpointBuilderFactory.java | {
"start": 80824,
"end": 81141
} | class ____ extends AbstractEndpointBuilder implements DockerEndpointBuilder, AdvancedDockerEndpointBuilder {
public DockerEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new DockerEndpointBuilderImpl(path);
}
} | DockerEndpointBuilderImpl |
java | google__guava | android/guava-tests/test/com/google/common/collect/TableCollectionTest.java | {
"start": 31695,
"end": 32526
} | class ____ extends MapTests {
ColumnTests(
boolean allowsNullValues,
boolean supportsPut,
boolean supportsRemove,
boolean supportsClear,
boolean supportsIteratorRemove) {
super(allowsNullValues, supportsPut, supportsRemove, supportsClear, supportsIteratorRemove);
}
abstract Table<String, Character, Integer> makeTable();
@Override
protected Map<String, Integer> makeEmptyMap() {
return makeTable().column('a');
}
@Override
protected Map<String, Integer> makePopulatedMap() {
Table<String, Character, Integer> table = makeTable();
table.put("one", 'a', 1);
table.put("two", 'a', 2);
table.put("three", 'a', 3);
table.put("four", 'b', 4);
return table.column('a');
}
}
private abstract static | ColumnTests |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/navigation/ClassBasedNavigableList_withDefault_Test.java | {
"start": 933,
"end": 964
} | class ____ a List property
*/
| with |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java | {
"start": 114733,
"end": 117130
} | class ____ extends Rule<LogicalPlan, LogicalPlan> {
@Override
public LogicalPlan apply(LogicalPlan plan) {
return plan.transformUp(EsRelation.class, relation -> {
if (relation.indexMode() == IndexMode.LOOKUP) {
return relation;
}
return relation.transformExpressionsUp(FieldAttribute.class, f -> {
if (f.field() instanceof InvalidMappedField imf && imf.types().stream().allMatch(DataType::isDate)) {
HashMap<ResolveUnionTypes.TypeResolutionKey, Expression> typeResolutions = new HashMap<>();
var convert = new ToDateNanos(f.source(), f);
imf.types().forEach(type -> typeResolutions(f, convert, type, imf, typeResolutions));
var resolvedField = ResolveUnionTypes.resolvedMultiTypeEsField(f, typeResolutions);
return new FieldAttribute(
f.source(),
f.parentName(),
f.qualifier(),
f.name(),
resolvedField,
f.nullable(),
f.id(),
f.synthetic()
);
}
return f;
});
});
}
}
private static void typeResolutions(
FieldAttribute fieldAttribute,
ConvertFunction convert,
DataType type,
InvalidMappedField imf,
HashMap<ResolveUnionTypes.TypeResolutionKey, Expression> typeResolutions
) {
ResolveUnionTypes.TypeResolutionKey key = new ResolveUnionTypes.TypeResolutionKey(fieldAttribute.name(), type);
var concreteConvert = ResolveUnionTypes.typeSpecificConvert(convert, fieldAttribute.source(), type, imf);
typeResolutions.put(key, concreteConvert);
}
/**
* Take InvalidMappedFields in specific aggregations (min, max, sum, count, and avg) and if all original data types
* are aggregate metric double + any combination of numerics, implicitly cast them to the same type: aggregate metric
* double for count, and double for min, max, and sum. Avg gets replaced with its surrogate (Div(Sum, Count))
*/
private static | DateMillisToNanosInEsRelation |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_xujin2.java | {
"start": 798,
"end": 1570
} | class ____ implements ValueFilter {
private Set<String> needMaskFileds = new HashSet();
public IntEnumFilter() {
}
public IntEnumFilter(String... fileds) {
if(fileds != null) {
String[] arr$ = fileds;
int len$ = fileds.length;
for(int i$ = 0; i$ < len$; ++i$) {
String filed = arr$[i$];
this.needMaskFileds.add(filed);
}
}
}
public Object process(Object object, String name, Object value) {
return value == null?value:(this.needMaskFileds.contains(name) && value instanceof IntEnum ?Integer.valueOf(((IntEnum)value).getCode()):value);
}
}
public static | IntEnumFilter |
java | quarkusio__quarkus | independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/BuildDependencyGraphVisitor.java | {
"start": 812,
"end": 5647
} | class ____ {
private final MavenArtifactResolver resolver;
private final ApplicationModelBuilder appBuilder;
private final Consumer<String> buildTreeConsumer;
private final List<Boolean> depth;
private DependencyNode currentDeployment;
private DependencyNode currentRuntime;
private Artifact runtimeArtifactToFind;
public BuildDependencyGraphVisitor(MavenArtifactResolver resolver, ApplicationModelBuilder appBuilder,
Consumer<String> buildTreeConsumer) {
this.resolver = resolver;
this.appBuilder = appBuilder;
this.buildTreeConsumer = buildTreeConsumer;
if (buildTreeConsumer == null) {
depth = null;
} else {
depth = new ArrayList<>();
}
}
public void visit(DependencyNode node) throws BootstrapMavenException {
if (depth != null) {
consume(node);
}
final Dependency dep = node.getDependency();
final DependencyNode previousDeployment = currentDeployment;
final DependencyNode previousRuntime = currentRuntime;
final Artifact previousRuntimeArtifact = runtimeArtifactToFind;
final Artifact newRuntimeArtifact = ApplicationDependencyTreeResolver.getRuntimeArtifact(node);
if (newRuntimeArtifact != null) {
currentDeployment = node;
runtimeArtifactToFind = newRuntimeArtifact;
currentRuntime = null;
} else if (runtimeArtifactToFind != null && currentRuntime == null
&& runtimeArtifactToFind.equals(dep.getArtifact())) {
currentRuntime = node;
runtimeArtifactToFind = null;
}
final List<DependencyNode> children = node.getChildren();
final int childrenTotal = children.size();
if (childrenTotal > 0) {
if (childrenTotal == 1) {
if (depth != null) {
depth.add(false);
}
visit(children.get(0));
} else {
if (depth != null) {
depth.add(true);
}
int i = 0;
while (i < childrenTotal) {
visit(children.get(i++));
if (depth != null && i == childrenTotal - 1) {
depth.set(depth.size() - 1, false);
}
}
}
if (depth != null) {
depth.remove(depth.size() - 1);
}
}
visitLeave(node);
currentDeployment = previousDeployment;
currentRuntime = previousRuntime;
runtimeArtifactToFind = previousRuntimeArtifact;
}
private void consume(DependencyNode node) {
var buf = new StringBuilder();
if (!depth.isEmpty()) {
for (int i = 0; i < depth.size() - 1; ++i) {
if (depth.get(i)) {
//buf.append("| ");
buf.append('\u2502').append(" ");
} else {
buf.append(" ");
}
}
if (depth.get(depth.size() - 1)) {
//buf.append("|- ");
buf.append('\u251c').append('\u2500').append(' ');
} else {
//buf.append("\\- ");
buf.append('\u2514').append('\u2500').append(' ');
}
}
var a = node.getArtifact();
buf.append(a.getGroupId()).append(":").append(a.getArtifactId()).append(":");
if (!a.getClassifier().isEmpty()) {
buf.append(a.getClassifier()).append(":");
}
if (!ArtifactCoords.TYPE_JAR.equals(a.getExtension())) {
buf.append(a.getExtension()).append(":");
}
buf.append(a.getVersion());
if (!depth.isEmpty()) {
buf.append(" (").append(node.getDependency().getScope());
if (node.getDependency().isOptional()) {
buf.append(" optional");
}
buf.append(')');
}
buildTreeConsumer.accept(buf.toString());
}
private void visitLeave(DependencyNode node) throws BootstrapMavenException {
final Dependency dep = node.getDependency();
if (dep == null) {
return;
}
if (currentDeployment == null) {
return;
}
if (currentRuntime == null && appBuilder.getDependency(getKey(node.getArtifact())) == null) {
appBuilder.addDependency(newDependencyBuilder(node, resolver).setFlags(DependencyFlags.DEPLOYMENT_CP));
} else if (currentRuntime == node) {
currentRuntime = null;
runtimeArtifactToFind = null;
}
if (currentDeployment == node) {
currentDeployment = null;
}
}
}
| BuildDependencyGraphVisitor |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/SpecializedFunction.java | {
"start": 2923,
"end": 3354
} | interface ____ extends ExpressionEvaluatorFactory {
/** Returns the context of the current call. */
CallContext getCallContext();
/** Gives read-only access to the configuration of the current session. */
ReadableConfig getConfiguration();
/** Returns the classloader used to resolve built-in functions. */
ClassLoader getBuiltInClassLoader();
}
/** Helper | SpecializedContext |
java | apache__camel | components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpGetWithInvalidMessageTest.java | {
"start": 1471,
"end": 3714
} | class ____ extends CamelTestSupport {
private static final String REQUEST_STRING = "user: Willem\n"
+ "GET http://localhost:%s/test HTTP/1.1\n"
+ "another: value\n Host: localhost\n";
private int port1;
@BindToRegistry("string-decoder")
private final StringDecoder stringDecoder = new StringDecoder();
@BindToRegistry("string-encoder")
private final StringEncoder stringEncoder = new StringEncoder();
@BindToRegistry("encoders")
public List<ChannelHandler> addEncoders() {
List<ChannelHandler> encoders = new ArrayList<>();
encoders.add(stringEncoder);
return encoders;
}
@BindToRegistry("decoders")
public List<ChannelHandler> addDecoders() {
List<ChannelHandler> decoders = new ArrayList<>();
decoders.add(stringDecoder);
return decoders;
}
@Test
public void testNettyHttpServer() {
invokeService(port1);
}
//@Test
public void testJettyHttpServer() {
invokeService(port1);
}
private void invokeService(int port) {
Exchange out = template.request("netty:tcp://localhost:" + port + "?encoders=#encoders&decoders=#decoders&sync=true",
new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setBody(String.format(REQUEST_STRING, port));
}
});
assertNotNull(out);
String result = out.getMessage().getBody(String.class);
assertNotNull(result);
assertTrue(result.indexOf("404 Not Found") > 0, "We should get the 404 response.");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
port1 = AvailablePortFinder.getNextAvailable();
// set up a netty http proxy
from("netty-http:http://localhost:" + port1 + "/test")
.transform().simple("Bye ${header.user}.");
}
};
}
}
| NettyHttpGetWithInvalidMessageTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/model/ProcessorTypeConfigurationTest.java | {
"start": 1166,
"end": 1759
} | class ____ extends ContextTestSupport {
@Test
public void testProcessorRefMissConfigured() {
Exception e = assertThrows(Exception.class, () -> {
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:in").process("hello");
}
});
}, "Should have thrown IllegalArgumentException");
assertEquals("No bean could be found in the registry for: hello of type: org.apache.camel.Processor",
e.getCause().getMessage());
}
}
| ProcessorTypeConfigurationTest |
java | apache__avro | lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java | {
"start": 29386,
"end": 29489
} | class ____ for a fixed
assertNotNull(MD5.class.getAnnotation(TestAnnotation.class));
// a | generated |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/processors/ReplayProcessorBoundedConcurrencyTest.java | {
"start": 1108,
"end": 10579
} | class ____ extends RxJavaTest {
@Test
public void replaySubjectConcurrentSubscribersDoingReplayDontBlockEachOther() throws InterruptedException {
final ReplayProcessor<Long> replay = ReplayProcessor.createUnbounded();
Thread source = new Thread(new Runnable() {
@Override
public void run() {
Flowable.unsafeCreate(new Publisher<Long>() {
@Override
public void subscribe(Subscriber<? super Long> subscriber) {
System.out.println("********* Start Source Data ***********");
for (long l = 1; l <= 10000; l++) {
subscriber.onNext(l);
}
System.out.println("********* Finished Source Data ***********");
subscriber.onComplete();
}
}).subscribe(replay);
}
});
source.start();
long v = replay.blockingLast();
assertEquals(10000, v);
// it's been played through once so now it will all be replays
final CountDownLatch slowLatch = new CountDownLatch(1);
Thread slowThread = new Thread(new Runnable() {
@Override
public void run() {
Subscriber<Long> slow = new DefaultSubscriber<Long>() {
@Override
public void onComplete() {
System.out.println("*** Slow Observer completed");
slowLatch.countDown();
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(Long args) {
if (args == 1) {
System.out.println("*** Slow Observer STARTED");
}
try {
if (args % 10 == 0) {
Thread.sleep(1);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
};
replay.subscribe(slow);
try {
slowLatch.await();
} catch (InterruptedException e1) {
e1.printStackTrace();
}
}
});
slowThread.start();
Thread fastThread = new Thread(new Runnable() {
@Override
public void run() {
final CountDownLatch fastLatch = new CountDownLatch(1);
Subscriber<Long> fast = new DefaultSubscriber<Long>() {
@Override
public void onComplete() {
System.out.println("*** Fast Observer completed");
fastLatch.countDown();
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(Long args) {
if (args == 1) {
System.out.println("*** Fast Observer STARTED");
}
}
};
replay.subscribe(fast);
try {
fastLatch.await();
} catch (InterruptedException e1) {
e1.printStackTrace();
}
}
});
fastThread.start();
fastThread.join();
// slow should not yet be completed when fast completes
assertEquals(1, slowLatch.getCount());
slowThread.join();
}
@Test
public void replaySubjectConcurrentSubscriptions() throws InterruptedException {
final ReplayProcessor<Long> replay = ReplayProcessor.createUnbounded();
Thread source = new Thread(new Runnable() {
@Override
public void run() {
Flowable.unsafeCreate(new Publisher<Long>() {
@Override
public void subscribe(Subscriber<? super Long> subscriber) {
System.out.println("********* Start Source Data ***********");
for (long l = 1; l <= 10000; l++) {
subscriber.onNext(l);
}
System.out.println("********* Finished Source Data ***********");
subscriber.onComplete();
}
}).subscribe(replay);
}
});
// used to collect results of each thread
final List<List<Long>> listOfListsOfValues = Collections.synchronizedList(new ArrayList<>());
final List<Thread> threads = Collections.synchronizedList(new ArrayList<>());
for (int i = 1; i <= 200; i++) {
final int count = i;
if (count == 20) {
// start source data after we have some already subscribed
// and while others are in process of subscribing
source.start();
}
if (count == 100) {
// wait for source to finish then keep adding after it's done
source.join();
}
Thread t = new Thread(new Runnable() {
@Override
public void run() {
List<Long> values = replay.toList().blockingGet();
listOfListsOfValues.add(values);
System.out.println("Finished thread: " + count);
}
});
t.start();
System.out.println("Started thread: " + i);
threads.add(t);
}
// wait for all threads to complete
for (Thread t : threads) {
t.join();
}
// assert all threads got the same results
List<Long> sums = new ArrayList<>();
for (List<Long> values : listOfListsOfValues) {
long v = 0;
for (long l : values) {
v += l;
}
sums.add(v);
}
long expected = sums.get(0);
boolean success = true;
for (long l : sums) {
if (l != expected) {
success = false;
System.out.println("FAILURE => Expected " + expected + " but got: " + l);
}
}
if (success) {
System.out.println("Success! " + sums.size() + " each had the same sum of " + expected);
} else {
throw new RuntimeException("Concurrency Bug");
}
}
/**
* Can receive timeout if subscribe never receives an onError/onComplete ... which reveals a race condition.
*/
@Test
public void subscribeCompletionRaceCondition() {
for (int i = 0; i < 50; i++) {
final ReplayProcessor<String> processor = ReplayProcessor.createUnbounded();
final AtomicReference<String> value1 = new AtomicReference<>();
processor.subscribe(new Consumer<String>() {
@Override
public void accept(String t1) {
try {
// simulate a slow observer
Thread.sleep(50);
} catch (InterruptedException e) {
e.printStackTrace();
}
value1.set(t1);
}
});
Thread t1 = new Thread(new Runnable() {
@Override
public void run() {
processor.onNext("value");
processor.onComplete();
}
});
SubjectObserverThread t2 = new SubjectObserverThread(processor);
SubjectObserverThread t3 = new SubjectObserverThread(processor);
SubjectObserverThread t4 = new SubjectObserverThread(processor);
SubjectObserverThread t5 = new SubjectObserverThread(processor);
t2.start();
t3.start();
t1.start();
t4.start();
t5.start();
try {
t1.join();
t2.join();
t3.join();
t4.join();
t5.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
assertEquals("value", value1.get());
assertEquals("value", t2.value.get());
assertEquals("value", t3.value.get());
assertEquals("value", t4.value.get());
assertEquals("value", t5.value.get());
}
}
/**
* Make sure emission-subscription races are handled correctly.
* https://github.com/ReactiveX/RxJava/issues/1147
*/
@Test
public void raceForTerminalState() {
final List<Integer> expected = Arrays.asList(1);
for (int i = 0; i < 100000; i++) {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
Flowable.just(1).subscribeOn(Schedulers.computation()).cache().subscribe(ts);
ts.awaitDone(5, TimeUnit.SECONDS);
ts.assertValueSequence(expected);
ts.assertTerminated();
}
}
private static | ReplayProcessorBoundedConcurrencyTest |
java | apache__flink | flink-core/src/test/java/org/apache/flink/core/fs/AbstractAutoCloseableRegistryTest.java | {
"start": 7935,
"end": 8360
} | class ____ implements Closeable {
private final AtomicBoolean closed = new AtomicBoolean();
@Override
public void close() throws IOException {
assertThat(closed.compareAndSet(false, true))
.as("TestCloseable was already closed")
.isTrue();
}
public boolean isClosed() {
return closed.get();
}
}
}
| TestCloseable |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/utils/CatalogTableStatisticsConverter.java | {
"start": 2130,
"end": 8584
} | class ____ {
public static TableStats convertToTableStats(
CatalogTableStatistics tableStatistics, CatalogColumnStatistics columnStatistics) {
long rowCount;
if (tableStatistics != null && tableStatistics.getRowCount() >= 0) {
rowCount = tableStatistics.getRowCount();
} else {
rowCount = TableStats.UNKNOWN.getRowCount();
}
Map<String, ColumnStats> columnStatsMap;
if (columnStatistics != null) {
columnStatsMap = convertToColumnStatsMap(columnStatistics.getColumnStatisticsData());
} else {
columnStatsMap = new HashMap<>();
}
return new TableStats(rowCount, columnStatsMap);
}
public static TableStats convertToAccumulatedTableStates(
List<CatalogTableStatistics> tableStatisticsList,
List<CatalogColumnStatistics> catalogColumnStatisticsList,
Set<String> partitionKeys) {
Preconditions.checkState(
tableStatisticsList.size() == catalogColumnStatisticsList.size(),
String.format(
"The size of table statistic is %s, expect column statistic list has same size, but the size is %s.",
tableStatisticsList.size(), catalogColumnStatisticsList.size()));
List<TableStats> tableStats = new ArrayList<>();
for (int i = 0; i < tableStatisticsList.size(); i++) {
CatalogTableStatistics catalogTableStatistics = tableStatisticsList.get(i);
CatalogColumnStatistics catalogColumnStatistics = catalogColumnStatisticsList.get(i);
tableStats.add(
CatalogTableStatisticsConverter.convertToTableStats(
catalogTableStatistics, catalogColumnStatistics));
}
return tableStats.stream()
.reduce((s1, s2) -> s1.merge(s2, partitionKeys))
.orElse(TableStats.UNKNOWN);
}
@VisibleForTesting
public static Map<String, ColumnStats> convertToColumnStatsMap(
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData) {
Map<String, ColumnStats> columnStatsMap = new HashMap<>();
for (Map.Entry<String, CatalogColumnStatisticsDataBase> entry :
columnStatisticsData.entrySet()) {
if (entry.getValue() != null) {
ColumnStats columnStats = convertToColumnStats(entry.getValue());
columnStatsMap.put(entry.getKey(), columnStats);
}
}
return columnStatsMap;
}
private static ColumnStats convertToColumnStats(
CatalogColumnStatisticsDataBase columnStatisticsData) {
Long ndv = null;
Long nullCount = columnStatisticsData.getNullCount();
Double avgLen = null;
Integer maxLen = null;
Comparable<?> max = null;
Comparable<?> min = null;
if (columnStatisticsData instanceof CatalogColumnStatisticsDataBoolean) {
CatalogColumnStatisticsDataBoolean booleanData =
(CatalogColumnStatisticsDataBoolean) columnStatisticsData;
avgLen = 1.0;
maxLen = 1;
if (null == booleanData.getFalseCount() || null == booleanData.getTrueCount()) {
ndv = 2L;
} else if ((booleanData.getFalseCount() == 0 && booleanData.getTrueCount() > 0)
|| (booleanData.getFalseCount() > 0 && booleanData.getTrueCount() == 0)) {
ndv = 1L;
} else {
ndv = 2L;
}
} else if (columnStatisticsData instanceof CatalogColumnStatisticsDataLong) {
CatalogColumnStatisticsDataLong longData =
(CatalogColumnStatisticsDataLong) columnStatisticsData;
ndv = longData.getNdv();
avgLen = 8.0;
maxLen = 8;
max = longData.getMax();
min = longData.getMin();
} else if (columnStatisticsData instanceof CatalogColumnStatisticsDataDouble) {
CatalogColumnStatisticsDataDouble doubleData =
(CatalogColumnStatisticsDataDouble) columnStatisticsData;
ndv = doubleData.getNdv();
avgLen = 8.0;
maxLen = 8;
max = doubleData.getMax();
min = doubleData.getMin();
} else if (columnStatisticsData instanceof CatalogColumnStatisticsDataString) {
CatalogColumnStatisticsDataString strData =
(CatalogColumnStatisticsDataString) columnStatisticsData;
ndv = strData.getNdv();
avgLen = strData.getAvgLength();
maxLen = null == strData.getMaxLength() ? null : strData.getMaxLength().intValue();
} else if (columnStatisticsData instanceof CatalogColumnStatisticsDataBinary) {
CatalogColumnStatisticsDataBinary binaryData =
(CatalogColumnStatisticsDataBinary) columnStatisticsData;
avgLen = binaryData.getAvgLength();
maxLen =
null == binaryData.getMaxLength() ? null : binaryData.getMaxLength().intValue();
} else if (columnStatisticsData instanceof CatalogColumnStatisticsDataDate) {
CatalogColumnStatisticsDataDate dateData =
(CatalogColumnStatisticsDataDate) columnStatisticsData;
ndv = dateData.getNdv();
if (dateData.getMax() != null) {
max =
Date.valueOf(
DateTimeUtils.formatDate(
(int) dateData.getMax().getDaysSinceEpoch()));
}
if (dateData.getMin() != null) {
min =
Date.valueOf(
DateTimeUtils.formatDate(
(int) dateData.getMin().getDaysSinceEpoch()));
}
} else {
throw new TableException(
"Unsupported CatalogColumnStatisticsDataBase: "
+ columnStatisticsData.getClass().getCanonicalName());
}
return ColumnStats.Builder.builder()
.setNdv(ndv)
.setNullCount(nullCount)
.setAvgLen(avgLen)
.setMaxLen(maxLen)
.setMax(max)
.setMin(min)
.build();
}
}
| CatalogTableStatisticsConverter |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/notify/listener/Subscriber.java | {
"start": 784,
"end": 998
} | class ____ subscriber interface.
*
* @author <a href="mailto:liaochuntao@live.com">liaochuntao</a>
* @author zongtanghu
*/
@SuppressWarnings("PMD.AbstractClassShouldStartWithAbstractNamingRule")
public abstract | for |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/jdbc/TooManyRowsAffectedException.java | {
"start": 360,
"end": 833
} | class ____ extends HibernateException {
private final int expectedRowCount;
private final int actualRowCount;
public TooManyRowsAffectedException(String message, int expectedRowCount, int actualRowCount) {
super( message );
this.expectedRowCount = expectedRowCount;
this.actualRowCount = actualRowCount;
}
public int getExpectedRowCount() {
return expectedRowCount;
}
public int getActualRowCount() {
return actualRowCount;
}
}
| TooManyRowsAffectedException |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/eventbus/ReplyException.java | {
"start": 800,
"end": 3237
} | class ____ extends VertxException {
private final ReplyFailure failureType;
private final int failureCode;
/**
* Create a ReplyException with all attributes.
*
* @param failureType the failure type
* @param failureCode the failure code (e.g. 404)
* @param message the failure message
*/
public ReplyException(ReplyFailure failureType, int failureCode, String message, boolean noStackTrace) {
super(message, noStackTrace);
this.failureType = failureType;
this.failureCode = failureCode;
}
/**
* Create a ReplyException with all attributes, including cause.<br>
* Default {@link io.vertx.core.eventbus.impl.codecs.ReplyExceptionMessageCodec ReplyExceptionMessageCodec} doesn't support Cause!<br>
* This ctor is meant to be used for extension, together with a custom codec.
*
* @param failureType the failure type
* @param failureCode the failure code (e.g. 404)
* @param message the failure message
*/
protected ReplyException(ReplyFailure failureType, int failureCode, String message, Throwable cause, boolean noStackTrace) {
super(message, cause, noStackTrace);
this.failureType = failureType;
this.failureCode = failureCode;
}
/**
* Create a ReplyException
*
* @param failureType the failure type
* @param failureCode the failure code
* @param message the failure message
*/
public ReplyException(ReplyFailure failureType, int failureCode, String message) {
this(failureType, failureCode, message, true);
}
/**
* Create a ReplyException
*
* @param failureType the failure type
* @param message the failure message
*/
public ReplyException(ReplyFailure failureType, String message) {
this(failureType, -1, message);
}
/**
* Create a ReplyException
*
* @param failureType the failure type
*/
public ReplyException(ReplyFailure failureType) {
this(failureType, -1, null);
}
/**
* Get the failure type for the message
*
* @return the failure type
*/
public ReplyFailure failureType() {
return failureType;
}
/**
* Get the failure code for the message
*
* @return the failure code
*/
public int failureCode() {
return failureCode;
}
@Override
public String toString() {
String message = getMessage();
return "(" + failureType + "," + failureCode + ") " + (message != null ? message : "");
}
}
| ReplyException |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java | {
"start": 821,
"end": 1989
} | class ____ implements AggregatorFunctionSupplier {
public SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier() {
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return SpatialExtentGeoShapeSourceValuesAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public SpatialExtentGeoShapeSourceValuesAggregatorFunction aggregator(DriverContext driverContext,
List<Integer> channels) {
return SpatialExtentGeoShapeSourceValuesAggregatorFunction.create(driverContext, channels);
}
@Override
public SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction groupingAggregator(
DriverContext driverContext, List<Integer> channels) {
return SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.create(channels, driverContext);
}
@Override
public String describe() {
return "spatial_extent_geo_shape_source of valuess";
}
}
| SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier |
java | apache__camel | components/camel-debug/src/main/java/org/apache/camel/component/debug/CamelDebuggerFactory.java | {
"start": 1289,
"end": 3511
} | class ____ implements DebuggerFactory {
@Override
// Debugger is created and added as a service. This method always returns a null object.
public Debugger createDebugger(CamelContext camelContext) throws Exception {
// only create a debugger if none already exists
if (camelContext.hasService(BacklogDebugger.class) == null) {
// NOTE: the AutoCloseable object is added as a Service, hence it is closed by Camel context
// according to the object lifecycle.
BacklogDebugger backlog = DefaultBacklogDebugger.createDebugger(camelContext); // NOSONAR
backlog.setStandby(camelContext.isDebugStandby());
// must enable source location and history
// so debugger tooling knows to map breakpoints to source code
camelContext.setSourceLocationEnabled(true);
camelContext.setMessageHistory(true);
// enable debugger on camel
camelContext.setDebugging(true);
// to make debugging possible for tooling we need to make it possible to do remote JMX connection
DebuggerJmxConnectorService connector = new DebuggerJmxConnectorService();
connector.setCreateConnector(true);
camelContext.addService(connector);
// we need to enable debugger after context is started
camelContext.addLifecycleStrategy(new LifecycleStrategySupport() {
@Override
public void onContextStarted(CamelContext context) {
// only enable debugger if not in standby mode
if (!backlog.isStandby()) {
backlog.enableDebugger();
}
}
@Override
public void onContextStopping(CamelContext context) {
backlog.disableDebugger();
}
});
camelContext.addService(backlog, true, true);
}
// return null as we fool camel-core into using this backlog debugger as we added it as a service
return null;
}
@Override
public String toString() {
return "camel-debug";
}
}
| CamelDebuggerFactory |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/spdy/SpdyFrameDecoder.java | {
"start": 2977,
"end": 18775
} | enum ____ {
READ_COMMON_HEADER,
READ_DATA_FRAME,
READ_SYN_STREAM_FRAME,
READ_SYN_REPLY_FRAME,
READ_RST_STREAM_FRAME,
READ_SETTINGS_FRAME,
READ_SETTING,
READ_PING_FRAME,
READ_GOAWAY_FRAME,
READ_HEADERS_FRAME,
READ_WINDOW_UPDATE_FRAME,
READ_UNKNOWN_FRAME,
READ_HEADER_BLOCK,
DISCARD_FRAME,
FRAME_ERROR
}
/**
* Creates a new instance with the specified {@code version}
* and the default {@code maxChunkSize (8192)}.
*/
public SpdyFrameDecoder(SpdyVersion spdyVersion, SpdyFrameDecoderDelegate delegate) {
this(spdyVersion, delegate, 8192);
}
/**
* Creates a new instance with the specified parameters.
*/
public SpdyFrameDecoder(SpdyVersion spdyVersion, SpdyFrameDecoderDelegate delegate, int maxChunkSize) {
this.spdyVersion = ObjectUtil.checkNotNull(spdyVersion, "spdyVersion").version();
this.delegate = ObjectUtil.checkNotNull(delegate, "delegate");
this.maxChunkSize = ObjectUtil.checkPositive(maxChunkSize, "maxChunkSize");
state = State.READ_COMMON_HEADER;
}
public void decode(ByteBuf buffer) {
boolean last;
int statusCode;
while (true) {
switch(state) {
case READ_COMMON_HEADER:
if (buffer.readableBytes() < SPDY_HEADER_SIZE) {
return;
}
int frameOffset = buffer.readerIndex();
int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET;
int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET;
buffer.skipBytes(SPDY_HEADER_SIZE);
boolean control = (buffer.getByte(frameOffset) & 0x80) != 0;
int version;
if (control) {
// Decode control frame common header
version = getUnsignedShort(buffer, frameOffset) & 0x7FFF;
frameType = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET);
streamId = 0; // Default to session Stream-ID
} else {
// Decode data frame common header
version = spdyVersion; // Default to expected version
frameType = SPDY_DATA_FRAME;
streamId = getUnsignedInt(buffer, frameOffset);
}
flags = buffer.getByte(flagsOffset);
length = getUnsignedMedium(buffer, lengthOffset);
// Check version first then validity
if (version != spdyVersion) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SPDY Version");
} else if (!isValidFrameHeader(streamId, frameType, flags, length)) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid Frame Error");
} else if (isValidUnknownFrameHeader(streamId, frameType, flags, length)) {
state = State.READ_UNKNOWN_FRAME;
} else {
state = getNextState(frameType, length);
}
break;
case READ_DATA_FRAME:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0));
break;
}
// Generate data frames that do not exceed maxChunkSize
int dataLength = Math.min(maxChunkSize, length);
// Wait until entire frame is readable
if (buffer.readableBytes() < dataLength) {
return;
}
ByteBuf data = buffer.readRetainedSlice(dataLength);
length -= dataLength;
if (length == 0) {
state = State.READ_COMMON_HEADER;
}
last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN);
delegate.readDataFrame(streamId, last, data);
break;
case READ_SYN_STREAM_FRAME:
if (buffer.readableBytes() < 10) {
return;
}
int offset = buffer.readerIndex();
streamId = getUnsignedInt(buffer, offset);
int associatedToStreamId = getUnsignedInt(buffer, offset + 4);
byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07);
last = hasFlag(flags, SPDY_FLAG_FIN);
boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL);
buffer.skipBytes(10);
length -= 10;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_STREAM Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional);
}
break;
case READ_SYN_REPLY_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SYN_REPLY Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readSynReplyFrame(streamId, last);
}
break;
case READ_RST_STREAM_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (streamId == 0 || statusCode == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid RST_STREAM Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readRstStreamFrame(streamId, statusCode);
}
break;
case READ_SETTINGS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR);
numSettings = getUnsignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
length -= 4;
// Validate frame length against number of entries. Each ID/Value entry is 8 bytes.
if ((length & 0x07) != 0 || length >> 3 != numSettings) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid SETTINGS Frame");
} else {
state = State.READ_SETTING;
delegate.readSettingsFrame(clear);
}
break;
case READ_SETTING:
if (numSettings == 0) {
state = State.READ_COMMON_HEADER;
delegate.readSettingsEnd();
break;
}
if (buffer.readableBytes() < 8) {
return;
}
byte settingsFlags = buffer.getByte(buffer.readerIndex());
int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1);
int value = getSignedInt(buffer, buffer.readerIndex() + 4);
boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE);
boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED);
buffer.skipBytes(8);
--numSettings;
delegate.readSetting(id, value, persistValue, persisted);
break;
case READ_PING_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
int pingId = getSignedInt(buffer, buffer.readerIndex());
buffer.skipBytes(4);
state = State.READ_COMMON_HEADER;
delegate.readPingFrame(pingId);
break;
case READ_GOAWAY_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex());
statusCode = getSignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
state = State.READ_COMMON_HEADER;
delegate.readGoAwayFrame(lastGoodStreamId, statusCode);
break;
case READ_HEADERS_FRAME:
if (buffer.readableBytes() < 4) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
last = hasFlag(flags, SPDY_FLAG_FIN);
buffer.skipBytes(4);
length -= 4;
if (streamId == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid HEADERS Frame");
} else {
state = State.READ_HEADER_BLOCK;
delegate.readHeadersFrame(streamId, last);
}
break;
case READ_WINDOW_UPDATE_FRAME:
if (buffer.readableBytes() < 8) {
return;
}
streamId = getUnsignedInt(buffer, buffer.readerIndex());
int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4);
buffer.skipBytes(8);
if (deltaWindowSize == 0) {
state = State.FRAME_ERROR;
delegate.readFrameError("Invalid WINDOW_UPDATE Frame");
} else {
state = State.READ_COMMON_HEADER;
delegate.readWindowUpdateFrame(streamId, deltaWindowSize);
}
break;
case READ_UNKNOWN_FRAME:
if (decodeUnknownFrame(frameType, flags, length, buffer)) {
state = State.READ_COMMON_HEADER;
break;
}
return;
case READ_HEADER_BLOCK:
if (length == 0) {
state = State.READ_COMMON_HEADER;
delegate.readHeaderBlockEnd();
break;
}
if (!buffer.isReadable()) {
return;
}
int compressedBytes = Math.min(buffer.readableBytes(), length);
ByteBuf headerBlock = buffer.readRetainedSlice(compressedBytes);
length -= compressedBytes;
delegate.readHeaderBlock(headerBlock);
break;
case DISCARD_FRAME:
int numBytes = Math.min(buffer.readableBytes(), length);
buffer.skipBytes(numBytes);
length -= numBytes;
if (length == 0) {
state = State.READ_COMMON_HEADER;
break;
}
return;
case FRAME_ERROR:
buffer.skipBytes(buffer.readableBytes());
return;
default:
throw new Error("Unexpected state: " + state);
}
}
}
private static boolean hasFlag(byte flags, byte flag) {
return (flags & flag) != 0;
}
private static State getNextState(int type, int length) {
switch (type) {
case SPDY_DATA_FRAME:
return State.READ_DATA_FRAME;
case SPDY_SYN_STREAM_FRAME:
return State.READ_SYN_STREAM_FRAME;
case SPDY_SYN_REPLY_FRAME:
return State.READ_SYN_REPLY_FRAME;
case SPDY_RST_STREAM_FRAME:
return State.READ_RST_STREAM_FRAME;
case SPDY_SETTINGS_FRAME:
return State.READ_SETTINGS_FRAME;
case SPDY_PING_FRAME:
return State.READ_PING_FRAME;
case SPDY_GOAWAY_FRAME:
return State.READ_GOAWAY_FRAME;
case SPDY_HEADERS_FRAME:
return State.READ_HEADERS_FRAME;
case SPDY_WINDOW_UPDATE_FRAME:
return State.READ_WINDOW_UPDATE_FRAME;
default:
if (length != 0) {
return State.DISCARD_FRAME;
} else {
return State.READ_COMMON_HEADER;
}
}
}
/**
* Decode the unknown frame, returns true if parsed something, otherwise false.
*/
protected boolean decodeUnknownFrame(int frameType, byte flags, int length, ByteBuf buffer) {
if (length == 0) {
delegate.readUnknownFrame(frameType, flags, Unpooled.EMPTY_BUFFER);
return true;
}
if (buffer.readableBytes() < length) {
return false;
}
ByteBuf data = buffer.readRetainedSlice(length);
delegate.readUnknownFrame(frameType, flags, data);
return true;
}
/**
* Check whether the unknown frame is valid, if not, the frame will be discarded,
* otherwise, the frame will be passed to {@link #decodeUnknownFrame(int, byte, int, ByteBuf)}.
* */
protected boolean isValidUnknownFrameHeader(int streamId, int type, byte flags, int length) {
return false;
}
private static boolean isValidFrameHeader(int streamId, int type, byte flags, int length) {
switch (type) {
case SPDY_DATA_FRAME:
return streamId != 0;
case SPDY_SYN_STREAM_FRAME:
return length >= 10;
case SPDY_SYN_REPLY_FRAME:
return length >= 4;
case SPDY_RST_STREAM_FRAME:
return flags == 0 && length == 8;
case SPDY_SETTINGS_FRAME:
return length >= 4;
case SPDY_PING_FRAME:
return length == 4;
case SPDY_GOAWAY_FRAME:
return length == 8;
case SPDY_HEADERS_FRAME:
return length >= 4;
case SPDY_WINDOW_UPDATE_FRAME:
return length == 8;
default:
return true;
}
}
}
| State |
java | elastic__elasticsearch | libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java | {
"start": 1947,
"end": 2328
} | interface ____ {
void check$org_example_TestTargetClass$$staticMethod(Class<?> clazz, int arg0, String arg1, Object arg2);
void check$org_example_TestTargetClass$instanceMethodNoArgs(Class<?> clazz, TestTargetClass that);
void check$org_example_TestTargetClass$instanceMethodWithArgs(Class<?> clazz, TestTargetClass that, int x, int y);
}
| TestChecker |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ResourceMappings.java | {
"start": 1195,
"end": 1519
} | class ____ used to store assigned resource to a single container by
* resource types.
*
* Assigned resource could be list of String
*
* For example, we can assign container to:
* "numa": ["numa0"]
* "gpu": ["0", "1", "2", "3"]
* "fpga": ["1", "3"]
*
* This will be used for NM restart container recovery.
*/
public | is |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedVariableTest.java | {
"start": 9320,
"end": 9544
} | class ____ {
// BUG: Diagnostic contains: 'j' is never read
public void test(int i, int j) {
System.out.println(i);
}
}
private | Inner |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/assertion/RecursiveAssertionAssert_hasNoNullFields_Test.java | {
"start": 1845,
"end": 3395
} | class ____ {
@Test
void should_pass_when_asserting_not_null_over_graph_without_null_values() {
// GIVEN
Object testObject = objectGraphNoNulls();
// WHEN/THEN
thenNoException().isThrownBy(() -> assertThat(testObject).usingRecursiveAssertion().hasNoNullFields());
}
@Test
void should_fail_when_asserting_not_null_over_graph_with_null_values() {
// GIVEN
Object testObject = objectGraphWithNullValue();
// WHEN
var error = expectAssertionError(() -> assertThat(testObject).usingRecursiveAssertion().hasNoNullFields());
// THEN
then(error).hasMessageContaining("books.[0].authors.[1].books.[1].authors.[1].email");
}
Object objectGraphNoNulls() {
Author root = (Author) objectGraphWithNullValue();
root.books.getFirst().authors[1].books.get(1).authors[1].email = "k.beck@recursive.test";
return root;
}
Object objectGraphWithNullValue() {
Author pramodSadalage = new Author("Pramod Sadalage", "p.sadalage@recursive.test");
Author martinFowler = new Author("Martin Fowler", "m.fowler@recursive.test");
Author kentBeck = new Author("Kent Beck", null);
Book noSqlDistilled = new Book("NoSql Distilled", array(pramodSadalage, martinFowler));
pramodSadalage.books.add(noSqlDistilled);
martinFowler.books.add(noSqlDistilled);
Book refactoring = new Book("Refactoring", array(martinFowler, kentBeck));
martinFowler.books.add(refactoring);
kentBeck.books.add(refactoring);
return pramodSadalage;
}
| RecursiveAssertionAssert_hasNoNullFields_Test |
java | spring-projects__spring-framework | spring-core/src/testFixtures/java/org/springframework/core/testfixture/codec/AbstractDecoderTests.java | {
"start": 1712,
"end": 16831
} | class ____<D extends Decoder<?>> extends AbstractLeakCheckingTests {
/**
* The decoder to test.
*/
protected D decoder;
/**
* Construct a new {@code AbstractDecoderTests} instance for the given decoder.
* @param decoder the decoder
*/
protected AbstractDecoderTests(D decoder) {
Assert.notNull(decoder, "Encoder must not be null");
this.decoder = decoder;
}
/**
* Subclasses should implement this method to test {@link Decoder#canDecode}.
*/
@Test
protected abstract void canDecode() throws Exception;
/**
* Subclasses should implement this method to test {@link Decoder#decode}, possibly using
* {@link #testDecodeAll} or other helper methods.
*/
@Test
protected abstract void decode() throws Exception;
/**
* Subclasses should implement this method to test {@link Decoder#decodeToMono}, possibly using
* {@link #testDecodeToMonoAll}.
*/
@Test
protected abstract void decodeToMono() throws Exception;
// Flux
/**
* Helper method that tests for a variety of {@link Flux} decoding scenarios. This method
* invokes:
* <ul>
* <li>{@link #testDecode(Publisher, ResolvableType, Consumer, MimeType, Map)}</li>
* <li>{@link #testDecodeError(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testDecodeCancel(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testDecodeEmpty(ResolvableType, MimeType, Map)}</li>
* </ul>
*
* @param input the input to be provided to the decoder
* @param outputClass the desired output class
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param <T> the output type
*/
protected <T> void testDecodeAll(Publisher<DataBuffer> input, Class<? extends T> outputClass,
Consumer<StepVerifier.FirstStep<T>> stepConsumer) {
testDecodeAll(input, ResolvableType.forClass(outputClass), stepConsumer, null, null);
}
/**
* Helper method that tests for a variety of {@link Flux} decoding scenarios. This method
* invokes:
* <ul>
* <li>{@link #testDecode(Publisher, ResolvableType, Consumer, MimeType, Map)}</li>
* <li>{@link #testDecodeError(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testDecodeCancel(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testDecodeEmpty(ResolvableType, MimeType, Map)}</li>
* </ul>
*
* @param input the input to be provided to the decoder
* @param outputType the desired output type
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
* @param <T> the output type
*/
protected <T> void testDecodeAll(Publisher<DataBuffer> input, ResolvableType outputType,
Consumer<StepVerifier.FirstStep<T>> stepConsumer,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
testDecode(input, outputType, stepConsumer, mimeType, hints);
testDecodeError(input, outputType, mimeType, hints);
testDecodeCancel(input, outputType, mimeType, hints);
testDecodeEmpty(outputType, mimeType, hints);
}
/**
* Test a standard {@link Decoder#decode decode} scenario. For example:
* <pre class="code">
* byte[] bytes1 = ...
* byte[] bytes2 = ...
*
* Flux<DataBuffer> input = Flux.concat(
* dataBuffer(bytes1),
* dataBuffer(bytes2));
*
* testDecodeAll(input, byte[].class, step -> step
* .consumeNextWith(expectBytes(bytes1))
* .consumeNextWith(expectBytes(bytes2))
* .verifyComplete());
* </pre>
*
* @param input the input to be provided to the decoder
* @param outputClass the desired output class
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param <T> the output type
*/
protected <T> void testDecode(Publisher<DataBuffer> input, Class<? extends T> outputClass,
Consumer<StepVerifier.FirstStep<T>> stepConsumer) {
testDecode(input, ResolvableType.forClass(outputClass), stepConsumer, null, null);
}
/**
* Test a standard {@link Decoder#decode decode} scenario. For example:
* <pre class="code">
* byte[] bytes1 = ...
* byte[] bytes2 = ...
*
* Flux<DataBuffer> input = Flux.concat(
* dataBuffer(bytes1),
* dataBuffer(bytes2));
*
* testDecodeAll(input, byte[].class, step -> step
* .consumeNextWith(expectBytes(bytes1))
* .consumeNextWith(expectBytes(bytes2))
* .verifyComplete());
* </pre>
*
* @param input the input to be provided to the decoder
* @param outputType the desired output type
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
* @param <T> the output type
*/
@SuppressWarnings("unchecked")
protected <T> void testDecode(Publisher<DataBuffer> input, ResolvableType outputType,
Consumer<StepVerifier.FirstStep<T>> stepConsumer,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Flux<T> result = (Flux<T>) this.decoder.decode(input, outputType, mimeType, hints);
StepVerifier.FirstStep<T> step = StepVerifier.create(result);
stepConsumer.accept(step);
}
/**
* Test a {@link Decoder#decode decode} scenario where the input stream contains an error.
* This test method will feed the first element of the {@code input} stream to the decoder,
* followed by an {@link InputException}.
* The result is expected to contain one "normal" element, followed by the error.
*
* @param input the input to be provided to the decoder
* @param outputType the desired output type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
* @see InputException
*/
protected void testDecodeError(Publisher<DataBuffer> input, ResolvableType outputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Flux<DataBuffer> flux = Mono.from(input).concatWith(Flux.error(new InputException()));
assertThatExceptionOfType(InputException.class).isThrownBy(() ->
this.decoder.decode(flux, outputType, mimeType, hints).blockLast(Duration.ofSeconds(5)));
}
/**
* Test a {@link Decoder#decode decode} scenario where the input stream is canceled.
* This test method will feed the first element of the {@code input} stream to the decoder,
* followed by a cancel signal.
* The result is expected to contain one "normal" element.
*
* @param input the input to be provided to the decoder
* @param outputType the desired output type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
*/
protected void testDecodeCancel(Publisher<DataBuffer> input, ResolvableType outputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Flux<?> result = this.decoder.decode(input, outputType, mimeType, hints);
StepVerifier.create(result).expectNextCount(1).thenCancel().verify();
}
/**
* Test a {@link Decoder#decode decode} scenario where the input stream is empty.
* The output is expected to be empty as well.
*
* @param outputType the desired output type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
*/
protected void testDecodeEmpty(ResolvableType outputType, @Nullable MimeType mimeType,
@Nullable Map<String, Object> hints) {
Flux<DataBuffer> input = Flux.empty();
Flux<?> result = this.decoder.decode(input, outputType, mimeType, hints);
StepVerifier.create(result).verifyComplete();
}
// Mono
/**
* Helper method that tests for a variety of {@link Mono} decoding scenarios. This method
* invokes:
* <ul>
* <li>{@link #testDecodeToMono(Publisher, ResolvableType, Consumer, MimeType, Map)}</li>
* <li>{@link #testDecodeToMonoError(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testDecodeToMonoCancel(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testDecodeToMonoEmpty(ResolvableType, MimeType, Map)}</li>
* </ul>
*
* @param input the input to be provided to the decoder
* @param outputClass the desired output class
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param <T> the output type
*/
protected <T> void testDecodeToMonoAll(Publisher<DataBuffer> input,
Class<? extends T> outputClass, Consumer<StepVerifier.FirstStep<T>> stepConsumer) {
testDecodeToMonoAll(input, ResolvableType.forClass(outputClass), stepConsumer, null, null);
}
/**
* Helper method that tests for a variety of {@link Mono} decoding scenarios. This method
* invokes:
* <ul>
* <li>{@link #testDecodeToMono(Publisher, ResolvableType, Consumer, MimeType, Map)}</li>
* <li>{@link #testDecodeToMonoError(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testDecodeToMonoCancel(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testDecodeToMonoEmpty(ResolvableType, MimeType, Map)}</li>
* </ul>
*
* @param input the input to be provided to the decoder
* @param outputType the desired output type
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
* @param <T> the output type
*/
protected <T> void testDecodeToMonoAll(Publisher<DataBuffer> input, ResolvableType outputType,
Consumer<StepVerifier.FirstStep<T>> stepConsumer,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
testDecodeToMono(input, outputType, stepConsumer, mimeType, hints);
testDecodeToMonoError(input, outputType, mimeType, hints);
testDecodeToMonoCancel(input, outputType, mimeType, hints);
testDecodeToMonoEmpty(outputType, mimeType, hints);
}
/**
* Test a standard {@link Decoder#decodeToMono decode} scenario. For example:
* <pre class="code">
* byte[] bytes1 = ...
* byte[] bytes2 = ...
* byte[] allBytes = ... // bytes1 + bytes2
*
* Flux<DataBuffer> input = Flux.concat(
* dataBuffer(bytes1),
* dataBuffer(bytes2));
*
* testDecodeAll(input, byte[].class, step -> step
* .consumeNextWith(expectBytes(allBytes))
* .verifyComplete());
* </pre>
*
* @param input the input to be provided to the decoder
* @param outputClass the desired output class
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param <T> the output type
*/
protected <T> void testDecodeToMono(Publisher<DataBuffer> input,
Class<? extends T> outputClass, Consumer<StepVerifier.FirstStep<T>> stepConsumer) {
testDecodeToMono(input, ResolvableType.forClass(outputClass), stepConsumer, null, null);
}
/**
* Test a standard {@link Decoder#decodeToMono decode} scenario. For example:
* <pre class="code">
* byte[] bytes1 = ...
* byte[] bytes2 = ...
* byte[] allBytes = ... // bytes1 + bytes2
*
* Flux<DataBuffer> input = Flux.concat(
* dataBuffer(bytes1),
* dataBuffer(bytes2));
*
* testDecodeAll(input, byte[].class, step -> step
* .consumeNextWith(expectBytes(allBytes))
* .verifyComplete());
* </pre>
*
* @param input the input to be provided to the decoder
* @param outputType the desired output type
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
* @param <T> the output type
*/
@SuppressWarnings("unchecked")
protected <T> void testDecodeToMono(Publisher<DataBuffer> input, ResolvableType outputType,
Consumer<StepVerifier.FirstStep<T>> stepConsumer,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Mono<T> result = (Mono<T>) this.decoder.decodeToMono(input, outputType, mimeType, hints);
StepVerifier.FirstStep<T> step = StepVerifier.create(result);
stepConsumer.accept(step);
}
/**
* Test a {@link Decoder#decodeToMono decode} scenario where the input stream contains an error.
* This test method will feed the first element of the {@code input} stream to the decoder,
* followed by an {@link InputException}.
* The result is expected to contain the error.
*
* @param input the input to be provided to the decoder
* @param outputType the desired output type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
* @see InputException
*/
protected void testDecodeToMonoError(Publisher<DataBuffer> input, ResolvableType outputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
input = Mono.from(input).concatWith(Flux.error(new InputException()));
Mono<?> result = this.decoder.decodeToMono(input, outputType, mimeType, hints);
StepVerifier.create(result).expectError(InputException.class).verify();
}
/**
* Test a {@link Decoder#decodeToMono decode} scenario where the input stream is canceled.
* This test method will immediately cancel the output stream.
*
* @param input the input to be provided to the decoder
* @param outputType the desired output type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
*/
protected void testDecodeToMonoCancel(Publisher<DataBuffer> input, ResolvableType outputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Mono<?> result = this.decoder.decodeToMono(input, outputType, mimeType, hints);
StepVerifier.create(result).thenCancel().verify();
}
/**
* Test a {@link Decoder#decodeToMono decode} scenario where the input stream is empty.
* The output is expected to be empty as well.
*
* @param outputType the desired output type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
*/
protected void testDecodeToMonoEmpty(ResolvableType outputType, @Nullable MimeType mimeType,
@Nullable Map<String, Object> hints) {
Mono<?> result = this.decoder.decodeToMono(Flux.empty(), outputType, mimeType, hints);
StepVerifier.create(result).verifyComplete();
}
/**
* Creates a deferred {@link DataBuffer} containing the given bytes.
* @param bytes the bytes that are to be stored in the buffer
* @return the deferred buffer
*/
protected Mono<DataBuffer> dataBuffer(byte[] bytes) {
return Mono.fromCallable(() -> {
DataBuffer dataBuffer = this.bufferFactory.allocateBuffer(bytes.length);
dataBuffer.write(bytes);
return dataBuffer;
});
}
/**
* Exception used in {@link #testDecodeError} and {@link #testDecodeToMonoError}
*/
@SuppressWarnings("serial")
public static | AbstractDecoderTests |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.