language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | alibaba__nacos | test/core-test/src/test/java/com/alibaba/nacos/test/client/ConfigIntegrationV2MutualAuthCoreITCase.java | {
"start": 2551,
"end": 5248
} | class ____ {
public static AtomicInteger increment = new AtomicInteger(100);
@LocalServerPort
private int port;
@BeforeAll
static void beforeClass() throws IOException {
ConfigCleanUtils.changeToNewTestNacosHome(ConfigIntegrationV2MutualAuthCoreITCase.class.getSimpleName());
}
@AfterEach
void cleanClientCache() throws Exception {
ConfigCleanUtils.cleanClientCache();
}
@Test
@Disabled("TODO, fix the cert expired problem")
void testMutualAuth() throws Exception {
RpcClientTlsConfig tlsConfig = new RpcClientTlsConfig();
tlsConfig.setEnableTls(true);
tlsConfig.setMutualAuthEnable(true);
tlsConfig.setCertChainFile("test-client-cert.pem");
tlsConfig.setCertPrivateKey("test-client-key.pem");
tlsConfig.setTrustCollectionCertFile("test-ca-cert.pem");
RpcClient client = RpcClientFactory.createClient("testMutualAuth", ConnectionType.GRPC,
Collections.singletonMap("labelKey", "labelValue"), tlsConfig);
RpcClient.ServerInfo serverInfo = new RpcClient.ServerInfo();
serverInfo.setServerIp("127.0.0.1");
serverInfo.setServerPort(port);
Connection connection = client.connectToServer(serverInfo);
ConfigPublishRequest configPublishRequest = new ConfigPublishRequest();
String content = UUID.randomUUID().toString();
configPublishRequest.setContent(content);
configPublishRequest.setGroup("test-group" + increment.getAndIncrement());
configPublishRequest.setDataId("test-data" + increment.getAndIncrement());
configPublishRequest.setRequestId(content);
Response response = connection.request(configPublishRequest, TimeUnit.SECONDS.toMillis(5));
assertTrue(response.isSuccess());
connection.close();
}
@Test
void testServerMutualAuthOnly() throws Exception {
RpcClientTlsConfig tlsConfig = new RpcClientTlsConfig();
tlsConfig.setEnableTls(true);
tlsConfig.setTrustCollectionCertFile("test-ca-cert.pem");
RpcClient client = RpcClientFactory.createClient("testServerMutualAuthNoly", ConnectionType.GRPC,
Collections.singletonMap("labelKey", "labelValue"), tlsConfig);
RpcClient.ServerInfo serverInfo = new RpcClient.ServerInfo();
serverInfo.setServerIp("127.0.0.1");
serverInfo.setServerPort(port);
Connection connection = client.connectToServer(serverInfo);
assertNull(connection);
TimeUnit.SECONDS.sleep(3);
}
}
| ConfigIntegrationV2MutualAuthCoreITCase |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/util/Instantiator.java | {
"start": 4089,
"end": 4462
} | class ____ to instantiate
* @return a list of instantiated instances
* @since 2.4.8
*/
public List<T> instantiate(@Nullable ClassLoader classLoader, Collection<String> names) {
Assert.notNull(names, "'names' must not be null");
return instantiate(names.stream().map((name) -> TypeSupplier.forName(classLoader, name)));
}
/**
* Instantiate the given set of | names |
java | apache__camel | core/camel-management/src/test/java/org/apache/camel/management/ManagedThrottlingExceptionRoutePolicyTest.java | {
"start": 6475,
"end": 6775
} | class ____ implements Processor {
@Override
public void process(Exchange exchange) throws Exception {
// need to sleep a little to cause last failure to be slow
Thread.sleep(50);
throw new IOException("boom!");
}
}
static | BoomProcess |
java | google__guava | guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java | {
"start": 24133,
"end": 24472
} | class ____ extends PassObject {
@Override
public void twoNullableArgs(@Nullable String s, @Nullable Integer i) {
checkNotNull(s); // ok to throw NPE?
}
}
public void testPassTwoNullableArgsFirstThrowsNpe() {
shouldPass(new PassTwoNullableArgsFirstThrowsNpe());
}
private static | PassTwoNullableArgsFirstThrowsNpe |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/XmlWithExplicitConvertAnnotationsTest.java | {
"start": 1489,
"end": 1895
} | class ____ {
// NOTE : essentially the same exact test as ExplicitDateConvertersTest, but here we will mix annotations and xml
static boolean convertToDatabaseColumnCalled = false;
static boolean convertToEntityAttributeCalled = false;
private void resetFlags() {
convertToDatabaseColumnCalled = false;
convertToEntityAttributeCalled = false;
}
public static | XmlWithExplicitConvertAnnotationsTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/mixed/FlowableConcatMapSinglePublisher.java | {
"start": 1303,
"end": 2047
} | class ____<T, R> extends Flowable<R> {
final Publisher<T> source;
final Function<? super T, ? extends SingleSource<? extends R>> mapper;
final ErrorMode errorMode;
final int prefetch;
public FlowableConcatMapSinglePublisher(Publisher<T> source,
Function<? super T, ? extends SingleSource<? extends R>> mapper,
ErrorMode errorMode, int prefetch) {
this.source = source;
this.mapper = mapper;
this.errorMode = errorMode;
this.prefetch = prefetch;
}
@Override
protected void subscribeActual(Subscriber<? super R> s) {
source.subscribe(new ConcatMapSingleSubscriber<>(s, mapper, prefetch, errorMode));
}
}
| FlowableConcatMapSinglePublisher |
java | elastic__elasticsearch | x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformTests.java | {
"start": 1462,
"end": 4156
} | class ____ extends ESTestCase {
public void testSetTransformUpgradeMode() {
var threadPool = new TestThreadPool("testSetTransformUpgradeMode");
Client client = mock();
when(client.threadPool()).thenReturn(threadPool);
doAnswer(invocationOnMock -> {
ActionListener<AcknowledgedResponse> listener = invocationOnMock.getArgument(2);
listener.onResponse(AcknowledgedResponse.TRUE);
return null;
}).when(client).execute(same(SetTransformUpgradeModeAction.INSTANCE), any(), any());
try (var transformPlugin = new Transform(Settings.EMPTY)) {
SetOnce<Map<String, Object>> response = new SetOnce<>();
transformPlugin.prepareForIndicesMigration(emptyProject(), client, ActionTestUtils.assertNoFailureListener(response::set));
assertThat(response.get(), equalTo(Collections.singletonMap("already_in_upgrade_mode", false)));
verify(client).execute(
same(SetTransformUpgradeModeAction.INSTANCE),
eq(new SetUpgradeModeActionRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, true)),
any()
);
transformPlugin.indicesMigrationComplete(
response.get(),
client,
ActionTestUtils.assertNoFailureListener(ESTestCase::assertTrue)
);
verify(client).execute(
same(SetTransformUpgradeModeAction.INSTANCE),
eq(new SetUpgradeModeActionRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, false)),
any()
);
} finally {
terminate(threadPool);
}
}
public void testIgnoreSetTransformUpgradeMode() {
final var project = ProjectMetadata.builder(randomProjectIdOrDefault())
.putCustom(TransformMetadata.TYPE, new TransformMetadata.Builder().upgradeMode(true).build())
.build();
Client client = mock();
try (var transformPlugin = new Transform(Settings.EMPTY)) {
SetOnce<Map<String, Object>> response = new SetOnce<>();
transformPlugin.prepareForIndicesMigration(project, client, ActionTestUtils.assertNoFailureListener(response::set));
assertThat(response.get(), equalTo(Collections.singletonMap("already_in_upgrade_mode", true)));
verifyNoMoreInteractions(client);
transformPlugin.indicesMigrationComplete(
response.get(),
client,
ActionTestUtils.assertNoFailureListener(ESTestCase::assertTrue)
);
verifyNoMoreInteractions(client);
}
}
}
| TransformTests |
java | alibaba__nacos | naming/src/test/java/com/alibaba/nacos/naming/BaseTest.java | {
"start": 1428,
"end": 3499
} | class ____ {
protected static final String TEST_CLUSTER_NAME = "test-cluster";
protected static final String TEST_SERVICE_NAME = "DEFAULT_GROUP@@test-service";
protected static final String TEST_GROUP_NAME = "test-group-name";
protected static final String TEST_NAMESPACE = "test-namespace";
protected static final String TEST_IP = "1.1.1.1";
protected static final String TEST_METADATA = "{\"label\":\"123\"}";
protected static final String TEST_INSTANCE_INFO_LIST = "[{\"instanceId\":\"123\",\"ip\":\"1.1.1.1\","
+ "\"port\":9870,\"weight\":2.0,\"healthy\":true,\"enabled\":true,\"ephemeral\":true"
+ ",\"clusterName\":\"clusterName\",\"serviceName\":\"serviceName\",\"metadata\":{}}]";
@Spy
protected ConfigurableApplicationContext context;
@Mock
protected DistroMapper distroMapper;
@Spy
protected SwitchDomain switchDomain;
@Mock
protected UdpPushService pushService;
@Spy
protected MockEnvironment environment;
@BeforeEach
public void before() {
EnvUtil.setEnvironment(environment);
ApplicationUtils.injectContext(context);
}
protected MockHttpServletRequestBuilder convert(Object simpleOb, MockHttpServletRequestBuilder builder) throws IllegalAccessException {
Field[] declaredFields = simpleOb.getClass().getDeclaredFields();
for (Field declaredField : declaredFields) {
declaredField.setAccessible(true);
builder.param(declaredField.getName(), String.valueOf(declaredField.get(simpleOb)));
}
return builder;
}
protected void mockInjectPushServer() {
doReturn(pushService).when(context).getBean(UdpPushService.class);
}
protected void mockInjectSwitchDomain() {
doReturn(switchDomain).when(context).getBean(SwitchDomain.class);
}
protected void mockInjectDistroMapper() {
doReturn(distroMapper).when(context).getBean(DistroMapper.class);
}
}
| BaseTest |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java | {
"start": 1707,
"end": 8980
} | class ____ implements DataExtractor {
private static final Logger LOGGER = LogManager.getLogger(AbstractAggregationDataExtractor.class);
protected final Client client;
protected final AggregationDataExtractorContext context;
private final DatafeedTimingStatsReporter timingStatsReporter;
private boolean hasNext;
private volatile boolean isCancelled;
private AggregationToJsonProcessor aggregationToJsonProcessor;
private final ByteArrayOutputStream outputStream;
AbstractAggregationDataExtractor(
Client client,
AggregationDataExtractorContext dataExtractorContext,
DatafeedTimingStatsReporter timingStatsReporter
) {
this.client = Objects.requireNonNull(client);
this.context = Objects.requireNonNull(dataExtractorContext);
this.timingStatsReporter = Objects.requireNonNull(timingStatsReporter);
this.hasNext = true;
this.isCancelled = false;
this.outputStream = new ByteArrayOutputStream();
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public boolean isCancelled() {
return isCancelled;
}
@Override
public void cancel() {
LOGGER.debug("[{}] Data extractor received cancel request", context.jobId);
isCancelled = true;
hasNext = false;
}
@Override
public void destroy() {
cancel();
}
@Override
public long getEndTime() {
return context.queryContext.end;
}
@Override
public Result next() throws IOException {
if (hasNext() == false) {
throw new NoSuchElementException();
}
SearchInterval searchInterval = new SearchInterval(context.queryContext.start, context.queryContext.end);
if (aggregationToJsonProcessor == null) {
InternalAggregations aggs = search();
if (aggs == null) {
hasNext = false;
return new Result(searchInterval, Optional.empty());
}
initAggregationProcessor(aggs);
}
outputStream.reset();
// We can cancel immediately as we process whole date_histogram buckets at a time
aggregationToJsonProcessor.writeAllDocsCancellable(_timestamp -> isCancelled, outputStream);
// We process the whole search. So, if we are chunking or not, we have nothing more to process given the current query
hasNext = false;
return new Result(
searchInterval,
aggregationToJsonProcessor.getKeyValueCount() > 0
? Optional.of(new ByteArrayInputStream(outputStream.toByteArray()))
: Optional.empty()
);
}
private InternalAggregations search() {
LOGGER.debug("[{}] Executing aggregated search", context.jobId);
ActionRequestBuilder<SearchRequest, SearchResponse> searchRequest = buildSearchRequest(buildBaseSearchSource());
assert searchRequest.request().allowPartialSearchResults() == false;
SearchResponse searchResponse = executeSearchRequest(client, context.queryContext, searchRequest);
try {
LOGGER.debug("[{}] Search response was obtained", context.jobId);
timingStatsReporter.reportSearchDuration(searchResponse.getTook());
return validateAggs(searchResponse.getAggregations());
} finally {
searchResponse.decRef();
}
}
private void initAggregationProcessor(InternalAggregations aggs) throws IOException {
aggregationToJsonProcessor = new AggregationToJsonProcessor(
context.queryContext.timeField,
context.fields,
context.includeDocCount,
context.queryContext.start,
null
);
aggregationToJsonProcessor.process(aggs);
}
static SearchResponse executeSearchRequest(
Client client,
DataExtractorQueryContext context,
ActionRequestBuilder<SearchRequest, SearchResponse> searchRequestBuilder
) {
SearchResponse searchResponse = ClientHelper.executeWithHeaders(
context.headers,
ClientHelper.ML_ORIGIN,
client,
searchRequestBuilder::get
);
boolean success = false;
try {
DataExtractorUtils.checkForSkippedClusters(searchResponse);
success = true;
} finally {
if (success == false) {
searchResponse.decRef();
}
}
return searchResponse;
}
private SearchSourceBuilder buildBaseSearchSource() {
// For derivative aggregations the first bucket will always be null
// so query one extra histogram bucket back and hope there is data
// in that bucket
long histogramSearchStartTime = Math.max(
0,
context.queryContext.start - DatafeedConfigUtils.getHistogramIntervalMillis(context.aggs)
);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0)
.query(
DataExtractorUtils.wrapInTimeRangeQuery(
context.queryContext.query,
context.queryContext.timeField,
histogramSearchStartTime,
context.queryContext.end
)
);
if (context.queryContext.runtimeMappings.isEmpty() == false) {
searchSourceBuilder.runtimeMappings(context.queryContext.runtimeMappings);
}
context.aggs.getAggregatorFactories().forEach(searchSourceBuilder::aggregation);
context.aggs.getPipelineAggregatorFactories().forEach(searchSourceBuilder::aggregation);
return searchSourceBuilder;
}
protected abstract ActionRequestBuilder<SearchRequest, SearchResponse> buildSearchRequest(SearchSourceBuilder searchRequestBuilder);
private static InternalAggregations validateAggs(@Nullable InternalAggregations aggs) {
if (aggs == null) {
return null;
}
List<InternalAggregation> aggsAsList = aggs.asList();
if (aggsAsList.isEmpty()) {
return null;
}
if (aggsAsList.size() > 1) {
throw new IllegalArgumentException(
"Multiple top level aggregations not supported; found: " + aggsAsList.stream().map(Aggregation::getName).toList()
);
}
return aggs;
}
public AggregationDataExtractorContext getContext() {
return context;
}
@Override
public DataSummary getSummary() {
ActionRequestBuilder<SearchRequest, SearchResponse> searchRequestBuilder = buildSearchRequest(
DataExtractorUtils.getSearchSourceBuilderForSummary(context.queryContext)
);
SearchResponse searchResponse = executeSearchRequest(client, context.queryContext, searchRequestBuilder);
try {
LOGGER.debug("[{}] Aggregating Data summary response was obtained", context.jobId);
timingStatsReporter.reportSearchDuration(searchResponse.getTook());
return DataExtractorUtils.getDataSummary(searchResponse);
} finally {
searchResponse.decRef();
}
}
}
| AbstractAggregationDataExtractor |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/block/OracleBlockTest9.java | {
"start": 977,
"end": 3162
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = "DECLARE" +
" my_emp_id NUMBER(6);" +
" my_job_id VARCHAR2(10);" +
" my_sal NUMBER(8,2);" +
" CURSOR c1 IS" +
" SELECT employee_id, job_id, salary" +
" FROM employees FOR UPDATE;" +
"BEGIN" +
" OPEN c1;" +
" LOOP" +
" FETCH c1 INTO my_emp_id, my_job_id, my_sal;" +
" IF my_job_id = 'SA_REP' THEN" +
" UPDATE employees" +
" SET salary = salary * 1.02" +
" WHERE CURRENT OF c1;" +
" END IF;" +
" EXIT WHEN c1%NOTFOUND;" +
" END LOOP;" +
"END;";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
print(statementList);
assertEquals(1, statementList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
for (SQLStatement statement : statementList) {
statement.accept(visitor);
}
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("employees")));
assertEquals(3, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.getColumns().contains(new TableStat.Column("employees", "salary")));
assertTrue(visitor.getColumns().contains(new TableStat.Column("employees", "employee_id")));
assertTrue(visitor.getColumns().contains(new TableStat.Column("employees", "job_id")));
}
}
| OracleBlockTest9 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/fetch/depth/SysModule.java | {
"start": 539,
"end": 1102
} | class ____ {
@Id
private Integer id;
@Column(name = "name")
private String name;
// @OneToMany( cascade = CascadeType.PERSIST, fetch = FetchType.EAGER )
// @JoinColumn( name = "target_mod_fk" )
@ManyToMany( targetEntity = SysModule.class, cascade = { CascadeType.PERSIST }, fetch = FetchType.EAGER )
@JoinTable(
name = "sys_group_mod",
joinColumns = @JoinColumn(name = "src_fk", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "target_fk", referencedColumnName = "id")
)
private Set<SysModule> targetModules;
}
| SysModule |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTests.java | {
"start": 1085,
"end": 3643
} | class ____ extends ESTestCase {
public static final Logger logger = LogManager.getLogger(ChunkedLoggingStreamTests.class);
@TestLogging(reason = "testing logging", value = "org.elasticsearch.common.logging.ChunkedLoggingStreamTests:DEBUG")
public void testLogMessageChunking() {
// bugs are most likely near chunk boundaries, so test sizes that are within +/- 3 bytes of 0, 1, and 2 chunks:
IntStream.rangeClosed(-3, 3)
.flatMap(i -> IntStream.iterate(i, j -> j + ChunkedLoggingStream.CHUNK_SIZE).limit(3))
.filter(i -> i >= 0)
.sorted()
.forEach(ChunkedLoggingStreamTests::runChunkingTest);
}
private static void runChunkingTest(int size) {
final var bytes = new byte[size];
Arrays.fill(bytes, (byte) '.');
final var expectedBody = new String(bytes, StandardCharsets.ISO_8859_1);
final var prefix = randomAlphaOfLength(10);
final var level = randomFrom(Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR);
final var referenceDocs = randomFrom(ReferenceDocs.values());
assertEquals(expectedBody, ChunkedLoggingStreamTestUtils.getLoggedBody(logger, level, prefix, referenceDocs, () -> {
try (var stream = new ChunkedLoggingStream(logger, level, prefix, referenceDocs)) {
writeRandomly(stream, bytes);
}
}));
}
@TestLogging(reason = "testing logging", value = "org.elasticsearch.common.logging.ChunkedLoggingStreamTests:DEBUG")
public void testEncodingRoundTrip() {
final var bytes = randomByteArrayOfLength(between(0, 10000));
final var level = randomFrom(Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR);
final var referenceDocs = randomFrom(ReferenceDocs.values());
assertThat(ChunkedLoggingStreamTestUtils.getDecodedLoggedBody(logger, level, "prefix", referenceDocs, () -> {
try (var stream = ChunkedLoggingStream.create(logger, level, "prefix", referenceDocs)) {
writeRandomly(stream, bytes);
}
}), equalBytes(new BytesArray(bytes)));
}
private static void writeRandomly(OutputStream stream, byte[] bytes) throws IOException {
for (var pos = 0; pos < bytes.length;) {
if (randomBoolean()) {
stream.write(bytes[pos++]);
} else {
var len = between(1, bytes.length - pos);
stream.write(bytes, pos, len);
pos += len;
}
}
}
}
| ChunkedLoggingStreamTests |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/app/BeanConstructorDefinition.java | {
"start": 1201,
"end": 1770
} | class ____ {
@XmlAttribute
private Integer index;
@XmlAttribute(required = true)
private String value;
public Integer getIndex() {
return index;
}
/**
* Constructor argument index. The first argument must use index 0.
*/
public void setIndex(Integer index) {
this.index = index;
}
public String getValue() {
return value;
}
/**
* The argument value for the constructor.
*/
public void setValue(String value) {
this.value = value;
}
}
| BeanConstructorDefinition |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/TypeReference.java | {
"start": 9305,
"end": 9401
} | interface ____ the 'implements' clause of a
* class.
*
* @param itfIndex the index of an | of |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/hashtable/BinaryHashTableTest.java | {
"start": 48535,
"end": 49055
} | class ____ implements Projection<RowData, BinaryRowData> {
BinaryRowData innerRow = new BinaryRowData(1);
BinaryRowWriter writer = new BinaryRowWriter(innerRow);
@Override
public BinaryRowData apply(RowData row) {
writer.reset();
if (row.isNullAt(0)) {
writer.setNullAt(0);
} else {
writer.writeInt(0, row.getInt(0));
}
writer.complete();
return innerRow;
}
}
}
| MyProjection |
java | redisson__redisson | redisson/src/main/java/org/redisson/connection/ConnectionManager.java | {
"start": 1128,
"end": 3422
} | interface ____ {
void connect();
PublishSubscribeService getSubscribeService();
RedisURI getLastClusterNode();
int calcSlot(String key);
int calcSlot(ByteBuf key);
int calcSlot(byte[] key);
Collection<MasterSlaveEntry> getEntrySet();
MasterSlaveEntry getEntry(String name);
MasterSlaveEntry getEntry(int slot);
MasterSlaveEntry getWriteEntry(int slot);
MasterSlaveEntry getReadEntry(int slot);
MasterSlaveEntry getEntry(InetSocketAddress address);
MasterSlaveEntry getEntry(RedisURI addr);
RedisClient createClient(NodeType type, InetSocketAddress address, RedisURI uri, String sslHostname);
RedisClient createClient(NodeType type, RedisURI address, String sslHostname);
MasterSlaveEntry getEntry(RedisClient redisClient);
void shutdown();
void shutdown(long quietPeriod, long timeout, TimeUnit unit);
ServiceManager getServiceManager();
CommandAsyncExecutor createCommandExecutor(RedissonObjectBuilder objectBuilder,
RedissonObjectBuilder.ReferenceType referenceType);
static ConnectionManager create(Config configCopy) {
BaseConfig<?> cfg = ConfigSupport.getConfig(configCopy);
ConnectionManager cm = null;
if (cfg instanceof MasterSlaveServersConfig) {
cm = new MasterSlaveConnectionManager((MasterSlaveServersConfig) cfg, configCopy);
} else if (cfg instanceof SingleServerConfig) {
cm = new SingleConnectionManager((SingleServerConfig) cfg, configCopy);
} else if (cfg instanceof SentinelServersConfig) {
cm = new SentinelConnectionManager((SentinelServersConfig) cfg, configCopy);
} else if (cfg instanceof ClusterServersConfig) {
cm = new ClusterConnectionManager((ClusterServersConfig) cfg, configCopy);
} else if (cfg instanceof ReplicatedServersConfig) {
cm = new ReplicatedConnectionManager((ReplicatedServersConfig) cfg, configCopy);
}
if (cm == null) {
throw new IllegalArgumentException("server(s) address(es) not defined!");
}
if (!configCopy.isLazyInitialization()) {
cm.connect();
}
return cm;
}
}
| ConnectionManager |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/ClassificationType.java | {
"start": 805,
"end": 3509
} | class ____ implements EnhancedUserType<Classification>, ValueExtractor<Classification> {
@Override
public int getSqlType() {
return Types.TINYINT;
}
@Override
public Class<Classification> returnedClass() {
return Classification.class;
}
@Override
public boolean equals(Classification x, Classification y) throws HibernateException {
if ( x == null && y == null ) {
return false;
}
else if ( x != null ) {
return x.equals( y );
}
else {
return y.equals( x );
}
}
@Override
public int hashCode(Classification x) throws HibernateException {
return x.hashCode();
}
@Override
public Classification nullSafeGet(ResultSet rs, int position, WrapperOptions options)
throws SQLException {
final int intValue = rs.getInt( position );
if ( rs.wasNull() ) {
return null;
}
return Classification.valueOf( intValue );
}
@Override
public void nullSafeSet(PreparedStatement st, Classification value, int index, WrapperOptions options)
throws SQLException {
if ( value == null ) {
st.setNull( index, Types.INTEGER );
}
else {
st.setInt( index, value.ordinal() );
}
}
@Override
public Classification deepCopy(Classification value) throws HibernateException {
return value;
}
@Override
public boolean isMutable() {
return false;
}
@Override
public Serializable disassemble(Classification value) throws HibernateException {
return ( Classification ) value;
}
@Override
public Classification assemble(Serializable cached, Object owner) throws HibernateException {
return (Classification) cached;
}
@Override
public Classification replace(Classification original, Classification target, Object owner) throws HibernateException {
return original;
}
@Override
public String toSqlLiteral(Classification value) {
return Integer.toString( value.ordinal() );
}
@Override
public String toString(Classification value) throws HibernateException {
return value.name();
}
@Override
public Classification fromStringValue(CharSequence sequence) {
return Classification.valueOf( sequence.toString() );
}
@Override
public Classification extract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return Classification.valueOf( rs.getInt( paramIndex ) );
}
@Override
public Classification extract(CallableStatement statement, int paramIndex, WrapperOptions options) throws SQLException {
return Classification.valueOf( statement.getInt( paramIndex ) );
}
@Override
public Classification extract(CallableStatement statement, String paramName, WrapperOptions options) throws SQLException {
return Classification.valueOf( statement.getInt( paramName ) );
}
}
| ClassificationType |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/function/FailableTest.java | {
"start": 94159,
"end": 94696
} | interface ____ properly defined to throw any exception using the top level generic types
* Object and Throwable.
*/
@Test
void testThrows_FailableLongBinaryOperator_Throwable() {
assertThrows(IOException.class, () -> new FailableLongBinaryOperator<Throwable>() {
@Override
public long applyAsLong(final long left, final long right) throws Throwable {
throw new IOException("test");
}
}.applyAsLong(0, 0));
}
/**
* Tests that our failable | is |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/plugin/DirectoryBasedPluginFinder.java | {
"start": 2003,
"end": 3522
} | class ____ implements PluginFinder {
/** Pattern to match jar files in a directory. */
private static final String JAR_MATCHER_PATTERN = "glob:**.jar";
/** Root directory to the plugin folders. */
private final Path pluginsRootDir;
/** Matcher for jar files in the filesystem of the root folder. */
private final PathMatcher jarFileMatcher;
public DirectoryBasedPluginFinder(Path pluginsRootDir) {
this.pluginsRootDir = pluginsRootDir;
this.jarFileMatcher = pluginsRootDir.getFileSystem().getPathMatcher(JAR_MATCHER_PATTERN);
}
@Override
public Collection<PluginDescriptor> findPlugins() throws IOException {
if (!Files.isDirectory(pluginsRootDir)) {
throw new IOException(
"Plugins root directory [" + pluginsRootDir + "] does not exist!");
}
try (Stream<Path> stream = Files.list(pluginsRootDir)) {
return stream.filter((Path path) -> Files.isDirectory(path))
.map(
FunctionUtils.uncheckedFunction(
this::createPluginDescriptorForSubDirectory))
.collect(Collectors.toList());
}
}
private PluginDescriptor createPluginDescriptorForSubDirectory(Path subDirectory)
throws IOException {
URL[] urls = createJarURLsFromDirectory(subDirectory);
Arrays.sort(urls, Comparator.comparing(URL::toString));
// TODO: This | DirectoryBasedPluginFinder |
java | google__guava | android/guava/src/com/google/common/graph/Graphs.java | {
"start": 13637,
"end": 15240
} | class ____<N> extends ForwardingGraph<N> {
private final Graph<N> graph;
TransposedGraph(Graph<N> graph) {
this.graph = graph;
}
@Override
Graph<N> delegate() {
return graph;
}
@Override
public Set<N> predecessors(N node) {
return delegate().successors(node); // transpose
}
@Override
public Set<N> successors(N node) {
return delegate().predecessors(node); // transpose
}
@Override
public Set<EndpointPair<N>> incidentEdges(N node) {
return new IncidentEdgeSet<N>(this, node, IncidentEdgeSet.EdgeType.BOTH) {
@Override
public Iterator<EndpointPair<N>> iterator() {
return Iterators.transform(
delegate().incidentEdges(node).iterator(),
edge -> EndpointPair.of(delegate(), edge.nodeV(), edge.nodeU()));
}
};
}
@Override
public int inDegree(N node) {
return delegate().outDegree(node); // transpose
}
@Override
public int outDegree(N node) {
return delegate().inDegree(node); // transpose
}
@Override
public boolean hasEdgeConnecting(N nodeU, N nodeV) {
return delegate().hasEdgeConnecting(nodeV, nodeU); // transpose
}
@Override
public boolean hasEdgeConnecting(EndpointPair<N> endpoints) {
return delegate().hasEdgeConnecting(transpose(endpoints));
}
}
// NOTE: this should work as long as the delegate graph's implementation of edges() (like that of
// AbstractValueGraph) derives its behavior from calling successors().
private static final | TransposedGraph |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/ImportedConfigurationClassEnhancementTests.java | {
"start": 3795,
"end": 3874
} | class ____ {
@Autowired TestBean testBean;
}
}
| ConfigThatImportsNonConfigClass |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/IdClassJpaAnnotation.java | {
"start": 460,
"end": 1338
} | class ____ implements IdClass {
private java.lang.Class<?> value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public IdClassJpaAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public IdClassJpaAnnotation(IdClass annotation, ModelsContext modelContext) {
this.value = annotation.value();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public IdClassJpaAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (Class<?>) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return IdClass.class;
}
@Override
public java.lang.Class<?> value() {
return value;
}
public void value(java.lang.Class<?> value) {
this.value = value;
}
}
| IdClassJpaAnnotation |
java | apache__camel | components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIConfiguration.java | {
"start": 2162,
"end": 6443
} | class ____ for structured output using response format")
private String outputClass;
@UriParam
@Metadata(description = "JSON schema for structured output validation", supportFileReference = true, largeInput = true,
inputLanguage = "json")
private String jsonSchema;
@UriParam(defaultValue = "false")
@Metadata(description = "Enable conversation memory per Exchange")
private boolean conversationMemory = false;
@UriParam(defaultValue = "CamelOpenAIConversationHistory")
@Metadata(description = "Exchange property name for storing conversation history")
private String conversationHistoryProperty = "CamelOpenAIConversationHistory";
@UriParam
@Metadata(description = "Default user message text to use when no prompt is provided", largeInput = true)
private String userMessage;
@UriParam
@Metadata(description = "System message to prepend. When set and conversationMemory is enabled, the conversation history is reset.",
largeInput = true)
private String systemMessage;
@UriParam
@Metadata(description = "Developer message to prepend before user messages", largeInput = true)
private String developerMessage;
@UriParam(defaultValue = "false")
@Metadata(description = "Store the full response in the exchange property 'CamelOpenAIResponse' in non-streaming mode")
private boolean storeFullResponse = false;
public String getApiKey() {
return apiKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
public String getBaseUrl() {
return baseUrl;
}
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public Double getTemperature() {
return temperature;
}
public void setTemperature(Double temperature) {
this.temperature = temperature;
}
public Double getTopP() {
return topP;
}
public void setTopP(Double topP) {
this.topP = topP;
}
public Integer getMaxTokens() {
return maxTokens;
}
public void setMaxTokens(Integer maxTokens) {
this.maxTokens = maxTokens;
}
public boolean isStreaming() {
return streaming;
}
public void setStreaming(boolean streaming) {
this.streaming = streaming;
}
public String getOutputClass() {
return outputClass;
}
public void setOutputClass(String outputClass) {
this.outputClass = outputClass;
}
public String getJsonSchema() {
return jsonSchema;
}
public void setJsonSchema(String jsonSchema) {
this.jsonSchema = jsonSchema;
}
public boolean isConversationMemory() {
return conversationMemory;
}
public void setConversationMemory(boolean conversationMemory) {
this.conversationMemory = conversationMemory;
}
public String getConversationHistoryProperty() {
return conversationHistoryProperty;
}
public void setConversationHistoryProperty(String conversationHistoryProperty) {
this.conversationHistoryProperty = conversationHistoryProperty;
}
public String getUserMessage() {
return userMessage;
}
public void setUserMessage(String userMessage) {
this.userMessage = userMessage;
}
public String getSystemMessage() {
return systemMessage;
}
public void setSystemMessage(String systemMessage) {
this.systemMessage = systemMessage;
}
public String getDeveloperMessage() {
return developerMessage;
}
public void setDeveloperMessage(String developerMessage) {
this.developerMessage = developerMessage;
}
public boolean isStoreFullResponse() {
return storeFullResponse;
}
public void setStoreFullResponse(boolean storeFullResponse) {
this.storeFullResponse = storeFullResponse;
}
public OpenAIConfiguration copy() {
try {
return (OpenAIConfiguration) clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
}
| name |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java | {
"start": 4423,
"end": 4564
} | class ____ {
@Metric("My metric description")
public int getMyMetric() {
return 42;
}
}</pre></td><td>
<pre>
| MyStat |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/runtime/operators/coordination/OperatorEventSendingCheckpointITCase.java | {
"start": 15314,
"end": 17647
} | class ____ implements RpcService {
private final RpcService rpcService;
public InterceptingRpcService(RpcService rpcService) {
this.rpcService = rpcService;
}
@Override
public String getAddress() {
return rpcService.getAddress();
}
@Override
public int getPort() {
return rpcService.getPort();
}
@Override
public <C extends RpcGateway> C getSelfGateway(
Class<C> selfGatewayType, RpcServer rpcServer) {
return rpcService.getSelfGateway(selfGatewayType, rpcServer);
}
@Override
public <C extends RpcGateway> CompletableFuture<C> connect(String address, Class<C> clazz) {
final CompletableFuture<C> future = rpcService.connect(address, clazz);
return clazz == TaskExecutorGateway.class ? decorateTmGateway(future) : future;
}
@Override
public <F extends Serializable, C extends FencedRpcGateway<F>> CompletableFuture<C> connect(
String address, F fencingToken, Class<C> clazz) {
return rpcService.connect(address, fencingToken, clazz);
}
@Override
public <C extends RpcEndpoint & RpcGateway> RpcServer startServer(
C rpcEndpoint, Map<String, String> loggingContext) {
return rpcService.startServer(rpcEndpoint, Collections.emptyMap());
}
@Override
public void stopServer(RpcServer selfGateway) {
rpcService.stopServer(selfGateway);
}
@Override
public CompletableFuture<Void> closeAsync() {
return rpcService.closeAsync();
}
@Override
public ScheduledExecutor getScheduledExecutor() {
return rpcService.getScheduledExecutor();
}
@SuppressWarnings("unchecked")
private <C extends RpcGateway> CompletableFuture<C> decorateTmGateway(
CompletableFuture<C> future) {
final CompletableFuture<TaskExecutorGateway> wrapped =
future.thenApply(
(gateway) -> new OpEventRpcInterceptor((TaskExecutorGateway) gateway));
return (CompletableFuture<C>) wrapped;
}
}
private static | InterceptingRpcService |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 78434,
"end": 85314
} | class ____ {
private final String description;
final Supplier<TypeInference> typeInferenceExtraction;
@Nullable List<StaticArgument> expectedStaticArguments;
LinkedHashMap<String, StateTypeStrategy> expectedStateStrategies;
Map<InputTypeStrategy, TypeStrategy> expectedOutputStrategies;
@Nullable String expectedErrorMessage;
private TestSpec(String description, Supplier<TypeInference> typeInferenceExtraction) {
this.description = description;
this.typeInferenceExtraction = typeInferenceExtraction;
this.expectedStateStrategies = new LinkedHashMap<>();
this.expectedOutputStrategies = new LinkedHashMap<>();
}
static TestSpec forScalarFunction(Class<? extends ScalarFunction> function) {
return forScalarFunction(null, function);
}
static TestSpec forScalarFunction(
String description, Class<? extends ScalarFunction> function) {
return new TestSpec(
description == null ? function.getSimpleName() : description,
() ->
TypeInferenceExtractor.forScalarFunction(
new DataTypeFactoryMock(), function));
}
static TestSpec forAsyncScalarFunction(Class<? extends AsyncScalarFunction> function) {
return forAsyncScalarFunction(null, function);
}
static TestSpec forAsyncScalarFunction(
String description, Class<? extends AsyncScalarFunction> function) {
return new TestSpec(
description == null ? function.getSimpleName() : description,
() ->
TypeInferenceExtractor.forAsyncScalarFunction(
new DataTypeFactoryMock(), function));
}
@SuppressWarnings("rawtypes")
static TestSpec forAsyncTableFunction(Class<? extends AsyncTableFunction<?>> function) {
return forAsyncTableFunction(null, function);
}
@SuppressWarnings("rawtypes")
static TestSpec forAsyncTableFunction(
String description, Class<? extends AsyncTableFunction<?>> function) {
return new TestSpec(
description == null ? function.getSimpleName() : description,
() ->
TypeInferenceExtractor.forAsyncTableFunction(
new DataTypeFactoryMock(), function));
}
static TestSpec forAggregateFunction(Class<? extends AggregateFunction<?, ?>> function) {
return new TestSpec(
function.getSimpleName(),
() ->
TypeInferenceExtractor.forAggregateFunction(
new DataTypeFactoryMock(), function));
}
static TestSpec forTableFunction(Class<? extends TableFunction<?>> function) {
return forTableFunction(null, function);
}
static TestSpec forTableFunction(
String description, Class<? extends TableFunction<?>> function) {
return new TestSpec(
description == null ? function.getSimpleName() : description,
() ->
TypeInferenceExtractor.forTableFunction(
new DataTypeFactoryMock(), function));
}
static TestSpec forTableAggregateFunction(
Class<? extends TableAggregateFunction<?, ?>> function) {
return new TestSpec(
function.getSimpleName(),
() ->
TypeInferenceExtractor.forTableAggregateFunction(
new DataTypeFactoryMock(), function));
}
static TestSpec forProcessTableFunction(Class<? extends ProcessTableFunction<?>> function) {
return new TestSpec(
function.getSimpleName(),
() ->
TypeInferenceExtractor.forProcessTableFunction(
new DataTypeFactoryMock(), function));
}
static TestSpec forProcedure(Class<? extends Procedure> procedure) {
return forProcedure(null, procedure);
}
static TestSpec forProcedure(
@Nullable String description, Class<? extends Procedure> procedure) {
return new TestSpec(
description == null ? procedure.getSimpleName() : description,
() ->
TypeInferenceExtractor.forProcedure(
new DataTypeFactoryMock(), procedure));
}
TestSpec expectEmptyStaticArguments() {
this.expectedStaticArguments = new ArrayList<>();
return this;
}
TestSpec expectStaticArgument(StaticArgument argument) {
if (this.expectedStaticArguments == null) {
this.expectedStaticArguments = new ArrayList<>();
}
this.expectedStaticArguments.add(argument);
return this;
}
TestSpec expectAccumulator(TypeStrategy typeStrategy) {
expectState("acc", typeStrategy);
return this;
}
TestSpec expectState(String name, TypeStrategy typeStrategy) {
return expectState(name, typeStrategy, null);
}
TestSpec expectState(String name, TypeStrategy typeStrategy, @Nullable Duration ttl) {
this.expectedStateStrategies.put(name, StateTypeStrategy.of(typeStrategy, ttl));
return this;
}
TestSpec expectOutputMapping(InputTypeStrategy validator, TypeStrategy outputStrategy) {
this.expectedOutputStrategies.put(validator, outputStrategy);
return this;
}
TestSpec expectOutput(TypeStrategy outputStrategy) {
this.expectedOutputStrategies.put(InputTypeStrategies.WILDCARD, outputStrategy);
return this;
}
TestSpec expectErrorMessage(String expectedErrorMessage) {
this.expectedErrorMessage = expectedErrorMessage;
return this;
}
@Override
public String toString() {
return description;
}
}
// --------------------------------------------------------------------------------------------
// Test classes for extraction
// --------------------------------------------------------------------------------------------
@FunctionHint(
input = {@DataTypeHint("INT"), @DataTypeHint("STRING")},
argumentNames = {"i", "s"},
output = @DataTypeHint("BOOLEAN"))
private static | TestSpec |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java | {
"start": 69280,
"end": 71820
} | class ____ extends ParserRuleContext {
public TerminalNode METADATA() { return getToken(EsqlBaseParser.METADATA, 0); }
public List<TerminalNode> UNQUOTED_SOURCE() { return getTokens(EsqlBaseParser.UNQUOTED_SOURCE); }
public TerminalNode UNQUOTED_SOURCE(int i) {
return getToken(EsqlBaseParser.UNQUOTED_SOURCE, i);
}
public List<TerminalNode> COMMA() { return getTokens(EsqlBaseParser.COMMA); }
public TerminalNode COMMA(int i) {
return getToken(EsqlBaseParser.COMMA, i);
}
@SuppressWarnings("this-escape")
public MetadataContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_metadata; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMetadata(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMetadata(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitMetadata(this);
else return visitor.visitChildren(this);
}
}
public final MetadataContext metadata() throws RecognitionException {
MetadataContext _localctx = new MetadataContext(_ctx, getState());
enterRule(_localctx, 44, RULE_metadata);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
setState(343);
match(METADATA);
setState(344);
match(UNQUOTED_SOURCE);
setState(349);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,13,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
setState(345);
match(COMMA);
setState(346);
match(UNQUOTED_SOURCE);
}
}
}
setState(351);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,13,_ctx);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static | MetadataContext |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/xml/BaseShop.java | {
"start": 144,
"end": 410
} | class ____ {
private int id;
private int version;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
}
| BaseShop |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtilClient.java | {
"start": 1243,
"end": 2539
} | class ____ {
@Test
public void testToStringArray() {
List<String> strList = new ArrayList<String>(Arrays.asList("aaa", "bbb", "ccc"));
String[] strArr = JsonUtilClient.toStringArray(strList);
assertEquals(3, strArr.length, "Expected 3 items in the array");
assertEquals("aaa", strArr[0]);
assertEquals("bbb", strArr[1]);
assertEquals("ccc", strArr[2]);
}
@Test
public void testToBlockLocationArray() throws Exception {
BlockLocation blockLocation = new BlockLocation(
new String[] {"127.0.0.1:62870"},
new String[] {"127.0.0.1"},
null,
new String[] {"/default-rack/127.0.0.1:62870"},
null,
new StorageType[] {StorageType.DISK},
0, 1, false);
Map<String, Object> blockLocationsMap =
JsonUtil.toJsonMap(new BlockLocation[] {blockLocation});
String json = JsonUtil.toJsonString("BlockLocations", blockLocationsMap);
assertNotNull(json);
Map<?, ?> jsonMap = JsonSerialization.mapReader().readValue(json);
BlockLocation[] deserializedBlockLocations =
JsonUtilClient.toBlockLocationArray(jsonMap);
assertEquals(1, deserializedBlockLocations.length);
assertEquals(blockLocation.toString(),
deserializedBlockLocations[0].toString());
}
}
| TestJsonUtilClient |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/Pattern.java | {
"start": 2213,
"end": 2346
} | interface ____ {
/**
* Specifies the exchange pattern to be used for this method
*/
ExchangePattern value();
}
| Pattern |
java | google__guice | core/src/com/google/inject/Module.java | {
"start": 1314,
"end": 1678
} | interface ____ {
/**
* Contributes bindings and other configurations for this module to {@code binder}.
*
* <p><strong>Do not invoke this method directly</strong> to install submodules. Instead use
* {@link Binder#install(Module)}, which ensures that {@link Provides provider methods} are
* discovered.
*/
void configure(Binder binder);
}
| Module |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/support/PropertyBindingSupportMapTest.java | {
"start": 6158,
"end": 7099
} | class ____ {
private int age;
private boolean rider;
private Map<String, Company> works; // should auto-create this via the
// setter
private boolean goldCustomer;
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public boolean isRider() {
return rider;
}
public void setRider(boolean rider) {
this.rider = rider;
}
public Map<String, Company> getWorks() {
return works;
}
public void setWorks(Map<String, Company> works) {
this.works = works;
}
public boolean isGoldCustomer() {
return goldCustomer;
}
public void setGoldCustomer(boolean goldCustomer) {
this.goldCustomer = goldCustomer;
}
}
}
| Bar |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java | {
"start": 2533,
"end": 24620
} | class ____ extends ClientBaseWithFixes {
private Configuration conf;
private MiniZKFCCluster cluster;
// Set up ZK digest-based credentials for the purposes of the tests,
// to make sure all of our functionality works with auth and ACLs
// present.
private static final String DIGEST_USER_PASS="test-user:test-password";
private static final String TEST_AUTH_GOOD =
"digest:" + DIGEST_USER_PASS;
private static final String DIGEST_USER_HASH;
static {
try {
DIGEST_USER_HASH = DigestAuthenticationProvider.generateDigest(
DIGEST_USER_PASS);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
private static final String TEST_ACL =
"digest:" + DIGEST_USER_HASH + ":rwcda";
static {
GenericTestUtils.setLogLevel(ActiveStandbyElector.LOG, Level.TRACE);
}
@BeforeEach
public void setupConfAndServices() {
conf = new Configuration();
conf.set(ZKFailoverController.ZK_ACL_KEY, TEST_ACL);
conf.set(ZKFailoverController.ZK_AUTH_KEY, TEST_AUTH_GOOD);
conf.set(ZKFailoverController.ZK_QUORUM_KEY, hostPort);
this.cluster = new MiniZKFCCluster(conf, getServer(serverFactory));
}
@AfterEach
public void teardown() {
if (cluster != null) {
try {
cluster.stop();
} catch (Exception e) {
LOG.warn("When stopping the cluster", e);
}
}
}
/**
* Test that the various command lines for formatting the ZK directory
* function correctly.
*/
@Test
public void testFormatZK() throws Exception {
DummyHAService svc = cluster.getService(1);
// Run without formatting the base dir,
// should barf
assertEquals(ZKFailoverController.ERR_CODE_NO_PARENT_ZNODE,
runFC(svc));
// Format the base dir, should succeed
assertEquals(0, runFC(svc, "-formatZK"));
// Should fail to format if already formatted
assertEquals(ZKFailoverController.ERR_CODE_FORMAT_DENIED,
runFC(svc, "-formatZK", "-nonInteractive"));
// Unless '-force' is on
assertEquals(0, runFC(svc, "-formatZK", "-force"));
}
/**
* Test that if ZooKeeper is not running, the correct error
* code is returned.
*/
@Test
public void testNoZK() throws Exception {
stopServer();
DummyHAService svc = cluster.getService(1);
assertEquals(ZKFailoverController.ERR_CODE_NO_ZK,
runFC(svc));
}
@Test
public void testPolicyProviderForZKFCRpcServer() throws Exception {
Configuration myconf = new Configuration();
myconf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
true);
DummyHAService dummyHAService = new DummyHAService(HAServiceState.ACTIVE,
new InetSocketAddress(0), false);
MiniZKFCCluster.DummyZKFC dummyZKFC =
new MiniZKFCCluster.DummyZKFC(myconf, dummyHAService);
// initialize ZKFCRpcServer with null policy
LambdaTestUtils.intercept(HadoopIllegalArgumentException.class,
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
+ "is configured to true but service-level"
+ "authorization security policy is null.",
() -> new ZKFCRpcServer(myconf, new InetSocketAddress(0),
dummyZKFC, null));
// initialize ZKFCRpcServer with dummy policy
PolicyProvider dummyPolicy = new PolicyProvider() {
private final Service[] services = new Service[] {
new Service(CommonConfigurationKeys.SECURITY_ZKFC_PROTOCOL_ACL,
ZKFCProtocol.class),
new Service(
CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_REFRESH_POLICY,
RefreshAuthorizationPolicyProtocol.class),
};
@Override
public Service[] getServices() {
return this.services;
}
};
ZKFCRpcServer server = new ZKFCRpcServer(myconf,
new InetSocketAddress(0), dummyZKFC, dummyPolicy);
server.start();
server.stopAndJoin();
}
@Test
public void testFormatOneClusterLeavesOtherClustersAlone() throws Exception {
DummyHAService svc = cluster.getService(1);
DummyZKFC zkfcInOtherCluster = new DummyZKFC(conf, cluster.getService(1)) {
@Override
protected String getScopeInsideParentNode() {
return "other-scope";
}
};
// Run without formatting the base dir,
// should barf
assertEquals(ZKFailoverController.ERR_CODE_NO_PARENT_ZNODE,
runFC(svc));
// Format the base dir, should succeed
assertEquals(0, runFC(svc, "-formatZK"));
// Run the other cluster without formatting, should barf because
// it uses a different parent znode
assertEquals(ZKFailoverController.ERR_CODE_NO_PARENT_ZNODE,
zkfcInOtherCluster.run(new String[]{}));
// Should succeed in formatting the second cluster
assertEquals(0, zkfcInOtherCluster.run(new String[]{"-formatZK"}));
// But should not have deleted the original base node from the first
// cluster
assertEquals(ZKFailoverController.ERR_CODE_FORMAT_DENIED,
runFC(svc, "-formatZK", "-nonInteractive"));
}
/**
* Test that automatic failover won't run against a target that hasn't
* explicitly enabled the feature.
*/
@Test
public void testWontRunWhenAutoFailoverDisabled() throws Exception {
DummyHAService svc = cluster.getService(1);
svc = spy(svc);
doReturn(false).when(svc).isAutoFailoverEnabled();
assertEquals(ZKFailoverController.ERR_CODE_AUTO_FAILOVER_NOT_ENABLED,
runFC(svc, "-formatZK"));
assertEquals(ZKFailoverController.ERR_CODE_AUTO_FAILOVER_NOT_ENABLED,
runFC(svc));
}
/**
* Test that, if ACLs are specified in the configuration, that
* it sets the ACLs when formatting the parent node.
*/
@Test
public void testFormatSetsAcls() throws Exception {
// Format the base dir, should succeed
DummyHAService svc = cluster.getService(1);
assertEquals(0, runFC(svc, "-formatZK"));
ZooKeeper otherClient = createClient();
try {
// client without auth should not be able to read it
Stat stat = new Stat();
otherClient.getData(ZKFailoverController.ZK_PARENT_ZNODE_DEFAULT,
false, stat);
fail("Was able to read data without authenticating!");
} catch (KeeperException.NoAuthException nae) {
// expected
}
}
/**
* Test that the ZKFC won't run if fencing is not configured for the
* local service.
*/
@Test
public void testFencingMustBeConfigured() throws Exception {
DummyHAService svc = spy(cluster.getService(0));
doThrow(new BadFencingConfigurationException("no fencing"))
.when(svc).checkFencingConfigured();
// Format the base dir, should succeed
assertEquals(0, runFC(svc, "-formatZK"));
// Try to run the actual FC, should fail without a fencer
assertEquals(ZKFailoverController.ERR_CODE_NO_FENCER,
runFC(svc));
}
/**
* Test that, when the health monitor indicates bad health status,
* failover is triggered. Also ensures that graceful active->standby
* transition is used when possible, falling back to fencing when
* the graceful approach fails.
*/
@Test
public void testAutoFailoverOnBadHealth() throws Exception {
cluster.start();
DummyHAService svc1 = cluster.getService(1);
LOG.info("Faking svc0 unhealthy, should failover to svc1");
cluster.setHealthy(0, false);
LOG.info("Waiting for svc0 to enter initializing state");
cluster.waitForHAState(0, HAServiceState.INITIALIZING);
cluster.waitForHAState(1, HAServiceState.ACTIVE);
LOG.info("Allowing svc0 to be healthy again, making svc1 unreachable " +
"and fail to gracefully go to standby");
cluster.setUnreachable(1, true);
cluster.setHealthy(0, true);
// Should fail back to svc0 at this point
cluster.waitForHAState(0, HAServiceState.ACTIVE);
// and fence svc1
verify(svc1.fencer).fence(same(svc1));
}
/**
* Test that, when the health monitor indicates bad health status,
* failover is triggered. Also ensures that graceful active->standby
* transition is used when possible, falling back to fencing when
* the graceful approach fails.
*/
@Test
public void testAutoFailoverOnBadState() throws Exception {
cluster.start();
DummyHAService svc0 = cluster.getService(0);
LOG.info("Faking svc0 to change the state, should failover to svc1");
svc0.state = HAServiceState.STANDBY;
// Should fail back to svc0 at this point
cluster.waitForHAState(1, HAServiceState.ACTIVE);
}
@Test
public void testAutoFailoverOnLostZKSession() throws Exception {
cluster.start();
// Expire svc0, it should fail over to svc1
cluster.expireAndVerifyFailover(0, 1);
// Expire svc1, it should fail back to svc0
cluster.expireAndVerifyFailover(1, 0);
LOG.info("======= Running test cases second time to test " +
"re-establishment =========");
// Expire svc0, it should fail over to svc1
cluster.expireAndVerifyFailover(0, 1);
// Expire svc1, it should fail back to svc0
cluster.expireAndVerifyFailover(1, 0);
}
/**
* Test that the local node is observer.
*/
@Test
public void testVerifyObserverState()
throws Exception {
cluster.start(3);
DummyHAService svc2 = cluster.getService(2);
svc2.state = HAServiceState.OBSERVER;
// Verify svc2 is observer
LOG.info("Waiting for svc2 to enter observer state");
cluster.waitForHAState(2, HAServiceState.OBSERVER);
}
/**
* Test that, if the standby node is unhealthy, it doesn't try to become
* active
*/
@Test
public void testDontFailoverToUnhealthyNode() throws Exception {
cluster.start();
// Make svc1 unhealthy, and wait for its FC to notice the bad health.
cluster.setHealthy(1, false);
cluster.waitForHealthState(1, HealthMonitor.State.SERVICE_UNHEALTHY);
// Expire svc0
cluster.getElector(0).preventSessionReestablishmentForTests();
try {
cluster.expireActiveLockHolder(0);
LOG.info("Expired svc0's ZK session. Waiting a second to give svc1" +
" a chance to take the lock, if it is ever going to.");
Thread.sleep(1000);
// Ensure that no one holds the lock.
cluster.waitForActiveLockHolder(null);
} finally {
LOG.info("Allowing svc0's elector to re-establish its connection");
cluster.getElector(0).allowSessionReestablishmentForTests();
}
// svc0 should get the lock again
cluster.waitForActiveLockHolder(0);
}
/**
* Test that the ZKFC successfully quits the election when it fails to
* become active. This allows the old node to successfully fail back.
*/
@Test
public void testBecomingActiveFails() throws Exception {
cluster.start();
DummyHAService svc1 = cluster.getService(1);
LOG.info("Making svc1 fail to become active");
cluster.setFailToBecomeActive(1, true);
LOG.info("Faking svc0 unhealthy, should NOT successfully " +
"failover to svc1");
cluster.setHealthy(0, false);
cluster.waitForHealthState(0, State.SERVICE_UNHEALTHY);
cluster.waitForActiveLockHolder(null);
verify(svc1.proxy, timeout(2000).atLeastOnce())
.transitionToActive(Mockito.<StateChangeRequestInfo>any());
cluster.waitForHAState(0, HAServiceState.INITIALIZING);
cluster.waitForHAState(1, HAServiceState.STANDBY);
LOG.info("Faking svc0 healthy again, should go back to svc0");
cluster.setHealthy(0, true);
cluster.waitForHAState(0, HAServiceState.ACTIVE);
cluster.waitForHAState(1, HAServiceState.STANDBY);
cluster.waitForActiveLockHolder(0);
// Ensure that we can fail back to svc1 once it it is able
// to become active (e.g the admin has restarted it)
LOG.info("Allowing svc1 to become active, expiring svc0");
svc1.failToBecomeActive = false;
cluster.expireAndVerifyFailover(0, 1);
}
/**
* Test that, when ZooKeeper fails, the system remains in its
* current state, without triggering any failovers, and without
* causing the active node to enter standby state.
*/
@Test
public void testZooKeeperFailure() throws Exception {
cluster.start();
// Record initial ZK sessions
long session0 = cluster.getElector(0).getZKSessionIdForTests();
long session1 = cluster.getElector(1).getZKSessionIdForTests();
LOG.info("====== Stopping ZK server");
stopServer();
waitForServerDown(hostPort, CONNECTION_TIMEOUT);
LOG.info("====== Waiting for services to enter NEUTRAL mode");
cluster.waitForElectorState(0,
ActiveStandbyElector.State.NEUTRAL);
cluster.waitForElectorState(1,
ActiveStandbyElector.State.NEUTRAL);
LOG.info("====== Checking that the services didn't change HA state");
assertEquals(HAServiceState.ACTIVE, cluster.getService(0).state);
assertEquals(HAServiceState.STANDBY, cluster.getService(1).state);
LOG.info("====== Restarting server");
startServer();
waitForServerUp(hostPort, CONNECTION_TIMEOUT);
// Nodes should go back to their original states, since they re-obtain
// the same sessions.
cluster.waitForElectorState(0, ActiveStandbyElector.State.ACTIVE);
cluster.waitForElectorState(1, ActiveStandbyElector.State.STANDBY);
// Check HA states didn't change.
cluster.waitForHAState(0, HAServiceState.ACTIVE);
cluster.waitForHAState(1, HAServiceState.STANDBY);
// Check they re-used the same sessions and didn't spuriously reconnect
assertEquals(session0,
cluster.getElector(0).getZKSessionIdForTests());
assertEquals(session1,
cluster.getElector(1).getZKSessionIdForTests());
}
/**
* Test that the ZKFC can gracefully cede its active status.
*/
@Test
public void testCedeActive() throws Exception {
cluster.start();
DummyZKFC zkfc = cluster.getZkfc(0);
// It should be in active to start.
assertEquals(ActiveStandbyElector.State.ACTIVE,
zkfc.getElectorForTests().getStateForTests());
// Ask it to cede active for 3 seconds. It should respond promptly
// (i.e. the RPC itself should not take 3 seconds!)
ZKFCProtocol proxy = zkfc.getLocalTarget().getZKFCProxy(conf, 5000);
long st = Time.now();
proxy.cedeActive(3000);
long et = Time.now();
assertTrue(et - st < 1000,
"RPC to cedeActive took " + (et - st) + " ms");
// Should be in "INIT" state since it's not in the election
// at this point.
assertEquals(ActiveStandbyElector.State.INIT,
zkfc.getElectorForTests().getStateForTests());
// After the prescribed 3 seconds, should go into STANDBY state,
// since the other node in the cluster would have taken ACTIVE.
cluster.waitForElectorState(0, ActiveStandbyElector.State.STANDBY);
long et2 = Time.now();
assertTrue(et2 - et > 2800,
"Should take ~3 seconds to rejoin. Only took " + (et2 - et) +
"ms before rejoining.");
}
@Test
public void testGracefulFailover() throws Exception {
cluster.start();
cluster.waitForActiveLockHolder(0);
cluster.getService(1).getZKFCProxy(conf, 5000).gracefulFailover();
cluster.waitForActiveLockHolder(1);
cluster.getService(0).getZKFCProxy(conf, 5000).gracefulFailover();
cluster.waitForActiveLockHolder(0);
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return cluster.getService(0).fenceCount == 0 &&
cluster.getService(1).fenceCount == 0 &&
cluster.getService(0).activeTransitionCount == 2 &&
cluster.getService(1).activeTransitionCount == 1;
}
}, 100, 60 * 1000);
}
@Test
public void testGracefulFailoverToUnhealthy() throws Exception {
cluster.start();
cluster.waitForActiveLockHolder(0);
// Mark it unhealthy, wait for it to exit election
cluster.setHealthy(1, false);
cluster.waitForElectorState(1, ActiveStandbyElector.State.INIT);
// Ask for failover, it should fail, because it's unhealthy
try {
cluster.getService(1).getZKFCProxy(conf, 5000).gracefulFailover();
fail("Did not fail to graceful failover to unhealthy service!");
} catch (ServiceFailedException sfe) {
GenericTestUtils.assertExceptionContains(
cluster.getService(1).toString() +
" is not currently healthy.", sfe);
}
}
@Test
public void testObserverExitGracefulFailover() throws Exception {
cluster.start(3);
cluster.waitForActiveLockHolder(0);
// Mark it become observer, wait for it to exit election
DummyHAService svc2 = cluster.getService(2);
svc2.state = HAServiceState.OBSERVER;
cluster.waitForHAState(2, HAServiceState.OBSERVER);
cluster.setFailToBecomeActive(2, true);
cluster.setFailToBecomeStandby(2, true);
cluster.setFailToBecomeObserver(2, true);
cluster.waitForElectorState(2, ActiveStandbyElector.State.INIT);
// Ask for failover, it should fail, because it's observer
try {
cluster.getService(2).getZKFCProxy(conf, 5000).gracefulFailover();
fail("Did not fail to graceful failover to observer!");
} catch (ServiceFailedException sfe) {
GenericTestUtils.assertExceptionContains(
cluster.getService(2).toString() +
" is in observer state.", sfe);
}
}
@Test
public void testGracefulFailoverFailBecomingActive() throws Exception {
cluster.start();
cluster.waitForActiveLockHolder(0);
cluster.setFailToBecomeActive(1, true);
// Ask for failover, it should fail and report back to user.
try {
cluster.getService(1).getZKFCProxy(conf, 5000).gracefulFailover();
fail("Did not fail to graceful failover when target failed " +
"to become active!");
} catch (ServiceFailedException sfe) {
GenericTestUtils.assertExceptionContains(
"Couldn't make " + cluster.getService(1) + " active", sfe);
GenericTestUtils.assertExceptionContains(
"injected failure", sfe);
}
// No fencing
assertEquals(0, cluster.getService(0).fenceCount);
assertEquals(0, cluster.getService(1).fenceCount);
// Service 0 should go back to being active after the failed failover
cluster.waitForActiveLockHolder(0);
}
@Test
public void testGracefulFailoverFailBecomingStandby() throws Exception {
cluster.start();
cluster.waitForActiveLockHolder(0);
// Ask for failover when old node fails to transition to standby.
// This should trigger fencing, since the cedeActive() command
// still works, but leaves the breadcrumb in place.
cluster.setFailToBecomeStandby(0, true);
cluster.getService(1).getZKFCProxy(conf, 5000).gracefulFailover();
// Check that the old node was fenced
assertEquals(1, cluster.getService(0).fenceCount);
}
@Test
public void testGracefulFailoverFailBecomingStandbyAndFailFence()
throws Exception {
cluster.start();
cluster.waitForActiveLockHolder(0);
// Ask for failover when old node fails to transition to standby.
// This should trigger fencing, since the cedeActive() command
// still works, but leaves the breadcrumb in place.
cluster.setFailToBecomeStandby(0, true);
cluster.setFailToFence(0, true);
try {
cluster.getService(1).getZKFCProxy(conf, 5000).gracefulFailover();
fail("Failover should have failed when old node wont fence");
} catch (ServiceFailedException sfe) {
GenericTestUtils.assertExceptionContains(
"Unable to fence " + cluster.getService(0), sfe);
}
}
/**
* Test which exercises all of the inputs into ZKFC. This is particularly
* useful for running under jcarder to check for lock order violations.
*/
@Test
public void testOneOfEverything() throws Exception {
cluster.start();
// Failover by session expiration
LOG.info("====== Failing over by session expiration");
cluster.expireAndVerifyFailover(0, 1);
cluster.expireAndVerifyFailover(1, 0);
// Restart ZK
LOG.info("====== Restarting server");
stopServer();
waitForServerDown(hostPort, CONNECTION_TIMEOUT);
startServer();
waitForServerUp(hostPort, CONNECTION_TIMEOUT);
// Failover by bad health
cluster.setHealthy(0, false);
cluster.waitForHAState(0, HAServiceState.INITIALIZING);
cluster.waitForHAState(1, HAServiceState.ACTIVE);
cluster.setHealthy(1, true);
cluster.setHealthy(0, false);
cluster.waitForHAState(1, HAServiceState.ACTIVE);
cluster.waitForHAState(0, HAServiceState.INITIALIZING);
cluster.setHealthy(0, true);
cluster.waitForHealthState(0, State.SERVICE_HEALTHY);
// Graceful failovers
cluster.getZkfc(1).gracefulFailoverToYou();
cluster.getZkfc(0).gracefulFailoverToYou();
}
@Test
public void testGracefulFailoverMultipleZKfcs() throws Exception {
cluster.start(3);
cluster.waitForActiveLockHolder(0);
// failover to first
cluster.getService(1).getZKFCProxy(conf, 5000).gracefulFailover();
cluster.waitForActiveLockHolder(1);
// failover to second
cluster.getService(2).getZKFCProxy(conf, 5000).gracefulFailover();
cluster.waitForActiveLockHolder(2);
// failover back to original
cluster.getService(0).getZKFCProxy(conf, 5000).gracefulFailover();
cluster.waitForActiveLockHolder(0);
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return cluster.getService(0).fenceCount == 0 &&
cluster.getService(1).fenceCount == 0 &&
cluster.getService(2).fenceCount == 0 &&
cluster.getService(0).activeTransitionCount == 2 &&
cluster.getService(1).activeTransitionCount == 1 &&
cluster.getService(2).activeTransitionCount == 1;
}
}, 100, 60 * 1000);
}
private int runFC(DummyHAService target, String ... args) throws Exception {
DummyZKFC zkfc = new DummyZKFC(conf, target);
return zkfc.run(args);
}
}
| TestZKFailoverController |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/exception/AbstractExceptionContextTest.java | {
"start": 1737,
"end": 7940
} | class ____ {
@Override
public String toString() {
throw new RuntimeException("Crap");
}
}
protected static final String TEST_MESSAGE_2 = "This is monotonous";
protected static final String TEST_MESSAGE = "Test Message";
protected T exceptionContext;
@BeforeEach
public void setUp() throws Exception {
exceptionContext
.addContextValue("test1", null)
.addContextValue("test2", "some value")
.addContextValue("test Date", new Date())
.addContextValue("test Nbr", Integer.valueOf(5))
.addContextValue("test Poorly written obj", new ObjectWithFaultyToString());
}
@Test
void testAddContextValue() {
final String message = exceptionContext.getFormattedExceptionMessage(TEST_MESSAGE);
assertTrue(message.contains(TEST_MESSAGE));
assertTrue(message.contains("test1"));
assertTrue(message.contains("test2"));
assertTrue(message.contains("test Date"));
assertTrue(message.contains("test Nbr"));
assertTrue(message.contains("some value"));
assertTrue(message.contains("5"));
assertNull(exceptionContext.getFirstContextValue("test1"));
assertEquals("some value", exceptionContext.getFirstContextValue("test2"));
assertEquals(5, exceptionContext.getContextLabels().size());
assertTrue(exceptionContext.getContextLabels().contains("test1"));
assertTrue(exceptionContext.getContextLabels().contains("test2"));
assertTrue(exceptionContext.getContextLabels().contains("test Date"));
assertTrue(exceptionContext.getContextLabels().contains("test Nbr"));
exceptionContext.addContextValue("test2", "different value");
assertEquals(5, exceptionContext.getContextLabels().size());
assertTrue(exceptionContext.getContextLabels().contains("test2"));
final String contextMessage = exceptionContext.getFormattedExceptionMessage(null);
assertFalse(contextMessage.contains(TEST_MESSAGE));
}
@Test
void testGetContextEntries() {
assertEquals(5, exceptionContext.getContextEntries().size());
exceptionContext.addContextValue("test2", "different value");
final List<Pair<String, Object>> entries = exceptionContext.getContextEntries();
assertEquals(6, entries.size());
assertEquals("test1", entries.get(0).getKey());
assertEquals("test2", entries.get(1).getKey());
assertEquals("test Date", entries.get(2).getKey());
assertEquals("test Nbr", entries.get(3).getKey());
assertEquals("test Poorly written obj", entries.get(4).getKey());
assertEquals("test2", entries.get(5).getKey());
}
@Test
void testGetContextLabels() {
assertEquals(5, exceptionContext.getContextEntries().size());
exceptionContext.addContextValue("test2", "different value");
final Set<String> labels = exceptionContext.getContextLabels();
assertEquals(6, exceptionContext.getContextEntries().size());
assertEquals(5, labels.size());
assertTrue(labels.contains("test1"));
assertTrue(labels.contains("test2"));
assertTrue(labels.contains("test Date"));
assertTrue(labels.contains("test Nbr"));
}
@Test
void testGetContextValues() {
exceptionContext.addContextValue("test2", "different value");
assertEquals(Collections.singletonList(null), exceptionContext.getContextValues("test1"));
assertEquals(Arrays.asList("some value", "different value"), exceptionContext.getContextValues("test2"));
exceptionContext.setContextValue("test2", "another");
assertEquals("another", exceptionContext.getFirstContextValue("test2"));
}
@Test
void testGetFirstContextValue() {
exceptionContext.addContextValue("test2", "different value");
assertNull(exceptionContext.getFirstContextValue("test1"));
assertEquals("some value", exceptionContext.getFirstContextValue("test2"));
assertNull(exceptionContext.getFirstContextValue("crap"));
exceptionContext.setContextValue("test2", "another");
assertEquals("another", exceptionContext.getFirstContextValue("test2"));
}
@Test
void testJavaSerialization() {
exceptionContext.setContextValue("test Poorly written obj", "serializable replacement");
final T clone = SerializationUtils.deserialize(SerializationUtils.serialize(exceptionContext));
assertEquals(exceptionContext.getFormattedExceptionMessage(null), clone.getFormattedExceptionMessage(null));
}
@Test
void testSetContextValue() {
exceptionContext.addContextValue("test2", "different value");
exceptionContext.setContextValue("test3", "3");
final String message = exceptionContext.getFormattedExceptionMessage(TEST_MESSAGE);
assertTrue(message.contains(TEST_MESSAGE));
assertTrue(message.contains("test Poorly written obj"));
assertTrue(message.contains("Crap"));
assertNull(exceptionContext.getFirstContextValue("crap"));
assertInstanceOf(ObjectWithFaultyToString.class, exceptionContext.getFirstContextValue("test Poorly written obj"));
assertEquals(7, exceptionContext.getContextEntries().size());
assertEquals(6, exceptionContext.getContextLabels().size());
assertTrue(exceptionContext.getContextLabels().contains("test Poorly written obj"));
assertFalse(exceptionContext.getContextLabels().contains("crap"));
exceptionContext.setContextValue("test Poorly written obj", "replacement");
assertEquals(7, exceptionContext.getContextEntries().size());
assertEquals(6, exceptionContext.getContextLabels().size());
exceptionContext.setContextValue("test2", "another");
assertEquals(6, exceptionContext.getContextEntries().size());
assertEquals(6, exceptionContext.getContextLabels().size());
final String contextMessage = exceptionContext.getFormattedExceptionMessage(null);
assertFalse(contextMessage.contains(TEST_MESSAGE));
}
}
| ObjectWithFaultyToString |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableFromFuture.java | {
"start": 982,
"end": 1987
} | class ____<T> extends Flowable<T> {
final Future<? extends T> future;
final long timeout;
final TimeUnit unit;
public FlowableFromFuture(Future<? extends T> future, long timeout, TimeUnit unit) {
this.future = future;
this.timeout = timeout;
this.unit = unit;
}
@Override
public void subscribeActual(Subscriber<? super T> s) {
DeferredScalarSubscription<T> deferred = new DeferredScalarSubscription<>(s);
s.onSubscribe(deferred);
T v;
try {
v = unit != null ? future.get(timeout, unit) : future.get();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
if (!deferred.isCancelled()) {
s.onError(ex);
}
return;
}
if (v == null) {
s.onError(ExceptionHelper.createNullPointerException("The future returned a null value."));
} else {
deferred.complete(v);
}
}
}
| FlowableFromFuture |
java | elastic__elasticsearch | x-pack/plugin/write-load-forecaster/src/test/java/org/elasticsearch/xpack/writeloadforecaster/LicensedWriteLoadForecasterTests.java | {
"start": 23320,
"end": 27854
} | enum ____ implements IntToIntFunction {
INCREASE(1, 15) {
@Override
public int apply(int originalShardCount) {
return randomIntBetween(originalShardCount + 1, originalShardCount * 3);
}
public Matcher<Integer> expectedChangeFromOriginal(int originalShardCount) {
return greaterThan(originalShardCount);
}
},
DECREASE(10, 30) {
@Override
public int apply(int originalShardCount) {
return randomIntBetween(1, originalShardCount - 1);
}
public Matcher<Integer> expectedChangeFromOriginal(int originalShardCount) {
return lessThan(originalShardCount);
}
};
private final int originalMinimumShardCount;
private final int originalMaximumShardCount;
ShardCountChange(int originalMinimumShardCount, int originalMaximumShardCount) {
this.originalMinimumShardCount = originalMinimumShardCount;
this.originalMaximumShardCount = originalMaximumShardCount;
}
public int originalShardCount() {
return randomIntBetween(originalMinimumShardCount, originalMaximumShardCount);
}
abstract Matcher<Integer> expectedChangeFromOriginal(int originalShardCount);
}
private ProjectMetadata.Builder updateWriteIndexShardCount(
String dataStreamName,
ProjectMetadata.Builder originalMetadata,
ShardCountChange shardCountChange
) {
final ProjectMetadata.Builder updatedShardCountMetadata = ProjectMetadata.builder(originalMetadata.getId());
final DataStream originalDataStream = originalMetadata.dataStream(dataStreamName);
final Index existingWriteIndex = Objects.requireNonNull(originalDataStream.getWriteIndex());
final IndexMetadata originalWriteIndexMetadata = originalMetadata.getSafe(existingWriteIndex);
// Copy all non-write indices over unchanged
final List<IndexMetadata> backingIndexMetadatas = originalDataStream.getIndices()
.stream()
.filter(index -> index != existingWriteIndex)
.map(originalMetadata::getSafe)
.collect(Collectors.toList());
// Create a new write index with an updated shard count
final IndexMetadata writeIndexMetadata = createIndexMetadata(
DataStream.getDefaultBackingIndexName(dataStreamName, backingIndexMetadatas.size()),
shardCountChange.apply(originalWriteIndexMetadata.getNumberOfShards()),
null,
System.currentTimeMillis()
);
backingIndexMetadatas.add(writeIndexMetadata);
backingIndexMetadatas.forEach(indexMetadata -> updatedShardCountMetadata.put(indexMetadata, false));
final DataStream dataStream = createDataStream(
dataStreamName,
backingIndexMetadatas.stream().map(IndexMetadata::getIndex).toList()
);
updatedShardCountMetadata.put(dataStream);
return updatedShardCountMetadata;
}
private ProjectMetadata.Builder createMetadataBuilderWithDataStream(
String dataStreamName,
int numberOfBackingIndices,
int numberOfShards,
TimeValue maxIndexAge
) {
final ProjectMetadata.Builder metadataBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
final List<Index> backingIndices = new ArrayList<>();
for (int i = 0; i < numberOfBackingIndices; i++) {
final IndexMetadata indexMetadata = createIndexMetadata(
DataStream.getDefaultBackingIndexName(dataStreamName, i),
numberOfShards,
randomIndexWriteLoad(numberOfShards),
System.currentTimeMillis() - (maxIndexAge.millis() / 2)
);
backingIndices.add(indexMetadata.getIndex());
metadataBuilder.put(indexMetadata, false);
}
final IndexMetadata writeIndexMetadata = createIndexMetadata(
DataStream.getDefaultBackingIndexName(dataStreamName, numberOfBackingIndices),
numberOfShards,
null,
System.currentTimeMillis()
);
backingIndices.add(writeIndexMetadata.getIndex());
metadataBuilder.put(writeIndexMetadata, false);
final DataStream dataStream = createDataStream(dataStreamName, backingIndices);
metadataBuilder.put(dataStream);
return metadataBuilder;
}
}
| ShardCountChange |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/prefetch/BlockOperations.java | {
"start": 1533,
"end": 1684
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(BlockOperations.class);
/**
* Operation kind.
*/
public | BlockOperations |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/simp/broker/package-info.java | {
"start": 88,
"end": 271
} | class ____ other supporting types such as a registry for subscriptions.
*/
@NullMarked
package org.springframework.messaging.simp.broker;
import org.jspecify.annotations.NullMarked;
| and |
java | apache__kafka | connect/json/src/test/java/org/apache/kafka/connect/json/JsonConverterTest.java | {
"start": 2681,
"end": 58666
} | class ____ {
private static final String TOPIC = "topic";
private final ObjectMapper objectMapper = new ObjectMapper()
.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS)
.setNodeFactory(new JsonNodeFactory(true));
private final JsonConverter converter = new JsonConverter();
@BeforeEach
public void setUp() {
converter.configure(Map.of(), false);
}
// Schema metadata
@Test
public void testConnectSchemaMetadataTranslation() {
// this validates the non-type fields are translated and handled properly
assertEquals(new SchemaAndValue(Schema.BOOLEAN_SCHEMA, true), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": true }".getBytes()));
assertEquals(new SchemaAndValue(Schema.OPTIONAL_BOOLEAN_SCHEMA, null), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"optional\": true }, \"payload\": null }".getBytes()));
assertEquals(new SchemaAndValue(SchemaBuilder.bool().defaultValue(true).build(), true),
converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"default\": true }, \"payload\": null }".getBytes()));
assertEquals(new SchemaAndValue(SchemaBuilder.bool().required().name("bool").version(2).doc("the documentation").parameter("foo", "bar").build(), true),
converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"optional\": false, \"name\": \"bool\", \"version\": 2, \"doc\": \"the documentation\", \"parameters\": { \"foo\": \"bar\" }}, \"payload\": true }".getBytes()));
}
// Schema types
@Test
public void booleanToConnect() {
assertEquals(new SchemaAndValue(Schema.BOOLEAN_SCHEMA, true), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": true }".getBytes()));
assertEquals(new SchemaAndValue(Schema.BOOLEAN_SCHEMA, false), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": false }".getBytes()));
}
@Test
public void byteToConnect() {
assertEquals(new SchemaAndValue(Schema.INT8_SCHEMA, (byte) 12), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int8\" }, \"payload\": 12 }".getBytes()));
}
@Test
public void shortToConnect() {
assertEquals(new SchemaAndValue(Schema.INT16_SCHEMA, (short) 12), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int16\" }, \"payload\": 12 }".getBytes()));
}
@Test
public void intToConnect() {
assertEquals(new SchemaAndValue(Schema.INT32_SCHEMA, 12), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int32\" }, \"payload\": 12 }".getBytes()));
}
@Test
public void longToConnect() {
assertEquals(new SchemaAndValue(Schema.INT64_SCHEMA, 12L), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int64\" }, \"payload\": 12 }".getBytes()));
assertEquals(new SchemaAndValue(Schema.INT64_SCHEMA, 4398046511104L), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int64\" }, \"payload\": 4398046511104 }".getBytes()));
}
@Test
public void numberWithLeadingZerosToConnect() {
assertEquals(new SchemaAndValue(Schema.INT8_SCHEMA, (byte) 12), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int8\" }, \"payload\": 0012 }".getBytes()));
assertEquals(new SchemaAndValue(Schema.INT16_SCHEMA, (short) 123), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int16\" }, \"payload\": 000123 }".getBytes()));
assertEquals(new SchemaAndValue(Schema.INT32_SCHEMA, 12345), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int32\" }, \"payload\": 000012345 }".getBytes()));
assertEquals(new SchemaAndValue(Schema.INT64_SCHEMA, 123456789L), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int64\" }, \"payload\": 00000123456789 }".getBytes()));
}
@Test
public void floatToConnect() {
assertEquals(new SchemaAndValue(Schema.FLOAT32_SCHEMA, 12.34f), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"float\" }, \"payload\": 12.34 }".getBytes()));
}
@Test
public void doubleToConnect() {
assertEquals(new SchemaAndValue(Schema.FLOAT64_SCHEMA, 12.34), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"double\" }, \"payload\": 12.34 }".getBytes()));
}
@Test
public void bytesToConnect() {
ByteBuffer reference = ByteBuffer.wrap(Utils.utf8("test-string"));
String msg = "{ \"schema\": { \"type\": \"bytes\" }, \"payload\": \"dGVzdC1zdHJpbmc=\" }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
ByteBuffer converted = ByteBuffer.wrap((byte[]) schemaAndValue.value());
assertEquals(reference, converted);
}
@Test
public void stringToConnect() {
assertEquals(new SchemaAndValue(Schema.STRING_SCHEMA, "foo-bar-baz"), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"string\" }, \"payload\": \"foo-bar-baz\" }".getBytes()));
}
@Test
public void arrayToConnect() {
byte[] arrayJson = "{ \"schema\": { \"type\": \"array\", \"items\": { \"type\" : \"int32\" } }, \"payload\": [1, 2, 3] }".getBytes();
assertEquals(new SchemaAndValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), List.of(1, 2, 3)), converter.toConnectData(TOPIC, arrayJson));
}
@Test
public void mapToConnectStringKeys() {
byte[] mapJson = "{ \"schema\": { \"type\": \"map\", \"keys\": { \"type\" : \"string\" }, \"values\": { \"type\" : \"int32\" } }, \"payload\": { \"key1\": 12, \"key2\": 15} }".getBytes();
Map<String, Integer> expected = new HashMap<>();
expected.put("key1", 12);
expected.put("key2", 15);
assertEquals(new SchemaAndValue(SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA).build(), expected), converter.toConnectData(TOPIC, mapJson));
}
@Test
public void mapToConnectNonStringKeys() {
byte[] mapJson = "{ \"schema\": { \"type\": \"map\", \"keys\": { \"type\" : \"int32\" }, \"values\": { \"type\" : \"int32\" } }, \"payload\": [ [1, 12], [2, 15] ] }".getBytes();
Map<Integer, Integer> expected = new HashMap<>();
expected.put(1, 12);
expected.put(2, 15);
assertEquals(new SchemaAndValue(SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).build(), expected), converter.toConnectData(TOPIC, mapJson));
}
@Test
public void structToConnect() {
byte[] structJson = "{ \"schema\": { \"type\": \"struct\", \"fields\": [{ \"field\": \"field1\", \"type\": \"boolean\" }, { \"field\": \"field2\", \"type\": \"string\" }] }, \"payload\": { \"field1\": true, \"field2\": \"string\" } }".getBytes();
Schema expectedSchema = SchemaBuilder.struct().field("field1", Schema.BOOLEAN_SCHEMA).field("field2", Schema.STRING_SCHEMA).build();
Struct expected = new Struct(expectedSchema).put("field1", true).put("field2", "string");
SchemaAndValue converted = converter.toConnectData(TOPIC, structJson);
assertEquals(new SchemaAndValue(expectedSchema, expected), converted);
}
@Test
public void structWithOptionalFieldToConnect() {
byte[] structJson = "{ \"schema\": { \"type\": \"struct\", \"fields\": [{ \"field\":\"optional\", \"type\": \"string\", \"optional\": true }, { \"field\": \"required\", \"type\": \"string\" }] }, \"payload\": { \"required\": \"required\" } }".getBytes();
Schema expectedSchema = SchemaBuilder.struct().field("optional", Schema.OPTIONAL_STRING_SCHEMA).field("required", Schema.STRING_SCHEMA).build();
Struct expected = new Struct(expectedSchema).put("required", "required");
SchemaAndValue converted = converter.toConnectData(TOPIC, structJson);
assertEquals(new SchemaAndValue(expectedSchema, expected), converted);
}
@Test
public void nullToConnect() {
// When schemas are enabled, trying to decode a tombstone should be an empty envelope
// the behavior is the same as when the json is "{ "schema": null, "payload": null }"
// to keep compatibility with the record
SchemaAndValue converted = converter.toConnectData(TOPIC, null);
assertEquals(SchemaAndValue.NULL, converted);
}
/**
* When schemas are disabled, empty data should be decoded to an empty envelope.
* This test verifies the case where `schemas.enable` configuration is set to false, and
* {@link JsonConverter} converts empty bytes to {@link SchemaAndValue#NULL}.
*/
@Test
public void emptyBytesToConnect() {
// This characterizes the messages with empty data when Json schemas is disabled
Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true);
SchemaAndValue converted = converter.toConnectData(TOPIC, "".getBytes());
assertEquals(SchemaAndValue.NULL, converted);
}
/**
* When schemas are disabled, fields are mapped to Connect maps.
*/
@Test
public void schemalessWithEmptyFieldValueToConnect() {
// This characterizes the messages with empty data when Json schemas is disabled
Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true);
String input = "{ \"a\": \"\", \"b\": null}";
SchemaAndValue converted = converter.toConnectData(TOPIC, input.getBytes());
Map<String, String> expected = new HashMap<>();
expected.put("a", "");
expected.put("b", null);
assertEquals(new SchemaAndValue(null, expected), converted);
}
@Test
public void nullSchemaPrimitiveToConnect() {
SchemaAndValue converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": null }".getBytes());
assertEquals(SchemaAndValue.NULL, converted);
converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": true }".getBytes());
assertEquals(new SchemaAndValue(null, true), converted);
// Integers: Connect has more data types, and JSON unfortunately mixes all number types. We try to preserve
// info as best we can, so we always use the largest integer and floating point numbers we can and have Jackson
// determine if it's an integer or not
converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": 12 }".getBytes());
assertEquals(new SchemaAndValue(null, 12L), converted);
converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": 12.24 }".getBytes());
assertEquals(new SchemaAndValue(null, 12.24), converted);
converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": \"a string\" }".getBytes());
assertEquals(new SchemaAndValue(null, "a string"), converted);
converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": [1, \"2\", 3] }".getBytes());
assertEquals(new SchemaAndValue(null, List.of(1L, "2", 3L)), converted);
converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": { \"field1\": 1, \"field2\": 2} }".getBytes());
Map<String, Long> obj = new HashMap<>();
obj.put("field1", 1L);
obj.put("field2", 2L);
assertEquals(new SchemaAndValue(null, obj), converted);
}
@Test
public void decimalToConnect() {
Schema schema = Decimal.schema(2);
BigDecimal reference = new BigDecimal(new BigInteger("156"), 2);
// Payload is base64 encoded byte[]{0, -100}, which is the two's complement encoding of 156.
String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }, \"payload\": \"AJw=\" }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
BigDecimal converted = (BigDecimal) schemaAndValue.value();
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, converted);
}
@Test
public void decimalToConnectOptional() {
Schema schema = Decimal.builder(2).optional().schema();
String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"optional\": true, \"parameters\": { \"scale\": \"2\" } }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertNull(schemaAndValue.value());
}
@Test
public void decimalToConnectWithDefaultValue() {
BigDecimal reference = new BigDecimal(new BigInteger("156"), 2);
Schema schema = Decimal.builder(2).defaultValue(reference).build();
String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"default\": \"AJw=\", \"parameters\": { \"scale\": \"2\" } }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, schemaAndValue.value());
}
@Test
public void decimalToConnectOptionalWithDefaultValue() {
BigDecimal reference = new BigDecimal(new BigInteger("156"), 2);
Schema schema = Decimal.builder(2).optional().defaultValue(reference).build();
String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"optional\": true, \"default\": \"AJw=\", \"parameters\": { \"scale\": \"2\" } }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, schemaAndValue.value());
}
@Test
public void numericDecimalToConnect() {
BigDecimal reference = new BigDecimal(new BigInteger("156"), 2);
Schema schema = Decimal.schema(2);
String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }, \"payload\": 1.56 }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, schemaAndValue.value());
}
@Test
public void numericDecimalWithTrailingZerosToConnect() {
BigDecimal reference = new BigDecimal(new BigInteger("15600"), 4);
Schema schema = Decimal.schema(4);
String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"4\" } }, \"payload\": 1.5600 }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, schemaAndValue.value());
}
@Test
public void highPrecisionNumericDecimalToConnect() {
// this number is too big to be kept in a float64!
BigDecimal reference = new BigDecimal("1.23456789123456789");
Schema schema = Decimal.schema(17);
String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"17\" } }, \"payload\": 1.23456789123456789 }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, schemaAndValue.value());
}
@Test
public void dateToConnect() {
Schema schema = Date.SCHEMA;
GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0);
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
calendar.add(Calendar.DATE, 10000);
java.util.Date reference = calendar.getTime();
String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Date\", \"version\": 1 }, \"payload\": 10000 }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
java.util.Date converted = (java.util.Date) schemaAndValue.value();
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, converted);
}
@Test
public void dateToConnectOptional() {
Schema schema = Date.builder().optional().schema();
String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Date\", \"version\": 1, \"optional\": true }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertNull(schemaAndValue.value());
}
@Test
public void dateToConnectWithDefaultValue() {
java.util.Date reference = new java.util.Date(0);
Schema schema = Date.builder().defaultValue(reference).schema();
String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Date\", \"version\": 1, \"default\": 0 }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, schemaAndValue.value());
}
@Test
public void dateToConnectOptionalWithDefaultValue() {
java.util.Date reference = new java.util.Date(0);
Schema schema = Date.builder().optional().defaultValue(reference).schema();
String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Date\", \"version\": 1, \"optional\": true, \"default\": 0 }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, schemaAndValue.value());
}
@Test
public void timeToConnect() {
Schema schema = Time.SCHEMA;
GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0);
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
calendar.add(Calendar.MILLISECOND, 14400000);
java.util.Date reference = calendar.getTime();
String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Time\", \"version\": 1 }, \"payload\": 14400000 }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
java.util.Date converted = (java.util.Date) schemaAndValue.value();
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, converted);
}
@Test
public void timeToConnectOptional() {
Schema schema = Time.builder().optional().schema();
String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Time\", \"version\": 1, \"optional\": true }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertNull(schemaAndValue.value());
}
@Test
public void timeToConnectWithDefaultValue() {
java.util.Date reference = new java.util.Date(0);
Schema schema = Time.builder().defaultValue(reference).schema();
String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Time\", \"version\": 1, \"default\": 0 }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, schemaAndValue.value());
}
@Test
public void timeToConnectOptionalWithDefaultValue() {
java.util.Date reference = new java.util.Date(0);
Schema schema = Time.builder().optional().defaultValue(reference).schema();
String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Time\", \"version\": 1, \"optional\": true, \"default\": 0 }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, schemaAndValue.value());
}
@Test
public void timestampToConnect() {
Schema schema = Timestamp.SCHEMA;
GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0);
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
calendar.add(Calendar.MILLISECOND, 2000000000);
calendar.add(Calendar.MILLISECOND, 2000000000);
java.util.Date reference = calendar.getTime();
String msg = "{ \"schema\": { \"type\": \"int64\", \"name\": \"org.apache.kafka.connect.data.Timestamp\", \"version\": 1 }, \"payload\": 4000000000 }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
java.util.Date converted = (java.util.Date) schemaAndValue.value();
assertEquals(schema, schemaAndValue.schema());
assertEquals(reference, converted);
}
@Test
public void timestampToConnectOptional() {
Schema schema = Timestamp.builder().optional().schema();
String msg = "{ \"schema\": { \"type\": \"int64\", \"name\": \"org.apache.kafka.connect.data.Timestamp\", \"version\": 1, \"optional\": true }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertNull(schemaAndValue.value());
}
@Test
public void timestampToConnectWithDefaultValue() {
Schema schema = Timestamp.builder().defaultValue(new java.util.Date(42)).schema();
String msg = "{ \"schema\": { \"type\": \"int64\", \"name\": \"org.apache.kafka.connect.data.Timestamp\", \"version\": 1, \"default\": 42 }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(new java.util.Date(42), schemaAndValue.value());
}
@Test
public void timestampToConnectOptionalWithDefaultValue() {
Schema schema = Timestamp.builder().optional().defaultValue(new java.util.Date(42)).schema();
String msg = "{ \"schema\": { \"type\": \"int64\", \"name\": \"org.apache.kafka.connect.data.Timestamp\", \"version\": 1, \"optional\": true, \"default\": 42 }, \"payload\": null }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
assertEquals(schema, schemaAndValue.schema());
assertEquals(new java.util.Date(42), schemaAndValue.value());
}
// Schema metadata
@Test
public void testJsonSchemaMetadataTranslation() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.BOOLEAN_SCHEMA, true));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue());
converted = parse(converter.fromConnectData(TOPIC, Schema.OPTIONAL_BOOLEAN_SCHEMA, null));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": true }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isNull());
converted = parse(converter.fromConnectData(TOPIC, SchemaBuilder.bool().defaultValue(true).build(), true));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false, \"default\": true }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue());
converted = parse(converter.fromConnectData(TOPIC, SchemaBuilder.bool().required().name("bool").version(3).doc("the documentation").parameter("foo", "bar").build(), true));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false, \"name\": \"bool\", \"version\": 3, \"doc\": \"the documentation\", \"parameters\": { \"foo\": \"bar\" }}"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue());
}
@Test
public void testCacheSchemaToConnectConversion() {
assertEquals(0, converter.sizeOfToConnectSchemaCache());
converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": true }".getBytes());
assertEquals(1, converter.sizeOfToConnectSchemaCache());
converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": true }".getBytes());
assertEquals(1, converter.sizeOfToConnectSchemaCache());
// Different schema should also get cached
converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"optional\": true }, \"payload\": true }".getBytes());
assertEquals(2, converter.sizeOfToConnectSchemaCache());
// Even equivalent, but different JSON encoding of schema, should get different cache entry
converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"optional\": false }, \"payload\": true }".getBytes());
assertEquals(3, converter.sizeOfToConnectSchemaCache());
}
// Schema types
@Test
public void booleanToJson() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.BOOLEAN_SCHEMA, true));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue());
}
@Test
public void byteToJson() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT8_SCHEMA, (byte) 12));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"int8\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(12, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).intValue());
}
@Test
public void shortToJson() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT16_SCHEMA, (short) 12));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"int16\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(12, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).intValue());
}
@Test
public void intToJson() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT32_SCHEMA, 12));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"int32\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(12, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).intValue());
}
@Test
public void longToJson() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT64_SCHEMA, 4398046511104L));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"int64\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(4398046511104L, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).longValue());
}
@Test
public void floatToJson() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.FLOAT32_SCHEMA, 12.34f));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"float\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(12.34f, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).floatValue(), 0.001);
}
@Test
public void doubleToJson() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.FLOAT64_SCHEMA, 12.34));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"double\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(12.34, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).doubleValue(), 0.001);
}
@Test
public void bytesToJson() throws IOException {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.BYTES_SCHEMA, "test-string".getBytes()));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(ByteBuffer.wrap("test-string".getBytes()),
ByteBuffer.wrap(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).binaryValue()));
}
@Test
public void stringToJson() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, "test-string"));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"string\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals("test-string", converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).textValue());
}
@Test
public void arrayToJson() {
Schema int32Array = SchemaBuilder.array(Schema.INT32_SCHEMA).build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, int32Array, List.of(1, 2, 3)));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"array\", \"items\": { \"type\": \"int32\", \"optional\": false }, \"optional\": false }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(JsonNodeFactory.instance.arrayNode().add(1).add(2).add(3),
converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME));
}
@Test
public void mapToJsonStringKeys() {
Schema stringIntMap = SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA).build();
Map<String, Integer> input = new HashMap<>();
input.put("key1", 12);
input.put("key2", 15);
JsonNode converted = parse(converter.fromConnectData(TOPIC, stringIntMap, input));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"map\", \"keys\": { \"type\" : \"string\", \"optional\": false }, \"values\": { \"type\" : \"int32\", \"optional\": false }, \"optional\": false }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(JsonNodeFactory.instance.objectNode().put("key1", 12).put("key2", 15),
converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME));
}
@Test
public void mapToJsonNonStringKeys() {
Schema intIntMap = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).build();
Map<Integer, Integer> input = new HashMap<>();
input.put(1, 12);
input.put(2, 15);
JsonNode converted = parse(converter.fromConnectData(TOPIC, intIntMap, input));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"map\", \"keys\": { \"type\" : \"int32\", \"optional\": false }, \"values\": { \"type\" : \"int32\", \"optional\": false }, \"optional\": false }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isArray());
ArrayNode payload = (ArrayNode) converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME);
assertEquals(2, payload.size());
Set<JsonNode> payloadEntries = new HashSet<>();
for (JsonNode elem : payload)
payloadEntries.add(elem);
assertEquals(Set.of(JsonNodeFactory.instance.arrayNode().add(1).add(12),
JsonNodeFactory.instance.arrayNode().add(2).add(15)),
payloadEntries
);
}
@Test
public void structToJson() {
Schema schema = SchemaBuilder.struct().field("field1", Schema.BOOLEAN_SCHEMA).field("field2", Schema.STRING_SCHEMA).field("field3", Schema.STRING_SCHEMA).field("field4", Schema.BOOLEAN_SCHEMA).build();
Struct input = new Struct(schema).put("field1", true).put("field2", "string2").put("field3", "string3").put("field4", false);
JsonNode converted = parse(converter.fromConnectData(TOPIC, schema, input));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"struct\", \"optional\": false, \"fields\": [{ \"field\": \"field1\", \"type\": \"boolean\", \"optional\": false }, { \"field\": \"field2\", \"type\": \"string\", \"optional\": false }, { \"field\": \"field3\", \"type\": \"string\", \"optional\": false }, { \"field\": \"field4\", \"type\": \"boolean\", \"optional\": false }] }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(JsonNodeFactory.instance.objectNode()
.put("field1", true)
.put("field2", "string2")
.put("field3", "string3")
.put("field4", false),
converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME));
}
@Test
public void structSchemaIdentical() {
Schema schema = SchemaBuilder.struct().field("field1", Schema.BOOLEAN_SCHEMA)
.field("field2", Schema.STRING_SCHEMA)
.field("field3", Schema.STRING_SCHEMA)
.field("field4", Schema.BOOLEAN_SCHEMA).build();
Schema inputSchema = SchemaBuilder.struct().field("field1", Schema.BOOLEAN_SCHEMA)
.field("field2", Schema.STRING_SCHEMA)
.field("field3", Schema.STRING_SCHEMA)
.field("field4", Schema.BOOLEAN_SCHEMA).build();
Struct input = new Struct(inputSchema).put("field1", true).put("field2", "string2").put("field3", "string3").put("field4", false);
assertStructSchemaEqual(schema, input);
}
@Test
public void decimalToJson() throws IOException {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Decimal.schema(2), new BigDecimal(new BigInteger("156"), 2)));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isTextual(), "expected node to be base64 text");
assertArrayEquals(new byte[]{0, -100}, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).binaryValue());
}
@Test
public void decimalToNumericJson() {
converter.configure(Map.of(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, DecimalFormat.NUMERIC.name()), false);
JsonNode converted = parse(converter.fromConnectData(TOPIC, Decimal.schema(2), new BigDecimal(new BigInteger("156"), 2)));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isNumber(), "expected node to be numeric");
assertEquals(new BigDecimal("1.56"), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).decimalValue());
}
@Test
public void decimalWithTrailingZerosToNumericJson() {
converter.configure(Map.of(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, DecimalFormat.NUMERIC.name()), false);
JsonNode converted = parse(converter.fromConnectData(TOPIC, Decimal.schema(4), new BigDecimal(new BigInteger("15600"), 4)));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"4\" } }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isNumber(), "expected node to be numeric");
assertEquals(new BigDecimal("1.5600"), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).decimalValue());
}
@Test
public void decimalToJsonWithoutSchema() {
assertThrows(
DataException.class,
() -> converter.fromConnectData(TOPIC, null, new BigDecimal(new BigInteger("156"), 2)),
"expected data exception when serializing BigDecimal without schema");
}
@Test
public void dateToJson() {
GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0);
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
calendar.add(Calendar.DATE, 10000);
java.util.Date date = calendar.getTime();
JsonNode converted = parse(converter.fromConnectData(TOPIC, Date.SCHEMA, date));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"int32\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Date\", \"version\": 1 }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
JsonNode payload = converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME);
assertTrue(payload.isInt());
assertEquals(10000, payload.intValue());
}
@Test
public void timeToJson() {
GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0);
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
calendar.add(Calendar.MILLISECOND, 14400000);
java.util.Date date = calendar.getTime();
JsonNode converted = parse(converter.fromConnectData(TOPIC, Time.SCHEMA, date));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"int32\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Time\", \"version\": 1 }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
JsonNode payload = converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME);
assertTrue(payload.isInt());
assertEquals(14400000, payload.longValue());
}
@Test
public void timestampToJson() {
GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0);
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
calendar.add(Calendar.MILLISECOND, 2000000000);
calendar.add(Calendar.MILLISECOND, 2000000000);
java.util.Date date = calendar.getTime();
JsonNode converted = parse(converter.fromConnectData(TOPIC, Timestamp.SCHEMA, date));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"int64\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Timestamp\", \"version\": 1 }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
JsonNode payload = converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME);
assertTrue(payload.isLong());
assertEquals(4000000000L, payload.longValue());
}
@Test
public void nullSchemaAndPrimitiveToJson() {
// This still needs to do conversion of data, null schema means "anything goes"
JsonNode converted = parse(converter.fromConnectData(TOPIC, null, true));
validateEnvelopeNullSchema(converted);
assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull());
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue());
}
@Test
public void nullSchemaAndArrayToJson() {
// This still needs to do conversion of data, null schema means "anything goes". Make sure we mix and match
// types to verify conversion still works.
JsonNode converted = parse(converter.fromConnectData(TOPIC, null, List.of(1, "string", true)));
validateEnvelopeNullSchema(converted);
assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull());
assertEquals(JsonNodeFactory.instance.arrayNode().add(1).add("string").add(true),
converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME));
}
@Test
public void nullSchemaAndMapToJson() {
// This still needs to do conversion of data, null schema means "anything goes". Make sure we mix and match
// types to verify conversion still works.
Map<String, Object> input = new HashMap<>();
input.put("key1", 12);
input.put("key2", "string");
input.put("key3", true);
JsonNode converted = parse(converter.fromConnectData(TOPIC, null, input));
validateEnvelopeNullSchema(converted);
assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull());
assertEquals(JsonNodeFactory.instance.objectNode().put("key1", 12).put("key2", "string").put("key3", true),
converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME));
}
@Test
public void nullSchemaAndMapNonStringKeysToJson() {
// This still needs to do conversion of data, null schema means "anything goes". Make sure we mix and match
// types to verify conversion still works.
Map<Object, Object> input = new HashMap<>();
input.put("string", 12);
input.put(52, "string");
input.put(false, true);
JsonNode converted = parse(converter.fromConnectData(TOPIC, null, input));
validateEnvelopeNullSchema(converted);
assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull());
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isArray());
ArrayNode payload = (ArrayNode) converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME);
assertEquals(3, payload.size());
Set<JsonNode> payloadEntries = new HashSet<>();
for (JsonNode elem : payload)
payloadEntries.add(elem);
assertEquals(Set.of(JsonNodeFactory.instance.arrayNode().add("string").add(12),
JsonNodeFactory.instance.arrayNode().add(52).add("string"),
JsonNodeFactory.instance.arrayNode().add(false).add(true)),
payloadEntries
);
}
@Test
public void nullSchemaAndNullValueToJson() {
// This characterizes the production of tombstone messages when Json schemas is enabled
Map<String, Boolean> props = Map.of("schemas.enable", true);
converter.configure(props, true);
byte[] converted = converter.fromConnectData(TOPIC, null, null);
assertNull(converted);
}
@Test
public void nullValueToJson() {
// This characterizes the production of tombstone messages when Json schemas is not enabled
Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true);
byte[] converted = converter.fromConnectData(TOPIC, null, null);
assertNull(converted);
}
@Test
public void mismatchSchemaJson() {
// If we have mismatching schema info, we should properly convert to a DataException
assertThrows(DataException.class,
() -> converter.fromConnectData(TOPIC, Schema.FLOAT64_SCHEMA, true));
}
@Test
public void noSchemaToConnect() {
Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true);
assertEquals(new SchemaAndValue(null, true), converter.toConnectData(TOPIC, "true".getBytes()));
}
@Test
public void noSchemaToJson() {
Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true);
JsonNode converted = parse(converter.fromConnectData(TOPIC, null, true));
assertTrue(converted.isBoolean());
assertTrue(converted.booleanValue());
}
@Test
public void testCacheSchemaToJsonConversion() {
assertEquals(0, converter.sizeOfFromConnectSchemaCache());
// Repeated conversion of the same schema, even if the schema object is different should return the same Java
// object
converter.fromConnectData(TOPIC, SchemaBuilder.bool().build(), true);
assertEquals(1, converter.sizeOfFromConnectSchemaCache());
converter.fromConnectData(TOPIC, SchemaBuilder.bool().build(), true);
assertEquals(1, converter.sizeOfFromConnectSchemaCache());
// Validate that a similar, but different schema correctly returns a different schema.
converter.fromConnectData(TOPIC, SchemaBuilder.bool().optional().build(), true);
assertEquals(2, converter.sizeOfFromConnectSchemaCache());
}
@Test
public void testJsonSchemaCacheSizeFromConfigFile() throws URISyntaxException, IOException {
URL url = Objects.requireNonNull(getClass().getResource("/connect-test.properties"));
File propFile = new File(url.toURI());
String workerPropsFile = propFile.getAbsolutePath();
Map<String, String> workerProps = !workerPropsFile.isEmpty() ?
Utils.propsToStringMap(Utils.loadProps(workerPropsFile)) : Map.of();
JsonConverter rc = new JsonConverter();
rc.configure(workerProps, false);
}
// Note: the header conversion methods delegates to the data conversion methods, which are tested above.
// The following simply verify that the delegation works.
@Test
public void testStringHeaderToJson() {
JsonNode converted = parse(converter.fromConnectHeader(TOPIC, "headerName", Schema.STRING_SCHEMA, "test-string"));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"string\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals("test-string", converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).textValue());
}
@Test
public void stringHeaderToConnect() {
assertEquals(new SchemaAndValue(Schema.STRING_SCHEMA, "foo-bar-baz"), converter.toConnectHeader(TOPIC, "headerName", "{ \"schema\": { \"type\": \"string\" }, \"payload\": \"foo-bar-baz\" }".getBytes()));
}
@Test
public void serializeNullToDefault() {
converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false);
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, schema, null));
JsonNode expected = parse("{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":\"default\"}");
assertEquals(expected, converted);
}
@Test
public void serializeNullToNull() {
converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false);
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, schema, null));
JsonNode expected = parse("{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":null}");
assertEquals(expected, converted);
}
@Test
public void deserializeNullToDefault() {
converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false);
String value = "{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":null}";
SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes());
assertEquals("default", sav.value());
}
@Test
public void deserializeNullToNull() {
converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false);
String value = "{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":null}";
SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes());
assertNull(sav.value());
}
@Test
public void serializeFieldNullToDefault() {
converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false);
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
Schema structSchema = SchemaBuilder.struct().field("field1", schema).build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, structSchema, new Struct(structSchema)));
JsonNode expected = parse("{\"schema\":{\"type\":\"struct\",\"fields\":[{\"field\":\"field1\",\"type\":\"string\",\"optional\":true,\"default\":\"default\"}],\"optional\":false},\"payload\":{\"field1\":\"default\"}}");
assertEquals(expected, converted);
}
@Test
public void serializeFieldNullToNull() {
converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false);
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
Schema structSchema = SchemaBuilder.struct().field("field1", schema).build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, structSchema, new Struct(structSchema)));
JsonNode expected = parse("{\"schema\":{\"type\":\"struct\",\"fields\":[{\"field\":\"field1\",\"type\":\"string\",\"optional\":true,\"default\":\"default\"}],\"optional\":false},\"payload\":{\"field1\":null}}");
assertEquals(expected, converted);
}
@Test
public void deserializeFieldNullToDefault() {
converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false);
String value = "{\"schema\":{\"type\":\"struct\",\"fields\":[{\"field\":\"field1\",\"type\":\"string\",\"optional\":true,\"default\":\"default\"}],\"optional\":false},\"payload\":{\"field1\":null}}";
SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes());
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
Schema structSchema = SchemaBuilder.struct().field("field1", schema).build();
assertEquals(new Struct(structSchema).put("field1", "default"), sav.value());
}
@Test
public void deserializeFieldNullToNull() {
converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false);
String value = "{\"schema\":{\"type\":\"struct\",\"fields\":[{\"field\":\"field1\",\"type\":\"string\",\"optional\":true,\"default\":\"default\"}],\"optional\":false},\"payload\":{\"field1\":null}}";
SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes());
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
Schema structSchema = SchemaBuilder.struct().field("field1", schema).build();
assertEquals(new Struct(structSchema), sav.value());
}
@Test
public void testVersionRetrievedFromAppInfoParser() {
assertEquals(AppInfoParser.getVersion(), converter.version());
}
@Test
public void testSchemaContentIsNull() {
Map<String, Object> config = new HashMap<>();
config.put(JsonConverterConfig.SCHEMA_CONTENT_CONFIG, null);
converter.configure(config, false);
byte[] jsonBytes = "{ \"schema\": { \"type\": \"string\" }, \"payload\": \"foo-bar-baz\" }".getBytes();
SchemaAndValue result = converter.toConnectData(TOPIC, jsonBytes);
assertEquals(new SchemaAndValue(Schema.STRING_SCHEMA, "foo-bar-baz"), result);
}
@Test
public void testSchemaContentIsEmptyString() {
converter.configure(Map.of(JsonConverterConfig.SCHEMA_CONTENT_CONFIG, ""), false);
assertEquals(new SchemaAndValue(Schema.STRING_SCHEMA, "foo-bar-baz"), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"string\" }, \"payload\": \"foo-bar-baz\" }".getBytes()));
}
@Test
public void testSchemaContentValidSchema() {
converter.configure(Map.of(JsonConverterConfig.SCHEMA_CONTENT_CONFIG, "{ \"type\": \"string\" }"), false);
assertEquals(new SchemaAndValue(Schema.STRING_SCHEMA, "foo-bar-baz"), converter.toConnectData(TOPIC, "\"foo-bar-baz\"".getBytes()));
}
@Test
public void testSchemaContentInValidSchema() {
assertThrows(
DataException.class,
() -> converter.configure(Map.of(JsonConverterConfig.SCHEMA_CONTENT_CONFIG, "{ \"string\" }"), false),
" Provided schema is invalid , please recheck the schema you have provided");
}
@Test
public void testSchemaContentLooksLikeSchema() {
converter.configure(Map.of(JsonConverterConfig.SCHEMA_CONTENT_CONFIG, "{ \"type\": \"struct\", \"fields\": [{\"field\": \"schema\", \"type\": \"struct\",\"fields\": [{\"field\": \"type\", \"type\": \"string\" }]}, {\"field\": \"payload\", \"type\": \"string\"}]}"), false);
SchemaAndValue connectData = converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"string\" }, \"payload\": \"foo-bar-baz\" }".getBytes());
assertEquals("foo-bar-baz", ((Struct) connectData.value()).getString("payload"));
}
@ParameterizedTest
@ValueSource(strings = {
"{ }",
"{ \"wrong\": \"schema\" }",
"{ \"schema\": { \"type\": \"string\" } }",
"{ \"payload\": \"foo-bar-baz\" }",
"{ \"schema\": { \"type\": \"string\" }, \"payload\": \"foo-bar-baz\", \"extra\": \"field\" }",
})
public void testNullSchemaContentWithWrongConnectDataValue(String value) {
converter.configure(Map.of(), false);
assertThrows(
DataException.class,
() -> converter.toConnectData(TOPIC, value.getBytes()));
}
private JsonNode parse(byte[] json) {
try {
return objectMapper.readTree(json);
} catch (IOException e) {
fail("IOException during JSON parse: " + e.getMessage());
throw new RuntimeException("failed");
}
}
private JsonNode parse(String json) {
try {
return objectMapper.readTree(json);
} catch (IOException e) {
fail("IOException during JSON parse: " + e.getMessage());
throw new RuntimeException("failed");
}
}
private void validateEnvelope(JsonNode env) {
assertNotNull(env);
assertTrue(env.isObject());
assertEquals(2, env.size());
assertTrue(env.has(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(env.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isObject());
assertTrue(env.has(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME));
}
private void validateEnvelopeNullSchema(JsonNode env) {
assertNotNull(env);
assertTrue(env.isObject());
assertEquals(2, env.size());
assertTrue(env.has(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(env.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull());
assertTrue(env.has(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME));
}
private void assertStructSchemaEqual(Schema schema, Struct struct) {
converter.fromConnectData(TOPIC, schema, struct);
assertEquals(schema, struct.schema());
}
}
| JsonConverterTest |
java | alibaba__nacos | auth/src/main/java/com/alibaba/nacos/auth/serveridentity/ServerIdentityChecker.java | {
"start": 884,
"end": 1358
} | interface ____ {
/**
* Do init checker.
*
* @param authConfig config for nacos auth.
*/
void init(NacosAuthConfig authConfig);
/**
* Do check nacos server identity.
*
* @param serverIdentity server identity
* @param secured secured api secured annotation
* @return result of checking server identity
*/
ServerIdentityResult check(ServerIdentity serverIdentity, Secured secured);
}
| ServerIdentityChecker |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/AbstractContainersLauncher.java | {
"start": 1417,
"end": 1684
} | interface ____ extends Service,
EventHandler<ContainersLauncherEvent> {
void init(Context context, Dispatcher dispatcher,
ContainerExecutor exec, LocalDirsHandlerService dirsHandler,
ContainerManagerImpl containerManager);
}
| AbstractContainersLauncher |
java | dropwizard__dropwizard | dropwizard-testing/src/test/java/io/dropwizard/testing/junit5/DropwizardExtensionsSupportTest.java | {
"start": 8664,
"end": 9359
} | class ____ extends ParentClass_ParentHasExtension {
@Test
void childClassTestMethod() {
// when, then
delayedAssertions.add(() -> {
assertThat(extension.getBeforeInvocations()).isEqualTo(1);
assertThat(extension.getAfterInvocations()).isEqualTo(1);
});
}
@Test
@Override
public void parentClassTestMethod() {
// when, then
delayedAssertions.add(() -> {
assertThat(extension.getBeforeInvocations()).isEqualTo(1);
assertThat(extension.getAfterInvocations()).isEqualTo(1);
});
}
}
}
| NestedClass_Inheriting |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/TopNSetTestCase.java | {
"start": 1095,
"end": 7246
} | class ____<T extends Releasable, V extends Comparable<V>> extends ESTestCase {
/**
* Build a {@link T} to test. Sorts built by this method shouldn't need scores.
*/
protected abstract T build(BigArrays bigArrays, SortOrder sortOrder, int limit);
private T build(SortOrder sortOrder, int limit) {
return build(bigArrays(), sortOrder, limit);
}
/**
* A random value for testing, with the appropriate precision for the type we're testing.
*/
protected abstract V randomValue();
/**
* Returns a list of 3 values, in ascending order.
*/
protected abstract List<V> threeSortedValues();
/**
* Collect a value into the top.
*
* @param value value to collect, always sent as double just to have
* a number to test. Subclasses should cast to their favorite types
*/
protected abstract void collect(T sort, V value);
protected abstract void reduceLimitByOne(T sort);
protected abstract V getWorstValue(T sort);
protected abstract int getCount(T sort);
public final void testNeverCalled() {
SortOrder sortOrder = randomFrom(SortOrder.values());
int limit = randomIntBetween(0, 10);
try (T sort = build(sortOrder, limit)) {
assertResults(sort, sortOrder, limit, List.of());
}
}
public final void testLimit0() {
SortOrder sortOrder = randomFrom(SortOrder.values());
int limit = 0;
try (T sort = build(sortOrder, limit)) {
var values = threeSortedValues();
collect(sort, values.get(0));
collect(sort, values.get(1));
assertResults(sort, sortOrder, limit, List.of());
}
}
public final void testSingleValue() {
SortOrder sortOrder = randomFrom(SortOrder.values());
int limit = 1;
try (T sort = build(sortOrder, limit)) {
var values = threeSortedValues();
collect(sort, values.get(0));
assertResults(sort, sortOrder, limit, List.of(values.get(0)));
}
}
public final void testNonCompetitive() {
SortOrder sortOrder = SortOrder.DESC;
int limit = 1;
try (T sort = build(sortOrder, limit)) {
var values = threeSortedValues();
collect(sort, values.get(1));
collect(sort, values.get(0));
assertResults(sort, sortOrder, limit, List.of(values.get(1)));
}
}
public final void testCompetitive() {
SortOrder sortOrder = SortOrder.DESC;
int limit = 1;
try (T sort = build(sortOrder, limit)) {
var values = threeSortedValues();
collect(sort, values.get(0));
collect(sort, values.get(1));
assertResults(sort, sortOrder, limit, List.of(values.get(1)));
}
}
public final void testTwoHitsDesc() {
SortOrder sortOrder = SortOrder.DESC;
int limit = 2;
try (T sort = build(sortOrder, limit)) {
var values = threeSortedValues();
collect(sort, values.get(0));
collect(sort, values.get(1));
collect(sort, values.get(2));
assertResults(sort, sortOrder, limit, List.of(values.get(2), values.get(1)));
}
}
public final void testTwoHitsAsc() {
SortOrder sortOrder = SortOrder.ASC;
int limit = 2;
try (T sort = build(sortOrder, limit)) {
var values = threeSortedValues();
collect(sort, values.get(0));
collect(sort, values.get(1));
collect(sort, values.get(2));
assertResults(sort, sortOrder, limit, List.of(values.get(0), values.get(1)));
}
}
public final void testReduceLimit() {
SortOrder sortOrder = randomFrom(SortOrder.values());
int limit = 3;
try (T sort = build(sortOrder, limit)) {
var values = threeSortedValues();
collect(sort, values.get(0));
collect(sort, values.get(1));
collect(sort, values.get(2));
assertResults(sort, sortOrder, limit, values);
reduceLimitByOne(sort);
collect(sort, values.get(2));
assertResults(sort, sortOrder, limit - 1, values);
}
}
public final void testCrankyBreaker() {
BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService());
SortOrder sortOrder = randomFrom(SortOrder.values());
int limit = randomIntBetween(0, 3);
try (T sort = build(bigArrays, sortOrder, limit)) {
List<V> values = new ArrayList<>();
for (int i = 0; i < randomIntBetween(0, 4); i++) {
V value = randomValue();
values.add(value);
collect(sort, value);
}
if (randomBoolean() && limit > 0) {
reduceLimitByOne(sort);
limit--;
V value = randomValue();
values.add(value);
collect(sort, value);
}
assertResults(sort, sortOrder, limit, values);
} catch (CircuitBreakingException e) {
assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE));
}
assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L));
}
protected void assertResults(T sort, SortOrder sortOrder, int limit, List<V> values) {
var sortedUniqueValues = values.stream()
.distinct()
.sorted(sortOrder == SortOrder.ASC ? Comparator.naturalOrder() : Comparator.reverseOrder())
.limit(limit)
.toList();
assertEquals(sortedUniqueValues.size(), getCount(sort));
if (sortedUniqueValues.isEmpty() == false) {
assertEquals(sortedUniqueValues.getLast(), getWorstValue(sort));
}
}
private BigArrays bigArrays() {
return new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService());
}
}
| TopNSetTestCase |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/placement/VariableContext.java | {
"start": 1614,
"end": 10461
} | class ____ {
/**
* This is our actual variable store.
*/
private Map<String, String> variables = new HashMap<>();
private Map<String, String> originalVariables = new HashMap<>();
/**
* This is our conditional variable store.
*/
private Map<String, MappingRuleConditionalVariable> conditionalVariables =
new HashMap<>();
/**
* This set contains the names of the immutable variables if null it is
* ignored.
*/
private Set<String> immutableNames;
/**
* Some matchers may need to find a data in a set, which is not usable
* as a variable in substitutions, this store is for those sets.
*/
private Map<String, Set<String>> extraDataset = new HashMap<>();
/**
* Checks if the provided variable is immutable.
* @param name Name of the variable to check
* @return true if the variable is immutable
*/
public boolean isImmutable(String name) {
return (immutableNames != null && immutableNames.contains(name));
}
/**
* Can be used to provide a set which contains the name of the variables which
* should be immutable.
* @param variableNames Set containing the names of the immutable variables
* @throws IllegalStateException if the immutable set is already provided.
* @return same instance of VariableContext for daisy chaining.
*/
public VariableContext setImmutables(Set<String> variableNames) {
if (this.immutableNames != null) {
throw new IllegalStateException("Immutable variables are already defined,"
+ " variable immutability cannot be changed once set!");
}
this.immutableNames = ImmutableSet.copyOf(variableNames);
return this;
}
/**
* Can be used to provide an array of strings which contains the names of the
* variables which should be immutable. An immutable set will be created
* from the array.
* @param variableNames Set containing the names of the immutable variables
* @throws IllegalStateException if the immutable set is already provided.
* @return same instance of VariableContext for daisy chaining.
*/
public VariableContext setImmutables(String... variableNames) {
if (this.immutableNames != null) {
throw new IllegalStateException("Immutable variables are already defined,"
+ " variable immutability cannot be changed once set!");
}
this.immutableNames = ImmutableSet.copyOf(variableNames);
return this;
}
/**
* Adds a variable with value to the context or overrides an already existing
* one. If the variable is already set and immutable an IllegalStateException
* is thrown.
* @param name Name of the variable to be added to the context
* @param value Value of the variable
* @throws IllegalStateException if the variable is immutable and already set
* @return same instance of VariableContext for daisy chaining.
*/
public VariableContext put(String name, String value) {
if (variables.containsKey(name) && isImmutable(name)) {
throw new IllegalStateException(
"Variable '" + name + "' is immutable, cannot update it's value!");
}
if (conditionalVariables.containsKey(name)) {
throw new IllegalStateException(
"Variable '" + name + "' is already defined as a conditional" +
" variable, cannot change it's value!");
}
variables.put(name, value);
return this;
}
public void putOriginal(String name, String value) {
originalVariables.put(name, value);
}
/**
* This method is used to add a conditional variable to the variable context.
* @param name Name of the variable
* @param variable The conditional variable evaluator
* @return VariableContext for daisy chaining
*/
public VariableContext putConditional(String name,
MappingRuleConditionalVariable variable) {
if (conditionalVariables.containsKey(name)) {
throw new IllegalStateException(
"Variable '" + name + "' is conditional, cannot update it's value!");
}
conditionalVariables.put(name, variable);
return this;
}
/**
* Returns the value of a variable, null values are replaced with "".
* @param name Name of the variable
* @return The value of the variable
*/
public String get(String name) {
String ret = variables.get(name);
return ret == null ? "" : ret;
}
public String getOriginal(String name) {
return originalVariables.get(name);
}
/**
* Adds a set to the context, each name can only be added once. The extra
* dataset is different from the regular variables because it cannot be
* referenced via tokens in the paths or any other input. However matchers
* and actions can explicitly access these datasets and can make decisions
* based on them.
* @param name Name which can be used to reference the collection
* @param set The dataset to be stored
*/
public void putExtraDataset(String name, Set<String> set) {
if (extraDataset.containsKey(name)) {
throw new IllegalStateException(
"Dataset '" + name + "' is already set!");
}
extraDataset.put(name, set);
}
/**
* Returns the dataset referenced by the name.
* @param name Name of the set to be returned.
* @return the dataset referenced by the name.
*/
public Set<String> getExtraDataset(String name) {
return extraDataset.get(name);
}
/**
* Check if a variable is part of the context.
* @param name Name of the variable to be checked
* @return True if the variable is added to the context, false otherwise
*/
public boolean containsKey(String name) {
return variables.containsKey(name);
}
/**
* This method replaces all variables in the provided string. The variables
* are reverse ordered by the length of their names in order to avoid partial
* replaces when a shorter named variable is a substring of a longer named
* variable.
* All variables will be replaced in the string.
* Null values will be considered as empty strings during the replace.
* If the input is null, null will be returned.
* @param input The string with variables
* @return A string with all the variables substituted with their respective
* values.
*/
public String replaceVariables(String input) {
if (input == null) {
return null;
}
String[] keys = variables.keySet().toArray(new String[]{});
//Replacing variables starting longest first, to avoid collision when a
//shorter variable name matches the beginning of a longer one.
//e.g. %user_something, if %user is defined it may replace the %user before
//we would reach the %user_something variable, so we start with the longer
//names first
Arrays.sort(keys, (a, b) -> b.length() - a.length());
String ret = input;
for (String key : keys) {
//we cannot match for null, so we just skip if we have a variable "name"
//with null
if (key == null) {
continue;
}
ret = ret.replace(key, get(key));
}
return ret;
}
/**
* This method will consider the input as a queue path, which is a String
* separated by dot ('.') characters. The input will be split along the dots
* and all parts will be replaced individually. Replace only occur if a part
* exactly matches a variable name, no composite names or additional
* characters are supported.
* e.g. With variables %user and %default "%user.%default" will be substituted
* while "%user%default.something" won't.
* Null values will be considered as empty strings during the replace.
* If the input is null, null will be returned.
* @param input The string with variables
* @return A string with all the variable only path parts substituted with
* their respective values.
*/
public String replacePathVariables(String input) {
if (input == null) {
return null;
}
String[] parts = input.split("\\.");
for (int i = 0; i < parts.length; i++) {
String newVal = parts[i];
//if the part is a variable it should be in either the variable or the
//conditional variable map, otherwise we keep it's original value.
//This means undefined variables will return the name of the variable,
//but this is working as intended.
if (variables.containsKey(parts[i])) {
newVal = variables.get(parts[i]);
} else if (conditionalVariables.containsKey(parts[i])) {
MappingRuleConditionalVariable condVariable =
conditionalVariables.get(parts[i]);
if (condVariable != null) {
newVal = condVariable.evaluateInPath(parts, i);
}
}
//if a variable's value is null, we use empty string instead
if (newVal == null) {
newVal = "";
}
parts[i] = newVal;
}
return String.join(".", parts);
}
}
| VariableContext |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/Service.java | {
"start": 1164,
"end": 1244
} | interface ____ extends Closeable {
/**
* Service states
*/
public | Service |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MongoDbEndpointBuilderFactory.java | {
"start": 157729,
"end": 168765
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final MongoDbHeaderNameBuilder INSTANCE = new MongoDbHeaderNameBuilder();
/**
* The operation this endpoint will execute against MongoDB.
*
* The option is a: {@code
* org.apache.camel.component.mongodb.MongoDbOperation or String} type.
*
* Group: producer
*
* @return the name of the header {@code MongoDbOperation}.
*/
public String mongoDbOperation() {
return "CamelMongoDbOperation";
}
/**
* Number of objects matching the query. This does not take limit/skip
* into consideration.
*
* The option is a: {@code Integer} type.
*
* Group: producer findAll
*
* @return the name of the header {@code MongoDbResultTotalSize}.
*/
public String mongoDbResultTotalSize() {
return "CamelMongoDbResultTotalSize";
}
/**
* Number of objects matching the query. This does not take limit/skip
* into consideration.
*
* The option is a: {@code Integer} type.
*
* Group: producer findAll
*
* @return the name of the header {@code MongoDbResultPageSize}.
*/
public String mongoDbResultPageSize() {
return "CamelMongoDbResultPageSize";
}
/**
* The query to execute against MongoDB.
*
* The option is a: {@code org.bson.conversions.Bson} type.
*
* Group: producer
*
* @return the name of the header {@code MongoDbCriteria}.
*/
public String mongoDbCriteria() {
return "CamelMongoDbCriteria";
}
/**
* The project document.
*
* The option is a: {@code org.bson.conversions.Bson} type.
*
* Group: producer
*
* @return the name of the header {@code MongoDbFieldsProjection}.
*/
public String mongoDbFieldsProjection() {
return "CamelMongoDbFieldsProjection";
}
/**
* The number of documents per batch.
*
* The option is a: {@code Integer} type.
*
* Group: producer findAll aggregate
*
* @return the name of the header {@code MongoDbBatchSize}.
*/
public String mongoDbBatchSize() {
return "CamelMongoDbBatchSize";
}
/**
* Discards a given number of elements at the beginning of the cursor.
*
* The option is a: {@code Integer} type.
*
* Group: producer findAll
*
* @return the name of the header {@code MongoDbNumToSkip}.
*/
public String mongoDbNumToSkip() {
return "CamelMongoDbNumToSkip";
}
/**
* If the update should be applied to all objects matching. See
* http://www.mongodb.org/display/DOCS/AtomicOperationsAtomic
* Operations.
*
* The option is a: {@code Boolean} type.
*
* Group: producer update
*
* @return the name of the header {@code MongoDbMultiUpdate}.
*/
public String mongoDbMultiUpdate() {
return "CamelMongoDbMultiUpdate";
}
/**
* If the database should create the element if it does not exist.
*
* The option is a: {@code Boolean} type.
*
* Group: producer update
*
* @return the name of the header {@code MongoDbUpsert}.
*/
public String mongoDbUpsert() {
return "CamelMongoDbUpsert";
}
/**
* The number of modified or deleted records.
*
* The option is a: {@code long} type.
*
* Group: producer
*
* @return the name of the header {@code MongoDbRecordsAffected}.
*/
public String mongoDbRecordsAffected() {
return "CamelMongoDbRecordsAffected";
}
/**
* The number of documents matched by the query.
*
* The option is a: {@code long} type.
*
* Group: producer
*
* @return the name of the header {@code MongoDbRecordsMatched}.
*/
public String mongoDbRecordsMatched() {
return "CamelMongoDbRecordsMatched";
}
/**
* The sort criteria.
*
* The option is a: {@code Bson or Document} type.
*
* Group: producer
*
* @return the name of the header {@code MongoDbSortBy}.
*/
public String mongoDbSortBy() {
return "CamelMongoDbSortBy";
}
/**
* The name of the MongoDB database to target.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code MongoDbDatabase}.
*/
public String mongoDbDatabase() {
return "CamelMongoDbDatabase";
}
/**
* The name of the MongoDB collection to bind to this endpoint.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code MongoDbCollection}.
*/
public String mongoDbCollection() {
return "CamelMongoDbCollection";
}
/**
* The list of dynamic indexes to create on the fly.
*
* The option is a: {@code List<Bson>} type.
*
* Group: producer
*
* @return the name of the header {@code MongoDbCollectionIndex}.
*/
public String mongoDbCollectionIndex() {
return "CamelMongoDbCollectionIndex";
}
/**
* Limits the number of elements returned.
*
* The option is a: {@code Integer} type.
*
* Group: producer findAll
*
* @return the name of the header {@code MongoDbLimit}.
*/
public String mongoDbLimit() {
return "CamelMongoDbLimit";
}
/**
* Is from tailable.
*
* The option is a: {@code Boolean} type.
*
* Group: consumer
*
* @return the name of the header {@code MongoDbTailable}.
*/
public String mongoDbTailable() {
return "CamelMongoDbTailable";
}
/**
* The result of the write operation.
*
* The option is a: {@code Object} type.
*
* Group: producer
*
* @return the name of the header {@code MongoWriteResult}.
*/
public String mongoWriteResult() {
return "CamelMongoWriteResult";
}
/**
* The OID(s) of the inserted record(s).
*
* The option is a: {@code Object or List<Object>} type.
*
* Group: producer
*
* @return the name of the header {@code MongoOid}.
*/
public String mongoOid() {
return "CamelMongoOid";
}
/**
* The specified field name fow which we want to get the distinct
* values.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code MongoDbDistinctQueryField}.
*/
public String mongoDbDistinctQueryField() {
return "CamelMongoDbDistinctQueryField";
}
/**
* Sets allowDiskUse MongoDB flag. This is supported since MongoDB
* Server 4.3.1. Using this header with older MongoDB Server version can
* cause query to fail.
*
* The option is a: {@code Boolean} type.
*
* Group: producer findAll aggregate
*
* @return the name of the header {@code MongoDbAllowDiskUse}.
*/
public String mongoDbAllowDiskUse() {
return "CamelMongoDbAllowDiskUse";
}
/**
* Perform an ordered or unordered operation execution.
*
* The option is a: {@code Boolean} type.
*
* Default: TRUE
* Group: producer bulkWrite
*
* @return the name of the header {@code MongoDbBulkOrdered}.
*/
public String mongoDbBulkOrdered() {
return "CamelMongoDbBulkOrdered";
}
/**
* A document that contains the _id of the document created or modified
* by the insert, replace, delete, update operations (i.e. CRUD
* operations). For sharded collections, also displays the full shard
* key for the document. The _id field is not repeated if it is already
* a part of the shard key.
*
* The option is a: {@code org.bson.types.ObjectId} type.
*
* Group: consumer changeStreams
*
* @return the name of the header {@code _id}.
*/
public String id() {
return "_id";
}
/**
* The type of operation that occurred. Can be any of the following
* values: insert, delete, replace, update, drop, rename, dropDatabase,
* invalidate.
*
* The option is a: {@code String} type.
*
* Group: consumer changeStreams
*
* @return the name of the header {@code MongoDbStreamOperationType}.
*/
public String mongoDbStreamOperationType() {
return "CamelMongoDbStreamOperationType";
}
/**
* Indicates which document to return, the document before or after an
* update and return atomic operation.
*
* The option is a: {@code com.mongodb.client.model.ReturnDocument}
* type.
*
* Group: producer update one and return
*
* @return the name of the header {@code MongoDbReturnDocumentType}.
*/
public String mongoDbReturnDocumentType() {
return "CamelMongoDbReturnDocumentType";
}
/**
* Options to use. When set, options set in the headers will be ignored.
*
* The option is a: {@code Object} type.
*
* Group: producer update one and options
*
* @return the name of the header {@code MongoDbOperationOption}.
*/
public String mongoDbOperationOption() {
return "CamelMongoDbOperationOption";
}
}
static MongoDbEndpointBuilder endpointBuilder(String componentName, String path) {
| MongoDbHeaderNameBuilder |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TempDirectoryCleanupTests.java | {
"start": 14463,
"end": 15008
} | class ____ {
@Test
@Order(1)
void testOnSuccessFailingParameter(@TempDir(cleanup = ON_SUCCESS) Path onSuccessFailingParameterDir) {
TempDirParameterTests.onSuccessFailingParameterDir = onSuccessFailingParameterDir;
fail();
}
@Test
@Order(2)
void testOnSuccessPassingParameter(@TempDir(cleanup = ON_SUCCESS) Path onSuccessPassingParameterDir) {
TempDirParameterTests.onSuccessPassingParameterDir = onSuccessPassingParameterDir;
}
}
}
@Nested
@EnabledOnOs(WINDOWS)
| OnSuccessFailingThenPassingParameterCase |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java | {
"start": 18119,
"end": 19305
} | class ____ extends AbstractTransportRequest {
private final DiscoveryNode sender;
LeaderCheckRequest(final DiscoveryNode sender) {
this.sender = sender;
}
LeaderCheckRequest(final StreamInput in) throws IOException {
super(in);
sender = new DiscoveryNode(in);
}
@Override
public void writeTo(final StreamOutput out) throws IOException {
super.writeTo(out);
sender.writeTo(out);
}
public DiscoveryNode getSender() {
return sender;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final LeaderCheckRequest that = (LeaderCheckRequest) o;
return Objects.equals(sender, that.sender);
}
@Override
public int hashCode() {
return Objects.hash(sender);
}
@Override
public String toString() {
return "LeaderCheckRequest{" + "sender=" + sender + '}';
}
}
@FunctionalInterface
| LeaderCheckRequest |
java | apache__maven | impl/maven-impl/src/test/java/org/apache/maven/impl/cache/AbstractRequestCacheTest.java | {
"start": 1696,
"end": 7044
} | class ____ {
private TestRequestCache cache;
@BeforeEach
void setUp() {
cache = new TestRequestCache();
}
@Test
void testBatchRequestExceptionIncludesSuppressedExceptions() {
// Create mock requests and results
TestRequest request1 = createTestRequest("request1");
TestRequest request2 = createTestRequest("request2");
TestRequest request3 = createTestRequest("request3");
// Create specific exceptions with different messages and stack traces
RuntimeException exception1 = new RuntimeException("Error processing request1");
IllegalArgumentException exception2 = new IllegalArgumentException("Invalid argument in request2");
IllegalStateException exception3 = new IllegalStateException("Invalid state in request3");
// Set up the cache to return failures for all requests
cache.addFailure(request1, exception1);
cache.addFailure(request2, exception2);
cache.addFailure(request3, exception3);
List<TestRequest> requests = Arrays.asList(request1, request2, request3);
// Create a supplier that should not be called since we're simulating cached failures
Function<List<TestRequest>, List<TestResult>> supplier = reqs -> {
throw new AssertionError("Supplier should not be called in this test");
};
// Execute the batch request and expect BatchRequestException
BatchRequestException batchException =
assertThrows(BatchRequestException.class, () -> cache.requests(requests, supplier));
// Verify the main exception message
assertEquals("One or more requests failed", batchException.getMessage());
// Verify that all individual exceptions are included as suppressed exceptions
Throwable[] suppressedExceptions = batchException.getSuppressed();
assertNotNull(suppressedExceptions);
assertEquals(3, suppressedExceptions.length);
// Verify each suppressed exception
assertTrue(Arrays.asList(suppressedExceptions).contains(exception1));
assertTrue(Arrays.asList(suppressedExceptions).contains(exception2));
assertTrue(Arrays.asList(suppressedExceptions).contains(exception3));
// Verify the results contain the correct error information
List<RequestResult<?, ?>> results = batchException.getResults();
assertEquals(3, results.size());
for (RequestResult<?, ?> result : results) {
assertNotNull(result.error());
assertInstanceOf(RuntimeException.class, result.error());
}
}
@Test
void testBatchRequestWithMixedSuccessAndFailure() {
TestRequest successRequest = createTestRequest("success");
TestRequest failureRequest = createTestRequest("failure");
RuntimeException failureException = new RuntimeException("Processing failed");
// Set up mixed success/failure scenario
cache.addFailure(failureRequest, failureException);
List<TestRequest> requests = Arrays.asList(successRequest, failureRequest);
Function<List<TestRequest>, List<TestResult>> supplier = reqs -> {
// Only the success request should reach the supplier
assertEquals(1, reqs.size());
assertEquals(successRequest, reqs.get(0));
return List.of(new TestResult(successRequest));
};
BatchRequestException batchException =
assertThrows(BatchRequestException.class, () -> cache.requests(requests, supplier));
// Verify only the failure exception is suppressed
Throwable[] suppressedExceptions = batchException.getSuppressed();
assertEquals(1, suppressedExceptions.length);
assertEquals(failureException, suppressedExceptions[0]);
// Verify results: one success, one failure
List<RequestResult<?, ?>> results = batchException.getResults();
assertEquals(2, results.size());
RequestResult<?, ?> result1 = results.get(0);
RequestResult<?, ?> result2 = results.get(1);
// One should be success, one should be failure
boolean hasSuccess = (result1.error() == null) || (result2.error() == null);
boolean hasFailure = (result1.error() != null) || (result2.error() != null);
assertTrue(hasSuccess);
assertTrue(hasFailure);
}
@Test
void testSuccessfulBatchRequestDoesNotThrowException() {
TestRequest request1 = createTestRequest("success1");
TestRequest request2 = createTestRequest("success2");
List<TestRequest> requests = Arrays.asList(request1, request2);
Function<List<TestRequest>, List<TestResult>> supplier =
reqs -> reqs.stream().map(TestResult::new).toList();
// Should not throw any exception
List<TestResult> results = cache.requests(requests, supplier);
assertEquals(2, results.size());
assertEquals(request1, results.get(0).getRequest());
assertEquals(request2, results.get(1).getRequest());
}
// Helper methods and test classes
private TestRequest createTestRequest(String id) {
ProtoSession session = mock(ProtoSession.class);
return new TestRequestImpl(id, session);
}
// Test implementations
| AbstractRequestCacheTest |
java | junit-team__junit5 | junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/execution/NamespaceAwareStore.java | {
"start": 1034,
"end": 5041
} | class ____ implements Store {
private final NamespacedHierarchicalStore<Namespace> valuesStore;
private final Namespace namespace;
public NamespaceAwareStore(NamespacedHierarchicalStore<Namespace> valuesStore, Namespace namespace) {
this.valuesStore = valuesStore;
this.namespace = namespace;
}
@Override
public @Nullable Object get(Object key) {
Preconditions.notNull(key, "key must not be null");
Supplier<@Nullable Object> action = () -> this.valuesStore.get(this.namespace, key);
return this.<@Nullable Object> accessStore(action);
}
@Override
public <T> @Nullable T get(Object key, Class<T> requiredType) {
Preconditions.notNull(key, "key must not be null");
Preconditions.notNull(requiredType, "requiredType must not be null");
Supplier<@Nullable T> action = () -> this.valuesStore.get(this.namespace, key, requiredType);
return this.<@Nullable T> accessStore(action);
}
@SuppressWarnings("deprecation")
@Override
public <K, V extends @Nullable Object> @Nullable Object getOrComputeIfAbsent(K key,
Function<? super K, ? extends V> defaultCreator) {
Preconditions.notNull(key, "key must not be null");
Preconditions.notNull(defaultCreator, "defaultCreator function must not be null");
Supplier<@Nullable Object> action = () -> this.valuesStore.getOrComputeIfAbsent(this.namespace, key,
defaultCreator);
return this.<@Nullable Object> accessStore(action);
}
@SuppressWarnings("deprecation")
@Override
public <K, V extends @Nullable Object> @Nullable V getOrComputeIfAbsent(K key,
Function<? super K, ? extends V> defaultCreator, Class<V> requiredType) {
Preconditions.notNull(key, "key must not be null");
Preconditions.notNull(defaultCreator, "defaultCreator function must not be null");
Preconditions.notNull(requiredType, "requiredType must not be null");
Supplier<@Nullable V> action = () -> this.valuesStore.getOrComputeIfAbsent(this.namespace, key, defaultCreator,
requiredType);
return this.<@Nullable V> accessStore(action);
}
@Override
public <K, V> Object computeIfAbsent(K key, Function<? super K, ? extends V> defaultCreator) {
Preconditions.notNull(key, "key must not be null");
Preconditions.notNull(defaultCreator, "defaultCreator function must not be null");
Supplier<Object> action = () -> this.valuesStore.computeIfAbsent(this.namespace, key, defaultCreator);
return accessStore(action);
}
@Override
public <K, V> V computeIfAbsent(K key, Function<? super K, ? extends V> defaultCreator, Class<V> requiredType) {
Preconditions.notNull(key, "key must not be null");
Preconditions.notNull(defaultCreator, "defaultCreator function must not be null");
Preconditions.notNull(requiredType, "requiredType must not be null");
Supplier<V> action = () -> this.valuesStore.computeIfAbsent(this.namespace, key, defaultCreator, requiredType);
return accessStore(action);
}
@Override
public void put(Object key, @Nullable Object value) {
Preconditions.notNull(key, "key must not be null");
Supplier<@Nullable Object> action = () -> this.valuesStore.put(this.namespace, key, value);
this.<@Nullable Object> accessStore(action);
}
@Override
public @Nullable Object remove(Object key) {
Preconditions.notNull(key, "key must not be null");
Supplier<@Nullable Object> action = () -> this.valuesStore.remove(this.namespace, key);
return this.<@Nullable Object> accessStore(action);
}
@Override
public <T> @Nullable T remove(Object key, Class<T> requiredType) {
Preconditions.notNull(key, "key must not be null");
Preconditions.notNull(requiredType, "requiredType must not be null");
Supplier<@Nullable T> action = () -> this.valuesStore.remove(this.namespace, key, requiredType);
return this.<@Nullable T> accessStore(action);
}
private <T extends @Nullable Object> T accessStore(Supplier<T> action) {
try {
return action.get();
}
catch (NamespacedHierarchicalStoreException e) {
throw new ExtensionContextException(e.getMessage(), e);
}
}
}
| NamespaceAwareStore |
java | spring-projects__spring-boot | module/spring-boot-graphql-test/src/test/java/org/springframework/boot/graphql/test/autoconfigure/GraphQlTypeExcludeFilterTests.java | {
"start": 7384,
"end": 7427
} | class ____ {
}
@Service
static | Controller2 |
java | google__dagger | javatests/dagger/internal/codegen/MembersInjectionTest.java | {
"start": 19254,
"end": 19907
} | interface ____ {",
" void inject(foo target);",
"}");
CompilerTests.daggerCompiler(foo, fooModule, fooComponent)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSourceFileWithPath("test/foo_MembersInjector.java");
});
}
@Test
public void fieldInjectionForShadowedMember() {
Source foo =
CompilerTests.javaSource(
"test.Foo",
"package test;",
"",
"import javax.inject.Inject;",
"",
" | fooComponent |
java | quarkusio__quarkus | test-framework/junit5-component/src/main/java/io/quarkus/test/component/ComponentContainer.java | {
"start": 40038,
"end": 45081
} | class
____ testPath = testClass.getClassLoader().getResource(testClassResourceName).toString();
// file:/some/path/to/project/target/test-classes/org/acme/Foo.class -> file:/some/path/to/project/target/test-classes
String testClassesRootPath = testPath.substring(0, testPath.length() - testClassResourceName.length() - 1);
// resolve back to File instance
testOutputDirectory = new File(URI.create(testClassesRootPath));
}
if (!testOutputDirectory.canWrite()) {
throw new IllegalStateException("Invalid test output directory: " + testOutputDirectory);
}
return testOutputDirectory;
}
private static boolean isSatisfied(Type requiredType, Set<AnnotationInstance> qualifiers, InjectionPointInfo injectionPoint,
Iterable<BeanInfo> beans, BeanDeployment beanDeployment, QuarkusComponentTestConfiguration configuration) {
for (BeanInfo bean : beans) {
if (Beans.matches(bean, requiredType, qualifiers)) {
LOG.debugf("Injection point %s satisfied by %s", injectionPoint.getTargetInfo(),
bean.toString());
return true;
}
}
for (MockBeanConfiguratorImpl<?> mock : configuration.mockConfigurators) {
if (mock.matches(beanDeployment.getBeanResolver(), requiredType, qualifiers)) {
LOG.debugf("Injection point %s satisfied by %s", injectionPoint.getTargetInfo(),
mock);
return true;
}
}
return false;
}
private static void processTestInterceptorMethods(Class<?> testClass,
BeanRegistrar.RegistrationContext registrationContext, Set<String> interceptorBindings,
Map<String, String[]> interceptorMethods) {
List<Class<? extends Annotation>> annotations = List.of(AroundInvoke.class, PostConstruct.class, PreDestroy.class,
AroundConstruct.class);
Predicate<Method> predicate = m -> {
for (Class<? extends Annotation> annotation : annotations) {
if (m.isAnnotationPresent(annotation)) {
return true;
}
}
return false;
};
for (Method method : findMethods(testClass, predicate)) {
Set<Annotation> bindings = findBindings(method, interceptorBindings);
if (bindings.isEmpty()) {
throw new IllegalStateException("No bindings declared on a test interceptor method: " + method);
}
validateTestInterceptorMethod(method);
String key = UUID.randomUUID().toString();
interceptorMethods.put(key, InterceptorMethodCreator.descriptor(method));
InterceptionType interceptionType;
if (method.isAnnotationPresent(AroundInvoke.class)) {
interceptionType = InterceptionType.AROUND_INVOKE;
} else if (method.isAnnotationPresent(PostConstruct.class)) {
interceptionType = InterceptionType.POST_CONSTRUCT;
} else if (method.isAnnotationPresent(PreDestroy.class)) {
interceptionType = InterceptionType.PRE_DESTROY;
} else if (method.isAnnotationPresent(AroundConstruct.class)) {
interceptionType = InterceptionType.AROUND_CONSTRUCT;
} else {
// This should never happen
throw new IllegalStateException("No interceptor annotation declared on: " + method);
}
int priority = 1;
Priority priorityAnnotation = method.getAnnotation(Priority.class);
if (priorityAnnotation != null) {
priority = priorityAnnotation.value();
}
registrationContext.configureInterceptor(interceptionType)
.identifier(key)
.priority(priority)
.bindings(bindings.stream().map(Annotations::jandexAnnotation)
.toArray(AnnotationInstance[]::new))
.param(InterceptorMethodCreator.CREATE_KEY, key)
.creator(InterceptorMethodCreator.class);
}
}
private static void validateTestInterceptorMethod(Method method) {
Parameter[] params = method.getParameters();
if (params.length != 1 || !InvocationContext.class.isAssignableFrom(params[0].getType())) {
throw new IllegalStateException("A test interceptor method must declare exactly one InvocationContext parameter:"
+ Arrays.toString(params));
}
}
private static Set<Annotation> findBindings(Method method, Set<String> bindings) {
return Arrays.stream(method.getAnnotations()).filter(a -> bindings.contains(a.annotationType().getName()))
.collect(Collectors.toSet());
}
@SuppressWarnings("unchecked")
static <T> T cast(Object obj) {
return (T) obj;
}
public static | String |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ReflectionUtils.java | {
"start": 3220,
"end": 3250
} | class ____.
*/
public | hierarchy |
java | spring-projects__spring-framework | spring-expression/src/test/java/org/springframework/expression/spel/standard/SpelExpressionTestUtils.java | {
"start": 925,
"end": 1295
} | class ____ {
public static void assertIsCompiled(Expression expression) {
try {
Field field = SpelExpression.class.getDeclaredField("compiledAst");
field.setAccessible(true);
Object object = field.get(expression);
assertThat(object).isNotNull();
}
catch (Exception ex) {
throw new AssertionError(ex.getMessage(), ex);
}
}
}
| SpelExpressionTestUtils |
java | quarkusio__quarkus | extensions/websockets-next/runtime/src/main/java/io/quarkus/websockets/next/runtime/telemetry/WebSocketTelemetryProviderBuilder.java | {
"start": 334,
"end": 413
} | class ____ to build {@link WebSocketTelemetryProvider}.
*/
public final | internally |
java | apache__spark | mllib/src/test/java/org/apache/spark/ml/param/JavaTestParams.java | {
"start": 987,
"end": 3334
} | class ____ extends JavaParams {
public JavaTestParams() {
this.uid_ = Identifiable$.MODULE$.randomUID("javaTestParams");
init();
}
public JavaTestParams(String uid) {
this.uid_ = uid;
init();
}
private String uid_;
@Override
public String uid() {
return uid_;
}
private IntParam myIntParam_;
public IntParam myIntParam() {
return myIntParam_;
}
public int getMyIntParam() {
return (Integer) getOrDefault(myIntParam_);
}
public JavaTestParams setMyIntParam(int value) {
set(myIntParam_, value);
return this;
}
private DoubleParam myDoubleParam_;
public DoubleParam myDoubleParam() {
return myDoubleParam_;
}
public double getMyDoubleParam() {
return (Double) getOrDefault(myDoubleParam_);
}
public JavaTestParams setMyDoubleParam(double value) {
set(myDoubleParam_, value);
return this;
}
private Param<String> myStringParam_;
public Param<String> myStringParam() {
return myStringParam_;
}
public String getMyStringParam() {
return getOrDefault(myStringParam_);
}
public JavaTestParams setMyStringParam(String value) {
set(myStringParam_, value);
return this;
}
private DoubleArrayParam myDoubleArrayParam_;
public DoubleArrayParam myDoubleArrayParam() {
return myDoubleArrayParam_;
}
public double[] getMyDoubleArrayParam() {
return getOrDefault(myDoubleArrayParam_);
}
public JavaTestParams setMyDoubleArrayParam(double[] value) {
set(myDoubleArrayParam_, value);
return this;
}
private void init() {
myIntParam_ = new IntParam(this, "myIntParam", "this is an int param", ParamValidators.gt(0));
myDoubleParam_ = new DoubleParam(this, "myDoubleParam", "this is a double param",
ParamValidators.inRange(0.0, 1.0));
List<String> validStrings = Arrays.asList("a", "b");
myStringParam_ = new Param<>(this, "myStringParam", "this is a string param",
ParamValidators.inArray(validStrings));
myDoubleArrayParam_ =
new DoubleArrayParam(this, "myDoubleArrayParam", "this is a double param");
setDefault(myIntParam(), 1);
setDefault(myDoubleParam(), 0.5);
setDefault(myDoubleArrayParam(), new double[]{1.0, 2.0});
}
@Override
public JavaTestParams copy(ParamMap extra) {
return defaultCopy(extra);
}
}
| JavaTestParams |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/AnnotationLiteralProcessor.java | {
"start": 20412,
"end": 20636
} | class ____ {
/**
* Name of the generated annotation literal class.
*/
final String generatedClassName;
/**
* Whether the generated annotation literal | AnnotationLiteralClassInfo |
java | apache__camel | components/camel-milo/src/test/java/org/apache/camel/component/milo/WriteClientTest.java | {
"start": 1530,
"end": 6786
} | class ____ extends AbstractMiloServerTest {
private static final String DIRECT_START_1 = "direct:start1";
private static final String DIRECT_START_2 = "direct:start2";
private static final String DIRECT_START_3 = "direct:start3";
private static final String DIRECT_START_4 = "direct:start4";
private static final String MILO_SERVER_ITEM_1 = "milo-server:myitem1";
private static final String MILO_SERVER_ITEM_2 = "milo-server:myitem2";
private static final String MILO_CLIENT_BASE_C1 = "milo-client:opc.tcp://foo:bar@localhost:@@port@@";
private static final String MILO_CLIENT_BASE_C2 = "milo-client:opc.tcp://foo2:bar2@localhost:@@port@@";
private static final String MILO_CLIENT_ITEM_C1_1
= MILO_CLIENT_BASE_C1 + "?node=" + nodeValue(MiloServerComponent.DEFAULT_NAMESPACE_URI, "myitem1")
+ "&overrideHost=true";
private static final String MILO_CLIENT_ITEM_C1_2
= MILO_CLIENT_BASE_C1 + "?node=" + nodeValue(MiloServerComponent.DEFAULT_NAMESPACE_URI, "myitem2")
+ "&overrideHost=true";
private static final String MILO_CLIENT_ITEM_C2_1
= MILO_CLIENT_BASE_C2 + "?node=" + nodeValue(MiloServerComponent.DEFAULT_NAMESPACE_URI, "myitem1")
+ "&overrideHost=true";
private static final String MILO_CLIENT_ITEM_C2_2
= MILO_CLIENT_BASE_C2 + "?node=" + nodeValue(MiloServerComponent.DEFAULT_NAMESPACE_URI, "myitem2")
+ "&overrideHost=true";
private static final String MOCK_TEST_1 = "mock:test1";
private static final String MOCK_TEST_2 = "mock:test2";
private static final Logger LOG = LoggerFactory.getLogger(WriteClientTest.class);
@EndpointInject(MOCK_TEST_1)
protected MockEndpoint test1Endpoint;
@EndpointInject(MOCK_TEST_2)
protected MockEndpoint test2Endpoint;
@Produce(DIRECT_START_1)
protected ProducerTemplate producer1;
@Produce(DIRECT_START_2)
protected ProducerTemplate producer2;
@Produce(DIRECT_START_3)
protected ProducerTemplate producer3;
@Produce(DIRECT_START_4)
protected ProducerTemplate producer4;
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(MILO_SERVER_ITEM_1).to(MOCK_TEST_1);
from(MILO_SERVER_ITEM_2).to(MOCK_TEST_2);
from(DIRECT_START_1).to(resolve(MILO_CLIENT_ITEM_C1_1));
from(DIRECT_START_2).to(resolve(MILO_CLIENT_ITEM_C1_2));
from(DIRECT_START_3).to(resolve(MILO_CLIENT_ITEM_C2_1));
from(DIRECT_START_4).to(resolve(MILO_CLIENT_ITEM_C2_2));
}
};
}
@BeforeEach
public void setup(TestInfo testInfo) {
final var displayName = testInfo.getDisplayName();
LOG.info("********************************************************************************");
LOG.info(displayName);
LOG.info("********************************************************************************");
}
@Test
public void testWrite1() throws Exception {
// item 1
this.test1Endpoint.setExpectedCount(2);
testBody(this.test1Endpoint.message(0), assertGoodValue("Foo1"));
testBody(this.test1Endpoint.message(1), assertGoodValue("Foo2"));
// item 2
this.test2Endpoint.setExpectedCount(0);
// send
sendValue(this.producer1, new Variant("Foo1"));
sendValue(this.producer1, new Variant("Foo2"));
// assert
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testWrite2() throws Exception {
// item 1
this.test1Endpoint.setExpectedCount(0);
// item 2
this.test2Endpoint.setExpectedCount(2);
testBody(this.test2Endpoint.message(0), assertGoodValue("Foo1"));
testBody(this.test2Endpoint.message(1), assertGoodValue("Foo2"));
// send
sendValue(this.producer2, new Variant("Foo1"));
sendValue(this.producer2, new Variant("Foo2"));
// assert
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testWrite3() throws Exception {
// item 1
this.test1Endpoint.setExpectedCount(2);
testBody(this.test1Endpoint.message(0), assertGoodValue("Foo1"));
testBody(this.test1Endpoint.message(1), assertGoodValue("Foo3"));
// item 1
this.test2Endpoint.setExpectedCount(2);
testBody(this.test2Endpoint.message(0), assertGoodValue("Foo2"));
testBody(this.test2Endpoint.message(1), assertGoodValue("Foo4"));
// send
sendValue(this.producer1, new Variant("Foo1"));
sendValue(this.producer2, new Variant("Foo2"));
sendValue(this.producer3, new Variant("Foo3"));
sendValue(this.producer4, new Variant("Foo4"));
// assert
MockEndpoint.assertIsSatisfied(context);
}
private static void sendValue(final ProducerTemplate producerTemplate, final Variant variant) {
// we always write synchronously since we do need the message order
producerTemplate.sendBodyAndHeader(variant, "await", true);
}
}
| WriteClientTest |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/result_handler_type/ObjectFactory.java | {
"start": 918,
"end": 1416
} | class ____ extends DefaultObjectFactory {
private static final long serialVersionUID = -8855120656740914948L;
@Override
protected Class<?> resolveInterface(Class<?> type) {
Class<?> classToCreate;
if (type == Map.class) {
classToCreate = LinkedHashMap.class;
} else if (type == List.class || type == Collection.class) {
classToCreate = LinkedList.class;
} else {
classToCreate = super.resolveInterface(type);
}
return classToCreate;
}
}
| ObjectFactory |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SpringAiEmbeddingsEndpointBuilderFactory.java | {
"start": 10585,
"end": 10962
} | class ____ extends AbstractEndpointBuilder implements SpringAiEmbeddingsEndpointBuilder, AdvancedSpringAiEmbeddingsEndpointBuilder {
public SpringAiEmbeddingsEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new SpringAiEmbeddingsEndpointBuilderImpl(path);
}
} | SpringAiEmbeddingsEndpointBuilderImpl |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/OrderedBySQLTest.java | {
"start": 2229,
"end": 3072
} | class ____ {
@Id
private Long id;
private String name;
@OneToMany(
mappedBy = "person",
cascade = CascadeType.ALL
)
@SQLOrder("CHAR_LENGTH(name) DESC")
private List<Article> articles = new ArrayList<>();
//Getters and setters are omitted for brevity
//end::collections-customizing-ordered-by-sql-clause-mapping-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<Article> getArticles() {
return articles;
}
public void addArticle(Article article) {
article.setPerson(this);
articles.add(article);
}
//tag::collections-customizing-ordered-by-sql-clause-mapping-example[]
}
@Entity(name = "Article")
public static | Person |
java | elastic__elasticsearch | qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/MixedClusterClientYamlTestSuiteIT.java | {
"start": 931,
"end": 1356
} | class ____ extends ESClientYamlSuiteTestCase {
public MixedClusterClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return createParameters();
}
@Override
protected boolean randomizeContentType() {
return false;
}
}
| MixedClusterClientYamlTestSuiteIT |
java | google__guava | android/guava/src/com/google/common/collect/ImmutableAsList.java | {
"start": 1917,
"end": 2597
} | class ____ implements Serializable {
final ImmutableCollection<?> collection;
SerializedForm(ImmutableCollection<?> collection) {
this.collection = collection;
}
Object readResolve() {
return collection.asList();
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
@GwtIncompatible
@J2ktIncompatible
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
throw new InvalidObjectException("Use SerializedForm");
}
@GwtIncompatible
@J2ktIncompatible
@Override
Object writeReplace() {
return new SerializedForm(delegateCollection());
}
}
| SerializedForm |
java | apache__camel | core/camel-api/src/generated/java/org/apache/camel/spi/UriEndpoint.java | {
"start": 7491,
"end": 8074
} | enum ____ {
* {@literal @}Metadata
* FOO("fooKey");
*
* public final String headerName;
*
* SomeEnum(final String str) {
* this.headerName = str;
* }
* }
* </code>
* </pre>
*/
String headersNameProvider() default "";
/**
* Whether the component does remote communication such as connecting to an external system over the network. Set
* this to false for internal components such as log, message transformations and other kinds.
*/
boolean remote() default true;
}
| SomeEnum |
java | apache__camel | components/camel-mina/src/test/java/org/apache/camel/component/mina/MinaInOutRouteTest.java | {
"start": 1189,
"end": 2320
} | class ____ extends BaseMinaTest {
@Test
public void testInOutUsingMina() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Bye Chad");
// we should preserve headers
mock.expectedHeaderReceived("city", "Woodbine");
mock.setResultWaitTime(5000);
Object out = template.requestBodyAndHeader("direct:in", "Chad", "city", "Woodbine");
MockEndpoint.assertIsSatisfied(context);
assertEquals("Bye Chad", out);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
fromF("mina:tcp://localhost:%1$s?sync=true", getPort()).process(exchange -> {
String body = exchange.getIn().getBody(String.class);
exchange.getMessage().setBody("Bye " + body);
});
from("direct:in").toF("mina:tcp://localhost:%1$s?sync=true&lazySessionCreation=true", getPort())
.to("mock:result");
}
};
}
}
| MinaInOutRouteTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ClassInitializationDeadlockTest.java | {
"start": 884,
"end": 1197
} | class ____ {
private final CompilationTestHelper testHelper =
CompilationTestHelper.newInstance(ClassInitializationDeadlock.class, getClass());
@Test
public void positive() {
testHelper
.addSourceLines(
"A.java",
"""
public | ClassInitializationDeadlockTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/cache/CacheReproTests.java | {
"start": 22667,
"end": 22904
} | interface ____ {
@Cacheable(value = "itemCache", sync = true)
Optional<TestBean> findById(String id);
@CachePut(cacheNames = "itemCache", key = "#item.name")
TestBean insertItem(TestBean item);
}
public static | Spr15271Interface |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java | {
"start": 8351,
"end": 35280
} | class ____ generated. Edit {@code " + getClass().getSimpleName() + "} instead.");
builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL);
builder.addSuperinterface(AGGREGATOR_FUNCTION);
builder.addField(
FieldSpec.builder(LIST_AGG_FUNC_DESC, "INTERMEDIATE_STATE_DESC", Modifier.PRIVATE, Modifier.STATIC, Modifier.FINAL)
.initializer(initInterState())
.build()
);
if (warnExceptions.isEmpty() == false) {
builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL);
}
builder.addField(DRIVER_CONTEXT, "driverContext", Modifier.PRIVATE, Modifier.FINAL);
builder.addField(aggState.type, "state", Modifier.PRIVATE, Modifier.FINAL);
builder.addField(LIST_INTEGER, "channels", Modifier.PRIVATE, Modifier.FINAL);
for (Parameter p : createParameters) {
builder.addField(p.type(), p.name(), Modifier.PRIVATE, Modifier.FINAL);
}
builder.addMethod(create());
builder.addMethod(ctor());
builder.addMethod(intermediateStateDesc());
builder.addMethod(intermediateBlockCount());
builder.addMethod(addRawInput());
builder.addMethod(addRawInputExploded(true));
builder.addMethod(addRawInputExploded(false));
if (tryToUseVectors) {
builder.addMethod(addRawVector(false));
builder.addMethod(addRawVector(true));
}
builder.addMethod(addRawBlock(false));
builder.addMethod(addRawBlock(true));
builder.addMethod(addIntermediateInput());
builder.addMethod(evaluateIntermediate());
builder.addMethod(evaluateFinal());
builder.addMethod(toStringMethod());
builder.addMethod(close());
return builder.build();
}
private MethodSpec create() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("create");
builder.addModifiers(Modifier.PUBLIC, Modifier.STATIC).returns(implementation);
if (warnExceptions.isEmpty() == false) {
builder.addParameter(WARNINGS, "warnings");
}
builder.addParameter(DRIVER_CONTEXT, "driverContext");
builder.addParameter(LIST_INTEGER, "channels");
for (Parameter p : createParameters) {
builder.addParameter(p.type(), p.name());
}
if (createParameters.isEmpty()) {
builder.addStatement(
"return new $T($LdriverContext, channels, $L)",
implementation,
warnExceptions.isEmpty() ? "" : "warnings, ",
callInit()
);
} else {
builder.addStatement(
"return new $T($LdriverContext, channels, $L, $L)",
implementation,
warnExceptions.isEmpty() ? "" : "warnings, ",
callInit(),
createParameters.stream().map(p -> p.name()).collect(joining(", "))
);
}
return builder.build();
}
private CodeBlock callInit() {
String initParametersCall = init.getParameters()
.stream()
.map(p -> TypeName.get(p.asType()).equals(BIG_ARRAYS) ? "driverContext.bigArrays()" : p.getSimpleName().toString())
.collect(joining(", "));
CodeBlock.Builder builder = CodeBlock.builder();
if (aggState.declaredType().isPrimitive()) {
builder.add("new $T($T.$L($L))", aggState.type(), declarationType, init.getSimpleName(), initParametersCall);
} else {
builder.add("$T.$L($L)", declarationType, init.getSimpleName(), initParametersCall);
}
return builder.build();
}
private CodeBlock initInterState() {
CodeBlock.Builder builder = CodeBlock.builder();
builder.add("List.of(");
boolean addComma = false;
for (var interState : intermediateState) {
if (addComma) builder.add(",");
builder.add("$Wnew $T($S, $T." + interState.elementType() + ")", INTERMEDIATE_STATE_DESC, interState.name(), ELEMENT_TYPE);
addComma = true;
}
builder.add("$W$W)");
return builder.build();
}
private MethodSpec ctor() {
MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC);
if (warnExceptions.isEmpty() == false) {
builder.addParameter(WARNINGS, "warnings");
}
builder.addParameter(DRIVER_CONTEXT, "driverContext");
builder.addParameter(LIST_INTEGER, "channels");
builder.addParameter(aggState.type, "state");
if (warnExceptions.isEmpty() == false) {
builder.addStatement("this.warnings = warnings");
}
builder.addStatement("this.driverContext = driverContext");
builder.addStatement("this.channels = channels");
builder.addStatement("this.state = state");
for (Parameter p : createParameters()) {
p.buildCtor(builder);
}
return builder.build();
}
private MethodSpec intermediateStateDesc() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("intermediateStateDesc");
builder.addModifiers(Modifier.PUBLIC, Modifier.STATIC).returns(LIST_AGG_FUNC_DESC);
builder.addStatement("return INTERMEDIATE_STATE_DESC");
return builder.build();
}
private MethodSpec intermediateBlockCount() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("intermediateBlockCount");
builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).returns(TypeName.INT);
builder.addStatement("return INTERMEDIATE_STATE_DESC.size()");
return builder.build();
}
private MethodSpec addRawInput() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawInput");
builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(PAGE, "page").addParameter(BOOLEAN_VECTOR, "mask");
if (aggState.hasFailed()) {
builder.beginControlFlow("if (state.failed())");
builder.addStatement("return");
builder.endControlFlow();
}
builder.beginControlFlow("if (mask.allFalse())");
builder.addComment("Entire page masked away");
builder.nextControlFlow("else if (mask.allTrue())");
builder.addStatement("$L(page)", addRawInputExplodedName(false));
builder.nextControlFlow("else");
builder.addStatement("$L(page, mask)", addRawInputExplodedName(true));
builder.endControlFlow();
return builder.build();
}
private String addRawInputExplodedName(boolean hasMask) {
return hasMask ? "addRawInputMasked" : "addRawInputNotMasked";
}
private MethodSpec addRawInputExploded(boolean hasMask) {
MethodSpec.Builder builder = MethodSpec.methodBuilder(addRawInputExplodedName(hasMask));
builder.addModifiers(Modifier.PRIVATE).addParameter(PAGE, "page");
if (hasMask) {
builder.addParameter(BOOLEAN_VECTOR, "mask");
}
for (int i = 0; i < aggParams.size(); i++) {
Argument a = aggParams.get(i);
builder.addStatement("$T $L = page.getBlock(channels.get($L))", a.dataType(true), a.blockName(), i);
}
if (tryToUseVectors) {
for (Argument a : aggParams) {
String rawBlock = "addRawBlock("
+ aggParams.stream().map(arg -> arg.blockName()).collect(joining(", "))
+ (hasMask ? ", mask" : "")
+ ")";
a.resolveVectors(builder, rawBlock, "return");
}
}
builder.addStatement(invokeAddRaw(tryToUseVectors == false, hasMask));
return builder.build();
}
private String invokeAddRaw(boolean blockStyle, boolean hasMask) {
return addRawName(blockStyle)
+ "("
+ aggParams.stream().map(a -> blockStyle ? a.blockName() : a.vectorName()).collect(joining(", "))
+ (hasMask ? ", mask" : "")
+ ")";
}
private String addRawName(boolean blockStyle) {
return blockStyle ? "addRawBlock" : "addRawVector";
}
private MethodSpec addRawVector(boolean masked) {
MethodSpec.Builder builder = initAddRaw(false, masked);
if (aggParams.getFirst() instanceof BlockArgument) {
throw new IllegalStateException("The BlockArgument type does not support vectors because all values are multi-valued");
}
if (first != null) {
builder.addComment("Find the first value up front in the Vector path which is more complex but should be faster");
builder.addStatement("int valuesPosition = 0");
addRawVectorWithFirst(builder, true, masked);
addRawVectorWithFirst(builder, false, masked);
return builder.build();
}
if (aggState.hasSeen()) {
builder.addStatement("state.seen(true)");
}
builder.beginControlFlow(
"for (int valuesPosition = 0; valuesPosition < $L.getPositionCount(); valuesPosition++)",
aggParams.getFirst().vectorName()
);
{
if (masked) {
builder.beginControlFlow("if (mask.getBoolean(valuesPosition) == false)").addStatement("continue").endControlFlow();
}
for (Argument a : aggParams) {
a.read(builder, a.vectorName(), "valuesPosition");
}
combineRawInput(builder, false);
}
builder.endControlFlow();
return builder.build();
}
private void addRawVectorWithFirst(MethodSpec.Builder builder, boolean firstPass, boolean masked) {
builder.beginControlFlow(
firstPass
? "while (state.seen() == false && valuesPosition < $L.getPositionCount())"
: "while (valuesPosition < $L.getPositionCount())",
aggParams.getFirst().vectorName()
);
{
if (masked) {
builder.beginControlFlow("if (mask.getBoolean(valuesPosition) == false)");
builder.addStatement("valuesPosition++");
builder.addStatement("continue");
builder.endControlFlow();
}
for (Argument a : aggParams) {
a.read(builder, a.vectorName(), "valuesPosition");
}
combineRawInput(builder, firstPass);
builder.addStatement("valuesPosition++");
if (firstPass) {
builder.addStatement("state.seen(true)");
builder.addStatement("break");
}
}
builder.endControlFlow();
}
private MethodSpec addRawBlock(boolean masked) {
MethodSpec.Builder builder = initAddRaw(true, masked);
builder.beginControlFlow("for (int p = 0; p < $L.getPositionCount(); p++)", aggParams.getFirst().blockName());
{
if (masked) {
builder.beginControlFlow("if (mask.getBoolean(p) == false)").addStatement("continue").endControlFlow();
}
for (Argument a : aggParams) {
a.addContinueIfPositionHasNoValueBlock(builder);
}
if (hasOnlyBlockArguments == false) {
if (first == null && aggState.hasSeen()) {
builder.addStatement("state.seen(true)");
}
}
for (Argument a : aggParams) {
a.startBlockProcessingLoop(builder);
}
if (hasOnlyBlockArguments) {
String params = aggParams.stream().map(Argument::blockName).collect(joining(", "));
warningsBlock(builder, () -> builder.addStatement("$T.combine(state, p, $L)", declarationType, params));
} else {
if (first != null) {
builder.addComment("Check seen in every iteration to save on complexity in the Block path");
builder.beginControlFlow("if (state.seen())");
{
combineRawInput(builder, false);
}
builder.nextControlFlow("else");
{
builder.addStatement("state.seen(true)");
combineRawInput(builder, true);
}
builder.endControlFlow();
} else {
combineRawInput(builder, false);
}
}
for (int i = aggParams.size() - 1; i >= 0; --i) {
Argument a = aggParams.get(i);
a.endBlockProcessingLoop(builder);
}
}
builder.endControlFlow();
return builder.build();
}
private MethodSpec.Builder initAddRaw(boolean blockStyle, boolean masked) {
MethodSpec.Builder builder = MethodSpec.methodBuilder(addRawName(blockStyle));
builder.addModifiers(Modifier.PRIVATE);
for (Argument a : aggParams) {
a.declareProcessParameter(builder, blockStyle);
}
if (masked) {
builder.addParameter(BOOLEAN_VECTOR, "mask");
}
for (Argument a : aggParams) {
if (a.scratchType() != null) {
// Add scratch var that will be used for some blocks/vectors, e.g. for bytes_ref
builder.addStatement("$T $L = new $T()", a.scratchType(), a.scratchName(), a.scratchType());
}
}
return builder;
}
private void combineRawInput(MethodSpec.Builder builder, boolean useFirst) {
TypeName returnType = TypeName.get(combine.getReturnType());
warningsBlock(builder, () -> invokeCombineRawInput(returnType, builder, useFirst));
}
private void invokeCombineRawInput(TypeName returnType, MethodSpec.Builder builder, boolean useFirst) {
StringBuilder pattern = new StringBuilder();
List<Object> params = new ArrayList<>();
if (returnType.isPrimitive()) {
if (useFirst) {
throw new IllegalArgumentException("[first] not supported with primitive");
}
pattern.append("state.$TValue($T.combine(state.$TValue()");
params.add(returnType);
params.add(declarationType);
params.add(returnType);
} else if (returnType == TypeName.VOID) {
pattern.append("$T.$L(state");
params.add(declarationType);
params.add(useFirst ? first.getSimpleName() : combine.getSimpleName());
} else {
throw new IllegalArgumentException("combine must return void or a primitive");
}
for (Argument a : aggParams) {
pattern.append(", $L");
params.add(a.valueName());
}
if (returnType.isPrimitive()) {
pattern.append(")");
}
pattern.append(")");
builder.addStatement(pattern.toString(), params.toArray());
}
private void warningsBlock(MethodSpec.Builder builder, Runnable block) {
if (warnExceptions.isEmpty() == false) {
builder.beginControlFlow("try");
}
block.run();
if (warnExceptions.isEmpty() == false) {
String catchPattern = "catch (" + warnExceptions.stream().map(m -> "$T").collect(Collectors.joining(" | ")) + " e)";
builder.nextControlFlow(catchPattern, warnExceptions.stream().map(TypeName::get).toArray());
builder.addStatement("warnings.registerException(e)");
builder.addStatement("state.failed(true)");
builder.addStatement("return");
builder.endControlFlow();
}
}
private MethodSpec addIntermediateInput() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("addIntermediateInput");
builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(PAGE, "page");
builder.addStatement("assert channels.size() == intermediateBlockCount()");
builder.addStatement("assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size()");
for (int i = 0; i < intermediateState.size(); i++) {
var interState = intermediateState.get(i);
interState.assignToVariable(builder, i);
builder.addStatement("assert $L.getPositionCount() == 1", interState.name());
}
if (aggState.declaredType().isPrimitive()) {
if (warnExceptions.isEmpty()) {
assert intermediateState.size() == 2;
assert intermediateState.get(1).name().equals("seen");
builder.beginControlFlow("if (seen.getBoolean(0))");
} else {
assert intermediateState.size() == 3;
assert intermediateState.get(1).name().equals("seen");
assert intermediateState.get(2).name().equals("failed");
builder.beginControlFlow("if (failed.getBoolean(0))");
{
builder.addStatement("state.failed(true)");
builder.addStatement("state.seen(true)");
}
builder.nextControlFlow("else if (seen.getBoolean(0))");
}
warningsBlock(builder, () -> {
var primitiveStateMethod = switch (aggState.declaredType().toString()) {
case "boolean" -> "booleanValue";
case "int" -> "intValue";
case "long" -> "longValue";
case "double" -> "doubleValue";
case "float" -> "floatValue";
default -> throw new IllegalArgumentException("Unexpected primitive type: [" + aggState.declaredType() + "]");
};
var state = intermediateState.get(0);
var s = "state.$L($T.combine(state.$L(), " + state.name() + "." + vectorAccessorName(state.elementType()) + "(0)))";
builder.addStatement(s, primitiveStateMethod, declarationType, primitiveStateMethod);
builder.addStatement("state.seen(true)");
});
builder.endControlFlow();
} else {
requireStaticMethod(
declarationType,
requireVoidType(),
requireName("combineIntermediate"),
requireArgs(
Stream.concat(
Stream.of(aggState.declaredType()), // aggState
intermediateState.stream().map(IntermediateStateDesc::combineArgType) // intermediate state
).map(Methods::requireType).toArray(TypeMatcher[]::new)
)
);
for (IntermediateStateDesc interState : intermediateState) {
interState.addScratchDeclaration(builder);
}
builder.addStatement("$T.combineIntermediate(state, " + intermediateStateRowAccess() + ")", declarationType);
}
return builder.build();
}
String intermediateStateRowAccess() {
return intermediateState.stream().map(desc -> desc.access("0")).collect(joining(", "));
}
private MethodSpec evaluateIntermediate() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("evaluateIntermediate");
builder.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.addParameter(BLOCK_ARRAY, "blocks")
.addParameter(TypeName.INT, "offset")
.addParameter(DRIVER_CONTEXT, "driverContext");
builder.addStatement("state.toIntermediate(blocks, offset, driverContext)");
return builder.build();
}
private MethodSpec evaluateFinal() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("evaluateFinal");
builder.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.addParameter(BLOCK_ARRAY, "blocks")
.addParameter(TypeName.INT, "offset")
.addParameter(DRIVER_CONTEXT, "driverContext");
if (aggState.hasSeen() || aggState.hasFailed()) {
builder.beginControlFlow(
"if ($L)",
Stream.concat(
Stream.of("state.seen() == false").filter(c -> aggState.hasSeen()),
Stream.of("state.failed()").filter(c -> aggState.hasFailed())
).collect(joining(" || "))
);
builder.addStatement("blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1)", BLOCK);
builder.addStatement("return");
builder.endControlFlow();
}
if (aggState.declaredType().isPrimitive()) {
builder.addStatement(switch (aggState.declaredType().toString()) {
case "boolean" -> "blocks[offset] = driverContext.blockFactory().newConstantBooleanBlockWith(state.booleanValue(), 1)";
case "int" -> "blocks[offset] = driverContext.blockFactory().newConstantIntBlockWith(state.intValue(), 1)";
case "long" -> "blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1)";
case "double" -> "blocks[offset] = driverContext.blockFactory().newConstantDoubleBlockWith(state.doubleValue(), 1)";
case "float" -> "blocks[offset] = driverContext.blockFactory().newConstantFloatBlockWith(state.floatValue(), 1)";
default -> throw new IllegalArgumentException("Unexpected primitive type: [" + aggState.declaredType() + "]");
});
} else {
requireStaticMethod(
declarationType,
requireType(BLOCK),
requireName("evaluateFinal"),
requireArgs(requireType(aggState.declaredType()), requireType(DRIVER_CONTEXT))
);
builder.addStatement("blocks[offset] = $T.evaluateFinal(state, driverContext)", declarationType);
}
return builder.build();
}
private MethodSpec toStringMethod() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("toString");
builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).returns(String.class);
builder.addStatement("$T sb = new $T()", StringBuilder.class, StringBuilder.class);
builder.addStatement("sb.append(getClass().getSimpleName()).append($S)", "[");
builder.addStatement("sb.append($S).append(channels)", "channels=");
builder.addStatement("sb.append($S)", "]");
builder.addStatement("return sb.toString()");
return builder.build();
}
private MethodSpec close() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("close");
builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC);
builder.addStatement("state.close()");
return builder.build();
}
record IntermediateStateDesc(String name, String elementType, boolean block) {
static IntermediateStateDesc newIntermediateStateDesc(IntermediateState state) {
String type = state.type();
boolean block = false;
if (type.toUpperCase(Locale.ROOT).endsWith("_BLOCK")) {
type = type.substring(0, type.length() - "_BLOCK".length());
block = true;
}
return new IntermediateStateDesc(state.name(), type, block);
}
public String access(String position) {
if (block) {
return name();
}
String s = name() + ".";
if (vectorType(elementType) != null) {
s += vectorAccessorName(elementType()) + "(" + position;
} else {
s += getMethod(fromString(elementType())) + "(" + name() + ".getFirstValueIndex(" + position + ")";
}
if (scratchType(elementType()) != null) {
s += ", " + name() + "Scratch";
}
return s + ")";
}
public void addScratchDeclaration(MethodSpec.Builder builder) {
ClassName scratchType = scratchType(elementType());
if (scratchType != null) {
builder.addStatement("$T $L = new $T()", scratchType, name() + "Scratch", scratchType);
}
}
public void assignToVariable(MethodSpec.Builder builder, int offset) {
builder.addStatement("Block $L = page.getBlock(channels.get($L))", name + "Uncast", offset);
ClassName blockType = blockType(elementType());
builder.beginControlFlow("if ($L.areAllValuesNull())", name + "Uncast");
{
builder.addStatement("return");
builder.endControlFlow();
}
if (block || vectorType(elementType) == null) {
builder.addStatement("$T $L = ($T) $L", blockType, name, blockType, name + "Uncast");
} else {
builder.addStatement("$T $L = (($T) $L).asVector()", vectorType(elementType), name, blockType, name + "Uncast");
}
}
public TypeName combineArgType() {
var type = Types.fromString(elementType);
return block ? blockType(type) : type;
}
}
/**
* This represents the type returned by init method used to keep aggregation state
* @param declaredType declared state type as returned by init method
* @param type actual type used (we have some predefined state types for primitive values)
*/
public record AggregationState(TypeName declaredType, TypeName type, boolean hasSeen, boolean hasFailed) {
public static AggregationState create(Elements elements, TypeMirror mirror, boolean hasFailures, boolean isArray) {
var declaredType = TypeName.get(mirror);
var stateType = declaredType.isPrimitive()
? ClassName.get("org.elasticsearch.compute.aggregation", primitiveStateStoreClassname(declaredType, hasFailures, isArray))
: declaredType;
return new AggregationState(
declaredType,
stateType,
hasMethod(elements, stateType, "seen()"),
hasMethod(elements, stateType, "failed()")
);
}
private static String primitiveStateStoreClassname(TypeName declaredType, boolean hasFailures, boolean isArray) {
var name = capitalize(declaredType.toString());
if (hasFailures) {
name += "Fallible";
}
if (isArray) {
name += "Array";
}
return name + "State";
}
}
private static boolean hasMethod(Elements elements, TypeName type, String name) {
return elements.getAllMembers(elements.getTypeElement(type.toString())).stream().anyMatch(e -> e.toString().equals(name));
}
}
| is |
java | apache__camel | core/camel-main/src/generated/java/org/apache/camel/main/HealthConfigurationPropertiesConfigurer.java | {
"start": 707,
"end": 5185
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("ConsumersEnabled", java.lang.Boolean.class);
map.put("Enabled", java.lang.Boolean.class);
map.put("ExcludePattern", java.lang.String.class);
map.put("ExposureLevel", java.lang.String.class);
map.put("InitialState", java.lang.String.class);
map.put("ProducersEnabled", java.lang.Boolean.class);
map.put("RegistryEnabled", java.lang.Boolean.class);
map.put("RoutesEnabled", java.lang.Boolean.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.main.HealthConfigurationProperties target = (org.apache.camel.main.HealthConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "consumersenabled":
case "consumersEnabled": target.setConsumersEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
case "enabled": target.setEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
case "excludepattern":
case "excludePattern": target.setExcludePattern(property(camelContext, java.lang.String.class, value)); return true;
case "exposurelevel":
case "exposureLevel": target.setExposureLevel(property(camelContext, java.lang.String.class, value)); return true;
case "initialstate":
case "initialState": target.setInitialState(property(camelContext, java.lang.String.class, value)); return true;
case "producersenabled":
case "producersEnabled": target.setProducersEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
case "registryenabled":
case "registryEnabled": target.setRegistryEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
case "routesenabled":
case "routesEnabled": target.setRoutesEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "consumersenabled":
case "consumersEnabled": return java.lang.Boolean.class;
case "enabled": return java.lang.Boolean.class;
case "excludepattern":
case "excludePattern": return java.lang.String.class;
case "exposurelevel":
case "exposureLevel": return java.lang.String.class;
case "initialstate":
case "initialState": return java.lang.String.class;
case "producersenabled":
case "producersEnabled": return java.lang.Boolean.class;
case "registryenabled":
case "registryEnabled": return java.lang.Boolean.class;
case "routesenabled":
case "routesEnabled": return java.lang.Boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.main.HealthConfigurationProperties target = (org.apache.camel.main.HealthConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "consumersenabled":
case "consumersEnabled": return target.getConsumersEnabled();
case "enabled": return target.getEnabled();
case "excludepattern":
case "excludePattern": return target.getExcludePattern();
case "exposurelevel":
case "exposureLevel": return target.getExposureLevel();
case "initialstate":
case "initialState": return target.getInitialState();
case "producersenabled":
case "producersEnabled": return target.getProducersEnabled();
case "registryenabled":
case "registryEnabled": return target.getRegistryEnabled();
case "routesenabled":
case "routesEnabled": return target.getRoutesEnabled();
default: return null;
}
}
}
| HealthConfigurationPropertiesConfigurer |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/commands/RunOnlyOnceServerCommandIntegrationTests.java | {
"start": 1015,
"end": 3772
} | class ____ extends TestSupport {
private final RedisClient client;
private final RedisCommands<String, String> redis;
@Inject
RunOnlyOnceServerCommandIntegrationTests(RedisClient client, StatefulRedisConnection<String, String> connection) {
this.client = client;
this.redis = connection.sync();
}
/**
* Executed in order: 1 this test causes a stop of the redis. This means, you cannot repeat the test without restarting your
* redis.
*/
@Test
@Disabled
@Order(1)
void debugSegfault() {
assumeTrue(CanConnect.to(host(), port(1)));
final RedisURI redisURI = RedisURI.Builder.redis(host(), port(1)).build();
try (StatefulRedisConnection<String, String> connection = client.connect(redisURI)) {
final RedisAsyncCommands<String, String> commands = connection.async();
commands.debugSegfault();
Wait.untilTrue(() -> !connection.isOpen()).waitOrTimeout();
assertThat(connection.isOpen()).isFalse();
}
}
/**
* Executed in order: 2
*/
@Test
@Order(2)
void migrate() {
assumeTrue(CanConnect.to(host(), port(7)));
redis.set(key, value);
String result = redis.migrate("localhost", TestSettings.port(7), key, 0, 10);
assertThat(result).isEqualTo("OK");
}
/**
* Executed in order: 3
*/
@Test
@Order(3)
void migrateCopyReplace() {
assumeTrue(CanConnect.to(host(), port(7)));
redis.set(key, value);
redis.set("key2", value);
redis.set("key3", value);
String result = redis.migrate("localhost", TestSettings.port(7), 0, 10, MigrateArgs.Builder.keys(key).copy().replace());
assertThat(result).isEqualTo("OK");
result = redis.migrate("localhost", TestSettings.port(7), 0, 10,
MigrateArgs.Builder.keys(Arrays.asList("key1", "key2")).replace());
assertThat(result).isEqualTo("OK");
}
/**
* Executed in order: 4 this test causes a stop of the redis. This means, you cannot repeat the test without restarting your
* redis.
*/
@Test
@Order(4)
void shutdown() {
assumeTrue(CanConnect.to(host(), port(7)));
final RedisURI redisURI = RedisURI.Builder.redis(host(), port(2)).build();
try (StatefulRedisConnection<String, String> cnxn = client.connect(redisURI)) {
final RedisAsyncCommands<String, String> commands = cnxn.async();
commands.shutdown(true);
commands.shutdown(false);
Wait.untilTrue(() -> !cnxn.isOpen()).waitOrTimeout();
assertThat(cnxn.isOpen()).isFalse();
}
}
}
| RunOnlyOnceServerCommandIntegrationTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/Crc32PerformanceTest.java | {
"start": 1996,
"end": 2465
} | class ____ implements Crc32 {
@Override
public void verifyChunked(ByteBuffer data, int bytesPerSum,
ByteBuffer sums, String fileName, long basePos)
throws ChecksumException {
NativeCrc32.verifyChunkedSums(bytesPerSum, DataChecksum.Type.CRC32.id,
sums, data, fileName, basePos);
}
@Override
public DataChecksum.Type crcType() {
return DataChecksum.Type.CRC32;
}
}
final | Native |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/component/sql/SqlProducerInMultiQueryEndpointTest.java | {
"start": 898,
"end": 1558
} | class ____ extends SqlProducerInMultiTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// required for the sql component
getContext().getComponent("sql", SqlComponent.class).setDataSource(db);
from("direct:query")
.to("sql:select * from projects where project in (:#in:names) and license in (:#in:licenses) order by id")
.to("log:query")
.to("mock:query");
}
};
}
}
| SqlProducerInMultiQueryEndpointTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/CollectionIdType.java | {
"start": 644,
"end": 1059
} | interface ____ {
/**
* The custom type implementor class
*
* @see Type#value
*/
Class<? extends UserType<?>> value();
/**
* Parameters to be injected into the custom type after
* it is instantiated.
*
* The type should implement {@link org.hibernate.usertype.ParameterizedType}
* to receive the parameters
*
* @see Type#parameters
*/
Parameter[] parameters() default {};
}
| CollectionIdType |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/expr/SQLBinaryOperator.java | {
"start": 724,
"end": 4683
} | enum ____ {
Union("UNION", 0),
COLLATE("COLLATE", 20),
BitwiseXor("^", 50),
BitwiseXorEQ("^=", 110),
Multiply("*", 60),
Divide("/", 60),
DIV("DIV", 60), // mysql integer division
Modulus("%", 60),
Mod("MOD", 60),
Add("+", 70),
Subtract("-", 70),
SubGt("->", 20),
SubGtGt("->>", 20),
PoundGt("#>", 20),
PoundGtGt("#>>", 20),
QuesQues("??", 20),
QuesBar("?|", 20),
QuesQuesBar("??|", 20),
QuesAmp("?&", 20),
LeftShift("<<", 80),
LeftShiftUnsigned("<<<", 80),
RightShift(">>", 80),
RightShiftUnsigned(">>>", 80),
BitwiseAnd("&", 90),
BitwiseOr("|", 100),
GreaterThan(">", 110),
GreaterThanOrEqual(">=", 110),
Is("IS", 110),
LessThan("<", 110),
LessThanOrEqual("<=", 110),
LessThanOrEqualOrGreaterThan("<=>", 110),
LessThanOrGreater("<>", 110),
IsDistinctFrom("IS DISTINCT FROM", 110),
IsNotDistinctFrom("IS NOT DISTINCT FROM", 110),
Like("LIKE", 110),
SoudsLike("SOUNDS LIKE", 110),
NotLike("NOT LIKE", 110),
ILike("ILIKE", 110),
NotILike("NOT ILIKE", 110),
AT_AT("@@", 110), // postgresql textsearch
SIMILAR_TO("SIMILAR TO", 110),
POSIX_Regular_Match("~", 110),
POSIX_Regular_Match_Insensitive("~*", 110),
POSIX_Regular_Not_Match("!~", 110),
POSIX_Regular_Not_Match_POSIX_Regular_Match_Insensitive("!~*", 110),
Array_Contains("@>", 110),
Array_ContainedBy("<@", 110),
SAME_AS("~=", 110),
JSONContains("?", 110),
RLike("RLIKE", 110),
NotRLike("NOT RLIKE", 110),
NotEqual("!=", 110),
NotLessThan("!<", 110),
NotGreaterThan("!>", 110),
IsNot("IS NOT", 110),
Escape("ESCAPE", 110),
RegExp("REGEXP", 110),
NotRegExp("NOT REGEXP", 110),
Equality("=", 110),
EqEq("==", 110),
BitwiseNot("!", 130),
Concat("||", 140),
BooleanAnd("AND", 140),
BooleanXor("XOR", 150),
BooleanOr("OR", 160),
Assignment(":=", 169),
Blank("", 170),
PG_And("&&", 140),
PG_ST_DISTANCE("<->", 20);
public static int getPriority(SQLBinaryOperator operator) {
return 0;
}
public final String name;
public final String nameLCase;
public final int priority;
SQLBinaryOperator(String name, int priority) {
this.name = name;
this.nameLCase = name.toLowerCase();
this.priority = priority;
}
public String getName() {
return this.name;
}
public int getPriority() {
return this.priority;
}
public boolean isRelational() {
switch (this) {
case Equality:
case Like:
case SoudsLike:
case NotEqual:
case GreaterThan:
case GreaterThanOrEqual:
case LessThan:
case LessThanOrEqual:
case LessThanOrGreater:
case NotLike:
case NotLessThan:
case NotGreaterThan:
case RLike:
case NotRLike:
case RegExp:
case NotRegExp:
case Is:
case IsNot:
return true;
default:
return false;
}
}
public boolean isLogical() {
return this == BooleanAnd || this == BooleanOr || this == BooleanXor;
}
public boolean isArithmetic() {
switch (this) {
case Add:
case Subtract:
case Multiply:
case Divide:
case DIV:
case Modulus:
case Mod:
return true;
default:
return false;
}
}
public static SQLBinaryOperator from(String str) {
if (str == null) {
return null;
}
for (SQLBinaryOperator value : values()) {
if (value.name.equals(str)) {
return value;
}
}
return valueOf(str);
}
}
| SQLBinaryOperator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/JavaInstantGetSecondsGetNanoTest.java | {
"start": 7555,
"end": 8403
} | class ____ {
private static final Instant INSTANT = Instant.EPOCH;
public static void foo() {
long seconds = INSTANT.getEpochSecond();
Object obj =
new Object() {
@Override
public String toString() {
// BUG: Diagnostic contains: JavaInstantGetSecondsGetNano
return String.valueOf(INSTANT.getNano());
}
};
}
}
""")
.doTest();
}
@Test
public void getNanoInInnerClassGetSecondsInClassVariable() {
compilationHelper
.addSourceLines(
"test/TestCase.java",
"""
package test;
import java.time.Instant;
public | TestCase |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java | {
"start": 5720,
"end": 14589
} | class ____ {
private final List<String> rootLogDirs;
private final ContainerId containerId;
private final String user;
private final LogAggregationContext logAggregationContext;
private Set<File> uploadedFiles = new HashSet<File>();
private final Set<String> alreadyUploadedLogFiles;
private Set<String> allExistingFileMeta = new HashSet<String>();
private final boolean appFinished;
private final boolean containerFinished;
/**
* The retention context to determine if log files are older than
* the retention policy configured.
*/
private final LogRetentionContext logRetentionContext;
/**
* The set of log files that are older than retention policy that will
* not be uploaded but ready for deletion.
*/
private final Set<File> obsoleteRetentionLogFiles = new HashSet<File>();
// TODO Maybe add a version string here. Instead of changing the version of
// the entire k-v format
public LogValue(List<String> rootLogDirs, ContainerId containerId,
String user) {
this(rootLogDirs, containerId, user, null, new HashSet<String>(),
null, true, true);
}
public LogValue(List<String> rootLogDirs, ContainerId containerId,
String user, LogAggregationContext logAggregationContext,
Set<String> alreadyUploadedLogFiles,
LogRetentionContext retentionContext, boolean appFinished,
boolean containerFinished) {
this.rootLogDirs = new ArrayList<String>(rootLogDirs);
this.containerId = containerId;
this.user = user;
// Ensure logs are processed in lexical order
Collections.sort(this.rootLogDirs);
this.logAggregationContext = logAggregationContext;
this.alreadyUploadedLogFiles = alreadyUploadedLogFiles;
this.appFinished = appFinished;
this.containerFinished = containerFinished;
this.logRetentionContext = retentionContext;
}
@VisibleForTesting
public Set<File> getPendingLogFilesToUploadForThisContainer() {
Set<File> pendingUploadFiles = new HashSet<File>();
for (String rootLogDir : this.rootLogDirs) {
File appLogDir = new File(rootLogDir,
this.containerId.getApplicationAttemptId().
getApplicationId().toString());
File containerLogDir =
new File(appLogDir, this.containerId.toString());
if (!containerLogDir.isDirectory()) {
continue; // ContainerDir may have been deleted by the user.
}
pendingUploadFiles
.addAll(getPendingLogFilesToUpload(containerLogDir));
}
return pendingUploadFiles;
}
public void write(DataOutputStream out, Set<File> pendingUploadFiles)
throws IOException {
List<File> fileList = new ArrayList<File>(pendingUploadFiles);
Collections.sort(fileList);
for (File logFile : fileList) {
// We only aggregate top level files.
// Ignore anything inside sub-folders.
if (logFile.isDirectory()) {
LOG.warn(logFile.getAbsolutePath() + " is a directory. Ignore it.");
continue;
}
FileInputStream in = null;
try {
in = secureOpenFile(logFile);
} catch (IOException e) {
logErrorMessage(logFile, e);
IOUtils.cleanupWithLogger(LOG, in);
continue;
}
final long fileLength = logFile.length();
// Write the logFile Type
out.writeUTF(logFile.getName());
// Write the log length as UTF so that it is printable
out.writeUTF(String.valueOf(fileLength));
// Write the log itself
try {
byte[] buf = new byte[65535];
int len = 0;
long bytesLeft = fileLength;
while ((len = in.read(buf)) != -1) {
//If buffer contents within fileLength, write
if (len < bytesLeft) {
out.write(buf, 0, len);
bytesLeft-=len;
}
//else only write contents within fileLength, then exit early
else {
out.write(buf, 0, (int)bytesLeft);
break;
}
}
long newLength = logFile.length();
if(fileLength < newLength) {
LOG.warn("Aggregated logs truncated by approximately "+
(newLength-fileLength) +" bytes.");
}
this.uploadedFiles.add(logFile);
} catch (IOException e) {
String message = logErrorMessage(logFile, e);
out.write(message.getBytes(StandardCharsets.UTF_8));
} finally {
IOUtils.cleanupWithLogger(LOG, in);
}
}
}
@VisibleForTesting
public FileInputStream secureOpenFile(File logFile) throws IOException {
return SecureIOUtils.openForRead(logFile, getUser(), null);
}
private static String logErrorMessage(File logFile, Exception e) {
String message = "Error aggregating log file. Log file : "
+ logFile.getAbsolutePath() + ". " + e.getMessage();
LOG.error(message, e);
return message;
}
// Added for testing purpose.
public String getUser() {
return user;
}
private Set<File> getPendingLogFilesToUpload(File containerLogDir) {
if(containerLogDir == null) {
return new HashSet<>(0);
}
File[] filesList = containerLogDir.listFiles();
if (filesList == null) {
return new HashSet<>(0);
}
Set<File> candidates =
new HashSet<File>(Arrays.asList(filesList));
for (File logFile : candidates) {
this.allExistingFileMeta.add(getLogFileMetaData(logFile));
}
// if log files are older than retention policy, do not upload them.
// but schedule them for deletion.
if(logRetentionContext != null && !logRetentionContext.shouldRetainLog()){
obsoleteRetentionLogFiles.addAll(candidates);
candidates.clear();
return candidates;
}
Set<File> fileCandidates = new HashSet<File>(candidates);
if (this.logAggregationContext != null && candidates.size() > 0) {
fileCandidates = getFileCandidates(fileCandidates, this.appFinished);
if (!this.appFinished && this.containerFinished) {
Set<File> addition = new HashSet<File>(candidates);
addition = getFileCandidates(addition, true);
fileCandidates.addAll(addition);
}
}
return fileCandidates;
}
private Set<File> getFileCandidates(Set<File> candidates,
boolean useRegularPattern) {
filterFiles(
useRegularPattern ? this.logAggregationContext.getIncludePattern()
: this.logAggregationContext.getRolledLogsIncludePattern(),
candidates, false);
filterFiles(
useRegularPattern ? this.logAggregationContext.getExcludePattern()
: this.logAggregationContext.getRolledLogsExcludePattern(),
candidates, true);
Iterable<File> mask = Iterables.filter(candidates, (input) ->
!alreadyUploadedLogFiles
.contains(getLogFileMetaData(input)));
return Sets.newHashSet(mask);
}
private void filterFiles(String pattern, Set<File> candidates,
boolean exclusion) {
if (pattern != null && !pattern.isEmpty()) {
Pattern filterPattern = Pattern.compile(pattern);
for (Iterator<File> candidatesItr = candidates.iterator(); candidatesItr
.hasNext();) {
File candidate = candidatesItr.next();
boolean match = filterPattern.matcher(candidate.getName()).find();
if ((!match && !exclusion) || (match && exclusion)) {
candidatesItr.remove();
}
}
}
}
public Set<Path> getCurrentUpLoadedFilesPath() {
Set<Path> path = new HashSet<Path>();
for (File file : this.uploadedFiles) {
path.add(new Path(file.getAbsolutePath()));
}
return path;
}
public Set<String> getCurrentUpLoadedFileMeta() {
Set<String> info = new HashSet<String>();
for (File file : this.uploadedFiles) {
info.add(getLogFileMetaData(file));
}
return info;
}
public Set<Path> getObsoleteRetentionLogFiles() {
Set<Path> path = new HashSet<Path>();
for(File file: this.obsoleteRetentionLogFiles) {
path.add(new Path(file.getAbsolutePath()));
}
return path;
}
public Set<String> getAllExistingFilesMeta() {
return this.allExistingFileMeta;
}
private String getLogFileMetaData(File file) {
return containerId.toString() + "_" + file.getName() + "_"
+ file.lastModified();
}
}
/**
* A context for log retention to determine if files are older than
* the retention policy configured in YarnConfiguration.
*/
public static | LogValue |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/net/impl/UncloseableChunkedNioFile.java | {
"start": 782,
"end": 1039
} | class ____ extends ChunkedNioFile {
public UncloseableChunkedNioFile(FileChannel in, long offset, long length) throws IOException {
super(in, offset, length, 8192);
}
@Override
public void close() throws Exception {
}
}
| UncloseableChunkedNioFile |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/CharEncoding.java | {
"start": 1804,
"end": 1870
} | class ____ be removed in a future release.
*/
@Deprecated
public | will |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/grant/MySqlGrantTest_24.java | {
"start": 969,
"end": 2356
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "GRANT PROCESS ON mydb.* TO 'someuser'@'somehost';";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
// print(statementList);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("GRANT PROCESS ON mydb.* TO 'someuser'@'somehost';", //
output);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("City")));
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("t2")));
// assertTrue(visitor.getColumns().contains(new Column("t2", "id")));
}
}
| MySqlGrantTest_24 |
java | quarkusio__quarkus | independent-projects/qute/core/src/test/java/io/quarkus/qute/NumberResolversTest.java | {
"start": 131,
"end": 2846
} | class ____ {
@Test
public void testSequence() {
Engine engine = Engine.builder().addDefaults().build();
int one = 1;
assertEquals("0", engine.parse("{one plus 1 - 2}").data("one", one).render());
}
@Test
public void testPlus() {
Engine engine = Engine.builder().addDefaults().build();
int one = 1;
long intMaxTwoTimes = 4294967294l;
int intMax = Integer.MAX_VALUE;
assertEquals("2", engine.parse("{one plus 1}").data("one", one).render());
assertEquals("2", engine.parse("{one.plus(1)}").data("one", one).render());
assertEquals("256", engine.parse("{one + 255}").data("one", one).render());
assertEquals("11", engine.parse("{one + 10l}").data("one", one).render());
assertEquals("-2", engine.parse("{intMax plus intMax}").data("intMax", intMax).render());
assertEquals("4294967295",
engine.parse("{one + intMaxTwoTimes}").data("one", one, "intMaxTwoTimes", intMaxTwoTimes).render());
assertEquals("4", engine.parse("{one plus 1 + 2}").data("one", one).render());
}
@Test
public void testMinus() {
Engine engine = Engine.builder().addDefaults().build();
int one = 1;
long intMaxTwoTimes = 4294967294l;
int intMax = Integer.MAX_VALUE;
assertEquals("0", engine.parse("{one minus 1}").data("one", one).render());
assertEquals("-9", engine.parse("{one - 10}").data("one", one).render());
assertEquals("-4", engine.parse("{one.minus(5)}").data("one", one).render());
assertEquals(Integer.MAX_VALUE + "", engine.parse("{intMaxTwoTimes.minus(intMax)}")
.data("intMaxTwoTimes", intMaxTwoTimes, "intMax", intMax).render());
}
@Test
public void testMod() {
Engine engine = Engine.builder().addDefaults().build();
assertEquals("1", engine.parse("{eleven.mod(5)}").data("eleven", 11).render());
assertEquals("1", engine.parse("{eleven mod 5}").data("eleven", 11).render());
}
@Test
public void testNumberValue() {
Engine engine = Engine.builder().addDefaults().build();
int one = 1;
int million = 1_000_000;
double foo = 1.234d;
assertEquals("1", engine.parse("{one.intValue}").data("one", one).render());
assertEquals("1", engine.parse("{one.longValue}").data("one", one).render());
assertEquals("1", engine.parse("{foo.intValue}").data("foo", foo).render());
assertEquals("1.234", engine.parse("{foo.floatValue}").data("foo", foo).render());
assertEquals("64", engine.parse("{million.byteValue}").data("million", million).render());
}
}
| NumberResolversTest |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/scenarios/FluxTests.java | {
"start": 34717,
"end": 34941
} | class ____ {
final Point point;
public Sample(Point point) {
this.point = point;
}
@Override
public String toString() {
return "Sample{" +
"point=" + point +
'}';
}
}
private static final | Sample |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/OnExceptionRouteScopedErrorHandlerRefIssueTest.java | {
"start": 1077,
"end": 2213
} | class ____ extends ContextTestSupport {
@Test
public void testOnExceptionErrorHandlerRef() throws Exception {
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:handled").expectedMessageCount(1);
getMockEndpoint("mock:dead").expectedMessageCount(0);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myDLC", new DeadLetterChannelBuilder("mock:dead"));
return jndi;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").errorHandler("myDLC")
.onException(IllegalArgumentException.class).handled(true).to("mock:handled").end()
.to("mock:a").throwException(new IllegalArgumentException("Damn"));
}
};
}
}
| OnExceptionRouteScopedErrorHandlerRefIssueTest |
java | elastic__elasticsearch | x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderDocument.java | {
"start": 1783,
"end": 2245
} | class ____ implements ToXContentObject, Writeable {
public static final String SIGN_AUTHN = "authn";
public static final String SIGN_LOGOUT = "logout";
private static final Set<String> ALLOWED_SIGN_MESSAGES = Set.of(SIGN_AUTHN, SIGN_LOGOUT);
private static final TransportVersion IDP_CUSTOM_SAML_ATTRIBUTES_ALLOW_LIST = TransportVersion.fromName(
"idp_custom_saml_attributes_allow_list"
);
public static | SamlServiceProviderDocument |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_iterableRemove_Test.java | {
"start": 1026,
"end": 1627
} | class ____ extends AbstractTest_StandardComparisonStrategy {
@Test
void should_pass() {
List<?> list = newArrayList("Sam", "Merry", null, "Frodo");
assertThat(list.contains("Frodo")).isTrue();
standardComparisonStrategy.iterableRemoves(list, "Frodo");
assertThat(list.contains("Frodo")).isFalse();
standardComparisonStrategy.iterableRemoves(list, null);
assertThat(list.contains(null)).isFalse();
}
@Test
void should_do_nothing_if_iterable_is_null() {
standardComparisonStrategy.iterableRemoves(null, "Sauron");
}
}
| StandardComparisonStrategy_iterableRemove_Test |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/commands/RemoveExtensions.java | {
"start": 686,
"end": 1926
} | class ____ {
public static final String EXTENSIONS = "quarkus.remove-extensions.extensions";
public static final String OUTCOME_UPDATED = "quarkus.remove-extensions.outcome-updated";
public static final String EXTENSION_MANAGER = "quarkus.remove-extensions.extension-manager";
private final QuarkusCommandInvocation invocation;
public RemoveExtensions(final QuarkusProject quarkusProject) {
invocation = new QuarkusCommandInvocation(quarkusProject);
}
public RemoveExtensions(final QuarkusProject quarkusProject, final MessageWriter messageWriter) {
this.invocation = new QuarkusCommandInvocation(quarkusProject, new HashMap<>(), messageWriter);
}
public RemoveExtensions extensions(Set<String> extensions) {
invocation.setValue(EXTENSIONS, extensions);
return this;
}
public RemoveExtensions extensionManager(ExtensionManager extensionManager) {
invocation.setValue(EXTENSION_MANAGER, requireNonNull(extensionManager, "extensionManager is required"));
return this;
}
public QuarkusCommandOutcome execute() throws QuarkusCommandException {
return new RemoveExtensionsCommandHandler().execute(invocation);
}
}
| RemoveExtensions |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/steps/MainClassBuildStep.java | {
"start": 28219,
"end": 28534
} | class ____ reflection, needed when launching via the Quarkus launcher
*/
@BuildStep
ReflectiveClassBuildItem applicationReflection() {
return ReflectiveClassBuildItem.builder(Application.APP_CLASS_NAME).reason("The generated application class").build();
}
/**
* Transform the main | for |
java | apache__camel | components/camel-mina/src/test/java/org/apache/camel/component/mina/MinaClientServerTest.java | {
"start": 1065,
"end": 1967
} | class ____ extends BaseMinaTest {
@Test
public void testSendToServer() {
// START SNIPPET: e3
String out = (String) template.requestBody(String.format("mina:tcp://localhost:%1$s?textline=true", getPort()), "Chad");
assertEquals("Hello Chad", out);
// END SNIPPET: e3
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// START SNIPPET: e1
// lets setup a server on port %1$s
// and we let the request-reply be processed in the MyServerProcessor
from(String.format("mina:tcp://localhost:%1$s?textline=true", getPort())).process(new MyServerProcessor());
// END SNIPPET: e1
}
};
}
// START SNIPPET: e2
private static | MinaClientServerTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/cfg/TransactionSettings.java | {
"start": 1642,
"end": 7457
} | class ____ implements {@code JtaPlatform}.
* <li>short name of a class (sans package name) that implements {@code JtaPlatform}.
* </ul>
*
* @see #JTA_PLATFORM_RESOLVER
*
* @since 4.0
*/
String JTA_PLATFORM = "hibernate.transaction.jta.platform";
/**
* Specifies a {@link org.hibernate.engine.transaction.jta.platform.spi.JtaPlatformResolver}
* implementation that should be used to obtain an instance of
* {@link org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform}.
*
* @since 4.3
*/
String JTA_PLATFORM_RESOLVER = "hibernate.transaction.jta.platform_resolver";
/**
* When enabled, specifies that the {@link jakarta.transaction.UserTransaction} should
* be used in preference to the {@link jakarta.transaction.TransactionManager} for JTA
* transaction management.
* <p>
* By default, the {@code TransactionManager} is preferred.
*
* @see org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform#retrieveUserTransaction
* @see org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform#retrieveTransactionManager
*
* @see org.hibernate.boot.SessionFactoryBuilder#applyPreferUserTransactions(boolean)
*
* @settingDefault {@code false} as {@code TransactionManager} is preferred.
*
* @since 5.0
*/
String PREFER_USER_TRANSACTION = "hibernate.jta.prefer_user_transaction";
/**
* When enabled, indicates that it is safe to cache {@link jakarta.transaction.TransactionManager}
* references in the {@link org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform}
*
* @settingDefault Generally {@code true}, though {@code JtaPlatform} implementations
* can do their own thing.
*
* @since 4.0
*/
String JTA_CACHE_TM = "hibernate.jta.cacheTransactionManager";
/**
* When enabled, indicates that it is safe to cache {@link jakarta.transaction.UserTransaction}
* references in the {@link org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform}
*
* @settingDefault Generally {@code true}, though {@code JtaPlatform} implementations
* can do their own thing.
*
* @since 4.0
*/
String JTA_CACHE_UT = "hibernate.jta.cacheUserTransaction";
/**
* A transaction can be rolled back by another thread ("tracking by thread")
* -- not the original application. Examples of this include a JTA
* transaction timeout handled by a background reaper thread. The ability
* to handle this situation requires checking the Thread ID every time
* Session is called. This can certainly have performance considerations.
*
* @settingDefault {@code true} (enabled).
*
* @see org.hibernate.boot.SessionFactoryBuilder#applyJtaTrackingByThread(boolean)
*/
String JTA_TRACK_BY_THREAD = "hibernate.jta.track_by_thread";
/**
* When enabled, allows access to the {@link org.hibernate.Transaction} even when
* using a JTA for transaction management.
* <p>
* Values are {@code true}, which grants access, and {@code false}, which does not.
* <p>
*
* @settingDefault {@code false} when bootstrapped via JPA; {@code true} otherwise.
*
* @see JpaComplianceSettings#JPA_TRANSACTION_COMPLIANCE
*/
String ALLOW_JTA_TRANSACTION_ACCESS = "hibernate.jta.allowTransactionAccess";
/**
* When enabled, specifies that the {@link org.hibernate.Session} should be
* closed automatically at the end of each transaction.
*
* @settingDefault {@code false}
*
* @see org.hibernate.boot.SessionFactoryBuilder#applyAutoClosing(boolean)
*/
String AUTO_CLOSE_SESSION = "hibernate.transaction.auto_close_session";
/**
* When enabled, specifies that automatic flushing should occur during the JTA
* {@link jakarta.transaction.Synchronization#beforeCompletion()} callback.
*
* @settingDefault {@code true} unless using JPA bootstrap
*
* @see org.hibernate.boot.SessionFactoryBuilder#applyAutoFlushing(boolean)
*/
String FLUSH_BEFORE_COMPLETION = "hibernate.transaction.flush_before_completion";
/**
* Allows a detached proxy or lazy collection to be fetched even when not
* associated with an open persistence context, by creating a temporary
* persistence context when the proxy or collection is accessed. This
* behavior is not recommended since it can easily break transaction
* isolation or lead to data aliasing; it is therefore disabled by default.
*
* @settingDefault {@code false} (disabled)
*
* @apiNote Generally speaking, all access to transactional data should be
* done in a transaction. Use of this setting is discouraged.
*
* @see org.hibernate.boot.spi.SessionFactoryOptions#isInitializeLazyStateOutsideTransactionsEnabled
* @see org.hibernate.boot.SessionFactoryBuilder#applyLazyInitializationOutsideTransaction(boolean)
*/
@Unsafe
String ENABLE_LAZY_LOAD_NO_TRANS = "hibernate.enable_lazy_load_no_trans";
/**
* When enabled, allows update operations outside a transaction.
* <p>
* Since version 5.2 Hibernate conforms with the JPA specification and disallows
* flushing any update outside a transaction.
* <p>
* Values are {@code true}, which allows flushing outside a transaction, and
* {@code false}, which does not.
* <p>
* The default behavior is to disallow update operations outside a transaction.
*
* @settingDefault {@code false} (disabled)
*
* @apiNote Generally speaking, all access to transactional data should be
* done in a transaction. Combining this with second-level caching
* is not safe. Use of this setting is discouraged.
*
* @see org.hibernate.boot.spi.SessionFactoryOptions#isAllowOutOfTransactionUpdateOperations
* @see org.hibernate.boot.SessionFactoryBuilder#allowOutOfTransactionUpdateOperations(boolean)
*
* @since 5.2
*/
@Unsafe
String ALLOW_UPDATE_OUTSIDE_TRANSACTION = "hibernate.allow_update_outside_transaction";
}
| that |
java | quarkusio__quarkus | integration-tests/logging-panache/src/test/java/io/quarkus/logging/LoggingBean.java | {
"start": 3998,
"end": 4095
} | interface ____<T, U, V, W> {
void accept(T t, U u, V v, W w);
}
static | TetraConsumer |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/SubTypeResolutionTest.java | {
"start": 2582,
"end": 3038
} | class ____<M, V, B> extends AbstractMetaValue<M, V, B> {
public MetaAttribute() { }
}
// [databind#2632]: fail to specialize type-erased
@SuppressWarnings("rawtypes")
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME)
@JsonSubTypes(value = {
@JsonSubTypes.Type(value = Either.Left.class, name = "left"),
@JsonSubTypes.Type(value = Either.Right.class, name = "right")
})
static | MetaAttribute |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/authentication/Saml2RedirectAuthenticationRequest.java | {
"start": 2945,
"end": 3034
} | class ____ a {@link Saml2RedirectAuthenticationRequest} object.
*/
public static final | for |
java | quarkusio__quarkus | extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/IAMPrincipal.java | {
"start": 324,
"end": 753
} | class ____ implements Principal {
private String name;
private APIGatewayV2HTTPEvent.RequestContext.IAM iam;
public IAMPrincipal(APIGatewayV2HTTPEvent.RequestContext.IAM iam) {
this.iam = iam;
this.name = iam.getUserId();
}
@Override
public String getName() {
return name;
}
public APIGatewayV2HTTPEvent.RequestContext.IAM getIam() {
return iam;
}
}
| IAMPrincipal |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/text/StrSubstitutor.java | {
"start": 1146,
"end": 1582
} | class ____ a piece of text and substitutes all the variables within it.
* The default definition of a variable is {@code ${variableName}}.
* The prefix and suffix can be changed via constructors and set methods.
* </p>
* <p>
* Variable values are typically resolved from a map, but could also be resolved
* from system properties, or by supplying a custom variable resolver.
* </p>
* <p>
* The simplest example is to use this | takes |
java | google__error-prone | core/src/test/java/com/google/errorprone/matchers/AnnotationMatcherTest.java | {
"start": 4238,
"end": 4894
} | class ____ {}
""");
assertCompiles(
nodeWithAnnotationMatches(
/* shouldMatch= */ true,
new AnnotationMatcher<Tree>(
AT_LEAST_ONE, isType("com.google.SampleNestedAnnotation.Annotation"))));
assertCompiles(
nodeWithAnnotationMatches(
/* shouldMatch= */ true,
new AnnotationMatcher<Tree>(
ALL, isType("com.google.SampleNestedAnnotation.Annotation"))));
}
@Test
public void shouldNotMatchNonmatchingSingleAnnotationOnClass() {
writeFile(
"A.java",
"""
package com.google;
@SampleAnnotation1
public | A |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/context/HttpSessionSecurityContextRepositoryTests.java | {
"start": 3351,
"end": 42132
} | class ____ {
private final TestingAuthenticationToken testToken = new TestingAuthenticationToken("someone", "passwd", "ROLE_A");
@AfterEach
public void tearDown() {
SecurityContextHolder.clearContext();
}
@Test
public void startAsyncDisablesSaveOnCommit() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
HttpServletRequest request = mock(HttpServletRequest.class);
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
repo.loadContext(holder);
reset(request);
holder.getRequest().startAsync();
holder.getResponse().sendError(HttpServletResponse.SC_BAD_REQUEST);
// ensure that sendError did cause interaction with the HttpSession
verify(request, never()).getSession(anyBoolean());
verify(request, never()).getSession();
}
@Test
public void startAsyncRequestResponseDisablesSaveOnCommit() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
HttpServletRequest request = mock(HttpServletRequest.class);
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
repo.loadContext(holder);
reset(request);
holder.getRequest().startAsync(request, response);
holder.getResponse().sendError(HttpServletResponse.SC_BAD_REQUEST);
// ensure that sendError did cause interaction with the HttpSession
verify(request, never()).getSession(anyBoolean());
verify(request, never()).getSession();
}
@Test
public void sessionIsntCreatedIfContextDoesntChange() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
assertThat(request.getSession(false)).isNull();
repo.saveContext(context, holder.getRequest(), holder.getResponse());
assertThat(request.getSession(false)).isNull();
}
@Test
public void sessionIsntCreatedIfAllowSessionCreationIsFalse() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setAllowSessionCreation(false);
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
// Change context
context.setAuthentication(this.testToken);
repo.saveContext(context, holder.getRequest(), holder.getResponse());
assertThat(request.getSession(false)).isNull();
}
@Test
public void loadContextWhenNullResponse() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, null);
assertThat(repo.loadContext(holder)).isEqualTo(SecurityContextHolder.createEmptyContext());
}
@Test
public void loadContextHttpServletRequestWhenNotSavedThenEmptyContextReturned() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
assertThat(repo.loadDeferredContext(request).get()).isEqualTo(SecurityContextHolder.createEmptyContext());
}
@Test
public void loadContextHttpServletRequestWhenSavedThenSavedContextReturned() {
SecurityContextImpl expectedContext = new SecurityContextImpl(this.testToken);
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.saveContext(expectedContext, request, response);
assertThat(repo.loadDeferredContext(request).get()).isEqualTo(expectedContext);
}
@Test
public void loadContextHttpServletRequestWhenNotAccessedThenHttpSessionNotAccessed() {
HttpSession session = mock(HttpSession.class);
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
request.setSession(session);
repo.loadDeferredContext(request);
verifyNoInteractions(session);
}
@Test
public void existingContextIsSuccessFullyLoadedFromSessionAndSavedBack() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
SecurityContextHolder.getContext().setAuthentication(this.testToken);
request.getSession().setAttribute("imTheContext", SecurityContextHolder.getContext());
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
assertThat(context).isNotNull();
assertThat(context.getAuthentication()).isEqualTo(this.testToken);
// Won't actually be saved as it hasn't changed, but go through the use case
// anyway
repo.saveContext(context, holder.getRequest(), holder.getResponse());
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(context);
}
// SEC-1528
@Test
public void saveContextCallsSetAttributeIfContextIsModifiedDirectlyDuringRequest() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
// Set up an existing authenticated context, mocking that it is in the session
// already
SecurityContext ctx = SecurityContextHolder.getContext();
ctx.setAuthentication(this.testToken);
HttpSession session = mock(HttpSession.class);
given(session.getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY)).willReturn(ctx);
request.setSession(session);
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, new MockHttpServletResponse());
assertThat(repo.loadContext(holder)).isSameAs(ctx);
// Modify context contents. Same user, different role
SecurityContextHolder.getContext()
.setAuthentication(new TestingAuthenticationToken("someone", "passwd", "ROLE_B"));
repo.saveContext(ctx, holder.getRequest(), holder.getResponse());
// Must be called even though the value in the local VM is already the same
verify(session).setAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY, ctx);
}
@Test
public void saveContextWhenSaveNewContextThenOriginalContextThenOriginalContextSaved() throws Exception {
HttpSessionSecurityContextRepository repository = new HttpSessionSecurityContextRepository();
SecurityContextPersistenceFilter securityContextPersistenceFilter = new SecurityContextPersistenceFilter(
repository);
UserDetails original = User.withUsername("user").password("password").roles("USER").build();
SecurityContext originalContext = createSecurityContext(original);
UserDetails impersonate = User.withUserDetails(original).username("impersonate").build();
SecurityContext impersonateContext = createSecurityContext(impersonate);
MockHttpServletRequest mockRequest = new MockHttpServletRequest();
MockHttpServletResponse mockResponse = new MockHttpServletResponse();
Filter saveImpersonateContext = (request, response, chain) -> {
SecurityContextHolder.setContext(impersonateContext);
// ensure the response is committed to trigger save
response.flushBuffer();
chain.doFilter(request, response);
};
Filter saveOriginalContext = (request, response, chain) -> {
SecurityContextHolder.setContext(originalContext);
chain.doFilter(request, response);
};
HttpServlet servlet = new HttpServlet() {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.getWriter().write("Hi");
}
};
SecurityContextHolder.setContext(originalContext);
MockFilterChain chain = new MockFilterChain(servlet, saveImpersonateContext, saveOriginalContext);
securityContextPersistenceFilter.doFilter(mockRequest, mockResponse, chain);
assertThat(
mockRequest.getSession().getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY))
.isEqualTo(originalContext);
}
@Test
public void nonSecurityContextInSessionIsIgnored() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
SecurityContextHolder.getContext().setAuthentication(this.testToken);
request.getSession()
.setAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY,
"NotASecurityContextInstance");
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
assertThat(context).isNotNull();
assertThat(context.getAuthentication()).isNull();
}
@Test
public void sessionIsCreatedAndContextStoredWhenContextChanges() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
assertThat(request.getSession(false)).isNull();
// Simulate authentication during the request
context.setAuthentication(this.testToken);
repo.saveContext(context, holder.getRequest(), holder.getResponse());
assertThat(request.getSession(false)).isNotNull();
assertThat(request.getSession().getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY))
.isEqualTo(context);
}
@Test
public void redirectCausesEarlySaveOfContext() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
holder.getResponse().sendRedirect("/doesntmatter");
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
assertThat(((SaveContextOnUpdateOrErrorResponseWrapper) holder.getResponse()).isContextSaved()).isTrue();
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
// Check it's still the same
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
}
@Test
public void sendErrorCausesEarlySaveOfContext() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
holder.getResponse().sendError(404);
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
assertThat(((SaveContextOnUpdateOrErrorResponseWrapper) holder.getResponse()).isContextSaved()).isTrue();
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
// Check it's still the same
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
}
// SEC-2005
@Test
public void flushBufferCausesEarlySaveOfContext() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
holder.getResponse().flushBuffer();
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
assertThat(((SaveContextOnUpdateOrErrorResponseWrapper) holder.getResponse()).isContextSaved()).isTrue();
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
// Check it's still the same
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
}
// SEC-2005
@Test
public void writerFlushCausesEarlySaveOfContext() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
holder.getResponse().getWriter().flush();
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
assertThat(((SaveContextOnUpdateOrErrorResponseWrapper) holder.getResponse()).isContextSaved()).isTrue();
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
// Check it's still the same
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
}
// SEC-2005
@Test
public void writerCloseCausesEarlySaveOfContext() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
holder.getResponse().getWriter().close();
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
assertThat(((SaveContextOnUpdateOrErrorResponseWrapper) holder.getResponse()).isContextSaved()).isTrue();
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
// Check it's still the same
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
}
// SEC-2005
@Test
public void outputStreamFlushCausesEarlySaveOfContext() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
holder.getResponse().getOutputStream().flush();
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
assertThat(((SaveContextOnUpdateOrErrorResponseWrapper) holder.getResponse()).isContextSaved()).isTrue();
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
// Check it's still the same
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
}
// SEC-2005
@Test
public void outputStreamCloseCausesEarlySaveOfContext() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
holder.getResponse().getOutputStream().close();
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
assertThat(((SaveContextOnUpdateOrErrorResponseWrapper) holder.getResponse()).isContextSaved()).isTrue();
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
// Check it's still the same
assertThat(request.getSession().getAttribute("imTheContext")).isEqualTo(SecurityContextHolder.getContext());
}
// SEC-SEC-2055
@Test
public void outputStreamCloseDelegate() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
HttpServletResponse response = mock(HttpServletResponse.class);
ServletOutputStream outputstream = mock(ServletOutputStream.class);
given(response.getOutputStream()).willReturn(outputstream);
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
holder.getResponse().getOutputStream().close();
verify(outputstream).close();
}
// SEC-SEC-2055
@Test
public void outputStreamFlushesDelegate() throws Exception {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
HttpServletResponse response = mock(HttpServletResponse.class);
ServletOutputStream outputstream = mock(ServletOutputStream.class);
given(response.getOutputStream()).willReturn(outputstream);
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
holder.getResponse().getOutputStream().flush();
verify(outputstream).flush();
}
@Test
public void noSessionIsCreatedIfSessionWasInvalidatedDuringTheRequest() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
request.getSession();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext().setAuthentication(this.testToken);
request.getSession().invalidate();
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
assertThat(request.getSession(false)).isNull();
}
// SEC-1315
@Test
public void noSessionIsCreatedIfAnonymousTokenIsUsed() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContextHolder.setContext(repo.loadContext(holder));
SecurityContextHolder.getContext()
.setAuthentication(
new AnonymousAuthenticationToken("key", "anon", AuthorityUtils.createAuthorityList("ANON")));
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
assertThat(request.getSession(false)).isNull();
}
// SEC-1587
@Test
public void contextIsRemovedFromSessionIfCurrentContextIsAnonymous() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
SecurityContext ctxInSession = SecurityContextHolder.createEmptyContext();
ctxInSession.setAuthentication(this.testToken);
request.getSession()
.setAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY, ctxInSession);
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, new MockHttpServletResponse());
repo.loadContext(holder);
SecurityContextHolder.getContext()
.setAuthentication(new AnonymousAuthenticationToken("x", "x", this.testToken.getAuthorities()));
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
assertThat(request.getSession().getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY))
.isNull();
}
@Test
public void contextIsRemovedFromSessionIfCurrentContextIsEmpty() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
repo.setSpringSecurityContextKey("imTheContext");
MockHttpServletRequest request = new MockHttpServletRequest();
SecurityContext ctxInSession = SecurityContextHolder.createEmptyContext();
ctxInSession.setAuthentication(this.testToken);
request.getSession().setAttribute("imTheContext", ctxInSession);
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, new MockHttpServletResponse());
repo.loadContext(holder);
// Save an empty context
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
assertThat(request.getSession().getAttribute("imTheContext")).isNull();
}
// SEC-1735
@Test
public void contextIsNotRemovedFromSessionIfContextBeforeExecutionDefault() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, new MockHttpServletResponse());
repo.loadContext(holder);
SecurityContext ctxInSession = SecurityContextHolder.createEmptyContext();
ctxInSession.setAuthentication(this.testToken);
request.getSession()
.setAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY, ctxInSession);
SecurityContextHolder.getContext()
.setAuthentication(
new AnonymousAuthenticationToken("x", "x", AuthorityUtils.createAuthorityList("ROLE_ANONYMOUS")));
repo.saveContext(SecurityContextHolder.getContext(), holder.getRequest(), holder.getResponse());
assertThat(ctxInSession).isSameAs(
request.getSession().getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY));
}
// SEC-3070
@Test
public void logoutInvalidateSessionFalseFails() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
SecurityContext ctxInSession = SecurityContextHolder.createEmptyContext();
ctxInSession.setAuthentication(this.testToken);
request.getSession()
.setAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY, ctxInSession);
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, new MockHttpServletResponse());
repo.loadContext(holder);
ctxInSession.setAuthentication(null);
repo.saveContext(ctxInSession, holder.getRequest(), holder.getResponse());
assertThat(request.getSession().getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY))
.isNull();
}
@Test
@SuppressWarnings("deprecation")
public void sessionDisableUrlRewritingPreventsSessionIdBeingWrittenToUrl() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
final String sessionId = ";jsessionid=id";
MockHttpServletResponse response = new MockHttpServletResponse() {
@Override
public String encodeRedirectURL(String url) {
return url + sessionId;
}
@Override
public String encodeURL(String url) {
return url + sessionId;
}
};
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
repo.loadContext(holder);
String url = "/aUrl";
assertThat(holder.getResponse().encodeRedirectURL(url)).isEqualTo(url + sessionId);
assertThat(holder.getResponse().encodeURL(url)).isEqualTo(url + sessionId);
repo.setDisableUrlRewriting(true);
holder = new HttpRequestResponseHolder(request, response);
repo.loadContext(holder);
assertThat(holder.getResponse().encodeRedirectURL(url)).isEqualTo(url);
assertThat(holder.getResponse().encodeURL(url)).isEqualTo(url);
}
@Test
public void saveContextCustomTrustResolver() {
SecurityContext contextToSave = SecurityContextHolder.createEmptyContext();
contextToSave.setAuthentication(this.testToken);
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, new MockHttpServletResponse());
repo.loadContext(holder);
AuthenticationTrustResolver trustResolver = mock(AuthenticationTrustResolver.class);
repo.setTrustResolver(trustResolver);
repo.saveContext(contextToSave, holder.getRequest(), holder.getResponse());
verify(trustResolver).isAnonymous(contextToSave.getAuthentication());
}
@Test
public void setTrustResolverNull() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
assertThatIllegalArgumentException().isThrownBy(() -> repo.setTrustResolver(null));
}
// SEC-2578
@Test
public void traverseWrappedRequests() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
assertThat(request.getSession(false)).isNull();
// Simulate authentication during the request
context.setAuthentication(this.testToken);
repo.saveContext(context, new HttpServletRequestWrapper(holder.getRequest()),
new HttpServletResponseWrapper(holder.getResponse()));
assertThat(request.getSession(false)).isNotNull();
assertThat(request.getSession().getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY))
.isEqualTo(context);
}
@Test
public void standardResponseWorks() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
SecurityContext context = SecurityContextHolder.createEmptyContext();
context.setAuthentication(this.testToken);
repo.saveContext(context, request, response);
assertThat(request.getSession(false)).isNotNull();
assertThat(request.getSession().getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY))
.isEqualTo(context);
}
@Test
public void saveContextWhenTransientSecurityContextThenSkipped() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
SecurityContext transientSecurityContext = new TransientSecurityContext();
Authentication authentication = TestAuthentication.authenticatedUser();
transientSecurityContext.setAuthentication(authentication);
repo.saveContext(transientSecurityContext, holder.getRequest(), holder.getResponse());
MockHttpSession session = (MockHttpSession) request.getSession(false);
assertThat(session).isNull();
}
@Test
public void saveContextWhenTransientSecurityContextSubclassThenSkipped() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
SecurityContext transientSecurityContext = new TransientSecurityContext() {
};
Authentication authentication = TestAuthentication.authenticatedUser();
transientSecurityContext.setAuthentication(authentication);
repo.saveContext(transientSecurityContext, holder.getRequest(), holder.getResponse());
MockHttpSession session = (MockHttpSession) request.getSession(false);
assertThat(session).isNull();
}
@Test
public void saveContextWhenTransientSecurityContextAndSessionExistsThenSkipped() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
request.getSession(); // ensure the session exists
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
SecurityContext transientSecurityContext = new TransientSecurityContext();
Authentication authentication = TestAuthentication.authenticatedUser();
transientSecurityContext.setAuthentication(authentication);
repo.saveContext(transientSecurityContext, holder.getRequest(), holder.getResponse());
MockHttpSession session = (MockHttpSession) request.getSession(false);
assertThat(Collections.list(session.getAttributeNames())).isEmpty();
}
@Test
public void saveContextWhenTransientSecurityContextWithCustomAnnotationThenSkipped() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
SecurityContext transientSecurityContext = new TransientSecurityContext();
Authentication authentication = TestAuthentication.authenticatedUser();
transientSecurityContext.setAuthentication(authentication);
repo.saveContext(transientSecurityContext, holder.getRequest(), holder.getResponse());
MockHttpSession session = (MockHttpSession) request.getSession(false);
assertThat(session).isNull();
}
@Test
public void saveContextWhenTransientAuthenticationThenSkipped() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
SomeTransientAuthentication authentication = new SomeTransientAuthentication();
context.setAuthentication(authentication);
repo.saveContext(context, holder.getRequest(), holder.getResponse());
MockHttpSession session = (MockHttpSession) request.getSession(false);
assertThat(session).isNull();
}
@Test
public void saveContextWhenTransientAuthenticationSubclassThenSkipped() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
SomeTransientAuthenticationSubclass authentication = new SomeTransientAuthenticationSubclass();
context.setAuthentication(authentication);
repo.saveContext(context, holder.getRequest(), holder.getResponse());
MockHttpSession session = (MockHttpSession) request.getSession(false);
assertThat(session).isNull();
}
@Test
public void saveContextWhenTransientAuthenticationAndSessionExistsThenSkipped() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
request.getSession(); // ensure the session exists
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
SomeTransientAuthentication authentication = new SomeTransientAuthentication();
context.setAuthentication(authentication);
repo.saveContext(context, holder.getRequest(), holder.getResponse());
MockHttpSession session = (MockHttpSession) request.getSession(false);
assertThat(Collections.list(session.getAttributeNames())).isEmpty();
}
@Test
public void saveContextWhenTransientAuthenticationWithCustomAnnotationThenSkipped() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SecurityContext context = repo.loadContext(holder);
SomeOtherTransientAuthentication authentication = new SomeOtherTransientAuthentication();
context.setAuthentication(authentication);
repo.saveContext(context, holder.getRequest(), holder.getResponse());
MockHttpSession session = (MockHttpSession) request.getSession(false);
assertThat(session).isNull();
}
// gh-8947
@Test
public void saveContextWhenSecurityContextAuthenticationUpdatedToNullThenSkipped() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
HttpRequestResponseHolder holder = new HttpRequestResponseHolder(request, response);
SomeOtherTransientAuthentication authentication = new SomeOtherTransientAuthentication();
repo.loadContext(holder);
SecurityContext context = mock(SecurityContext.class);
given(context.getAuthentication()).willReturn(authentication).willReturn(null);
repo.saveContext(context, holder.getRequest(), holder.getResponse());
MockHttpSession session = (MockHttpSession) request.getSession(false);
assertThat(session).isNull();
}
@Test
public void saveContextWhenSecurityContextEmptyThenRemoveAttributeFromSession() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
SecurityContext emptyContext = SecurityContextHolder.createEmptyContext();
MockHttpSession session = (MockHttpSession) request.getSession(true);
session.setAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY, emptyContext);
repo.saveContext(emptyContext, request, response);
Object attributeAfterSave = session
.getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY);
assertThat(attributeAfterSave).isNull();
}
@Test
public void saveContextWhenSecurityContextEmptyAndNoSessionThenDoesNotCreateSession() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
SecurityContext emptyContext = SecurityContextHolder.createEmptyContext();
repo.saveContext(emptyContext, request, response);
assertThat(request.getSession(false)).isNull();
}
@Test
public void saveContextWhenSecurityContextThenSaveInSession() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
SecurityContext context = createSecurityContext(PasswordEncodedUser.user());
repo.saveContext(context, request, response);
Object savedContext = request.getSession()
.getAttribute(HttpSessionSecurityContextRepository.SPRING_SECURITY_CONTEXT_KEY);
assertThat(savedContext).isEqualTo(context);
}
@Test
public void saveContextWhenTransientAuthenticationThenDoNotSave() {
HttpSessionSecurityContextRepository repo = new HttpSessionSecurityContextRepository();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
SecurityContext context = SecurityContextHolder.createEmptyContext();
context.setAuthentication(new SomeTransientAuthentication());
repo.saveContext(context, request, response);
assertThat(request.getSession(false)).isNull();
}
private SecurityContext createSecurityContext(UserDetails userDetails) {
UsernamePasswordAuthenticationToken token = UsernamePasswordAuthenticationToken.authenticated(userDetails,
userDetails.getPassword(), userDetails.getAuthorities());
SecurityContext securityContext = new SecurityContextImpl(token);
return securityContext;
}
@Transient
private static | HttpSessionSecurityContextRepositoryTests |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/expr/SQLIntegerExpr.java | {
"start": 1001,
"end": 6171
} | class ____ extends SQLNumericLiteralExpr implements Comparable<SQLIntegerExpr> {
public static final SQLDataType DATA_TYPE = new SQLDataTypeImpl("bigint");
private Number number;
private String type;
public SQLIntegerExpr(Number number) {
this();
this.number = number;
}
public SQLIntegerExpr(Number number, SQLObject parent) {
this();
this.number = number;
this.parent = parent;
}
public SQLIntegerExpr() {
super(new SQLDataTypeImpl(SQLDataType.Constants.INT));
}
public Number getNumber() {
return this.number;
}
public void setNumber(Number number) {
this.number = number;
}
public void output(StringBuilder buf) {
buf.append(this.number.toString());
}
protected void accept0(SQLASTVisitor visitor) {
visitor.visit(this);
visitor.endVisit(this);
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((number == null) ? 0 : number.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
SQLIntegerExpr other = (SQLIntegerExpr) obj;
if (number == null) {
if (other.number != null) {
return false;
}
} else if (!number.equals(other.number)) {
return false;
}
return true;
}
@Override
public Object getValue() {
return this.number;
}
public SQLIntegerExpr clone() {
return new SQLIntegerExpr(this.number);
}
public SQLDataType computeDataType() {
return DATA_TYPE;
}
public void decrement() {
if (number instanceof Integer) {
number = Integer.valueOf((Integer) number.intValue() - 1);
} else if (number instanceof Long) {
number = Long.valueOf((Long) number.longValue() - 1);
} else {
throw new FastsqlException("decrement not support.");
}
}
public static boolean isZero(SQLExpr expr) {
if (expr instanceof SQLIntegerExpr) {
Number number = ((SQLIntegerExpr) expr).getNumber();
return number != null && number.intValue() == 0;
}
return false;
}
public static SQLIntegerExpr substract(SQLIntegerExpr a, SQLIntegerExpr b) {
int val = a.number.intValue() - b.number.intValue();
return new SQLIntegerExpr(val);
}
public static SQLIntegerExpr least(SQLIntegerExpr a, SQLIntegerExpr b) {
if (a == null) {
return b;
}
if (a.number.intValue() <= b.number.intValue()) {
return a;
}
return b;
}
public static SQLIntegerExpr greatst(SQLIntegerExpr a, SQLIntegerExpr b) {
if (a.number.intValue() >= b.number.intValue()) {
return a;
}
return b;
}
public static SQLIntegerExpr ofIntOrLong(long value) {
if (value >= Integer.MIN_VALUE && value <= Integer.MAX_VALUE) {
return new SQLIntegerExpr((int) value);
}
return new SQLIntegerExpr(value);
}
public static SQLIntegerExpr add(long a, long b) {
long r = a + b;
if (a > 0 && b > 0 && r <= 0) {
return new SQLIntegerExpr(BigInteger.valueOf(a).add(BigInteger.valueOf(b)));
}
return new SQLIntegerExpr(r);
}
@Override
public int compareTo(SQLIntegerExpr o) {
if (this.number instanceof Integer && o.number instanceof Integer) {
return ((Integer) this.number).compareTo((Integer) o.number);
}
if (this.number instanceof Long && o.number instanceof Long) {
return ((Long) this.number).compareTo((Long) o.number);
}
if (this.number instanceof BigDecimal && o.number instanceof BigDecimal) {
return ((BigDecimal) this.number).compareTo((BigDecimal) o.number);
}
if (this.number instanceof Float && o.number instanceof Float) {
return ((Float) this.number).compareTo((Float) o.number);
}
if (this.number instanceof Double && o.number instanceof Double) {
return ((Double) this.number).compareTo((Double) o.number);
}
return -1;
}
public SQLIntegerExpr negative() {
Number number = this.number;
if (number instanceof Integer) {
number = -number.intValue();
} else if (number instanceof Long) {
number = -number.longValue();
} else if (number instanceof BigInteger) {
number = ((BigInteger) number).negate();
} else {
throw new UnsupportedOperationException();
}
return new SQLIntegerExpr(number);
}
}
| SQLIntegerExpr |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/testFixtures/java/org/springframework/boot/web/servlet/context/AbstractServletWebServerMvcIntegrationTests.java | {
"start": 3979,
"end": 4299
} | class ____ {
@Bean
DispatcherServlet dispatcherServlet() {
return new DispatcherServlet();
}
@Bean
HelloWorldController helloWorldController() {
return new HelloWorldController();
}
}
@Configuration(proxyBeanMethods = false)
@EnableWebMvc
@PropertySource("classpath:conf.properties")
static | Config |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/nonctor/NonDefaultConstructorTest0.java | {
"start": 172,
"end": 558
} | class ____ extends TestCase {
public void test_non_default_constructor() throws Exception {
Model model = JSON.parseObject("{\"id\":1001,\"value\":{\"id\":2001}}", Model.class);
assertNotNull(model);
assertEquals(1001, model.id);
assertNotNull(model.value);
assertEquals(2001, model.value.id);
}
public static | NonDefaultConstructorTest0 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/notfound/OptionalLazyNotFoundTest.java | {
"start": 14222,
"end": 14802
} | class ____ extends Person {
@Id
private Long id;
@OneToOne(fetch = FetchType.LAZY, cascade = CascadeType.PERSIST)
@MapsId
@NotFound(action = NotFoundAction.IGNORE)
@JoinColumn(foreignKey = @ForeignKey(ConstraintMode.NO_CONSTRAINT))
private City city;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public City getCity() {
return city;
}
@Override
public void setCity(City city) {
this.city = city;
}
}
@Entity
@Table(name = "PersonMapsIdColumnJoinException")
public static | PersonMapsIdSelectIgnore |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.