language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java | {
"start": 1218,
"end": 5777
} | class ____ extends ESTestCase {
private ThreadPool threadPool;
private DeterministicTaskQueue taskQueue;
@Before
public void init() {
threadPool = createThreadPool(inferenceUtilityExecutors());
taskQueue = new DeterministicTaskQueue();
}
@After
public void shutdown() {
terminate(threadPool);
}
public void testExecute_LogsOnlyOnce() {
var mockedLogger = mockLogger();
try (var throttler = new ThrottlerManager(Settings.EMPTY, taskQueue.getThreadPool())) {
throttler.init(mockClusterServiceEmpty());
throttler.warn(mockedLogger.logger(), "test", new IllegalArgumentException("failed"));
mockedLogger.verify(1, "test");
mockedLogger.verifyThrowable(1);
mockedLogger.clearInvocations();
throttler.warn(mockedLogger.logger(), "test", new IllegalArgumentException("failed"));
mockedLogger.verifyNever();
mockedLogger.verifyNoMoreInteractions();
}
}
public void testExecute_AllowsDifferentMessagesToBeLogged() {
var mockedLogger = mockLogger();
try (var throttler = new ThrottlerManager(Settings.EMPTY, threadPool)) {
throttler.init(mockClusterServiceEmpty());
throttler.warn(mockedLogger.logger(), "test", new IllegalArgumentException("failed"));
mockedLogger.verify(1, "test");
mockedLogger.verifyThrowable(1);
mockedLogger.clearInvocations();
throttler.warn(mockedLogger.logger(), "a different message", new IllegalArgumentException("failed"));
mockedLogger.verify(1, "a different message");
mockedLogger.verifyThrowable(1);
mockedLogger.verifyNoMoreInteractions();
}
}
public void testStartsNewThrottler_WhenLoggingIntervalIsChanged() {
var mockThreadPool = mock(ThreadPool.class);
when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class));
try (var manager = new ThrottlerManager(Settings.EMPTY, mockThreadPool)) {
manager.init(mockClusterServiceEmpty());
verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), eq(TimeValue.timeValueHours(1)), any());
clearInvocations(mockThreadPool);
var loggingInterval = TimeValue.timeValueSeconds(1);
var currentThrottler = manager.getThrottler();
manager.setLogInterval(loggingInterval);
verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), eq(TimeValue.timeValueSeconds(1)), any());
assertNotSame(currentThrottler, manager.getThrottler());
}
}
public void testStartsNewThrottler_WhenLoggingIntervalIsChanged_ThreadEmitsPreviousObjectsMessages() {
var mockedLogger = mockLogger();
try (var manager = new ThrottlerManager(Settings.EMPTY, taskQueue.getThreadPool())) {
manager.init(mockClusterServiceEmpty());
// first log message should be automatically emitted
manager.warn(mockedLogger.logger(), "test", new IllegalArgumentException("failed"));
mockedLogger.verify(1, "test");
mockedLogger.verifyThrowable(1);
mockedLogger.clearInvocations();
// This should not be emitted but should increment the counter to 1
manager.warn(mockedLogger.logger(), "test", new IllegalArgumentException("failed"));
mockedLogger.verifyNever();
var loggingInterval = TimeValue.timeValueSeconds(1);
var currentThrottler = manager.getThrottler();
manager.setLogInterval(loggingInterval);
assertNotSame(currentThrottler, manager.getThrottler());
mockedLogger.clearInvocations();
// This should not be emitted but should increment the counter to 2
manager.warn(mockedLogger.logger(), "test", new IllegalArgumentException("failed"));
mockedLogger.verifyNever();
mockedLogger.clearInvocations();
taskQueue.advanceTime();
taskQueue.runAllRunnableTasks();
mockedLogger.verifyContains(1, "test, repeated 2 times");
}
}
public static ThrottlerManager mockThrottlerManager() {
var mockManager = mock(ThrottlerManager.class);
when(mockManager.getThrottler()).thenReturn(mock(Throttler.class));
return mockManager;
}
}
| ThrottlerManagerTests |
java | apache__camel | components/camel-platform-http/src/test/java/org/apache/camel/component/platform/http/PlatformHttpReturnHttpRequestHeadersTest.java | {
"start": 1097,
"end": 4677
} | class ____ extends AbstractPlatformHttpTest {
@Test
void testReturnHttpRequestHeadersFalse() {
given()
.header("Accept", "application/json")
.header("User-Agent", "User-Agent-Camel")
.port(port)
.expect()
.statusCode(200)
.header("Accept", (String) null)
.header("User-Agent", (String) null)
.when()
.get("/getWithoutRequestHeadersReturn");
}
@Test
void testReturnHttpRequestHeadersTrue() {
given()
.header("Accept", "application/json")
.header("User-Agent", "User-Agent-Camel")
.port(port)
.expect()
.statusCode(200)
.header("Accept", "application/json")
.header("User-Agent", "User-Agent-Camel")
.when()
.get("/getWithRequestHeadersReturn");
}
@Test
void testReturnHttpRequestHeadersDefault() {
given()
.header("Accept", "application/json")
.header("User-Agent", "User-Agent-Camel")
.port(port)
.expect()
.statusCode(200)
.header("Accept", (String) null)
.header("User-Agent", (String) null)
.when()
.get("/get");
}
@Test
void testReturnHttpRequestHeadersFalseWithCustomHeaderFilterStrategy() {
given()
.header("Accept", "application/json")
.header("User-Agent", "User-Agent-Camel")
.header("Custom_In_Header", "Custom_In_Header_Value")
.header("Custom_Out_Header", "Custom_Out_Header_Value")
.port(port)
.expect()
.statusCode(200)
.header("Accept", (String) null)
.header("User-Agent", (String) null)
.header("Custom_In_Header", (String) null)
.header("Custom_Out_Header", (String) null)
.body(is("Custom_In_Header=, Custom_Out_Header=Custom_Out_Header_Value"))
.when()
.get("/getWithCustomHeaderFilterStrategy");
}
@Override
protected RouteBuilder routes() {
return new RouteBuilder() {
@Override
public void configure() {
DefaultHeaderFilterStrategy testHeaderFilterStrategy = new DefaultHeaderFilterStrategy();
testHeaderFilterStrategy.getInFilter().add("Custom_In_Header");
testHeaderFilterStrategy.getOutFilter().add("Custom_Out_Header");
getContext().getRegistry().bind("testHeaderFilterStrategy", testHeaderFilterStrategy);
from("platform-http:/getWithoutRequestHeadersReturn?returnHttpRequestHeaders=false")
.setBody().constant("getWithoutRequestHeadersReturn");
from("platform-http:/getWithRequestHeadersReturn?returnHttpRequestHeaders=true")
.setBody().constant("getWithRequestHeadersReturn");
from("platform-http:/getWithCustomHeaderFilterStrategy?headerFilterStrategy=#testHeaderFilterStrategy")
.setBody()
.simple("Custom_In_Header=${header.Custom_In_Header}, Custom_Out_Header=${header.Custom_Out_Header}");
from("platform-http:/get")
.setBody().constant("get");
}
};
}
}
| PlatformHttpReturnHttpRequestHeadersTest |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/classic-inst/src/main/java/org/acme/HelloResource.java | {
"start": 215,
"end": 860
} | class ____ {
static final UUID uuid;
static {
uuid = UUID.randomUUID();
}
private final HelloService resource;
@Inject
public HelloResource(HelloService resource) {
this.resource = resource;
}
@GET
@Path("hello")
@Produces(MediaType.TEXT_PLAIN)
public String hello() {
return "hello";
}
@GET
@Path("name")
@Produces(MediaType.TEXT_PLAIN)
public String name() {
return "hello " + resource.name();
}
@GET
@Path("uuid")
@Produces(MediaType.TEXT_PLAIN)
public String uuid() {
return uuid.toString();
}
}
| HelloResource |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/TimeFieldTests.java | {
"start": 719,
"end": 5021
} | class ____ extends ESTestCase {
public void testDocValueWithWholeMillisecondStringValue() {
long millis = randomNonNegativeLong();
Instant time = Instant.ofEpochMilli(millis);
DateFormatter formatter = DateFormatter.forPattern("epoch_millis");
String timeAsString = formatter.format(time);
SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", timeAsString).build();
ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE);
assertThat(timeField.value(hit, new SourceSupplier(hit)), equalTo(new Object[] { millis }));
assertThat(timeField.getName(), equalTo("time"));
assertThat(timeField.getSearchField(), equalTo("time"));
assertThat(timeField.getTypes(), containsInAnyOrder("date", "date_nanos"));
assertThat(timeField.getMethod(), equalTo(ExtractedField.Method.DOC_VALUE));
assertThat(timeField.getDocValueFormat(), equalTo("epoch_millis"));
assertThat(timeField.supportsFromSource(), is(false));
expectThrows(UnsupportedOperationException.class, timeField::newFromSource);
assertThat(timeField.isMultiField(), is(false));
expectThrows(UnsupportedOperationException.class, timeField::getParentField);
}
public void testDocValueWithFractionalMillisecondStringValue() {
long millis = randomNonNegativeLong();
int extraNanos = randomIntBetween(1, 999999);
Instant time = Instant.ofEpochMilli(millis).plusNanos(extraNanos);
DateFormatter formatter = DateFormatter.forPattern("epoch_millis");
String timeAsString = formatter.format(time);
SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", timeAsString).build();
ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE);
assertThat(timeField.value(hit, new SourceSupplier(hit)), equalTo(new Object[] { millis }));
assertThat(timeField.getName(), equalTo("time"));
assertThat(timeField.getSearchField(), equalTo("time"));
assertThat(timeField.getTypes(), containsInAnyOrder("date", "date_nanos"));
assertThat(timeField.getMethod(), equalTo(ExtractedField.Method.DOC_VALUE));
assertThat(timeField.getDocValueFormat(), equalTo("epoch_millis"));
assertThat(timeField.supportsFromSource(), is(false));
expectThrows(UnsupportedOperationException.class, timeField::newFromSource);
assertThat(timeField.isMultiField(), is(false));
expectThrows(UnsupportedOperationException.class, timeField::getParentField);
}
public void testScriptWithLongValue() {
long millis = randomLong();
SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", millis).build();
ExtractedField timeField = new TimeField("time", ExtractedField.Method.SCRIPT_FIELD);
assertThat(timeField.value(hit, new SourceSupplier(hit)), equalTo(new Object[] { millis }));
assertThat(timeField.getName(), equalTo("time"));
assertThat(timeField.getSearchField(), equalTo("time"));
assertThat(timeField.getTypes(), containsInAnyOrder("date", "date_nanos"));
assertThat(timeField.getMethod(), equalTo(ExtractedField.Method.SCRIPT_FIELD));
expectThrows(UnsupportedOperationException.class, timeField::getDocValueFormat);
assertThat(timeField.supportsFromSource(), is(false));
expectThrows(UnsupportedOperationException.class, timeField::newFromSource);
assertThat(timeField.isMultiField(), is(false));
expectThrows(UnsupportedOperationException.class, timeField::getParentField);
}
public void testUnknownFormat() {
final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new Object()).build();
final ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE);
assertThat(
expectThrows(IllegalStateException.class, () -> timeField.value(hit, new SourceSupplier(hit))).getMessage(),
startsWith("Unexpected value for a time field")
);
}
public void testSourceNotSupported() {
expectThrows(IllegalArgumentException.class, () -> new TimeField("foo", ExtractedField.Method.SOURCE));
}
}
| TimeFieldTests |
java | elastic__elasticsearch | x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformExtension.java | {
"start": 398,
"end": 1004
} | interface ____ {
boolean includeNodeInfo();
Settings getTransformInternalIndexAdditionalSettings();
/**
* Provides destination index settings, hardcoded at the moment. In future this might be customizable or generation could be based on
* source settings.
*/
Settings getTransformDestinationIndexSettings();
// TODO(jkuipers): remove this default implementation after the ServerlessTransformPlugin
// in the elasticsearch-serverless project is updated.
default TimeValue getMinFrequency() {
return TimeValue.timeValueSeconds(1);
}
}
| TransformExtension |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/net/KeyCertOptions.java | {
"start": 753,
"end": 2577
} | interface ____ {
/**
* @return a copy of these options
*/
KeyCertOptions copy();
/**
* Create and return the key manager factory for these options.
* <p>
* The returned key manager factory should be already initialized and ready to use.
*
* @param vertx the vertx instance
* @return the key manager factory
*/
KeyManagerFactory getKeyManagerFactory(Vertx vertx) throws Exception;
/**
* Returns a function that maps SNI server names to {@link KeyManagerFactory} instance.
*
* The returned {@code KeyManagerFactory} must satisfies these rules:
*
* <ul>
* <li>The store private key must match the indicated server name for a null alias.</li>
* <li>The store certificate chain must match the indicated server name for a null alias.</li>
* </ul>
*
* The mapper is only used when the server has SNI enabled and the client indicated a server name.
* <p>
* The returned function may return {@code null} in which case the default key manager provided by {@link #getKeyManagerFactory(Vertx)}
* will be used.
*/
Function<String, KeyManagerFactory> keyManagerFactoryMapper(Vertx vertx) throws Exception;
/**
* Returns a {@link KeyCertOptions} from the provided {@link X509KeyManager}
*
* @param keyManager the keyManager instance
* @return the {@link KeyCertOptions}
*/
static KeyCertOptions wrap(X509KeyManager keyManager) {
return new KeyManagerFactoryOptions(keyManager);
}
/**
* Returns a {@link KeyCertOptions} from the provided {@link KeyManagerFactory}
*
* @param keyManagerFactory the keyManagerFactory instance
* @return the {@link KeyCertOptions}
*/
static KeyCertOptions wrap(KeyManagerFactory keyManagerFactory) {
return new KeyManagerFactoryOptions(keyManagerFactory);
}
}
| KeyCertOptions |
java | elastic__elasticsearch | plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistedClass.java | {
"start": 767,
"end": 1565
} | class ____ {
public static final int CONSTANT = 42;
public int publicMember;
private int privateMember;
public ExampleWhitelistedClass(int publicMember, int privateMember) {
this.publicMember = publicMember;
this.privateMember = privateMember;
}
public int getPrivateMemberAccessor() {
return this.privateMember;
}
public void setPrivateMemberAccessor(int value) {
this.privateMember = value;
}
public static void staticMethod() {
// electricity
}
// example augmentation method
public static int toInt(String x) {
return Integer.parseInt(x);
}
// example method to attach annotations in whitelist
public void annotate() {
// some logic here
}
}
| ExampleWhitelistedClass |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/support/SimpleTransactionScope.java | {
"start": 3670,
"end": 4534
} | class ____ implements TransactionSynchronization {
private final ScopedObjectsHolder scopedObjects;
public CleanupSynchronization(ScopedObjectsHolder scopedObjects) {
this.scopedObjects = scopedObjects;
}
@Override
public void suspend() {
TransactionSynchronizationManager.unbindResource(SimpleTransactionScope.this);
}
@Override
public void resume() {
TransactionSynchronizationManager.bindResource(SimpleTransactionScope.this, this.scopedObjects);
}
@Override
public void afterCompletion(int status) {
TransactionSynchronizationManager.unbindResourceIfPossible(SimpleTransactionScope.this);
for (Runnable callback : this.scopedObjects.destructionCallbacks.values()) {
callback.run();
}
this.scopedObjects.destructionCallbacks.clear();
this.scopedObjects.scopedInstances.clear();
}
}
}
| CleanupSynchronization |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_startsWith_with_Integer_Arguments_Test.java | {
"start": 1015,
"end": 1489
} | class ____ extends ByteArrayAssertBaseTest {
@Override
protected ByteArrayAssert invoke_api_method() {
return assertions.startsWith(6, 8);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertStartsWith(getInfo(assertions), getActual(assertions), IntArrays.arrayOf(6, 8));
}
@Test
void invoke_api_like_user() {
assertThat(new byte[] { 1, 2, 3 }).startsWith(1, 2);
}
}
| ByteArrayAssert_startsWith_with_Integer_Arguments_Test |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java | {
"start": 3479,
"end": 4203
} | class ____ implements NewRpcService {
@Override
public EmptyResponseProto ping(RpcController unused,
EmptyRequestProto request) throws ServiceException {
// Ensure clientId is received
byte[] clientId = Server.getClientId();
assertNotNull(Server.getClientId());
assertEquals(16, clientId.length);
return EmptyResponseProto.newBuilder().build();
}
@Override
public OptResponseProto echo(RpcController unused, OptRequestProto request)
throws ServiceException {
return OptResponseProto.newBuilder().setMessage(request.getMessage())
.build();
}
}
@ProtocolInfo(protocolName = "testProto", protocolVersion = 2)
public static | NewServerImpl |
java | apache__camel | components/camel-ai/camel-langchain4j-tokenizer/src/main/java/org/apache/camel/component/langchain4j/tokenizer/config/LangChain4JQwenConfiguration.java | {
"start": 877,
"end": 1118
} | class ____ extends LangChain4JConfiguration {
private String apiKey;
public String getApiKey() {
return apiKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
}
| LangChain4JQwenConfiguration |
java | apache__camel | components/camel-caffeine/src/test/java/org/apache/camel/component/caffeine/cache/CaffeineCacheProducerMultiOperationSameCacheTest.java | {
"start": 1121,
"end": 2381
} | class ____ extends CaffeineCacheTestSupport {
@Test
void testSameCachePutAndGet() throws Exception {
fluentTemplate().withBody("1").to("direct://start").send();
MockEndpoint mock1 = getMockEndpoint("mock:result");
mock1.expectedMinimumMessageCount(1);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, true);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
assertEquals("1", mock1.getExchanges().get(0).getIn().getBody());
MockEndpoint.assertIsSatisfied(context);
}
// ****************************
// Route
// ****************************
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct://start")
.to("caffeine-cache://cache?action=PUT&key=1")
.to("caffeine-cache://cache?key=1&action=GET")
.to("log:org.apache.camel.component.caffeine?level=INFO&showAll=true&multiline=true")
.log("Test! ${body}")
.to("mock:result");
}
};
}
}
| CaffeineCacheProducerMultiOperationSameCacheTest |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/query/TimestampedRangeQuery.java | {
"start": 1581,
"end": 5302
} | class ____<K, V> implements Query<KeyValueIterator<K, ValueAndTimestamp<V>>> {
private final Optional<K> lower;
private final Optional<K> upper;
private final ResultOrder order;
private TimestampedRangeQuery(final Optional<K> lower, final Optional<K> upper, final ResultOrder order) {
this.lower = lower;
this.upper = upper;
this.order = order;
}
/**
* Interactive range query using a lower and upper bound to filter the keys returned.
* @param lower The key that specifies the lower bound of the range
* @param upper The key that specifies the upper bound of the range
* @param <K> The key type
* @param <V> The value type
*/
public static <K, V> TimestampedRangeQuery<K, V> withRange(final K lower, final K upper) {
return new TimestampedRangeQuery<>(Optional.ofNullable(lower), Optional.ofNullable(upper), ResultOrder.ANY);
}
/**
* Interactive range query using an upper bound to filter the keys returned.
* If both {@code <K,V>} are null, RangQuery returns a full range scan.
* @param upper The key that specifies the upper bound of the range
* @param <K> The key type
* @param <V> The value type
*/
public static <K, V> TimestampedRangeQuery<K, V> withUpperBound(final K upper) {
return new TimestampedRangeQuery<>(Optional.empty(), Optional.of(upper), ResultOrder.ANY);
}
/**
* Interactive range query using a lower bound to filter the keys returned.
* @param lower The key that specifies the lower bound of the range
* @param <K> The key type
* @param <V> The value type
*/
public static <K, V> TimestampedRangeQuery<K, V> withLowerBound(final K lower) {
return new TimestampedRangeQuery<>(Optional.of(lower), Optional.empty(), ResultOrder.ANY);
}
/**
* Determines if the serialized byte[] of the keys in ascending or descending or unordered order.
* Order is based on the serialized byte[] of the keys, not the 'logical' key order.
* @return return the order of return records base on the serialized byte[] of the keys (can be unordered, or in ascending, or in descending order).
*/
public ResultOrder resultOrder() {
return order;
}
/**
* Set the query to return the serialized byte[] of the keys in descending order.
* Order is based on the serialized byte[] of the keys, not the 'logical' key order.
* @return a new RangeQuery instance with descending flag set.
*/
public TimestampedRangeQuery<K, V> withDescendingKeys() {
return new TimestampedRangeQuery<>(this.lower, this.upper, ResultOrder.DESCENDING);
}
/**
* Set the query to return the serialized byte[] of the keys in ascending order.
* Order is based on the serialized byte[] of the keys, not the 'logical' key order.
* @return a new RangeQuery instance with ascending flag set.
*/
public TimestampedRangeQuery<K, V> withAscendingKeys() {
return new TimestampedRangeQuery<>(this.lower, this.upper, ResultOrder.ASCENDING);
}
/**
* Interactive scan query that returns all records in the store.
* @param <K> The key type
* @param <V> The value type
*/
public static <K, V> TimestampedRangeQuery<K, V> withNoBounds() {
return new TimestampedRangeQuery<>(Optional.empty(), Optional.empty(), ResultOrder.ANY);
}
/**
* The lower bound of the query, if specified.
*/
public Optional<K> lowerBound() {
return lower;
}
/**
* The upper bound of the query, if specified
*/
public Optional<K> upperBound() {
return upper;
}
} | TimestampedRangeQuery |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OidcClientRegistrationTests.java | {
"start": 8331,
"end": 31088
} | class ____ {
private static final String ISSUER = "https://example.com:8443/issuer1";
private static final String DEFAULT_TOKEN_ENDPOINT_URI = "/oauth2/token";
private static final String DEFAULT_OIDC_CLIENT_REGISTRATION_ENDPOINT_URI = "/connect/register";
private static final HttpMessageConverter<OAuth2AccessTokenResponse> accessTokenHttpResponseConverter = new OAuth2AccessTokenResponseHttpMessageConverter();
private static final HttpMessageConverter<OidcClientRegistration> clientRegistrationHttpMessageConverter = new OidcClientRegistrationHttpMessageConverter();
private static EmbeddedDatabase db;
private static JWKSource<SecurityContext> jwkSource;
private static JWKSet clientJwkSet;
private static JwtEncoder jwtClientAssertionEncoder;
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
private MockMvc mvc;
@Autowired
private JdbcOperations jdbcOperations;
@Autowired
private RegisteredClientRepository registeredClientRepository;
@Autowired
private AuthorizationServerSettings authorizationServerSettings;
private static AuthenticationConverter authenticationConverter;
private static Consumer<List<AuthenticationConverter>> authenticationConvertersConsumer;
private static AuthenticationProvider authenticationProvider;
private static Consumer<List<AuthenticationProvider>> authenticationProvidersConsumer;
private static AuthenticationSuccessHandler authenticationSuccessHandler;
private static AuthenticationFailureHandler authenticationFailureHandler;
private MockWebServer server;
private String clientJwkSetUrl;
@BeforeAll
public static void init() {
JWKSet jwkSet = new JWKSet(TestJwks.DEFAULT_RSA_JWK);
jwkSource = (jwkSelector, securityContext) -> jwkSelector.select(jwkSet);
clientJwkSet = new JWKSet(TestJwks.generateRsa().build());
jwtClientAssertionEncoder = new NimbusJwtEncoder(
(jwkSelector, securityContext) -> jwkSelector.select(clientJwkSet));
db = new EmbeddedDatabaseBuilder().generateUniqueName(true)
.setType(EmbeddedDatabaseType.HSQL)
.setScriptEncoding("UTF-8")
.addScript("org/springframework/security/oauth2/server/authorization/oauth2-authorization-schema.sql")
.addScript(
"org/springframework/security/oauth2/server/authorization/client/oauth2-registered-client-schema.sql")
.build();
authenticationConverter = mock(AuthenticationConverter.class);
authenticationConvertersConsumer = mock(Consumer.class);
authenticationProvider = mock(AuthenticationProvider.class);
authenticationProvidersConsumer = mock(Consumer.class);
authenticationSuccessHandler = mock(AuthenticationSuccessHandler.class);
authenticationFailureHandler = mock(AuthenticationFailureHandler.class);
}
@BeforeEach
public void setup() throws Exception {
this.server = new MockWebServer();
this.server.start();
this.clientJwkSetUrl = this.server.url("/jwks").toString();
// @formatter:off
MockResponse response = new MockResponse()
.setHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)
.setBody(clientJwkSet.toString());
// @formatter:on
this.server.enqueue(response);
given(authenticationProvider.supports(OidcClientRegistrationAuthenticationToken.class)).willReturn(true);
}
@AfterEach
public void tearDown() throws Exception {
this.server.shutdown();
this.jdbcOperations.update("truncate table oauth2_authorization");
this.jdbcOperations.update("truncate table oauth2_registered_client");
reset(authenticationConverter);
reset(authenticationConvertersConsumer);
reset(authenticationProvider);
reset(authenticationProvidersConsumer);
reset(authenticationSuccessHandler);
reset(authenticationFailureHandler);
}
@AfterAll
public static void destroy() {
db.shutdown();
}
@Test
public void requestWhenClientRegistrationRequestAuthorizedThenClientRegistrationResponse() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
OidcClientRegistration clientRegistration = OidcClientRegistration.builder()
.clientName("client-name")
.redirectUri("https://client.example.com")
.grantType(AuthorizationGrantType.AUTHORIZATION_CODE.getValue())
.grantType(AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.scope("scope1")
.scope("scope2")
.build();
// @formatter:on
OidcClientRegistration clientRegistrationResponse = registerClient(clientRegistration);
assertThat(clientRegistrationResponse.getClientId()).isNotNull();
assertThat(clientRegistrationResponse.getClientIdIssuedAt()).isNotNull();
assertThat(clientRegistrationResponse.getClientSecret()).isNotNull();
assertThat(clientRegistrationResponse.getClientSecretExpiresAt()).isNull();
assertThat(clientRegistrationResponse.getClientName()).isEqualTo(clientRegistration.getClientName());
assertThat(clientRegistrationResponse.getRedirectUris())
.containsExactlyInAnyOrderElementsOf(clientRegistration.getRedirectUris());
assertThat(clientRegistrationResponse.getGrantTypes())
.containsExactlyInAnyOrderElementsOf(clientRegistration.getGrantTypes());
assertThat(clientRegistrationResponse.getResponseTypes())
.containsExactly(OAuth2AuthorizationResponseType.CODE.getValue());
assertThat(clientRegistrationResponse.getScopes())
.containsExactlyInAnyOrderElementsOf(clientRegistration.getScopes());
assertThat(clientRegistrationResponse.getTokenEndpointAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC.getValue());
assertThat(clientRegistrationResponse.getIdTokenSignedResponseAlgorithm())
.isEqualTo(SignatureAlgorithm.RS256.getName());
assertThat(clientRegistrationResponse.getRegistrationClientUrl()).isNotNull();
assertThat(clientRegistrationResponse.getRegistrationAccessToken()).isNotEmpty();
}
@Test
public void requestWhenClientConfigurationRequestAuthorizedThenClientRegistrationResponse() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
OidcClientRegistration clientRegistration = OidcClientRegistration.builder()
.clientName("client-name")
.redirectUri("https://client.example.com")
.grantType(AuthorizationGrantType.AUTHORIZATION_CODE.getValue())
.grantType(AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.scope("scope1")
.scope("scope2")
.build();
// @formatter:on
OidcClientRegistration clientRegistrationResponse = registerClient(clientRegistration);
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.setBearerAuth(clientRegistrationResponse.getRegistrationAccessToken());
MvcResult mvcResult = this.mvc
.perform(get(clientRegistrationResponse.getRegistrationClientUrl().toURI()).headers(httpHeaders))
.andExpect(status().isOk())
.andExpect(header().string(HttpHeaders.CACHE_CONTROL, containsString("no-store")))
.andExpect(header().string(HttpHeaders.PRAGMA, containsString("no-cache")))
.andReturn();
OidcClientRegistration clientConfigurationResponse = readClientRegistrationResponse(mvcResult.getResponse());
assertThat(clientConfigurationResponse.getClientId()).isEqualTo(clientRegistrationResponse.getClientId());
assertThat(clientConfigurationResponse.getClientIdIssuedAt())
.isEqualTo(clientRegistrationResponse.getClientIdIssuedAt());
assertThat(clientConfigurationResponse.getClientSecret()).isNotNull();
assertThat(clientConfigurationResponse.getClientSecretExpiresAt())
.isEqualTo(clientRegistrationResponse.getClientSecretExpiresAt());
assertThat(clientConfigurationResponse.getClientName()).isEqualTo(clientRegistrationResponse.getClientName());
assertThat(clientConfigurationResponse.getRedirectUris())
.containsExactlyInAnyOrderElementsOf(clientRegistrationResponse.getRedirectUris());
assertThat(clientConfigurationResponse.getGrantTypes())
.containsExactlyInAnyOrderElementsOf(clientRegistrationResponse.getGrantTypes());
assertThat(clientConfigurationResponse.getResponseTypes())
.containsExactlyInAnyOrderElementsOf(clientRegistrationResponse.getResponseTypes());
assertThat(clientConfigurationResponse.getScopes())
.containsExactlyInAnyOrderElementsOf(clientRegistrationResponse.getScopes());
assertThat(clientConfigurationResponse.getTokenEndpointAuthenticationMethod())
.isEqualTo(clientRegistrationResponse.getTokenEndpointAuthenticationMethod());
assertThat(clientConfigurationResponse.getIdTokenSignedResponseAlgorithm())
.isEqualTo(clientRegistrationResponse.getIdTokenSignedResponseAlgorithm());
assertThat(clientConfigurationResponse.getRegistrationClientUrl())
.isEqualTo(clientRegistrationResponse.getRegistrationClientUrl());
assertThat(clientConfigurationResponse.getRegistrationAccessToken()).isNull();
}
@Test
public void requestWhenClientRegistrationEndpointCustomizedThenUsed() throws Exception {
this.spring.register(CustomClientRegistrationConfiguration.class).autowire();
// @formatter:off
OidcClientRegistration clientRegistration = OidcClientRegistration.builder()
.clientName("client-name")
.redirectUri("https://client.example.com")
.grantType(AuthorizationGrantType.AUTHORIZATION_CODE.getValue())
.grantType(AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.scope("scope1")
.scope("scope2")
.build();
// @formatter:on
willAnswer((invocation) -> {
HttpServletResponse response = invocation.getArgument(1, HttpServletResponse.class);
ServletServerHttpResponse httpResponse = new ServletServerHttpResponse(response);
httpResponse.setStatusCode(HttpStatus.CREATED);
new OidcClientRegistrationHttpMessageConverter().write(clientRegistration, null, httpResponse);
return null;
}).given(authenticationSuccessHandler).onAuthenticationSuccess(any(), any(), any());
registerClient(clientRegistration);
verify(authenticationConverter).convert(any());
ArgumentCaptor<List<AuthenticationConverter>> authenticationConvertersCaptor = ArgumentCaptor
.forClass(List.class);
verify(authenticationConvertersConsumer).accept(authenticationConvertersCaptor.capture());
List<AuthenticationConverter> authenticationConverters = authenticationConvertersCaptor.getValue();
assertThat(authenticationConverters).hasSize(2)
.allMatch((converter) -> converter == authenticationConverter
|| converter instanceof OidcClientRegistrationAuthenticationConverter);
verify(authenticationProvider).authenticate(any());
ArgumentCaptor<List<AuthenticationProvider>> authenticationProvidersCaptor = ArgumentCaptor
.forClass(List.class);
verify(authenticationProvidersConsumer).accept(authenticationProvidersCaptor.capture());
List<AuthenticationProvider> authenticationProviders = authenticationProvidersCaptor.getValue();
assertThat(authenticationProviders).hasSize(3)
.allMatch((provider) -> provider == authenticationProvider
|| provider instanceof OidcClientRegistrationAuthenticationProvider
|| provider instanceof OidcClientConfigurationAuthenticationProvider);
verify(authenticationSuccessHandler).onAuthenticationSuccess(any(), any(), any());
verifyNoInteractions(authenticationFailureHandler);
}
@Test
public void requestWhenClientRegistrationEndpointCustomizedWithAuthenticationFailureHandlerThenUsed()
throws Exception {
this.spring.register(CustomClientRegistrationConfiguration.class).autowire();
given(authenticationProvider.authenticate(any())).willThrow(new OAuth2AuthenticationException("error"));
this.mvc.perform(get(ISSUER.concat(DEFAULT_OIDC_CLIENT_REGISTRATION_ENDPOINT_URI))
.param(OAuth2ParameterNames.CLIENT_ID, "invalid")
.with(jwt()));
verify(authenticationFailureHandler).onAuthenticationFailure(any(), any(), any());
verifyNoInteractions(authenticationSuccessHandler);
}
// gh-1056
@Test
public void requestWhenClientRegistersWithSecretThenClientAuthenticationSuccess() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
OidcClientRegistration clientRegistration = OidcClientRegistration.builder()
.clientName("client-name")
.redirectUri("https://client.example.com")
.grantType(AuthorizationGrantType.AUTHORIZATION_CODE.getValue())
.grantType(AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.scope("scope1")
.scope("scope2")
.build();
// @formatter:on
OidcClientRegistration clientRegistrationResponse = registerClient(clientRegistration);
this.mvc
.perform(post(ISSUER.concat(DEFAULT_TOKEN_ENDPOINT_URI))
.param(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.param(OAuth2ParameterNames.SCOPE, "scope1")
.with(httpBasic(clientRegistrationResponse.getClientId(),
clientRegistrationResponse.getClientSecret())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.access_token").isNotEmpty())
.andExpect(jsonPath("$.scope").value("scope1"))
.andReturn();
}
// gh-1344
@Test
public void requestWhenClientRegistersWithClientSecretJwtThenClientAuthenticationSuccess() throws Exception {
this.spring.register(AuthorizationServerConfiguration.class).autowire();
// @formatter:off
OidcClientRegistration clientRegistration = OidcClientRegistration.builder()
.clientName("client-name")
.redirectUri("https://client.example.com")
.grantType(AuthorizationGrantType.AUTHORIZATION_CODE.getValue())
.grantType(AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.tokenEndpointAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_JWT.getValue())
.scope("scope1")
.scope("scope2")
.build();
// @formatter:on
OidcClientRegistration clientRegistrationResponse = registerClient(clientRegistration);
JwsHeader jwsHeader = JwsHeader.with(MacAlgorithm.HS256).build();
Instant issuedAt = Instant.now();
Instant expiresAt = issuedAt.plus(1, ChronoUnit.HOURS);
JwtClaimsSet jwtClaimsSet = JwtClaimsSet.builder()
.issuer(clientRegistrationResponse.getClientId())
.subject(clientRegistrationResponse.getClientId())
.audience(Collections.singletonList(asUrl(ISSUER, this.authorizationServerSettings.getTokenEndpoint())))
.issuedAt(issuedAt)
.expiresAt(expiresAt)
.build();
JWKSet jwkSet = new JWKSet(
TestJwks.jwk(new SecretKeySpec(clientRegistrationResponse.getClientSecret().getBytes(), "HS256"))
.build());
JwtEncoder jwtClientAssertionEncoder = new NimbusJwtEncoder(
(jwkSelector, securityContext) -> jwkSelector.select(jwkSet));
Jwt jwtAssertion = jwtClientAssertionEncoder.encode(JwtEncoderParameters.from(jwsHeader, jwtClaimsSet));
this.mvc
.perform(post(ISSUER.concat(DEFAULT_TOKEN_ENDPOINT_URI))
.param(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.param(OAuth2ParameterNames.SCOPE, "scope1")
.param(OAuth2ParameterNames.CLIENT_ASSERTION_TYPE,
"urn:ietf:params:oauth:client-assertion-type:jwt-bearer")
.param(OAuth2ParameterNames.CLIENT_ASSERTION, jwtAssertion.getTokenValue())
.param(OAuth2ParameterNames.CLIENT_ID, clientRegistrationResponse.getClientId()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.access_token").isNotEmpty())
.andExpect(jsonPath("$.scope").value("scope1"));
}
@Test
public void requestWhenClientRegistersWithCustomMetadataThenSavedToRegisteredClient() throws Exception {
this.spring.register(CustomClientMetadataConfiguration.class).autowire();
// @formatter:off
OidcClientRegistration clientRegistration = OidcClientRegistration.builder()
.clientName("client-name")
.redirectUri("https://client.example.com")
.grantType(AuthorizationGrantType.AUTHORIZATION_CODE.getValue())
.grantType(AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.scope("scope1")
.scope("scope2")
.claim("custom-metadata-name-1", "value-1")
.claim("custom-metadata-name-2", "value-2")
.claim("non-registered-custom-metadata", "value-3")
.build();
// @formatter:on
OidcClientRegistration clientRegistrationResponse = registerClient(clientRegistration);
RegisteredClient registeredClient = this.registeredClientRepository
.findByClientId(clientRegistrationResponse.getClientId());
assertThat(clientRegistrationResponse.<String>getClaim("custom-metadata-name-1")).isEqualTo("value-1");
assertThat(clientRegistrationResponse.<String>getClaim("custom-metadata-name-2")).isEqualTo("value-2");
assertThat(clientRegistrationResponse.<String>getClaim("non-registered-custom-metadata")).isNull();
assertThat(registeredClient.getClientSettings().<String>getSetting("custom-metadata-name-1"))
.isEqualTo("value-1");
assertThat(registeredClient.getClientSettings().<String>getSetting("custom-metadata-name-2"))
.isEqualTo("value-2");
assertThat(registeredClient.getClientSettings().<String>getSetting("non-registered-custom-metadata")).isNull();
}
// gh-2111
@Test
public void requestWhenClientRegistersWithSecretExpirationThenClientRegistrationResponse() throws Exception {
this.spring.register(ClientSecretExpirationConfiguration.class).autowire();
// @formatter:off
OidcClientRegistration clientRegistration = OidcClientRegistration.builder()
.clientName("client-name")
.redirectUri("https://client.example.com")
.grantType(AuthorizationGrantType.AUTHORIZATION_CODE.getValue())
.grantType(AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.scope("scope1")
.scope("scope2")
.build();
// @formatter:on
OidcClientRegistration clientRegistrationResponse = registerClient(clientRegistration);
Instant expectedSecretExpiryDate = Instant.now().plus(Duration.ofHours(24));
TemporalUnitWithinOffset allowedDelta = new TemporalUnitWithinOffset(1, ChronoUnit.MINUTES);
// Returned response contains expiration date
assertThat(clientRegistrationResponse.getClientSecretExpiresAt()).isNotNull()
.isCloseTo(expectedSecretExpiryDate, allowedDelta);
RegisteredClient registeredClient = this.registeredClientRepository
.findByClientId(clientRegistrationResponse.getClientId());
// Persisted RegisteredClient contains expiration date
assertThat(registeredClient).isNotNull();
assertThat(registeredClient.getClientSecretExpiresAt()).isNotNull()
.isCloseTo(expectedSecretExpiryDate, allowedDelta);
}
private OidcClientRegistration registerClient(OidcClientRegistration clientRegistration) throws Exception {
// ***** (1) Obtain the "initial" access token used for registering the client
String clientRegistrationScope = "client.create";
// @formatter:off
RegisteredClient clientRegistrar = RegisteredClient.withId("client-registrar-1")
.clientId("client-registrar-1")
.clientAuthenticationMethod(ClientAuthenticationMethod.PRIVATE_KEY_JWT)
.authorizationGrantType(AuthorizationGrantType.CLIENT_CREDENTIALS)
.scope(clientRegistrationScope)
.clientSettings(
ClientSettings.builder()
.jwkSetUrl(this.clientJwkSetUrl)
.tokenEndpointAuthenticationSigningAlgorithm(SignatureAlgorithm.RS256)
.build()
)
.build();
// @formatter:on
this.registeredClientRepository.save(clientRegistrar);
// @formatter:off
JwsHeader jwsHeader = JwsHeader.with(SignatureAlgorithm.RS256)
.build();
JwtClaimsSet jwtClaimsSet = jwtClientAssertionClaims(clientRegistrar)
.build();
// @formatter:on
Jwt jwtAssertion = jwtClientAssertionEncoder.encode(JwtEncoderParameters.from(jwsHeader, jwtClaimsSet));
MvcResult mvcResult = this.mvc
.perform(post(ISSUER.concat(DEFAULT_TOKEN_ENDPOINT_URI))
.param(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.CLIENT_CREDENTIALS.getValue())
.param(OAuth2ParameterNames.SCOPE, clientRegistrationScope)
.param(OAuth2ParameterNames.CLIENT_ASSERTION_TYPE,
"urn:ietf:params:oauth:client-assertion-type:jwt-bearer")
.param(OAuth2ParameterNames.CLIENT_ASSERTION, jwtAssertion.getTokenValue())
.param(OAuth2ParameterNames.CLIENT_ID, clientRegistrar.getClientId()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.access_token").isNotEmpty())
.andExpect(jsonPath("$.scope").value(clientRegistrationScope))
.andReturn();
OAuth2AccessToken accessToken = readAccessTokenResponse(mvcResult.getResponse()).getAccessToken();
// ***** (2) Register the client
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.setBearerAuth(accessToken.getTokenValue());
// Register the client
mvcResult = this.mvc
.perform(post(ISSUER.concat(DEFAULT_OIDC_CLIENT_REGISTRATION_ENDPOINT_URI)).headers(httpHeaders)
.contentType(MediaType.APPLICATION_JSON)
.content(getClientRegistrationRequestContent(clientRegistration)))
.andExpect(status().isCreated())
.andExpect(header().string(HttpHeaders.CACHE_CONTROL, containsString("no-store")))
.andExpect(header().string(HttpHeaders.PRAGMA, containsString("no-cache")))
.andReturn();
return readClientRegistrationResponse(mvcResult.getResponse());
}
private JwtClaimsSet.Builder jwtClientAssertionClaims(RegisteredClient registeredClient) {
Instant issuedAt = Instant.now();
Instant expiresAt = issuedAt.plus(1, ChronoUnit.HOURS);
return JwtClaimsSet.builder()
.issuer(registeredClient.getClientId())
.subject(registeredClient.getClientId())
.audience(Collections.singletonList(asUrl(ISSUER, this.authorizationServerSettings.getTokenEndpoint())))
.issuedAt(issuedAt)
.expiresAt(expiresAt);
}
private static String asUrl(String uri, String path) {
return UriComponentsBuilder.fromUriString(uri).path(path).build().toUriString();
}
private static OAuth2AccessTokenResponse readAccessTokenResponse(MockHttpServletResponse response)
throws Exception {
MockClientHttpResponse httpResponse = new MockClientHttpResponse(response.getContentAsByteArray(),
HttpStatus.valueOf(response.getStatus()));
return accessTokenHttpResponseConverter.read(OAuth2AccessTokenResponse.class, httpResponse);
}
private static byte[] getClientRegistrationRequestContent(OidcClientRegistration clientRegistration)
throws Exception {
MockHttpOutputMessage httpRequest = new MockHttpOutputMessage();
clientRegistrationHttpMessageConverter.write(clientRegistration, null, httpRequest);
return httpRequest.getBodyAsBytes();
}
private static OidcClientRegistration readClientRegistrationResponse(MockHttpServletResponse response)
throws Exception {
MockClientHttpResponse httpResponse = new MockClientHttpResponse(response.getContentAsByteArray(),
HttpStatus.valueOf(response.getStatus()));
return clientRegistrationHttpMessageConverter.read(OidcClientRegistration.class, httpResponse);
}
@EnableWebSecurity
@Configuration(proxyBeanMethods = false)
static | OidcClientRegistrationTests |
java | quarkusio__quarkus | extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/AllowParameterConstraintsOnParallelMethodsTest.java | {
"start": 1610,
"end": 1969
} | class ____
implements InterfaceWithNoConstraints, AnotherInterfaceWithMethodParameterConstraint {
/**
* Implement a method that is declared by two interfaces, one of which has a constraint
*/
@Override
public String foo(String s) {
return "Hello World";
}
}
}
| RealizationOfTwoInterface |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/client/DefaultRestClient.java | {
"start": 31053,
"end": 32532
} | class ____ implements RequestHeadersSpec.ConvertibleClientHttpResponse {
private final ClientHttpResponse delegate;
private final @Nullable Map<String, Object> hints;
public DefaultConvertibleClientHttpResponse(ClientHttpResponse delegate, @Nullable Map<String, Object> hints) {
this.delegate = delegate;
this.hints = hints;
}
@Override
public HttpStatusCode getStatusCode() throws IOException {
return this.delegate.getStatusCode();
}
@Override
public String getStatusText() throws IOException {
return this.delegate.getStatusText();
}
@Override
public HttpHeaders getHeaders() {
return this.delegate.getHeaders();
}
@Override
public InputStream getBody() throws IOException {
return this.delegate.getBody();
}
@Override
public void close() {
this.delegate.close();
}
@Override
public <T> @Nullable T bodyTo(Class<T> bodyType) {
return readWithMessageConverters(this.delegate, () -> {} , bodyType, bodyType, this.hints);
}
@Override
public <T> @Nullable T bodyTo(ParameterizedTypeReference<T> bodyType) {
Type type = bodyType.getType();
Class<T> bodyClass = bodyClass(type);
return readWithMessageConverters(this.delegate, () -> {}, type, bodyClass, this.hints);
}
@Override
public RestClientResponseException createException() throws IOException {
return StatusHandler.createException(this, DefaultRestClient.this.messageConverters);
}
}
}
| DefaultConvertibleClientHttpResponse |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/SQLOrderAnnotation.java | {
"start": 466,
"end": 1301
} | class ____ implements SQLOrder {
private String value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public SQLOrderAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public SQLOrderAnnotation(SQLOrder annotation, ModelsContext modelContext) {
this.value = annotation.value();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public SQLOrderAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (String) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return SQLOrder.class;
}
@Override
public String value() {
return value;
}
public void value(String value) {
this.value = value;
}
}
| SQLOrderAnnotation |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/callbacks/CallbackAndDirtyTest.java | {
"start": 555,
"end": 3237
} | class ____ {
@Test
public void testDirtyButNotDirty(EntityManagerFactoryScope scope) {
scope.inEntityManager( entityManager -> {
entityManager.getTransaction().begin();
Employee mark = new Employee();
mark.setName( "Mark" );
mark.setTitle( "internal sales" );
mark.setSex( 'M' );
mark.setAddress( "buckhead" );
mark.setZip( "30305" );
mark.setCountry( "USA" );
Customer joe = new Customer();
joe.setName( "Joe" );
joe.setSex( 'M' );
joe.setAddress( "San Francisco" );
joe.setZip( "XXXXX" );
joe.setCountry( "USA" );
joe.setComments( "Very demanding" );
joe.setSalesperson( mark );
Person yomomma = new Person();
yomomma.setName( "mum" );
yomomma.setSex( 'F' );
entityManager.persist( mark );
entityManager.persist( joe );
entityManager.persist( yomomma );
long[] ids = {mark.getId(), joe.getId(), yomomma.getId()};
entityManager.getTransaction().commit();
entityManager.getTransaction().begin();
assertEquals(
3,
entityManager.createQuery( "select p.address, p.name from Person p order by p.name" ).getResultList().size()
);
assertEquals( 1,
entityManager.createQuery( "select p from Person p where p.class = Customer" ).getResultList().size() );
entityManager.getTransaction().commit();
entityManager.getTransaction().begin();
List customers = entityManager.createQuery( "select c from Customer c left join fetch c.salesperson" ).getResultList();
for ( Object customer : customers ) {
Customer c = (Customer) customer;
assertEquals( "Mark", c.getSalesperson().getName() );
}
assertEquals( 1, customers.size() );
entityManager.getTransaction().commit();
entityManager.getTransaction().begin();
customers = entityManager.createQuery( "select c from Customer c" ).getResultList();
for ( Object customer : customers ) {
Customer c = (Customer) customer;
assertEquals( "Mark", c.getSalesperson().getName() );
}
assertEquals( 1, customers.size() );
entityManager.getTransaction().commit();
entityManager.getTransaction().begin();
mark = entityManager.find( Employee.class, ids[0] );
joe = entityManager.find( Customer.class, ids[1] );
yomomma = entityManager.find( Person.class, ids[2] );
mark.setZip( "30306" );
assertEquals( 1, entityManager.createQuery( "select p from Person p where p.zip = '30306'" ).getResultList().size() );
entityManager.remove( mark );
entityManager.remove( joe );
entityManager.remove( yomomma );
assertTrue( entityManager.createQuery( "select p from Person p" ).getResultList().isEmpty() );
entityManager.getTransaction().commit();
} );
}
}
| CallbackAndDirtyTest |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/scheduling/annotation/ScheduledAnnotationReactiveSupport.java | {
"start": 11670,
"end": 13703
} | class ____ implements Subscriber<Object>, Runnable {
private final List<Runnable> subscriptionTrackerRegistry;
private final Observation observation;
private final @Nullable CountDownLatch blockingLatch;
// Implementation note: since this is created last-minute when subscribing,
// there shouldn't be a way to cancel the tracker externally from the
// ScheduledAnnotationBeanProcessor before the #setSubscription(Subscription)
// method is called.
private @Nullable Subscription subscription;
TrackingSubscriber(List<Runnable> subscriptionTrackerRegistry, Observation observation) {
this(subscriptionTrackerRegistry, observation, null);
}
TrackingSubscriber(List<Runnable> subscriptionTrackerRegistry, Observation observation, @Nullable CountDownLatch latch) {
this.subscriptionTrackerRegistry = subscriptionTrackerRegistry;
this.observation = observation;
this.blockingLatch = latch;
}
@Override
public void run() {
if (this.subscription != null) {
this.subscription.cancel();
this.observation.stop();
}
if (this.blockingLatch != null) {
this.blockingLatch.countDown();
}
}
@Override
public void onSubscribe(Subscription subscription) {
this.subscription = subscription;
subscription.request(Integer.MAX_VALUE);
}
@Override
public void onNext(Object obj) {
// no-op
}
@Override
public void onError(Throwable ex) {
this.subscriptionTrackerRegistry.remove(this);
logger.warn("Unexpected error occurred in scheduled reactive task", ex);
this.observation.error(ex);
this.observation.stop();
if (this.blockingLatch != null) {
this.blockingLatch.countDown();
}
}
@Override
public void onComplete() {
this.subscriptionTrackerRegistry.remove(this);
if (this.observation.getContext() instanceof ScheduledTaskObservationContext context) {
context.setComplete(true);
}
this.observation.stop();
if (this.blockingLatch != null) {
this.blockingLatch.countDown();
}
}
}
}
| TrackingSubscriber |
java | apache__camel | components/camel-test/camel-test-main-junit5/src/main/java/org/apache/camel/test/main/junit5/CamelMainTest.java | {
"start": 1741,
"end": 2039
} | class ____ the Camel Main application to simulate is {@code SomeMainClass} and has one additional configuration class
* to consider which is {@code SomeConfiguration}.
*
* <pre>
* <code>
*
* @CamelMainTest(mainClass = SomeMainClass.class, configurationClasses = SomeConfiguration.class)
* | of |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/SavedRequestAwareAuthenticationSuccessHandler.java | {
"start": 1833,
"end": 3226
} | class ____ responsible for performing the redirect to the original URL if appropriate.
* <p>
* Following a successful authentication, it decides on the redirect destination, based on
* the following scenarios:
* <ul>
* <li>If the {@code alwaysUseDefaultTargetUrl} property is set to true, the
* {@code defaultTargetUrl} will be used for the destination. Any
* {@code DefaultSavedRequest} stored in the session will be removed.</li>
* <li>If the {@code targetUrlParameter} has been set on the request, the value will be
* used as the destination. Any {@code DefaultSavedRequest} will again be removed.</li>
* <li>If a {@link org.springframework.security.web.savedrequest.SavedRequest} is found in
* the {@code RequestCache} (as set by the {@link ExceptionTranslationFilter} to record
* the original destination before the authentication process commenced), a redirect will
* be performed to the Url of that original destination. The {@code SavedRequest} object
* will remain cached and be picked up when the redirected request is received (See
* <a href="
* {@docRoot}/org/springframework/security/web/savedrequest/SavedRequestAwareWrapper.html">SavedRequestAwareWrapper</a>).
* </li>
* <li>If no {@link org.springframework.security.web.savedrequest.SavedRequest} is found,
* it will delegate to the base class.</li>
* </ul>
*
* @author Luke Taylor
* @since 3.0
*/
public | is |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/xslt/XsltOutputFileDeleteTest.java | {
"start": 1076,
"end": 2069
} | class ____ extends ContextTestSupport {
@Test
public void testXsltOutputDeleteFile() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("<?xml version=\"1.0\" encoding=\"UTF-8\"?><goodbye>world!</goodbye>");
template.sendBodyAndHeader("direct:start", "<hello>world!</hello>", Exchange.XSLT_FILE_NAME,
testFile("xsltme.xml").toString());
assertMockEndpointsSatisfied();
oneExchangeDone.matchesWaitTime();
// assert file deleted
assertFileNotExists(testFile("xsltme.xml"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("xslt:org/apache/camel/component/xslt/example.xsl?output=file&deleteOutputFile=true")
.to("mock:result");
}
};
}
}
| XsltOutputFileDeleteTest |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/simplelistparameter/SimpleListParameterTest.java | {
"start": 1131,
"end": 2769
} | class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void setUp() throws Exception {
// create a SqlSessionFactory
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/simplelistparameter/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
// populate in-memory database
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/simplelistparameter/CreateDB.sql");
}
@Test
void shouldGetACar() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
CarMapper carMapper = sqlSession.getMapper(CarMapper.class);
Car car = new Car();
car.setDoors(Arrays.asList("2", "4"));
List<Car> cars = carMapper.getCar(car);
Assertions.assertNotNull(cars);
}
}
@Test
void shouldResolveGenericFieldGetterType() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
CarMapper carMapper = sqlSession.getMapper(CarMapper.class);
Rv rv = new Rv();
rv.doors1 = Arrays.asList("2", "4");
List<Rv> rvs = carMapper.getRv1(rv);
Assertions.assertNotNull(rvs);
}
}
@Test
void shouldResolveGenericMethodGetterType() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
CarMapper carMapper = sqlSession.getMapper(CarMapper.class);
Rv rv = new Rv();
rv.setDoors2(Arrays.asList("2", "4"));
List<Rv> rvs = carMapper.getRv2(rv);
Assertions.assertNotNull(rvs);
}
}
}
| SimpleListParameterTest |
java | apache__camel | components/camel-barcode/src/test/java/org/apache/camel/dataformat/barcode/BarcodeDataFormatCamelTest.java | {
"start": 1226,
"end": 5754
} | class ____ extends BarcodeTestBase {
@TempDir
Path testDirectory;
/**
* tests barcode (QR-Code) generation and reading.
*/
@Test
void testDefaultQRCode() throws Exception {
out.reset();
out.expectedBodiesReceived(MSG);
image.expectedMessageCount(1);
template.sendBody("direct:code1", MSG);
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
this.checkImage(image, 100, 100, BarcodeImageType.PNG.toString(), BarcodeFormat.QR_CODE);
}
/**
* tests barcode (QR-Code) generation with modified size and reading.
*/
@Test
void testQRCodeWithModifiedSize() throws Exception {
out.reset();
out.expectedBodiesReceived(MSG);
image.expectedMessageCount(1);
template.sendBody("direct:code2", MSG);
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
this.checkImage(image, 200, 200, BarcodeImageType.PNG.toString(), BarcodeFormat.QR_CODE);
}
/**
* tests barcode (QR-Code) generation with modified image type and reading.
*/
@Test
void testQRCodeWithJPEGType() throws Exception {
out.reset();
out.expectedBodiesReceived(MSG);
image.expectedMessageCount(1);
template.sendBody("direct:code3", MSG);
MockEndpoint.assertIsSatisfied(context, 5, TimeUnit.SECONDS);
this.checkImage(image, 100, 100, "JPEG", BarcodeFormat.QR_CODE);
}
/**
* tests barcode (PDF-417) with modified size and image type generation and reading.
*/
@Test
void testPDF417CodeWidthModifiedSizeAndImageType() throws Exception {
out.reset();
out.expectedBodiesReceived(MSG);
image.expectedMessageCount(1);
template.sendBody("direct:code4", MSG);
MockEndpoint.assertIsSatisfied(context, 60, TimeUnit.SECONDS);
this.checkImage(image, "JPEG", BarcodeFormat.PDF_417);
}
/**
* tests barcode (AZTEC).
*/
@Test
void testAZTECWidthModifiedSizeAndImageType() throws Exception {
out.reset();
out.expectedBodiesReceived(MSG);
image.expectedMessageCount(1);
template.sendBody("direct:code5", MSG);
MockEndpoint.assertIsSatisfied(context, 60, TimeUnit.SECONDS);
this.checkImage(image, 200, 200, "PNG", BarcodeFormat.AZTEC);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// QR-Code default
DataFormat code1 = new BarcodeDataFormat();
from("direct:code1")
.marshal(code1)
.to(TestSupport.fileUri(testDirectory));
// QR-Code with modified size
DataFormat code2 = new BarcodeDataFormat(200, 200);
from("direct:code2")
.marshal(code2)
.to(TestSupport.fileUri(testDirectory));
// QR-Code with JPEG type
DataFormat code3 = new BarcodeDataFormat(BarcodeImageType.JPG);
from("direct:code3")
.marshal(code3)
.to(TestSupport.fileUri(testDirectory));
// PDF-417 code with modified size and image type
DataFormat code4 = new BarcodeDataFormat(200, 200, BarcodeImageType.JPG, BarcodeFormat.PDF_417);
from("direct:code4")
.marshal(code4)
.to(TestSupport.fileUri(testDirectory));
// AZTEC with modified size and PNG type
DataFormat code5 = new BarcodeDataFormat(200, 200, BarcodeImageType.PNG, BarcodeFormat.AZTEC);
from("direct:code5")
.marshal(code5)
.to(TestSupport.fileUri(testDirectory));
// generic file read --->
//
// read file and route it
from(TestSupport.fileUri(testDirectory, "?noop=true&initialDelay=0&delay=10"))
.multicast().to("direct:unmarshall", "mock:image");
// get the message from code
from("direct:unmarshall")
.unmarshal(code1) // for unmarshalling, the instance doesn't matter
.to("log:OUT")
.to("mock:out");
}
};
}
}
| BarcodeDataFormatCamelTest |
java | grpc__grpc-java | api/src/test/java/io/grpc/ManagedChannelRegistryTest.java | {
"start": 10990,
"end": 11378
} | class ____ extends SocketAddress {
}
nameResolverRegistry.register(new BaseNameResolverProvider(true, 5, "sc1") {
@Override
public Collection<Class<? extends SocketAddress>> getProducedSocketAddressTypes() {
return Collections.singleton(SocketAddress1.class);
}
});
ManagedChannelRegistry registry = new ManagedChannelRegistry();
| SocketAddress1 |
java | apache__camel | components/camel-cxf/camel-cxf-soap/src/test/java/org/apache/camel/component/cxf/jaxws/CxfConsumerPayloadTest.java | {
"start": 1378,
"end": 5538
} | class ____ extends CxfConsumerMessageTest {
protected static final String ECHO_RESPONSE
= "<ns1:echoResponse xmlns:ns1=\"http://jaxws.cxf.component.camel.apache.org/\">"
+ "<return xmlns=\"http://jaxws.cxf.component.camel.apache.org/\">echo Hello World!</return>"
+ "</ns1:echoResponse>";
protected static final String ECHO_BOOLEAN_RESPONSE
= "<ns1:echoBooleanResponse xmlns:ns1=\"http://jaxws.cxf.component.camel.apache.org/\">"
+ "<return xmlns=\"http://jaxws.cxf.component.camel.apache.org/\">true</return>"
+ "</ns1:echoBooleanResponse>";
protected static final String ECHO_REQUEST = "<ns1:echo xmlns:ns1=\"http://jaxws.cxf.component.camel.apache.org/\">"
+ "<arg0 xmlns=\"http://jaxws.cxf.component.camel.apache.org/\">Hello World!</arg0></ns1:echo>";
protected static final String ECHO_BOOLEAN_REQUEST
= "<ns1:echoBoolean xmlns:ns1=\"http://jaxws.cxf.component.camel.apache.org/\">"
+ "<arg0 xmlns=\"http://jaxws.cxf.component.camel.apache.org/\">true</arg0></ns1:echoBoolean>";
protected static final String ELEMENT_NAMESPACE = "http://jaxws.cxf.component.camel.apache.org/";
protected void checkRequest(String expect, String request) {
//REVIST use a more reliable comparison to tolerate some namespaces being added to the root element
if (expect.equals("ECHO_REQUEST")) {
assertTrue(request.startsWith(ECHO_REQUEST.substring(0, 66))
&& request.endsWith(ECHO_REQUEST.substring(67)), "Get a wrong request");
} else {
assertTrue(request.startsWith(ECHO_BOOLEAN_REQUEST.substring(0, 73))
&& request.endsWith(ECHO_BOOLEAN_REQUEST.substring(74)), "Get a wrong request");
}
}
// START SNIPPET: payload
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(simpleEndpointURI + "&dataFormat=PAYLOAD").to("log:info").process(new Processor() {
@SuppressWarnings("unchecked")
public void process(final Exchange exchange) throws Exception {
CxfPayload<SoapHeader> requestPayload = exchange.getIn().getBody(CxfPayload.class);
List<Source> inElements = requestPayload.getBodySources();
List<Source> outElements = new ArrayList<>();
// You can use a customer toStringConverter to turn a CxfPayLoad message into String as you want
String request = exchange.getIn().getBody(String.class);
XmlConverter converter = new XmlConverter();
String documentString = ECHO_RESPONSE;
Element in = new XmlConverter().toDOMElement(inElements.get(0));
// Just check the element namespace
if (!in.getNamespaceURI().equals(ELEMENT_NAMESPACE)) {
throw new IllegalArgumentException("Wrong element namespace");
}
if (in.getLocalName().equals("echoBoolean")) {
documentString = ECHO_BOOLEAN_RESPONSE;
checkRequest("ECHO_BOOLEAN_REQUEST", request);
} else {
documentString = ECHO_RESPONSE;
checkRequest("ECHO_REQUEST", request);
}
Document outDocument = converter.toDOMDocument(documentString, exchange);
outElements.add(new DOMSource(outDocument.getDocumentElement()));
// set the payload header with null
CxfPayload<SoapHeader> responsePayload = new CxfPayload<>(null, outElements, null);
exchange.getMessage().setBody(responsePayload);
}
});
}
};
}
// END SNIPPET: payload
}
| CxfConsumerPayloadTest |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/Synchronization.java | {
"start": 1265,
"end": 2073
} | interface ____ {
/**
* Called when the processing of the message exchange is complete
*
* @param exchange the exchange being processed
*/
void onComplete(Exchange exchange);
/**
* Called when the processing of the message exchange has failed for some reason. The exception which caused the
* problem is in {@link Exchange#getException()}.
*
* @param exchange the exchange being processed
*/
void onFailure(Exchange exchange);
/**
* Get an optional {@link SynchronizationRouteAware} for this synchronization
*
* @return An instance of {@link SynchronizationRouteAware} or null if unset for this synchronization
*/
default SynchronizationRouteAware getRouteSynchronization() {
return null;
}
}
| Synchronization |
java | dropwizard__dropwizard | dropwizard-e2e/src/test/java/com/example/request_log/AbstractRequestLogPatternIntegrationTest.java | {
"start": 1499,
"end": 2628
} | class ____ extends Application<Configuration> {
public static void main(String[] args) throws Exception {
new TestApplication().run(args);
}
@Override
public void run(Configuration configuration, Environment environment) {
environment.jersey().register(TestResource.class);
AuthFilter<?, ?> basicAuthFilter = new BasicCredentialAuthFilter.Builder<PrincipalImpl>()
.setAuthenticator(credentials -> Optional.of(new PrincipalImpl(credentials.getUsername())))
.setAuthorizer((principal, role, requestContext) -> true)
.buildAuthFilter();
environment.jersey().register(new AuthDynamicFeature(basicAuthFilter));
environment.jersey().register(new AuthValueFactoryProvider.Binder<>(PrincipalImpl.class));
environment.healthChecks().register("dummy", new HealthCheck() {
@Override
protected Result check() {
return Result.healthy();
}
});
}
}
@Path("/greet")
public static | TestApplication |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/parallel/ParallelFailureHandling.java | {
"start": 826,
"end": 1380
} | enum ____ implements BiFunction<Long, Throwable, ParallelFailureHandling> {
/**
* The current rail is stopped and the error is dropped.
*/
STOP,
/**
* The current rail is stopped and the error is signalled.
*/
ERROR,
/**
* The current value and error is ignored and the rail resumes with the next item.
*/
SKIP,
/**
* Retry the current value.
*/
RETRY;
@Override
public ParallelFailureHandling apply(Long t1, Throwable t2) {
return this;
}
}
| ParallelFailureHandling |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/jmx/access/MBeanClientInterceptorTests.java | {
"start": 3312,
"end": 7880
} | class ____ be different than the base class").isNotSameAs(IJmxTestBean.class);
}
@Test
void differentProxiesSameClass() throws Exception {
assumeTrue(runTests);
IJmxTestBean proxy1 = getProxy();
IJmxTestBean proxy2 = getProxy();
assertThat(proxy2).as("The proxies should NOT be the same").isNotSameAs(proxy1);
assertThat(proxy2.getClass()).as("The proxy classes should be the same").isSameAs(proxy1.getClass());
}
@Test
void getAttributeValue() throws Exception {
assumeTrue(runTests);
IJmxTestBean proxy1 = getProxy();
int age = proxy1.getAge();
assertThat(age).as("The age should be 100").isEqualTo(100);
}
@Test
void setAttributeValue() throws Exception {
assumeTrue(runTests);
IJmxTestBean proxy = getProxy();
proxy.setName("Rob Harrop");
assertThat(target.getName()).as("The name of the bean should have been updated").isEqualTo("Rob Harrop");
}
@Test
void setAttributeValueWithRuntimeException() throws Exception {
assumeTrue(runTests);
IJmxTestBean proxy = getProxy();
assertThatIllegalArgumentException().isThrownBy(() -> proxy.setName("Juergen"));
}
@Test
void setAttributeValueWithCheckedException() throws Exception {
assumeTrue(runTests);
IJmxTestBean proxy = getProxy();
assertThatExceptionOfType(ClassNotFoundException.class).isThrownBy(() -> proxy.setName("Juergen Class"));
}
@Test
void setAttributeValueWithIOException() throws Exception {
assumeTrue(runTests);
IJmxTestBean proxy = getProxy();
assertThatIOException().isThrownBy(() -> proxy.setName("Juergen IO"));
}
@Test
void setReadOnlyAttribute() throws Exception {
assumeTrue(runTests);
IJmxTestBean proxy = getProxy();
assertThatExceptionOfType(InvalidInvocationException.class).isThrownBy(() -> proxy.setAge(900));
}
@Test
void invokeNoArgs() throws Exception {
assumeTrue(runTests);
IJmxTestBean proxy = getProxy();
long result = proxy.myOperation();
assertThat(result).as("The operation should return 1").isEqualTo(1);
}
@Test
void invokeArgs() throws Exception {
assumeTrue(runTests);
IJmxTestBean proxy = getProxy();
int result = proxy.add(1, 2);
assertThat(result).as("The operation should return 3").isEqualTo(3);
}
@Test
void invokeUnexposedMethodWithException() throws Exception {
assumeTrue(runTests);
IJmxTestBean bean = getProxy();
assertThatExceptionOfType(InvalidInvocationException.class).isThrownBy(bean::dontExposeMe);
}
@Test
void lazyConnectionToRemote() throws Exception {
assumeTrue(runTests);
@SuppressWarnings("deprecation")
final int port = TestSocketUtils.findAvailableTcpPort();
JMXServiceURL url = new JMXServiceURL("service:jmx:jmxmp://localhost:" + port);
JMXConnectorServer connector = JMXConnectorServerFactory.newJMXConnectorServer(url, null, getServer());
MBeanProxyFactoryBean factory = new MBeanProxyFactoryBean();
factory.setServiceUrl(url.toString());
factory.setProxyInterface(IJmxTestBean.class);
factory.setObjectName(OBJECT_NAME);
factory.setConnectOnStartup(false);
factory.setRefreshOnConnectFailure(true);
// should skip connection to the server
factory.afterPropertiesSet();
IJmxTestBean bean = (IJmxTestBean) factory.getObject();
// now start the connector
try {
connector.start();
}
catch (BindException ex) {
Assumptions.abort("Skipping remainder of JMX LazyConnectionToRemote test because binding to local port [" +
port + "] failed: " + ex.getMessage());
}
// should now be able to access data via the lazy proxy
try {
assertThat(bean.getName()).isEqualTo("Rob Harrop");
assertThat(bean.getAge()).isEqualTo(100);
}
finally {
connector.stop();
}
}
@Test
void mxBeanAttributeAccess() throws Exception {
MBeanClientInterceptor interceptor = new MBeanClientInterceptor();
interceptor.setServer(ManagementFactory.getPlatformMBeanServer());
interceptor.setObjectName("java.lang:type=Memory");
interceptor.setManagementInterface(MemoryMXBean.class);
MemoryMXBean proxy = ProxyFactory.getProxy(MemoryMXBean.class, interceptor);
assertThat(proxy.getHeapMemoryUsage().getMax()).isGreaterThan(0);
}
@Test
void mxBeanOperationAccess() throws Exception {
MBeanClientInterceptor interceptor = new MBeanClientInterceptor();
interceptor.setServer(ManagementFactory.getPlatformMBeanServer());
interceptor.setObjectName("java.lang:type=Threading");
ThreadMXBean proxy = ProxyFactory.getProxy(ThreadMXBean.class, interceptor);
assertThat(proxy.getThreadInfo(Thread.currentThread().getId()).getStackTrace()).isNotNull();
}
private static | should |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/tests/sample/beans/ResourceTestBean.java | {
"start": 887,
"end": 2367
} | class ____ {
private Resource resource;
private ContextResource contextResource;
private InputStream inputStream;
private Resource[] resourceArray;
private Map<String, Resource> resourceMap;
private Map<String, Resource[]> resourceArrayMap;
public ResourceTestBean() {
}
public ResourceTestBean(Resource resource, InputStream inputStream) {
this.resource = resource;
this.inputStream = inputStream;
}
public Resource getResource() {
return resource;
}
public void setResource(Resource resource) {
this.resource = resource;
}
public ContextResource getContextResource() {
return contextResource;
}
public void setContextResource(ContextResource contextResource) {
this.contextResource = contextResource;
}
public InputStream getInputStream() {
return inputStream;
}
public void setInputStream(InputStream inputStream) {
this.inputStream = inputStream;
}
public Resource[] getResourceArray() {
return resourceArray;
}
public void setResourceArray(Resource[] resourceArray) {
this.resourceArray = resourceArray;
}
public Map<String, Resource> getResourceMap() {
return resourceMap;
}
public void setResourceMap(Map<String, Resource> resourceMap) {
this.resourceMap = resourceMap;
}
public Map<String, Resource[]> getResourceArrayMap() {
return resourceArrayMap;
}
public void setResourceArrayMap(Map<String, Resource[]> resourceArrayMap) {
this.resourceArrayMap = resourceArrayMap;
}
}
| ResourceTestBean |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolPB.java | {
"start": 1456,
"end": 1543
} | interface ____ extends
DatanodeProtocolService.BlockingInterface {
}
| DatanodeProtocolPB |
java | quarkusio__quarkus | integration-tests/grpc-proto-v2/src/test/java/io/quarkus/grpc/examples/hello/HelloWorldServiceTestBase.java | {
"start": 469,
"end": 1594
} | class ____ {
private Vertx _vertx;
private Channel channel;
protected Vertx vertx() {
return null;
}
protected void close(Vertx vertx) {
}
@BeforeEach
public void init() {
_vertx = vertx();
channel = GRPCTestUtils.channel(_vertx);
}
@AfterEach
public void cleanup() {
GRPCTestUtils.close(channel);
close(_vertx);
}
@Test
public void testHelloWorldServiceUsingBlockingStub() {
GreeterGrpc.GreeterBlockingStub client = GreeterGrpc.newBlockingStub(channel);
HelloReply reply = client
.sayHello(HelloRequest.newBuilder().setName("neo-blocking").build());
assertThat(reply.getMessage()).isEqualTo("Hello neo-blocking");
}
@Test
public void testHelloWorldServiceUsingMutinyStub() {
HelloReply reply = MutinyGreeterGrpc.newMutinyStub(channel)
.sayHello(HelloRequest.newBuilder().setName("neo-blocking").build()).await().atMost(Duration.ofSeconds(5));
assertThat(reply.getMessage()).isEqualTo("Hello neo-blocking");
}
}
| HelloWorldServiceTestBase |
java | google__guava | android/guava/src/com/google/common/util/concurrent/UncheckedExecutionException.java | {
"start": 3121,
"end": 4339
} | class ____ expect for instances to have a non-null
* cause. At the moment, you can <i>usually</i> still preserve behavior by passing an explicit
* {@code null} cause. Note, however, that passing an explicit {@code null} cause prevents
* anyone from calling {@link #initCause} later, so it is not quite equivalent to using a
* constructor that omits the cause.
*/
@SuppressWarnings("InlineMeSuggester") // b/387265535
@Deprecated
protected UncheckedExecutionException(@Nullable String message) {
super(message);
}
/**
* Creates a new instance with the given detail message and cause. Prefer to provide a
* non-nullable {@code cause}, as many users expect to find one.
*/
public UncheckedExecutionException(@Nullable String message, @Nullable Throwable cause) {
super(message, cause);
}
/**
* Creates a new instance with {@code null} as its detail message and the given cause. Prefer to
* provide a non-nullable {@code cause}, as many users expect to find one.
*/
public UncheckedExecutionException(@Nullable Throwable cause) {
super(cause);
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
| typically |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplierTests.java | {
"start": 498,
"end": 1256
} | class ____ extends LocalSupplierTests {
@Override
protected LocalSupplier createTestInstance() {
int blockSize = randomInt(1000);
Block[] blocks = randomList(1, 10, () -> LocalSupplierTests.randomBlock(blockSize)).toArray(Block[]::new);
return new ImmediateLocalSupplier(new Page(blocks));
}
protected void assertOnBWCObject(LocalSupplier testInstance, LocalSupplier bwcDeserializedObject, TransportVersion version) {
assertNotSame(version.toString(), bwcDeserializedObject, testInstance);
assertThat(version.toString(), testInstance, equalTo(bwcDeserializedObject));
assertEquals(version.toString(), testInstance.hashCode(), bwcDeserializedObject.hashCode());
}
}
| ImmediateLocalSupplierTests |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java | {
"start": 3979,
"end": 53322
} | class ____ extends InferenceServiceTestCase {
private final MockWebServer webServer = new MockWebServer();
private ThreadPool threadPool;
private HttpClientManager clientManager;
private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
@Before
public void init() throws Exception {
webServer.start();
threadPool = createThreadPool(inferenceUtilityExecutors());
clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class));
}
@After
public void shutdown() throws IOException {
clientManager.close();
terminate(threadPool);
webServer.close();
}
public void testParseRequestConfig_CreateGoogleVertexAiChatCompletionModel() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var provider = GoogleModelGardenProvider.ANTHROPIC.name();
var url = "https://non-streaming.url";
var streamingUrl = "https://streaming.url";
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(GoogleVertexAiChatCompletionModel.class));
var vertexAIModel = (GoogleVertexAiChatCompletionModel) model;
assertThat(vertexAIModel.getServiceSettings().modelId(), is(modelId));
assertThat(vertexAIModel.getServiceSettings().location(), is(location));
assertThat(vertexAIModel.getServiceSettings().projectId(), is(projectId));
assertThat(vertexAIModel.getServiceSettings().provider(), is(GoogleModelGardenProvider.ANTHROPIC));
assertThat(vertexAIModel.getServiceSettings().uri(), is(new URI(url)));
assertThat(vertexAIModel.getServiceSettings().streamingUri(), is(new URI(streamingUrl)));
assertThat(vertexAIModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
assertThat(vertexAIModel.getConfigurations().getTaskType(), equalTo(CHAT_COMPLETION));
assertThat(vertexAIModel.getServiceSettings().rateLimitSettings().requestsPerTimeUnit(), equalTo(1000L));
assertThat(vertexAIModel.getServiceSettings().rateLimitSettings().timeUnit(), equalTo(MINUTES));
}, e -> fail("Model parsing should succeeded, but failed: " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.CHAT_COMPLETION,
getRequestConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId,
GoogleVertexAiServiceFields.PROVIDER_SETTING_NAME,
provider,
GoogleVertexAiServiceFields.URL_SETTING_NAME,
url,
GoogleVertexAiServiceFields.STREAMING_URL_SETTING_NAME,
streamingUrl
)
),
getTaskSettingsMapEmpty(),
getSecretSettingsMap(serviceAccountJson)
),
modelListener
);
}
}
public void testParseRequestConfig_CreatesGoogleVertexAiEmbeddingsModel() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}, e -> fail("Model parsing should succeeded, but failed: " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId
)
),
getTaskSettingsMap(true, InputType.INGEST),
getSecretSettingsMap(serviceAccountJson)
),
modelListener
);
}
}
public void testParseRequestConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}, e -> fail("Model parsing should succeeded, but failed: " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId
)
),
getTaskSettingsMap(true, InputType.INGEST),
createRandomChunkingSettingsMap(),
getSecretSettingsMap(serviceAccountJson)
),
modelListener
);
}
}
public void testParseRequestConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}, e -> fail("Model parsing should succeeded, but failed: " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId
)
),
getTaskSettingsMap(false, InputType.SEARCH),
getSecretSettingsMap(serviceAccountJson)
),
modelListener
);
}
}
public void testParseRequestConfig_CreatesGoogleVertexAiRerankModel() throws IOException {
var projectId = "project";
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(GoogleVertexAiRerankModel.class));
var rerankModel = (GoogleVertexAiRerankModel) model;
assertThat(rerankModel.getServiceSettings().projectId(), is(projectId));
assertThat(rerankModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}, e -> fail("Model parsing should succeeded, but failed: " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.RERANK,
getRequestConfigMap(
new HashMap<>(Map.of(GoogleVertexAiServiceFields.PROJECT_ID, projectId)),
new HashMap<>(Map.of()),
getSecretSettingsMap(serviceAccountJson)
),
modelListener
);
}
}
public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException {
try (var service = createGoogleVertexAiService()) {
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"The [googlevertexai] service does not support task type [sparse_embedding]"
);
service.parseRequestConfig(
"id",
TaskType.SPARSE_EMBEDDING,
getRequestConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
"model",
GoogleVertexAiServiceFields.LOCATION,
"location",
GoogleVertexAiServiceFields.PROJECT_ID,
"project"
)
),
new HashMap<>(Map.of()),
getSecretSettingsMap("{}")
),
failureListener
);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException {
try (var service = createGoogleVertexAiService()) {
var config = getRequestConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
"model",
GoogleVertexAiServiceFields.LOCATION,
"location",
GoogleVertexAiServiceFields.PROJECT_ID,
"project"
)
),
getTaskSettingsMap(true, InputType.SEARCH),
getSecretSettingsMap("{}")
);
config.put("extra_key", "value");
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [googlevertexai] service"
);
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMap() throws IOException {
try (var service = createGoogleVertexAiService()) {
Map<String, Object> serviceSettings = new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
"model",
GoogleVertexAiServiceFields.LOCATION,
"location",
GoogleVertexAiServiceFields.PROJECT_ID,
"project"
)
);
serviceSettings.put("extra_key", "value");
var config = getRequestConfigMap(serviceSettings, getTaskSettingsMap(true, InputType.CLUSTERING), getSecretSettingsMap("{}"));
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [googlevertexai] service"
);
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInTaskSettingsMap() throws IOException {
try (var service = createGoogleVertexAiService()) {
Map<String, Object> taskSettingsMap = new HashMap<>();
taskSettingsMap.put("extra_key", "value");
var config = getRequestConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
"model",
GoogleVertexAiServiceFields.LOCATION,
"location",
GoogleVertexAiServiceFields.PROJECT_ID,
"project"
)
),
taskSettingsMap,
getSecretSettingsMap("{}")
);
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [googlevertexai] service"
);
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap() throws IOException {
try (var service = createGoogleVertexAiService()) {
Map<String, Object> secretSettings = getSecretSettingsMap("{}");
secretSettings.put("extra_key", "value");
var config = getRequestConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
"model",
GoogleVertexAiServiceFields.LOCATION,
"location",
GoogleVertexAiServiceFields.PROJECT_ID,
"project"
)
),
getTaskSettingsMap(true, null),
secretSettings
);
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [googlevertexai] service"
);
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener);
}
}
public void testParsePersistedConfigWithSecrets_CreatesGoogleVertexAiEmbeddingsModel() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var autoTruncate = true;
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId,
GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER,
true
)
),
getTaskSettingsMap(autoTruncate, InputType.SEARCH),
getSecretSettingsMap(serviceAccountJson)
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE));
assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, InputType.SEARCH)));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}
}
public void testParsePersistedConfigWithSecrets_CreatesGoogleVertexAiChatCompletionModel() throws IOException, URISyntaxException {
var projectId = "project";
var location = "location";
var modelId = "model";
var provider = GoogleModelGardenProvider.ANTHROPIC.name();
var url = "https://non-streaming.url";
var streamingUrl = "https://streaming.url";
var autoTruncate = true;
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId,
GoogleVertexAiServiceFields.PROVIDER_SETTING_NAME,
provider,
GoogleVertexAiServiceFields.URL_SETTING_NAME,
url,
GoogleVertexAiServiceFields.STREAMING_URL_SETTING_NAME,
streamingUrl
)
),
getTaskSettingsMap(autoTruncate, InputType.INGEST),
getSecretSettingsMap(serviceAccountJson)
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.CHAT_COMPLETION,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(GoogleVertexAiChatCompletionModel.class));
var chatCompletionModel = (GoogleVertexAiChatCompletionModel) model;
assertThat(chatCompletionModel.getServiceSettings().modelId(), is(modelId));
assertThat(chatCompletionModel.getServiceSettings().location(), is(location));
assertThat(chatCompletionModel.getServiceSettings().projectId(), is(projectId));
assertThat(chatCompletionModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
assertThat(chatCompletionModel.getConfigurations().getTaskType(), equalTo(CHAT_COMPLETION));
assertThat(chatCompletionModel.getServiceSettings().rateLimitSettings().requestsPerTimeUnit(), equalTo(1000L));
assertThat(chatCompletionModel.getServiceSettings().rateLimitSettings().timeUnit(), equalTo(MINUTES));
assertThat(chatCompletionModel.getServiceSettings().provider(), is(GoogleModelGardenProvider.ANTHROPIC));
assertThat(chatCompletionModel.getServiceSettings().uri(), is(new URI(url)));
assertThat(chatCompletionModel.getServiceSettings().streamingUri(), is(new URI(streamingUrl)));
}
}
public void testParsePersistedConfigWithSecrets_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var autoTruncate = true;
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId,
GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER,
true
)
),
getTaskSettingsMap(autoTruncate, null),
createRandomChunkingSettingsMap(),
getSecretSettingsMap(serviceAccountJson)
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE));
assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, null)));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}
}
public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var autoTruncate = true;
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId,
GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER,
true
)
),
getTaskSettingsMap(autoTruncate, null),
getSecretSettingsMap(serviceAccountJson)
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE));
assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, null)));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}
}
public void testParsePersistedConfigWithSecrets_CreatesGoogleVertexAiRerankModel() throws IOException {
var projectId = "project";
var topN = 1;
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(Map.of(GoogleVertexAiServiceFields.PROJECT_ID, projectId)),
getTaskSettingsMap(topN),
getSecretSettingsMap(serviceAccountJson)
);
var model = service.parsePersistedConfigWithSecrets("id", TaskType.RERANK, persistedConfig.config(), persistedConfig.secrets());
assertThat(model, instanceOf(GoogleVertexAiRerankModel.class));
var rerankModel = (GoogleVertexAiRerankModel) model;
assertThat(rerankModel.getServiceSettings().projectId(), is(projectId));
assertThat(rerankModel.getTaskSettings(), is(new GoogleVertexAiRerankTaskSettings(topN)));
assertThat(rerankModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var autoTruncate = true;
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId,
GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER,
true
)
),
getTaskSettingsMap(autoTruncate, InputType.INGEST),
getSecretSettingsMap(serviceAccountJson)
);
persistedConfig.config().put("extra_key", "value");
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE));
assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, InputType.INGEST)));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var autoTruncate = true;
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
var secretSettingsMap = getSecretSettingsMap(serviceAccountJson);
secretSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
"model",
GoogleVertexAiServiceFields.LOCATION,
"location",
GoogleVertexAiServiceFields.PROJECT_ID,
"project",
GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER,
true
)
),
getTaskSettingsMap(autoTruncate, null),
secretSettingsMap
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE));
assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, null)));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var autoTruncate = true;
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
var serviceSettingsMap = new HashMap<String, Object>(
Map.of(
ServiceFields.MODEL_ID,
"model",
GoogleVertexAiServiceFields.LOCATION,
"location",
GoogleVertexAiServiceFields.PROJECT_ID,
"project",
GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER,
true
)
);
serviceSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
serviceSettingsMap,
getTaskSettingsMap(autoTruncate, InputType.CLUSTERING),
getSecretSettingsMap(serviceAccountJson)
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE));
assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, InputType.CLUSTERING)));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var autoTruncate = true;
var serviceAccountJson = """
{
"some json"
}
""";
try (var service = createGoogleVertexAiService()) {
var taskSettings = getTaskSettingsMap(autoTruncate, InputType.SEARCH);
taskSettings.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
"model",
GoogleVertexAiServiceFields.LOCATION,
"location",
GoogleVertexAiServiceFields.PROJECT_ID,
"project",
GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER,
true
)
),
taskSettings,
getSecretSettingsMap(serviceAccountJson)
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE));
assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, InputType.SEARCH)));
assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson));
}
}
public void testParsePersistedConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var autoTruncate = true;
try (var service = createGoogleVertexAiService()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId,
GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER,
true
)
),
getTaskSettingsMap(autoTruncate, null),
createRandomChunkingSettingsMap()
);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE));
assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, null)));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
}
}
public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException {
var projectId = "project";
var location = "location";
var modelId = "model";
var autoTruncate = true;
try (var service = createGoogleVertexAiService()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(
Map.of(
ServiceFields.MODEL_ID,
modelId,
GoogleVertexAiServiceFields.LOCATION,
location,
GoogleVertexAiServiceFields.PROJECT_ID,
projectId,
GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER,
true
)
),
getTaskSettingsMap(autoTruncate, null)
);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class));
var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId));
assertThat(embeddingsModel.getServiceSettings().location(), is(location));
assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId));
assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE));
assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, null)));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
}
}
public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException {
try (var service = createGoogleVertexAiService()) {
var model = GoogleVertexAiRerankModelTests.createModel(randomAlphaOfLength(10), randomNonNegativeInt());
assertThrows(
ElasticsearchStatusException.class,
() -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); }
);
}
}
public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException {
testUpdateModelWithEmbeddingDetails_Successful(null);
}
public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException {
testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values()));
}
private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException {
try (var service = createGoogleVertexAiService()) {
var embeddingSize = randomNonNegativeInt();
var model = GoogleVertexAiEmbeddingsModelTests.createModel(randomAlphaOfLength(10), randomBoolean(), similarityMeasure);
Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize);
SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? SimilarityMeasure.DOT_PRODUCT : similarityMeasure;
assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity());
assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue());
}
}
@SuppressWarnings("checkstyle:LineLength")
public void testGetConfiguration() throws Exception {
try (var service = createGoogleVertexAiService()) {
String content = XContentHelper.stripWhitespace(
"""
{
"service": "googlevertexai",
"name": "Google Vertex AI",
"task_types": ["text_embedding", "rerank", "completion", "chat_completion"],
"configurations": {
"service_account_json": {
"description": "API Key for the provider you're connecting to.",
"label": "Credentials JSON",
"required": true,
"sensitive": true,
"updatable": true,
"type": "str",
"supported_task_types": ["text_embedding", "rerank", "completion", "chat_completion"]
},
"project_id": {
"description": "The GCP Project ID which has Vertex AI API(s) enabled. For more information on the URL, refer to the {geminiVertexAIDocs}.",
"label": "GCP Project",
"required": true,
"sensitive": false,
"updatable": false,
"type": "str",
"supported_task_types": ["text_embedding", "rerank", "completion", "chat_completion"]
},
"location": {
"description": "Please provide the GCP region where the Vertex AI API(s) is enabled. For more information, refer to the {geminiVertexAIDocs}.",
"label": "GCP Region",
"required": true,
"sensitive": false,
"updatable": false,
"type": "str",
"supported_task_types": ["text_embedding", "completion", "chat_completion"]
},
"rate_limit.requests_per_minute": {
"description": "Minimize the number of rate limit errors.",
"label": "Rate Limit",
"required": false,
"sensitive": false,
"updatable": false,
"type": "int",
"supported_task_types": ["text_embedding", "rerank", "completion", "chat_completion"]
},
"model_id": {
"description": "ID of the LLM you're using.",
"label": "Model ID",
"required": true,
"sensitive": false,
"updatable": false,
"type": "str",
"supported_task_types": ["text_embedding", "rerank", "completion", "chat_completion"]
}
}
}
"""
);
InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes(
new BytesArray(content),
XContentType.JSON
);
boolean humanReadable = true;
BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable);
InferenceServiceConfiguration serviceConfiguration = service.getConfiguration();
assertToXContentEquivalent(
originalBytes,
toXContent(serviceConfiguration, XContentType.JSON, humanReadable),
XContentType.JSON
);
}
}
private GoogleVertexAiService createGoogleVertexAiService() {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
return new GoogleVertexAiService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty());
}
@Override
public InferenceService createInferenceService() {
return createGoogleVertexAiService();
}
protected void assertRerankerWindowSize(RerankingInferenceService rerankingInferenceService) {
assertThat(
rerankingInferenceService.rerankerWindowSize("semantic-ranker-default-003"),
CoreMatchers.is(RerankingInferenceService.CONSERVATIVE_DEFAULT_WINDOW_SIZE)
);
assertThat(rerankingInferenceService.rerankerWindowSize("semantic-ranker-default-004"), CoreMatchers.is(600));
assertThat(
rerankingInferenceService.rerankerWindowSize("any other"),
CoreMatchers.is(RerankingInferenceService.CONSERVATIVE_DEFAULT_WINDOW_SIZE)
);
}
private Map<String, Object> getRequestConfigMap(
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
Map<String, Object> chunkingSettings,
Map<String, Object> secretSettings
) {
var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings);
requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings);
return requestConfigMap;
}
private Map<String, Object> getRequestConfigMap(
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
Map<String, Object> secretSettings
) {
var builtServiceSettings = new HashMap<>();
builtServiceSettings.putAll(serviceSettings);
builtServiceSettings.putAll(secretSettings);
return new HashMap<>(
Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)
);
}
private static Map<String, Object> getSecretSettingsMap(String serviceAccountJson) {
return new HashMap<>(Map.of(GoogleVertexAiSecretSettings.SERVICE_ACCOUNT_JSON, serviceAccountJson));
}
private static ActionListener<Model> getModelListenerForException(Class<?> exceptionClass, String expectedMessage) {
return ActionListener.<Model>wrap((model) -> fail("Model parsing should have failed"), e -> {
assertThat(e, Matchers.instanceOf(exceptionClass));
assertThat(e.getMessage(), CoreMatchers.is(expectedMessage));
});
}
private static Map<String, Object> getTaskSettingsMap(Boolean autoTruncate, @Nullable InputType inputType) {
var taskSettings = new HashMap<String, Object>();
taskSettings.put(GoogleVertexAiEmbeddingsTaskSettings.AUTO_TRUNCATE, autoTruncate);
if (inputType != null) {
taskSettings.put(GoogleVertexAiEmbeddingsTaskSettings.INPUT_TYPE, inputType.toString());
}
return taskSettings;
}
private static Map<String, Object> getTaskSettingsMap(Integer topN) {
var taskSettings = new HashMap<String, Object>();
taskSettings.put(GoogleVertexAiRerankTaskSettings.TOP_N, topN);
return taskSettings;
}
}
| GoogleVertexAiServiceTests |
java | apache__logging-log4j2 | log4j-layout-template-json/src/main/java/org/apache/logging/log4j/layout/template/json/util/InstantFormatter.java | {
"start": 8550,
"end": 9563
} | class ____ implements Formatter {
private final FastDateFormat formatter;
private final Calendar calendar;
private Log4jFastFormatter(final String pattern, final Locale locale, final TimeZone timeZone) {
this.formatter = FastDateFormat.getInstance(pattern, timeZone, locale);
this.calendar = Calendar.getInstance(timeZone, locale);
}
@Override
public Class<?> getInternalImplementationClass() {
return FastDateFormat.class;
}
@Override
public void format(final Instant instant, final StringBuilder stringBuilder) {
calendar.setTimeInMillis(instant.getEpochMillisecond());
formatter.format(calendar, stringBuilder);
}
@Override
public boolean isInstantMatching(final Instant instant1, final Instant instant2) {
return instant1.getEpochMillisecond() == instant2.getEpochMillisecond();
}
}
private static final | Log4jFastFormatter |
java | apache__flink | flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/AfterMatchSkipITCase.java | {
"start": 22961,
"end": 36942
} | class ____ {
static Event a = new Event(1, "a", 0.0);
static Event c = new Event(4, "c", 0.0);
static List<List<Event>> compute(AfterMatchSkipStrategy skipStrategy) throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
streamEvents.add(new StreamRecord<>(a));
streamEvents.add(new StreamRecord<>(c));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a")
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.oneOrMore()
.optional()
.consecutive()
.next("c")
.where(SimpleCondition.of(value -> value.getName().contains("c")));
NFATestHarness nfaTestHarness =
NFATestHarness.forPattern(pattern)
.withAfterMatchSkipStrategy(skipStrategy)
.build();
return nfaTestHarness.feedRecords(streamEvents);
}
}
@Test
public void testSkipToLastWithOneOrMore() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event b1 = new Event(2, "b1", 0.0);
Event a2 = new Event(3, "a2", 0.0);
Event b2 = new Event(4, "b2", 0.0);
Event b3 = new Event(5, "b3", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b4 = new Event(4, "b4", 0.0);
streamEvents.add(new StreamRecord<Event>(a1));
streamEvents.add(new StreamRecord<Event>(b1));
streamEvents.add(new StreamRecord<Event>(a2));
streamEvents.add(new StreamRecord<Event>(b2));
streamEvents.add(new StreamRecord<Event>(b3));
streamEvents.add(new StreamRecord<Event>(a3));
streamEvents.add(new StreamRecord<Event>(b4));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipToLast("b"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")))
.oneOrMore()
.consecutive();
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a1, b1),
Lists.newArrayList(a2, b2),
Lists.newArrayList(a3, b4)));
}
/** Example from docs. */
@Test
public void testSkipPastLastWithOneOrMoreAtBeginning() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event a2 = new Event(2, "a2", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b1 = new Event(4, "b1", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(a3));
streamEvents.add(new StreamRecord<>(b1));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipPastLastEvent())
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.oneOrMore()
.consecutive()
.greedy()
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns, Collections.singletonList(Lists.newArrayList(a1, a2, a3, b1)));
}
/** Example from docs. */
@Test
public void testSkipToLastWithOneOrMoreAtBeginning() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event a2 = new Event(2, "a2", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b1 = new Event(4, "b1", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(a3));
streamEvents.add(new StreamRecord<>(b1));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipToLast("a"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.oneOrMore()
.consecutive()
.greedy()
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(Lists.newArrayList(a1, a2, a3, b1), Lists.newArrayList(a3, b1)));
}
/** Example from docs. */
@Test
public void testSkipToFirstWithOneOrMoreAtBeginning() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event a2 = new Event(2, "a2", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b1 = new Event(4, "b1", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(a3));
streamEvents.add(new StreamRecord<>(b1));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipToFirst("a"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.oneOrMore()
.consecutive()
.greedy()
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a1, a2, a3, b1),
Lists.newArrayList(a2, a3, b1),
Lists.newArrayList(a3, b1)));
}
/** Example from docs. */
@Test
public void testNoSkipWithOneOrMoreAtBeginning() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event a2 = new Event(2, "a2", 0.0);
Event a3 = new Event(3, "a3", 0.0);
Event b1 = new Event(4, "b1", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(a3));
streamEvents.add(new StreamRecord<>(b1));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.noSkip())
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.oneOrMore()
.consecutive()
.greedy()
.next("b")
.where(SimpleCondition.of(value -> value.getName().contains("b")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a1, a2, a3, b1),
Lists.newArrayList(a2, a3, b1),
Lists.newArrayList(a3, b1)));
}
/** Example from docs. */
@Test
public void testSkipToFirstDiscarding() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a = new Event(1, "a", 0.0);
Event b = new Event(2, "b", 0.0);
Event c1 = new Event(3, "c1", 0.0);
Event c2 = new Event(4, "c2", 0.0);
Event c3 = new Event(5, "c3", 0.0);
Event d = new Event(6, "d", 0.0);
streamEvents.add(new StreamRecord<>(a));
streamEvents.add(new StreamRecord<>(b));
streamEvents.add(new StreamRecord<>(c1));
streamEvents.add(new StreamRecord<>(c2));
streamEvents.add(new StreamRecord<>(c3));
streamEvents.add(new StreamRecord<>(d));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a or c", AfterMatchSkipStrategy.skipToFirst("c*"))
.where(
SimpleCondition.of(
value ->
value.getName().contains("a")
|| value.getName().contains("c")))
.followedBy("b or c")
.where(
SimpleCondition.of(
value ->
value.getName().contains("b")
|| value.getName().contains("c")))
.followedBy("c*")
.where(SimpleCondition.of(value -> value.getName().contains("c")))
.oneOrMore()
.greedy()
.followedBy("d")
.where(SimpleCondition.of(value -> value.getName().contains("d")));
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(
Lists.newArrayList(a, b, c1, c2, c3, d),
Lists.newArrayList(c1, c2, c3, d)));
}
@Test
public void testSkipBeforeOtherAlreadyCompleted() throws Exception {
List<StreamRecord<Event>> streamEvents = new ArrayList<>();
Event a1 = new Event(1, "a1", 0.0);
Event c1 = new Event(2, "c1", 0.0);
Event a2 = new Event(3, "a2", 1.0);
Event c2 = new Event(4, "c2", 0.0);
Event b1 = new Event(5, "b1", 1.0);
Event b2 = new Event(6, "b2", 0.0);
streamEvents.add(new StreamRecord<>(a1));
streamEvents.add(new StreamRecord<>(c1));
streamEvents.add(new StreamRecord<>(a2));
streamEvents.add(new StreamRecord<>(c2));
streamEvents.add(new StreamRecord<>(b1));
streamEvents.add(new StreamRecord<>(b2));
Pattern<Event, ?> pattern =
Pattern.<Event>begin("a", AfterMatchSkipStrategy.skipToFirst("c"))
.where(SimpleCondition.of(value -> value.getName().contains("a")))
.followedBy("c")
.where(SimpleCondition.of(value -> value.getName().contains("c")))
.followedBy("b")
.where(
new IterativeCondition<Event>() {
@Override
public boolean filter(Event value, Context<Event> ctx)
throws Exception {
return value.getName().contains("b")
&& ctx.getEventsForPattern("a")
.iterator()
.next()
.getPrice()
== value.getPrice();
}
});
NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build();
List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents);
comparePatterns(
resultingPatterns,
Lists.newArrayList(Lists.newArrayList(a1, c1, b2), Lists.newArrayList(a2, c2, b1)));
}
@Test
public void testSharedBufferIsProperlyCleared() throws Exception {
List<StreamRecord<Event>> inputEvents = new ArrayList<>();
for (int i = 0; i < 4; i++) {
inputEvents.add(new StreamRecord<>(new Event(1, "a", 1.0), i));
}
SkipPastLastStrategy matchSkipStrategy = AfterMatchSkipStrategy.skipPastLastEvent();
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start", matchSkipStrategy)
.where(SimpleCondition.of(value -> true))
.times(2);
SharedBuffer<Event> sharedBuffer =
TestSharedBuffer.createTestBuffer(Event.createTypeSerializer());
NFATestHarness nfaTestHarness =
NFATestHarness.forPattern(pattern).withSharedBuffer(sharedBuffer).build();
nfaTestHarness.feedRecords(inputEvents);
assertThat(sharedBuffer.isEmpty(), Matchers.is(true));
}
}
| MissedSkipTo |
java | apache__camel | components/camel-as2/camel-as2-api/src/main/java/org/apache/camel/component/as2/api/util/EntityUtils.java | {
"start": 2367,
"end": 11686
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(EntityUtils.class);
private static final AtomicLong partNumber = new AtomicLong();
private EntityUtils() {
}
/**
* Generated a unique value for a Multipart boundary string.
* <p>
* The boundary string is composed of the components:
* "----=_Part_<global_part_number>_<newly_created_object's_hashcode>.<current_time>"
* <p>
* The generated string contains only US-ASCII characters and hence is safe for use in RFC822 headers.
*
* @return The generated boundary string.
*/
public static String createBoundaryValue() {
// TODO: ensure boundary string is limited to 70 characters or less.
StringBuilder s = new StringBuilder();
s.append("----=_Part_").append(partNumber.incrementAndGet()).append("_").append(s.hashCode()).append(".")
.append(System.currentTimeMillis());
return s.toString();
}
public static String appendParameter(String headerString, String parameterName, String parameterValue) {
return headerString + "; " + parameterName + "=" + parameterValue;
}
public static String encode(String data, Charset charset, String encoding) throws CamelException {
byte[] encoded = encode(data.getBytes(charset), encoding);
return new String(encoded, charset);
}
public static byte[] encode(byte[] data, String encoding) throws CamelException {
ObjectHelper.notNull(data, "Data");
if (encoding == null) {
// Identity encoding
return data;
}
switch (encoding.toLowerCase()) {
case "base64":
return Base64.encode(data);
case "quoted-printable":
// TODO: implement QuotedPrintableOutputStream
return QuotedPrintableCodec.encodeQuotedPrintable(null, data);
case "binary":
case "7bit":
case "8bit":
// Identity encoding
return data;
default:
throw new CamelException("Unknown encoding: " + encoding);
}
}
public static OutputStream encode(OutputStream os, String encoding) throws CamelException {
ObjectHelper.notNull(os, "Output Stream");
if (encoding == null) {
// Identity encoding
return os;
}
switch (encoding.toLowerCase()) {
case "base64":
return new Base64OutputStream(os, true);
case "quoted-printable":
// TODO: implement QuotedPrintableOutputStream
return new Base64OutputStream(os, true);
case "binary":
case "7bit":
case "8bit":
// Identity encoding
return os;
default:
throw new CamelException("Unknown encoding: " + encoding);
}
}
public static String decode(String data, Charset charset, String encoding) throws CamelException, DecoderException {
byte[] decoded = decode(data.getBytes(charset), encoding);
return new String(decoded, charset);
}
public static byte[] decode(byte[] data, String encoding) throws CamelException, DecoderException {
ObjectHelper.notNull(data, "Data");
if (encoding == null) {
// Identity encoding
return data;
}
switch (encoding.toLowerCase()) {
case "base64":
return Base64.decode(data);
case "quoted-printable":
return QuotedPrintableCodec.decodeQuotedPrintable(data);
case "binary":
case "7bit":
case "8bit":
// Identity encoding
return data;
default:
throw new CamelException("Unknown encoding: " + encoding);
}
}
public static InputStream decode(InputStream is, String encoding) throws CamelException {
ObjectHelper.notNull(is, "Input Stream");
if (encoding == null) {
// Identity encoding
return is;
}
switch (encoding.toLowerCase()) {
case "base64":
return new Base64InputStream(is, false);
case "quoted-printable":
// TODO: implement QuotedPrintableInputStream
return new Base64InputStream(is, false);
case "binary":
case "7bit":
case "8bit":
// Identity encoding
return is;
default:
throw new CamelException("Unknown encoding: " + encoding);
}
}
public static ApplicationEntity createEDIEntity(
byte[] ediMessage, ContentType ediMessageContentType, String contentTransferEncoding, boolean isMainBody,
String filename)
throws CamelException {
ObjectHelper.notNull(ediMessage, "EDI Message");
ObjectHelper.notNull(ediMessageContentType, "EDI Message Content Type");
String charset = null;
if (ediMessageContentType.getCharset() != null) {
charset = ediMessageContentType.getCharset().toString();
}
switch (ediMessageContentType.getMimeType().toLowerCase()) {
case AS2MediaType.APPLICATION_EDIFACT:
return new ApplicationEDIFACTEntity(ediMessage, charset, contentTransferEncoding, isMainBody, filename);
case AS2MediaType.APPLICATION_EDI_X12:
return new ApplicationEDIX12Entity(ediMessage, charset, contentTransferEncoding, isMainBody, filename);
case AS2MediaType.APPLICATION_EDI_CONSENT:
return new ApplicationEDIConsentEntity(ediMessage, charset, contentTransferEncoding, isMainBody, filename);
case AS2MediaType.APPLICATION_XML:
return new ApplicationXMLEntity(ediMessage, charset, contentTransferEncoding, isMainBody, filename);
default:
throw new CamelException("Invalid EDI entity mime type: " + ediMessageContentType.getMimeType());
}
}
public static byte[] getContent(HttpEntity entity) {
try (final ByteArrayOutputStream os = new ByteArrayOutputStream()) {
entity.writeTo(os);
os.flush();
return os.toByteArray();
} catch (Exception e) {
LOG.debug("failed to get content", e);
return null;
}
}
public static boolean hasEntity(HttpMessage message) {
boolean hasEntity = false;
if (message instanceof ClassicHttpRequest httpEntityEnclosingRequest) {
hasEntity = httpEntityEnclosingRequest.getEntity() != null;
} else if (message instanceof ClassicHttpResponse httpResponse) {
hasEntity = httpResponse.getEntity() != null;
}
return hasEntity;
}
public static HttpEntity getMessageEntity(HttpMessage message) {
if (message instanceof ClassicHttpRequest httpEntityEnclosingRequest) {
return httpEntityEnclosingRequest.getEntity();
} else if (message instanceof ClassicHttpResponse httpResponse) {
return httpResponse.getEntity();
}
return null;
}
public static void setMessageEntity(HttpMessage message, HttpEntity entity) {
if (message instanceof ClassicHttpRequest httpEntityEnclosingRequest) {
httpEntityEnclosingRequest.setEntity(entity);
} else if (message instanceof ClassicHttpResponse httpResponse) {
httpResponse.setEntity(entity);
}
String contentType = entity.getContentType();
if (contentType != null) {
message.setHeader(AS2Header.CONTENT_TYPE, contentType);
}
if (entity instanceof MimeEntity mimeEntity) {
Header contentTransferEncodingHeader = mimeEntity.getContentTransferEncoding();
if (contentTransferEncodingHeader != null) {
message.setHeader(contentTransferEncodingHeader);
}
}
long contentLength = entity.getContentLength();
message.setHeader(AS2Header.CONTENT_LENGTH, Long.toString(contentLength));
}
public static byte[] decodeTransferEncodingOfBodyPartContent(
String bodyPartContent,
ContentType contentType,
String bodyPartTransferEncoding)
throws CamelException, DecoderException {
ObjectHelper.notNull(bodyPartContent, "bodyPartContent");
Charset contentCharset = contentType.getCharset();
if (contentCharset == null) {
contentCharset = StandardCharsets.US_ASCII;
}
return decode(bodyPartContent.getBytes(contentCharset), bodyPartTransferEncoding);
}
public static void printEntity(PrintStream out, HttpEntity entity) throws IOException {
entity.writeTo(out);
}
public static String printEntity(HttpEntity entity) throws IOException {
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos, true, StandardCharsets.UTF_8)) {
printEntity(ps, entity);
return baos.toString(StandardCharsets.UTF_8);
}
}
}
| EntityUtils |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/PojoTypeExtractionTest.java | {
"start": 6587,
"end": 24897
} | class ____<T> {
T packageProtected;
public T getPackageProtected() {
return packageProtected;
}
}
@Test
void testIncorrectPojos() {
TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(IncorrectPojo.class);
assertThat(typeForClass).isInstanceOf(GenericTypeInfo.class);
typeForClass = TypeExtractor.createTypeInfo(WrongCtorPojo.class);
assertThat(typeForClass).isInstanceOf(GenericTypeInfo.class);
}
@Test
void testCorrectPojos() {
TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(BeanStylePojo.class);
assertThat(typeForClass).isInstanceOf(PojoTypeInfo.class);
typeForClass = TypeExtractor.createTypeInfo(TypedPojoGetterSetterCheck.class);
assertThat(typeForClass).isInstanceOf(PojoTypeInfo.class);
}
@Test
void testPojoWC() {
TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(WC.class);
checkWCPojoAsserts(typeForClass);
WC t = new WC();
t.complex = new ComplexNestedClass();
TypeInformation<?> typeForObject = TypeExtractor.getForObject(t);
checkWCPojoAsserts(typeForObject);
}
@SuppressWarnings({"unchecked", "rawtypes"})
private void checkWCPojoAsserts(TypeInformation<?> typeInfo) {
assertThat(typeInfo.isBasicType()).isFalse();
assertThat(typeInfo.isTupleType()).isFalse();
assertThat(typeInfo.getTotalFields()).isEqualTo(10);
assertThat(typeInfo).isInstanceOf(PojoTypeInfo.class);
PojoTypeInfo<?> pojoType = (PojoTypeInfo<?>) typeInfo;
List<FlatFieldDescriptor> ffd = new ArrayList<FlatFieldDescriptor>();
String[] fields = {
"count",
"complex.date",
"complex.collection",
"complex.nothing",
"complex.someFloat",
"complex.someNumberWithÜnicödeNäme",
"complex.valueType",
"complex.word.f0",
"complex.word.f1",
"complex.word.f2"
};
int[] positions = {9, 1, 0, 2, 3, 4, 5, 6, 7, 8};
assertThat(fields).hasSameSizeAs(positions);
for (int i = 0; i < fields.length; i++) {
pojoType.getFlatFields(fields[i], 0, ffd);
assertThat(ffd).as("Too many keys returned").hasSize(1);
assertThat(ffd.get(0).getPosition())
.as("position of field " + fields[i] + " wrong")
.isEqualTo(positions[i]);
ffd.clear();
}
pojoType.getFlatFields("complex.word.*", 0, ffd);
assertThat(ffd).hasSize(3);
// check if it returns 5,6,7
for (FlatFieldDescriptor ffdE : ffd) {
final int pos = ffdE.getPosition();
assertThat(pos).isGreaterThanOrEqualTo(6).isLessThanOrEqualTo(8);
if (pos == 6) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Long.class);
}
if (pos == 7) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Long.class);
}
if (pos == 8) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(String.class);
}
}
ffd.clear();
// scala style full tuple selection for pojos
pojoType.getFlatFields("complex.word._", 0, ffd);
assertThat(ffd).hasSize(3);
ffd.clear();
pojoType.getFlatFields("complex.*", 0, ffd);
assertThat(ffd).hasSize(9);
// check if it returns 0-7
for (FlatFieldDescriptor ffdE : ffd) {
final int pos = ffdE.getPosition();
assertThat(ffdE.getPosition()).isGreaterThanOrEqualTo(0).isLessThanOrEqualTo(8);
if (pos == 0) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(List.class);
}
if (pos == 1) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Date.class);
}
if (pos == 2) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Object.class);
}
if (pos == 3) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Float.class);
}
if (pos == 4) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Integer.class);
}
if (pos == 5) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(MyValue.class);
}
if (pos == 6) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Long.class);
}
if (pos == 7) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Long.class);
}
if (pos == 8) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(String.class);
}
if (pos == 9) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Integer.class);
}
}
ffd.clear();
pojoType.getFlatFields("*", 0, ffd);
assertThat(ffd).hasSize(10);
// check if it returns 0-8
for (FlatFieldDescriptor ffdE : ffd) {
assertThat(ffdE.getPosition() <= 9).isTrue();
assertThat(0 <= ffdE.getPosition()).isTrue();
if (ffdE.getPosition() == 9) {
assertThat(ffdE.getType().getTypeClass()).isEqualTo(Integer.class);
}
}
ffd.clear();
TypeInformation<?> typeComplexNested = pojoType.getTypeAt(0); // ComplexNestedClass complex
assertThat(typeComplexNested).isInstanceOf(PojoTypeInfo.class);
assertThat(typeComplexNested.getArity()).isEqualTo(7);
assertThat(typeComplexNested.getTotalFields()).isEqualTo(9);
PojoTypeInfo<?> pojoTypeComplexNested = (PojoTypeInfo<?>) typeComplexNested;
boolean dateSeen = false,
intSeen = false,
floatSeen = false,
tupleSeen = false,
objectSeen = false,
writableSeen = false,
collectionSeen = false;
for (int i = 0; i < pojoTypeComplexNested.getArity(); i++) {
PojoField field = pojoTypeComplexNested.getPojoFieldAt(i);
String name = field.getField().getName();
if (name.equals("date")) {
if (dateSeen) {
fail("already seen");
}
dateSeen = true;
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.DATE_TYPE_INFO);
assertThat(field.getTypeInformation().getTypeClass()).isEqualTo(Date.class);
} else if (name.equals("someNumberWithÜnicödeNäme")) {
if (intSeen) {
fail("already seen");
}
intSeen = true;
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(field.getTypeInformation().getTypeClass()).isEqualTo(Integer.class);
} else if (name.equals("someFloat")) {
if (floatSeen) {
fail("already seen");
}
floatSeen = true;
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.FLOAT_TYPE_INFO);
assertThat(field.getTypeInformation().getTypeClass()).isEqualTo(Float.class);
} else if (name.equals("word")) {
if (tupleSeen) {
fail("already seen");
}
tupleSeen = true;
assertThat(field.getTypeInformation() instanceof TupleTypeInfo<?>).isTrue();
assertThat(field.getTypeInformation().getTypeClass()).isEqualTo(Tuple3.class);
// do some more advanced checks on the tuple
TupleTypeInfo<?> tupleTypeFromComplexNested =
(TupleTypeInfo<?>) field.getTypeInformation();
assertThat(tupleTypeFromComplexNested.getTypeAt(0))
.isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
assertThat(tupleTypeFromComplexNested.getTypeAt(1))
.isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
assertThat(tupleTypeFromComplexNested.getTypeAt(2))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
} else if (name.equals("nothing")) {
if (objectSeen) {
fail("already seen");
}
objectSeen = true;
assertThat(field.getTypeInformation())
.isEqualTo(new GenericTypeInfo<Object>(Object.class));
assertThat(field.getTypeInformation().getTypeClass()).isEqualTo(Object.class);
} else if (name.equals("valueType")) {
if (writableSeen) {
fail("already seen");
}
writableSeen = true;
assertThat(field.getTypeInformation())
.isEqualTo(new ValueTypeInfo<>(MyValue.class));
assertThat(field.getTypeInformation().getTypeClass()).isEqualTo(MyValue.class);
} else if (name.equals("collection")) {
if (collectionSeen) {
fail("already seen");
}
collectionSeen = true;
assertThat(field.getTypeInformation())
.isEqualTo(new NullableListTypeInfo<>(String.class));
} else {
fail("Unexpected field " + field);
}
}
assertThat(dateSeen).as("Field was not present").isTrue();
assertThat(intSeen).as("Field was not present").isTrue();
assertThat(floatSeen).as("Field was not present").isTrue();
assertThat(tupleSeen).as("Field was not present").isTrue();
assertThat(objectSeen).as("Field was not present").isTrue();
assertThat(writableSeen).as("Field was not present").isTrue();
assertThat(collectionSeen).as("Field was not present").isTrue();
TypeInformation<?> typeAtOne = pojoType.getTypeAt(1); // int count
assertThat(typeAtOne).isInstanceOf(BasicTypeInfo.class);
assertThat(typeInfo.getTypeClass()).isEqualTo(WC.class);
assertThat(typeInfo.getArity()).isEqualTo(2);
}
@Test
void testPojoAllPublic() {
TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(AllPublic.class);
checkAllPublicAsserts(typeForClass);
TypeInformation<?> typeForObject = TypeExtractor.getForObject(new AllPublic());
checkAllPublicAsserts(typeForObject);
}
private void checkAllPublicAsserts(TypeInformation<?> typeInformation) {
assertThat(typeInformation).isInstanceOf(PojoTypeInfo.class);
assertThat(typeInformation.getArity()).isEqualTo(10);
assertThat(typeInformation.getTotalFields()).isEqualTo(12);
// check if the three additional fields are identified correctly
boolean arrayListSeen = false, multisetSeen = false, strArraySeen = false;
PojoTypeInfo<?> pojoTypeForClass = (PojoTypeInfo<?>) typeInformation;
for (int i = 0; i < pojoTypeForClass.getArity(); i++) {
PojoField field = pojoTypeForClass.getPojoFieldAt(i);
String name = field.getField().getName();
if (name.equals("somethingFancy")) {
if (arrayListSeen) {
fail("already seen");
}
arrayListSeen = true;
assertThat(field.getTypeInformation() instanceof GenericTypeInfo).isTrue();
assertThat(field.getTypeInformation().getTypeClass()).isEqualTo(ArrayList.class);
} else if (name.equals("fancyIds")) {
if (multisetSeen) {
fail("already seen");
}
multisetSeen = true;
assertThat(field.getTypeInformation() instanceof GenericTypeInfo).isTrue();
assertThat(field.getTypeInformation().getTypeClass())
.isEqualTo(FancyCollectionSubtype.class);
} else if (name.equals("fancyArray")) {
if (strArraySeen) {
fail("already seen");
}
strArraySeen = true;
assertThat(field.getTypeInformation())
.isEqualTo(BasicArrayTypeInfo.STRING_ARRAY_TYPE_INFO);
assertThat(field.getTypeInformation().getTypeClass()).isEqualTo(String[].class);
} else if (Arrays.asList(
"date",
"someNumberWithÜnicödeNäme",
"someFloat",
"word",
"nothing",
"valueType",
"collection")
.contains(name)) {
// ignore these, they are inherited from the ComplexNestedClass
} else {
fail("Unexpected field " + field);
}
}
assertThat(arrayListSeen).as("Field was not present").isTrue();
assertThat(multisetSeen).as("Field was not present").isTrue();
assertThat(strArraySeen).as("Field was not present").isTrue();
}
@Test
void testPojoExtendingTuple() {
TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(FromTuple.class);
checkFromTuplePojo(typeForClass);
FromTuple ft = new FromTuple();
ft.f0 = "";
ft.f1 = "";
ft.f2 = 0L;
TypeInformation<?> typeForObject = TypeExtractor.getForObject(ft);
checkFromTuplePojo(typeForObject);
}
private void checkFromTuplePojo(TypeInformation<?> typeInformation) {
assertThat(typeInformation).isInstanceOf(PojoTypeInfo.class);
assertThat(typeInformation.getTotalFields()).isEqualTo(4);
PojoTypeInfo<?> pojoTypeForClass = (PojoTypeInfo<?>) typeInformation;
for (int i = 0; i < pojoTypeForClass.getArity(); i++) {
PojoField field = pojoTypeForClass.getPojoFieldAt(i);
String name = field.getField().getName();
if (name.equals("special")) {
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
} else if (name.equals("f0") || name.equals("f1")) {
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
} else if (name.equals("f2")) {
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
} else {
fail("unexpected field");
}
}
}
@Test
void testPojoWithGenerics() {
TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(ParentSettingGenerics.class);
assertThat(typeForClass).isInstanceOf(PojoTypeInfo.class);
PojoTypeInfo<?> pojoTypeForClass = (PojoTypeInfo<?>) typeForClass;
for (int i = 0; i < pojoTypeForClass.getArity(); i++) {
PojoField field = pojoTypeForClass.getPojoFieldAt(i);
String name = field.getField().getName();
if (name.equals("field1")) {
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
} else if (name.equals("field2")) {
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
} else if (name.equals("field3")) {
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
} else if (name.equals("key")) {
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
} else {
fail("Unexpected field " + field);
}
}
}
/** Test if the TypeExtractor is accepting untyped generics, making them GenericTypes */
@Test
void testPojoWithGenericsSomeFieldsGeneric() {
TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(PojoWithGenerics.class);
assertThat(typeForClass).isInstanceOf(PojoTypeInfo.class);
PojoTypeInfo<?> pojoTypeForClass = (PojoTypeInfo<?>) typeForClass;
for (int i = 0; i < pojoTypeForClass.getArity(); i++) {
PojoField field = pojoTypeForClass.getPojoFieldAt(i);
String name = field.getField().getName();
if (name.equals("field1")) {
assertThat(field.getTypeInformation())
.isEqualTo(new GenericTypeInfo<Object>(Object.class));
} else if (name.equals("field2")) {
assertThat(field.getTypeInformation())
.isEqualTo(new GenericTypeInfo<Object>(Object.class));
} else if (name.equals("key")) {
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
} else {
fail("Unexpected field " + field);
}
}
}
@Test
void testPojoWithComplexHierarchy() {
TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(ComplexHierarchyTop.class);
assertThat(typeForClass).isInstanceOf(PojoTypeInfo.class);
PojoTypeInfo<?> pojoTypeForClass = (PojoTypeInfo<?>) typeForClass;
for (int i = 0; i < pojoTypeForClass.getArity(); i++) {
PojoField field = pojoTypeForClass.getPojoFieldAt(i);
String name = field.getField().getName();
if (name.equals("field1")) {
assertThat(field.getTypeInformation() instanceof PojoTypeInfo<?>)
.isTrue(); // From tuple is pojo (not tuple type!)
} else if (name.equals("field2")) {
assertThat(field.getTypeInformation() instanceof TupleTypeInfo<?>).isTrue();
assertThat(((TupleTypeInfo<?>) field.getTypeInformation()).getTypeAt(0))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
} else if (name.equals("key")) {
assertThat(field.getTypeInformation()).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
} else {
fail("Unexpected field " + field);
}
}
}
public static | GenericPojoGetterSetterCheck |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/context/visitor/PackageConfigurationImportVisitor.java | {
"start": 1308,
"end": 2231
} | class ____ implements PackageElementVisitor<Configuration> {
@Override
public void visitPackage(PackageElement packageElement, VisitorContext context) throws ProcessingException {
var writer = new BeanConfigurationWriter(
packageElement.getName(),
packageElement,
packageElement.getAnnotationMetadata(),
context
);
try {
writer.accept(context);
} catch (IOException e) {
throw new ProcessingException(packageElement, "I/O error occurred writing Configuration for package [" + packageElement.getName() + "]: " + e.getMessage(), e);
}
}
@Override
public TypeElementVisitor.@NonNull VisitorKind getVisitorKind() {
return TypeElementVisitor.VisitorKind.ISOLATING;
}
@Override
public int getOrder() {
return HIGHEST_PRECEDENCE;
}
}
| PackageConfigurationImportVisitor |
java | netty__netty | codec-http2/src/test/java/io/netty/handler/codec/http2/Http2FrameInboundWriter.java | {
"start": 1418,
"end": 4437
} | class ____ {
private final ChannelHandlerContext ctx;
private final Http2FrameWriter writer;
Http2FrameInboundWriter(EmbeddedChannel channel) {
this(channel, new DefaultHttp2FrameWriter());
}
Http2FrameInboundWriter(EmbeddedChannel channel, Http2FrameWriter writer) {
ctx = new WriteInboundChannelHandlerContext(channel);
this.writer = writer;
}
void writeInboundData(int streamId, ByteBuf data, int padding, boolean endStream) {
writer.writeData(ctx, streamId, data, padding, endStream, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundHeaders(int streamId, Http2Headers headers,
int padding, boolean endStream) {
writer.writeHeaders(ctx, streamId, headers, padding, endStream, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundHeaders(int streamId, Http2Headers headers,
int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) {
writer.writeHeaders(ctx, streamId, headers, streamDependency,
weight, exclusive, padding, endStream, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundPriority(int streamId, int streamDependency,
short weight, boolean exclusive) {
writer.writePriority(ctx, streamId, streamDependency, weight,
exclusive, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundRstStream(int streamId, long errorCode) {
writer.writeRstStream(ctx, streamId, errorCode, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundSettings(Http2Settings settings) {
writer.writeSettings(ctx, settings, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundSettingsAck() {
writer.writeSettingsAck(ctx, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundPing(boolean ack, long data) {
writer.writePing(ctx, ack, data, ctx.newPromise()).syncUninterruptibly();
}
void writePushPromise(int streamId, int promisedStreamId,
Http2Headers headers, int padding) {
writer.writePushPromise(ctx, streamId, promisedStreamId,
headers, padding, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundGoAway(int lastStreamId, long errorCode, ByteBuf debugData) {
writer.writeGoAway(ctx, lastStreamId, errorCode, debugData, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundWindowUpdate(int streamId, int windowSizeIncrement) {
writer.writeWindowUpdate(ctx, streamId, windowSizeIncrement, ctx.newPromise()).syncUninterruptibly();
}
void writeInboundFrame(byte frameType, int streamId,
Http2Flags flags, ByteBuf payload) {
writer.writeFrame(ctx, frameType, streamId, flags, payload, ctx.newPromise()).syncUninterruptibly();
}
private static final | Http2FrameInboundWriter |
java | google__guava | android/guava/src/com/google/common/io/CharSource.java | {
"start": 3862,
"end": 20119
} | class ____ {
/** Constructor for use by subclasses. */
protected CharSource() {}
/**
* Returns a {@link ByteSource} view of this char source that encodes chars read from this source
* as bytes using the given {@link Charset}.
*
* <p>If {@link ByteSource#asCharSource} is called on the returned source with the same charset,
* the default implementation of this method will ensure that the original {@code CharSource} is
* returned, rather than round-trip encoding. Subclasses that override this method should behave
* the same way.
*
* @since 20.0
*/
public ByteSource asByteSource(Charset charset) {
return new AsByteSource(charset);
}
/**
* Opens a new {@link Reader} for reading from this source. This method returns a new, independent
* reader each time it is called.
*
* <p>The caller is responsible for ensuring that the returned reader is closed.
*
* @throws IOException if an I/O error occurs while opening the reader
*/
public abstract Reader openStream() throws IOException;
/**
* Opens a new {@link BufferedReader} for reading from this source. This method returns a new,
* independent reader each time it is called.
*
* <p>The caller is responsible for ensuring that the returned reader is closed.
*
* @throws IOException if an I/O error occurs while of opening the reader
*/
public BufferedReader openBufferedStream() throws IOException {
Reader reader = openStream();
return (reader instanceof BufferedReader)
? (BufferedReader) reader
: new BufferedReader(reader);
}
/**
* Opens a new {@link Stream} for reading text one line at a time from this source. This method
* returns a new, independent stream each time it is called.
*
* <p>The returned stream is lazy and only reads from the source in the terminal operation. If an
* I/O error occurs while the stream is reading from the source or when the stream is closed, an
* {@link UncheckedIOException} is thrown.
*
* <p>Like {@link BufferedReader#readLine()}, this method considers a line to be a sequence of
* text that is terminated by (but does not include) one of {@code \r\n}, {@code \r} or {@code
* \n}. If the source's content does not end in a line termination sequence, it is treated as if
* it does.
*
* <p>The caller is responsible for ensuring that the returned stream is closed. For example:
*
* {@snippet :
* try (Stream<String> lines = source.lines()) {
* lines.map(...)
* .filter(...)
* .forEach(...);
* }
* }
*
* @throws IOException if an I/O error occurs while opening the stream
* @since 33.4.0 (but since 22.0 in the JRE flavor)
*/
@MustBeClosed
// If users use this when they shouldn't, we hope that NewApi will catch subsequent Stream calls.
@IgnoreJRERequirement
public Stream<String> lines() throws IOException {
BufferedReader reader = openBufferedStream();
return reader.lines().onClose(() -> closeUnchecked(reader));
}
@IgnoreJRERequirement // helper for lines()
/*
* If we make these calls inline inside the lambda inside lines(), we get an Animal Sniffer error,
* despite the @IgnoreJRERequirement annotation there. For details, see ImmutableSortedMultiset.
*/
private static void closeUnchecked(Closeable closeable) {
try {
closeable.close();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Returns the size of this source in chars, if the size can be easily determined without actually
* opening the data stream.
*
* <p>The default implementation returns {@link Optional#absent}. Some sources, such as a {@code
* CharSequence}, may return a non-absent value. Note that in such cases, it is <i>possible</i>
* that this method will return a different number of chars than would be returned by reading all
* of the chars.
*
* <p>Additionally, for mutable sources such as {@code StringBuilder}s, a subsequent read may
* return a different number of chars if the contents are changed.
*
* @since 19.0
*/
public Optional<Long> lengthIfKnown() {
return Optional.absent();
}
/**
* Returns the length of this source in chars, even if doing so requires opening and traversing an
* entire stream. To avoid a potentially expensive operation, see {@link #lengthIfKnown}.
*
* <p>The default implementation calls {@link #lengthIfKnown} and returns the value if present. If
* absent, it will fall back to a heavyweight operation that will open a stream, {@link
* Reader#skip(long) skip} to the end of the stream, and return the total number of chars that
* were skipped.
*
* <p>Note that for sources that implement {@link #lengthIfKnown} to provide a more efficient
* implementation, it is <i>possible</i> that this method will return a different number of chars
* than would be returned by reading all of the chars.
*
* <p>In either case, for mutable sources such as files, a subsequent read may return a different
* number of chars if the contents are changed.
*
* @throws IOException if an I/O error occurs while reading the length of this source
* @since 19.0
*/
public long length() throws IOException {
Optional<Long> lengthIfKnown = lengthIfKnown();
if (lengthIfKnown.isPresent()) {
return lengthIfKnown.get();
}
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return countBySkipping(reader);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
private static long countBySkipping(Reader reader) throws IOException {
long count = 0;
long read;
while ((read = reader.skip(Long.MAX_VALUE)) != 0) {
count += read;
}
return count;
}
/**
* Appends the contents of this source to the given {@link Appendable} (such as a {@link Writer}).
* Does not close {@code appendable} if it is {@code Closeable}.
*
* @return the number of characters copied
* @throws IOException if an I/O error occurs while reading from this source or writing to {@code
* appendable}
*/
@CanIgnoreReturnValue
public long copyTo(Appendable appendable) throws IOException {
checkNotNull(appendable);
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return CharStreams.copy(reader, appendable);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Copies the contents of this source to the given sink.
*
* @return the number of characters copied
* @throws IOException if an I/O error occurs while reading from this source or writing to {@code
* sink}
*/
@CanIgnoreReturnValue
public long copyTo(CharSink sink) throws IOException {
checkNotNull(sink);
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
Writer writer = closer.register(sink.openStream());
return CharStreams.copy(reader, writer);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Reads the contents of this source as a string.
*
* @throws IOException if an I/O error occurs while reading from this source
*/
public String read() throws IOException {
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return CharStreams.toString(reader);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Reads the first line of this source as a string. Returns {@code null} if this source is empty.
*
* <p>Like {@link BufferedReader#readLine()}, this method considers a line to be a sequence of
* text that is terminated by (but does not include) one of {@code \r\n}, {@code \r} or {@code
* \n}. If the source's content does not end in a line termination sequence, it is treated as if
* it does.
*
* @throws IOException if an I/O error occurs while reading from this source
*/
public @Nullable String readFirstLine() throws IOException {
Closer closer = Closer.create();
try {
BufferedReader reader = closer.register(openBufferedStream());
return reader.readLine();
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Reads all the lines of this source as a list of strings. The returned list will be empty if
* this source is empty.
*
* <p>Like {@link BufferedReader#readLine()}, this method considers a line to be a sequence of
* text that is terminated by (but does not include) one of {@code \r\n}, {@code \r} or {@code
* \n}. If the source's content does not end in a line termination sequence, it is treated as if
* it does.
*
* @throws IOException if an I/O error occurs while reading from this source
*/
public ImmutableList<String> readLines() throws IOException {
Closer closer = Closer.create();
try {
BufferedReader reader = closer.register(openBufferedStream());
List<String> result = new ArrayList<>();
String line;
while ((line = reader.readLine()) != null) {
result.add(line);
}
return ImmutableList.copyOf(result);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Reads lines of text from this source, processing each line as it is read using the given {@link
* LineProcessor processor}. Stops when all lines have been processed or the processor returns
* {@code false} and returns the result produced by the processor.
*
* <p>Like {@link BufferedReader#readLine()}, this method considers a line to be a sequence of
* text that is terminated by (but does not include) one of {@code \r\n}, {@code \r} or {@code
* \n}. If the source's content does not end in a line termination sequence, it is treated as if
* it does.
*
* @throws IOException if an I/O error occurs while reading from this source or if {@code
* processor} throws an {@code IOException}
* @since 16.0
*/
@CanIgnoreReturnValue // some processors won't return a useful result
@ParametricNullness
public <T extends @Nullable Object> T readLines(LineProcessor<T> processor) throws IOException {
checkNotNull(processor);
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return CharStreams.readLines(reader, processor);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Reads all lines of text from this source, running the given {@code action} for each line as it
* is read.
*
* <p>Like {@link BufferedReader#readLine()}, this method considers a line to be a sequence of
* text that is terminated by (but does not include) one of {@code \r\n}, {@code \r} or {@code
* \n}. If the source's content does not end in a line termination sequence, it is treated as if
* it does.
*
* @throws IOException if an I/O error occurs while reading from this source or if {@code action}
* throws an {@code UncheckedIOException}
* @since 33.4.0 (but since 22.0 in the JRE flavor)
*/
/*
* We have to rely on users not to call this without library desugaring, as NewApi won't flag
* Consumer creation.
*/
@IgnoreJRERequirement
public void forEachLine(Consumer<? super String> action) throws IOException {
try (Stream<String> lines = lines()) {
// The lines should be ordered regardless in most cases, but use forEachOrdered to be sure
lines.forEachOrdered(action);
} catch (UncheckedIOException e) {
throw e.getCause();
}
}
/**
* Returns whether the source has zero chars. The default implementation first checks {@link
* #lengthIfKnown}, returning true if it's known to be zero and false if it's known to be
* non-zero. If the length is not known, it falls back to opening a stream and checking for EOF.
*
* <p>Note that, in cases where {@code lengthIfKnown} returns zero, it is <i>possible</i> that
* chars are actually available for reading. This means that a source may return {@code true} from
* {@code isEmpty()} despite having readable content.
*
* @throws IOException if an I/O error occurs
* @since 15.0
*/
public boolean isEmpty() throws IOException {
Optional<Long> lengthIfKnown = lengthIfKnown();
if (lengthIfKnown.isPresent()) {
return lengthIfKnown.get() == 0L;
}
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return reader.read() == -1;
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Concatenates multiple {@link CharSource} instances into a single source. Streams returned from
* the source will contain the concatenated data from the streams of the underlying sources.
*
* <p>Only one underlying stream will be open at a time. Closing the concatenated stream will
* close the open underlying stream.
*
* @param sources the sources to concatenate
* @return a {@code CharSource} containing the concatenated data
* @since 15.0
*/
public static CharSource concat(Iterable<? extends CharSource> sources) {
return new ConcatenatedCharSource(sources);
}
/**
* Concatenates multiple {@link CharSource} instances into a single source. Streams returned from
* the source will contain the concatenated data from the streams of the underlying sources.
*
* <p>Only one underlying stream will be open at a time. Closing the concatenated stream will
* close the open underlying stream.
*
* <p>Note: The input {@code Iterator} will be copied to an {@code ImmutableList} when this method
* is called. This will fail if the iterator is infinite and may cause problems if the iterator
* eagerly fetches data for each source when iterated (rather than producing sources that only
* load data through their streams). Prefer using the {@link #concat(Iterable)} overload if
* possible.
*
* @param sources the sources to concatenate
* @return a {@code CharSource} containing the concatenated data
* @throws NullPointerException if any of {@code sources} is {@code null}
* @since 15.0
*/
public static CharSource concat(Iterator<? extends CharSource> sources) {
return concat(ImmutableList.copyOf(sources));
}
/**
* Concatenates multiple {@link CharSource} instances into a single source. Streams returned from
* the source will contain the concatenated data from the streams of the underlying sources.
*
* <p>Only one underlying stream will be open at a time. Closing the concatenated stream will
* close the open underlying stream.
*
* @param sources the sources to concatenate
* @return a {@code CharSource} containing the concatenated data
* @throws NullPointerException if any of {@code sources} is {@code null}
* @since 15.0
*/
public static CharSource concat(CharSource... sources) {
return concat(ImmutableList.copyOf(sources));
}
/**
* Returns a view of the given character sequence as a {@link CharSource}. The behavior of the
* returned {@code CharSource} and any {@code Reader} instances created by it is unspecified if
* the {@code charSequence} is mutated while it is being read, so don't do that.
*
* @since 15.0 (since 14.0 as {@code CharStreams.asCharSource(String)})
*/
public static CharSource wrap(CharSequence charSequence) {
return charSequence instanceof String
? new StringCharSource((String) charSequence)
: new CharSequenceCharSource(charSequence);
}
/**
* Returns an immutable {@link CharSource} that contains no characters.
*
* @since 15.0
*/
public static CharSource empty() {
return EmptyCharSource.INSTANCE;
}
/** A byte source that reads chars from this source and encodes them as bytes using a charset. */
private final | CharSource |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/converter/StaticFallbackConverterTest.java | {
"start": 1198,
"end": 2498
} | class ____ extends ContextTestSupport {
@Test
public void testStaticFallbackConverter() {
Exchange exchange = new DefaultExchange(context);
TimeZone tz = TimeZone.getDefault();
String money = context.getTypeConverter().convertTo(String.class, exchange, tz);
assertEquals("Time talks", money);
}
@Test
public void testStaticFallbackMandatoryConverter() throws Exception {
Exchange exchange = new DefaultExchange(context);
TimeZone tz = TimeZone.getDefault();
String money = context.getTypeConverter().mandatoryConvertTo(String.class, exchange, tz);
assertEquals("Time talks", money);
}
@Test
public void testStaticFallbackMandatoryFailed() {
Exchange exchange = new DefaultExchange(context);
assertThrows(NoTypeConversionAvailableException.class,
() -> context.getTypeConverter().mandatoryConvertTo(Date.class, exchange, new Timestamp(0)),
"Should have thrown an exception");
}
@Test
public void testStaticFallbackFailed() {
Exchange exchange = new DefaultExchange(context);
Date out = context.getTypeConverter().convertTo(Date.class, exchange, new Timestamp(0));
assertNull(out);
}
}
| StaticFallbackConverterTest |
java | apache__camel | test-infra/camel-test-infra-artemis/src/main/java/org/apache/camel/test/infra/artemis/services/ArtemisVMInfraService.java | {
"start": 1792,
"end": 3216
} | class ____ extends ArtemisVMInfraService {
public ReusableArtemisVMService(int port) {
super(port);
}
@Override
protected int computeBrokerId() {
return 0;
}
}
@Override
protected Configuration configure(Configuration configuration, int port, int brokerId) {
brokerURL = "vm://" + brokerId;
LOG.info("Creating a new Artemis VM-based broker");
configuration.setPersistenceEnabled(false);
configuration.setJournalMinFiles(10);
configuration.setSecurityEnabled(false);
try {
configuration.addAcceptorConfiguration("in-vm", "vm://" + brokerId);
} catch (Exception e) {
LOG.warn(e.getMessage(), e);
throw new ArtemisRunException("vm acceptor cannot be configured", e);
}
configuration.addAddressSetting("#",
new AddressSettings()
.setAddressFullMessagePolicy(AddressFullMessagePolicy.FAIL)
.setAutoDeleteQueues(false)
.setDeadLetterAddress(SimpleString.of("DLQ"))
.setExpiryAddress(SimpleString.of("ExpiryQueue")));
return configuration;
}
@Override
public String serviceAddress() {
return brokerURL;
}
@Override
public int brokerPort() {
return 0;
}
}
| ReusableArtemisVMService |
java | quarkusio__quarkus | integration-tests/devtools/src/test/java/io/quarkus/devtools/codestarts/quarkus/GoogleCloudFunctionsCodestartTest.java | {
"start": 376,
"end": 1530
} | class ____ {
@RegisterExtension
public static QuarkusCodestartTest codestartTest = QuarkusCodestartTest.builder()
.codestarts("google-cloud-functions")
.languages(JAVA)
.build();
@Test
void testContent() throws Throwable {
codestartTest.checkGeneratedSource("org.acme.googlecloudfunctions.HelloWorldCloudEventsFunction");
codestartTest.checkGeneratedSource("org.acme.googlecloudfunctions.HelloWorldBackgroundFunction");
codestartTest.checkGeneratedSource("org.acme.googlecloudfunctions.HelloWorldHttpFunction");
codestartTest.checkGeneratedTestSource("org.acme.googlecloudfunctions.HelloWorldCloudEventsFunctionTest");
codestartTest.checkGeneratedTestSource("org.acme.googlecloudfunctions.HelloWorldBackgroundFunctionTest");
codestartTest.checkGeneratedTestSource("org.acme.googlecloudfunctions.HelloWorldHttpFunctionTest");
}
@Test
@EnabledIfSystemProperty(named = "build-projects", matches = "true")
void buildAllProjectsForLocalUse() throws Throwable {
codestartTest.buildAllProjects();
}
}
| GoogleCloudFunctionsCodestartTest |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ext/DOMDeserializer.java | {
"start": 3204,
"end": 3515
} | class ____ extends DOMDeserializer<Document> {
public DocumentDeserializer() { super(Document.class); }
@Override
public Document _deserialize(String value, DeserializationContext ctxt) throws IllegalArgumentException {
return parse(value);
}
}
}
| DocumentDeserializer |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/JavaBeanPropertyDescriptorTests.java | {
"start": 1426,
"end": 10973
} | class ____ extends PropertyDescriptorTests {
@Test
void javaBeanSimpleProperty() {
process(SimpleTypeProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleTypeProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "myString");
assertThat(property.getName()).isEqualTo("myString");
assertThat(property.getGetter().getSimpleName()).hasToString("getMyString");
assertThat(property.getSetter().getSimpleName()).hasToString("setMyString");
assertThat(property.isProperty(metadataEnv)).isTrue();
assertThat(property.isNested(metadataEnv)).isFalse();
});
}
@Test
void javaBeanCollectionProperty() {
process(SimpleCollectionProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleCollectionProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "doubles");
assertThat(property.getName()).isEqualTo("doubles");
assertThat(property.getGetter().getSimpleName()).hasToString("getDoubles");
assertThat(property.getSetter()).isNull();
assertThat(property.isProperty(metadataEnv)).isTrue();
assertThat(property.isNested(metadataEnv)).isFalse();
});
}
@Test
void javaBeanNestedPropertySameClass() {
process(InnerClassProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(InnerClassProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "first");
assertThat(property.getName()).isEqualTo("first");
assertThat(property.getGetter().getSimpleName()).hasToString("getFirst");
assertThat(property.getSetter()).isNull();
assertThat(property.isProperty(metadataEnv)).isFalse();
assertThat(property.isNested(metadataEnv)).isTrue();
});
}
@Test
void javaBeanNestedPropertyWithAnnotation() {
process(InnerClassProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(InnerClassProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "third");
assertThat(property.getName()).isEqualTo("third");
assertThat(property.getGetter().getSimpleName()).hasToString("getThird");
assertThat(property.getSetter()).isNull();
assertThat(property.isProperty(metadataEnv)).isFalse();
assertThat(property.isNested(metadataEnv)).isTrue();
});
}
@Test
void javaBeanSimplePropertyWithOnlyGetterShouldNotBeExposed() {
process(SimpleProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleProperties.class);
ExecutableElement getter = getMethod(ownerElement, "getSize");
VariableElement field = getField(ownerElement, "size");
JavaBeanPropertyDescriptor property = new JavaBeanPropertyDescriptor("size", field.asType(), ownerElement,
getter, null, field, getter);
assertThat(property.getName()).isEqualTo("size");
assertThat(property.getGetter().getSimpleName()).hasToString("getSize");
assertThat(property.getSetter()).isNull();
assertThat(property.isProperty(metadataEnv)).isFalse();
assertThat(property.isNested(metadataEnv)).isFalse();
});
}
@Test
void javaBeanSimplePropertyWithOnlySetterShouldNotBeExposed() {
process(SimpleProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleProperties.class);
VariableElement field = getField(ownerElement, "counter");
JavaBeanPropertyDescriptor property = new JavaBeanPropertyDescriptor("counter", field.asType(),
ownerElement, null, getMethod(ownerElement, "setCounter"), field, null);
assertThat(property.getName()).isEqualTo("counter");
assertThat(property.getGetter()).isNull();
assertThat(property.getSetter().getSimpleName()).hasToString("setCounter");
assertThat(property.isProperty(metadataEnv)).isFalse();
assertThat(property.isNested(metadataEnv)).isFalse();
});
}
@Test
void javaBeanMetadataSimpleProperty() {
process(SimpleTypeProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleTypeProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "myString");
assertItemMetadata(metadataEnv, property).isProperty()
.hasName("test.my-string")
.hasType(String.class)
.hasSourceType(SimpleTypeProperties.class)
.hasNoDescription()
.isNotDeprecated();
});
}
@Test
void javaBeanMetadataCollectionProperty() {
process(SimpleCollectionProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleCollectionProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "doubles");
assertItemMetadata(metadataEnv, property).isProperty()
.hasName("test.doubles")
.hasType("java.util.List<java.lang.Double>")
.hasSourceType(SimpleCollectionProperties.class)
.hasNoDescription()
.isNotDeprecated();
});
}
@Test
void javaBeanMetadataNestedGroup() {
process(InnerClassProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(InnerClassProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "first");
assertItemMetadata(metadataEnv, property).isGroup()
.hasName("test.first")
.hasType("org.springframework.boot.configurationsample.specific.InnerClassProperties$Foo")
.hasSourceType(InnerClassProperties.class)
.hasSourceMethod("getFirst()")
.hasNoDescription()
.isNotDeprecated();
});
}
@Test
void javaBeanMetadataNotACandidatePropertyShouldReturnNull() {
process(SimpleProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleProperties.class);
VariableElement field = getField(ownerElement, "counter");
JavaBeanPropertyDescriptor property = new JavaBeanPropertyDescriptor("counter", field.asType(),
ownerElement, null, getMethod(ownerElement, "setCounter"), field, null);
assertThat(property.resolveItemMetadata("test", metadataEnv)).isNull();
});
}
@Test
@SuppressWarnings("deprecation")
void javaBeanDeprecatedPropertyOnClass() {
process(org.springframework.boot.configurationsample.simple.DeprecatedProperties.class,
(roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv
.getRootElement(org.springframework.boot.configurationsample.simple.DeprecatedProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "name");
assertItemMetadata(metadataEnv, property).isProperty().isDeprecatedWithNoInformation();
});
}
@Test
void javaBeanMetadataDeprecatedPropertyWithAnnotation() {
process(DeprecatedSingleProperty.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(DeprecatedSingleProperty.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "name");
assertItemMetadata(metadataEnv, property).isProperty()
.isDeprecatedWithReason("renamed")
.isDeprecatedWithReplacement("singledeprecated.new-name");
});
}
@Test
void javaBeanDeprecatedPropertyOnGetter() {
process(SimpleProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "flag", "isFlag", "setFlag");
assertItemMetadata(metadataEnv, property).isProperty().isDeprecatedWithNoInformation();
});
}
@Test
void javaBeanDeprecatedPropertyOnSetter() {
process(SimpleProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "theName");
assertItemMetadata(metadataEnv, property).isProperty().isDeprecatedWithNoInformation();
});
}
@Test
void javaBeanPropertyWithDescription() {
process(SimpleProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "theName");
assertItemMetadata(metadataEnv, property).isProperty()
.hasDescription("The name of this simple properties.");
});
}
@Test
void javaBeanPropertyWithDefaultValue() {
process(SimpleProperties.class, (roundEnv, metadataEnv) -> {
TypeElement ownerElement = roundEnv.getRootElement(SimpleProperties.class);
JavaBeanPropertyDescriptor property = createPropertyDescriptor(ownerElement, "theName");
assertItemMetadata(metadataEnv, property).isProperty().hasDefaultValue("boot");
});
}
protected JavaBeanPropertyDescriptor createPropertyDescriptor(TypeElement ownerElement, String name) {
return createPropertyDescriptor(ownerElement, name, createAccessorMethodName("get", name),
createAccessorMethodName("set", name));
}
protected JavaBeanPropertyDescriptor createPropertyDescriptor(TypeElement ownerElement, String name,
String getterName, String setterName) {
ExecutableElement getter = getMethod(ownerElement, getterName);
ExecutableElement setter = getMethod(ownerElement, setterName);
VariableElement field = getField(ownerElement, name);
return new JavaBeanPropertyDescriptor(name, getter.getReturnType(), ownerElement, getter, setter, field, null);
}
}
| JavaBeanPropertyDescriptorTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/impl/pb/service/ClientAMProtocolPBServiceImpl.java | {
"start": 2467,
"end": 5503
} | class ____ implements ClientAMProtocolPB {
private ClientAMProtocol real;
public ClientAMProtocolPBServiceImpl(ClientAMProtocol impl) {
this.real = impl;
}
@Override
public FlexComponentsResponseProto flexComponents(RpcController controller,
FlexComponentsRequestProto request) throws ServiceException {
try {
return real.flexComponents(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
@Override public GetStatusResponseProto getStatus(RpcController controller,
GetStatusRequestProto request) throws ServiceException {
try {
return real.getStatus(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
@Override
public org.apache.hadoop.yarn.proto.ClientAMProtocol.StopResponseProto stop(
RpcController controller,
org.apache.hadoop.yarn.proto.ClientAMProtocol.StopRequestProto request)
throws ServiceException {
try {
return real.stop(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
@Override
public UpgradeServiceResponseProto upgradeService(RpcController controller,
UpgradeServiceRequestProto request) throws ServiceException {
try {
return real.upgrade(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
@Override
public RestartServiceResponseProto restartService(RpcController controller,
RestartServiceRequestProto request) throws ServiceException {
try {
return real.restart(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
@Override
public CompInstancesUpgradeResponseProto upgrade(RpcController controller,
CompInstancesUpgradeRequestProto request) throws ServiceException {
try {
return real.upgrade(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
@Override
public GetCompInstancesResponseProto getCompInstances(
RpcController controller, GetCompInstancesRequestProto request)
throws ServiceException {
try {
return real.getCompInstances(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
@Override
public CancelUpgradeResponseProto cancelUpgrade(
RpcController controller, CancelUpgradeRequestProto request)
throws ServiceException {
try {
return real.cancelUpgrade(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
@Override
public DecommissionCompInstancesResponseProto decommissionCompInstances(
RpcController controller, DecommissionCompInstancesRequestProto
request) throws ServiceException {
try {
return real.decommissionCompInstances(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
}
| ClientAMProtocolPBServiceImpl |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/errors/UniFailureErrorTest.java | {
"start": 1832,
"end": 3017
} | class ____ {
@Inject
WebSocketConnection connection;
@Inject
RequestBean requestBean;
@OnBinaryMessage
Uni<Void> process(WebSocketConnection connection, Buffer message) {
requestBean.setState("ok");
return Uni.createFrom().failure(new IllegalStateException("Something went wrong"));
}
@OnError
String encodingError(BinaryEncodeException e) {
return "Problem encoding: " + e.getEncodedObject().toString();
}
@OnError
String decodingError(BinaryDecodeException e) {
return "Problem decoding: " + e.getBytes().toString();
}
@OnError
String runtimeProblem(RuntimeException e, WebSocketConnection connection) {
assertTrue(Context.isOnWorkerThread());
assertEquals(connection.id(), this.connection.id());
// A new request context is used
assertEquals("nok", requestBean.getState());
return e.getMessage();
}
@OnError
String catchAll(Throwable e) {
return "Ooops!";
}
}
@RequestScoped
public static | Echo |
java | google__guice | extensions/persist/src/com/google/inject/persist/PersistFilter.java | {
"start": 2445,
"end": 3283
} | class ____ implements Filter {
private final UnitOfWork unitOfWork;
private final PersistService persistService;
@Inject
public PersistFilter(UnitOfWork unitOfWork, PersistService persistService) {
this.unitOfWork = unitOfWork;
this.persistService = persistService;
}
@Override
public void init(FilterConfig filterConfig) throws ServletException {
persistService.start();
}
@Override
public void destroy() {
persistService.stop();
}
@Override
public void doFilter(
final ServletRequest servletRequest,
final ServletResponse servletResponse,
final FilterChain filterChain)
throws IOException, ServletException {
unitOfWork.begin();
try {
filterChain.doFilter(servletRequest, servletResponse);
} finally {
unitOfWork.end();
}
}
}
| PersistFilter |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/packagescan/resource/WritableResource.java | {
"start": 1123,
"end": 2707
} | interface ____ extends Resource {
/**
* Indicate whether the contents of this resource can be written
* via {@link #getOutputStream()}.
*
* <p>Will be {@code true} for typical resource descriptors;
* note that actual content writing may still fail when attempted.
* However, a value of {@code false} is a definitive indication
* that the resource content cannot be modified.
*
* @see #getOutputStream()
* @see #isReadable()
*/
default boolean isWritable() {
return true;
}
/**
* Return an {@link OutputStream} for the underlying resource,
* allowing to (over-)write its content.
*
* @throws IOException if the stream could not be opened
* @see #getInputStream()
*/
OutputStream getOutputStream() throws IOException;
/**
* Return a {@link WritableByteChannel}.
*
* <p>It is expected that each call creates a <i>fresh</i> channel.
*
* <p>The default implementation returns {@link Channels#newChannel(OutputStream)}
* with the result of {@link #getOutputStream()}.
*
* @return the byte channel for the underlying resource (must not be {@code null})
* @throws java.io.FileNotFoundException if the underlying resource doesn't exist
* @throws IOException if the content channel could not be opened
* @see #getOutputStream()
* @since 5.0
*/
default WritableByteChannel writableChannel() throws IOException {
return Channels.newChannel(getOutputStream());
}
}
| WritableResource |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/utils/UtilsTest.java | {
"start": 4296,
"end": 30208
} | class ____ {
@Test
public void testMurmur2() {
Map<byte[], Integer> cases = new java.util.HashMap<>();
cases.put("21".getBytes(), -973932308);
cases.put("foobar".getBytes(), -790332482);
cases.put("a-little-bit-long-string".getBytes(), -985981536);
cases.put("a-little-bit-longer-string".getBytes(), -1486304829);
cases.put("lkjh234lh9fiuh90y23oiuhsafujhadof229phr9h19h89h8".getBytes(), -58897971);
cases.put(new byte[] {'a', 'b', 'c'}, 479470107);
for (Map.Entry<byte[], Integer> c : cases.entrySet()) {
assertEquals(c.getValue().intValue(), murmur2(c.getKey()));
}
}
private static String toHexString(byte[] buf) {
StringBuilder bld = new StringBuilder();
for (byte b : buf) {
bld.append(String.format("%02x", b));
}
return bld.toString();
}
@Test
public void testMurmur2Checksum() {
// calculates the checksum of hashes of many different random byte arrays of variable length
// this test detects any incompatible changes to the Murmur2 implementation with near certainty
int numTrials = 100;
int maxLen = 1000;
long seed = 0;
SplittableRandom random = new SplittableRandom(seed);
long checksum = 0;
for (int len = 0; len <= maxLen; ++len) {
byte[] data = new byte[len];
for (int i = 0; i < numTrials; ++i) {
random.nextBytes(data);
int hash = Utils.murmur2(data);
checksum += Integer.toUnsignedLong(hash);
}
}
assertEquals(0xc3b8cf7c99fcL, checksum);
}
@ParameterizedTest
@CsvSource(value = {"PLAINTEXT", "SASL_PLAINTEXT", "SSL", "SASL_SSL"})
public void testGetHostValid(String protocol) {
assertEquals("mydomain.com", getHost(protocol + "://mydomain.com:8080"));
assertEquals("MyDomain.com", getHost(protocol + "://MyDomain.com:8080"));
assertEquals("My_Domain.com", getHost(protocol + "://My_Domain.com:8080"));
assertEquals("::1", getHost(protocol + "://[::1]:1234"));
assertEquals("2001:db8:85a3:8d3:1319:8a2e:370:7348", getHost(protocol + "://[2001:db8:85a3:8d3:1319:8a2e:370:7348]:5678"));
assertEquals("2001:DB8:85A3:8D3:1319:8A2E:370:7348", getHost(protocol + "://[2001:DB8:85A3:8D3:1319:8A2E:370:7348]:5678"));
assertEquals("fe80::b1da:69ca:57f7:63d8%3", getHost(protocol + "://[fe80::b1da:69ca:57f7:63d8%3]:5678"));
assertEquals("127.0.0.1", getHost("127.0.0.1:8000"));
assertEquals("::1", getHost("[::1]:1234"));
}
@ParameterizedTest
@CsvSource(value = {"PLAINTEXT", "SASL_PLAINTEXT", "SSL", "SASL_SSL"})
public void testGetHostInvalid(String protocol) {
assertNull(getHost(protocol + "://mydo)main.com:8080"));
assertNull(getHost(protocol + "://mydo(main.com:8080"));
assertNull(getHost(protocol + "://mydo()main.com:8080"));
assertNull(getHost(protocol + "://mydo(main).com:8080"));
assertNull(getHost(protocol + "://[2001:db)8:85a3:8d3:1319:8a2e:370:7348]:5678"));
assertNull(getHost(protocol + "://[2001:db(8:85a3:8d3:1319:8a2e:370:7348]:5678"));
assertNull(getHost(protocol + "://[2001:db()8:85a3:8d3:1319:8a2e:370:7348]:5678"));
assertNull(getHost(protocol + "://[2001:db(8:85a3:)8d3:1319:8a2e:370:7348]:5678"));
assertNull(getHost("ho)st:9092"));
assertNull(getHost("ho(st:9092"));
assertNull(getHost("ho()st:9092"));
assertNull(getHost("ho(st):9092"));
}
@Test
public void testHostPattern() {
assertTrue(validHostPattern("127.0.0.1"));
assertTrue(validHostPattern("mydomain.com"));
assertTrue(validHostPattern("MyDomain.com"));
assertTrue(validHostPattern("My_Domain.com"));
assertTrue(validHostPattern("::1"));
assertTrue(validHostPattern("2001:db8:85a3:8d3:1319:8a2e:370"));
}
@Test
public void testGetPort() {
// valid
assertEquals(8000, getPort("127.0.0.1:8000").intValue());
assertEquals(8080, getPort("mydomain.com:8080").intValue());
assertEquals(8080, getPort("MyDomain.com:8080").intValue());
assertEquals(1234, getPort("[::1]:1234").intValue());
assertEquals(5678, getPort("[2001:db8:85a3:8d3:1319:8a2e:370:7348]:5678").intValue());
assertEquals(5678, getPort("[2001:DB8:85A3:8D3:1319:8A2E:370:7348]:5678").intValue());
assertEquals(5678, getPort("[fe80::b1da:69ca:57f7:63d8%3]:5678").intValue());
// invalid
assertNull(getPort("host:-92"));
assertNull(getPort("host:-9-2"));
assertNull(getPort("host:92-"));
assertNull(getPort("host:9-2"));
}
@Test
public void testFormatAddress() {
assertEquals("127.0.0.1:8000", formatAddress("127.0.0.1", 8000));
assertEquals("mydomain.com:8080", formatAddress("mydomain.com", 8080));
assertEquals("[::1]:1234", formatAddress("::1", 1234));
assertEquals("[2001:db8:85a3:8d3:1319:8a2e:370:7348]:5678", formatAddress("2001:db8:85a3:8d3:1319:8a2e:370:7348", 5678));
}
@Test
public void testFormatBytes() {
assertEquals("-1", formatBytes(-1));
assertEquals("1023 B", formatBytes(1023));
assertEquals("1 KB", formatBytes(1024));
assertEquals("1024 KB", formatBytes((1024 * 1024) - 1));
assertEquals("1 MB", formatBytes(1024 * 1024));
assertEquals("1.1 MB", formatBytes((long) (1.1 * 1024 * 1024)));
assertEquals("10 MB", formatBytes(10 * 1024 * 1024));
}
@Test
public void testAbs() {
assertEquals(0, Utils.abs(Integer.MIN_VALUE));
assertEquals(10, Utils.abs(-10));
assertEquals(10, Utils.abs(10));
assertEquals(0, Utils.abs(0));
assertEquals(1, Utils.abs(-1));
assertEquals(Integer.MAX_VALUE, Utils.abs(Integer.MAX_VALUE));
}
@Test
public void writeToBuffer() throws IOException {
byte[] input = {0, 1, 2, 3, 4, 5};
ByteBuffer source = ByteBuffer.wrap(input);
doTestWriteToByteBuffer(source, ByteBuffer.allocate(input.length));
doTestWriteToByteBuffer(source, ByteBuffer.allocateDirect(input.length));
assertEquals(0, source.position());
source.position(2);
doTestWriteToByteBuffer(source, ByteBuffer.allocate(input.length));
doTestWriteToByteBuffer(source, ByteBuffer.allocateDirect(input.length));
}
private void doTestWriteToByteBuffer(ByteBuffer source, ByteBuffer dest) throws IOException {
int numBytes = source.remaining();
int position = source.position();
DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(dest));
Utils.writeTo(out, source, source.remaining());
dest.flip();
assertEquals(numBytes, dest.remaining());
assertEquals(position, source.position());
assertEquals(source, dest);
}
@Test
public void toArray() {
byte[] input = {0, 1, 2, 3, 4};
ByteBuffer buffer = ByteBuffer.wrap(input);
assertArrayEquals(input, Utils.toArray(buffer));
assertEquals(0, buffer.position());
assertArrayEquals(new byte[] {1, 2}, Utils.toArray(buffer, 1, 2));
assertEquals(0, buffer.position());
buffer.position(2);
assertArrayEquals(new byte[] {2, 3, 4}, Utils.toArray(buffer));
assertEquals(2, buffer.position());
}
@Test
public void toArrayDirectByteBuffer() {
byte[] input = {0, 1, 2, 3, 4};
ByteBuffer buffer = ByteBuffer.allocateDirect(5);
buffer.put(input);
buffer.rewind();
assertArrayEquals(input, Utils.toArray(buffer));
assertEquals(0, buffer.position());
assertArrayEquals(new byte[] {1, 2}, Utils.toArray(buffer, 1, 2));
assertEquals(0, buffer.position());
buffer.position(2);
assertArrayEquals(new byte[] {2, 3, 4}, Utils.toArray(buffer));
assertEquals(2, buffer.position());
}
@Test
public void getNullableSizePrefixedArrayExact() {
byte[] input = {0, 0, 0, 2, 1, 0};
final ByteBuffer buffer = ByteBuffer.wrap(input);
final byte[] array = Utils.getNullableSizePrefixedArray(buffer);
assertArrayEquals(new byte[] {1, 0}, array);
assertEquals(6, buffer.position());
assertFalse(buffer.hasRemaining());
}
@Test
public void getNullableSizePrefixedArrayExactEmpty() {
byte[] input = {0, 0, 0, 0};
final ByteBuffer buffer = ByteBuffer.wrap(input);
final byte[] array = Utils.getNullableSizePrefixedArray(buffer);
assertArrayEquals(new byte[] {}, array);
assertEquals(4, buffer.position());
assertFalse(buffer.hasRemaining());
}
@Test
public void getNullableSizePrefixedArrayRemainder() {
byte[] input = {0, 0, 0, 2, 1, 0, 9};
final ByteBuffer buffer = ByteBuffer.wrap(input);
final byte[] array = Utils.getNullableSizePrefixedArray(buffer);
assertArrayEquals(new byte[] {1, 0}, array);
assertEquals(6, buffer.position());
assertTrue(buffer.hasRemaining());
}
@Test
public void getNullableSizePrefixedArrayNull() {
// -1
byte[] input = {-1, -1, -1, -1};
final ByteBuffer buffer = ByteBuffer.wrap(input);
final byte[] array = Utils.getNullableSizePrefixedArray(buffer);
assertNull(array);
assertEquals(4, buffer.position());
assertFalse(buffer.hasRemaining());
}
@Test
public void getNullableSizePrefixedArrayInvalid() {
// -2
byte[] input = {-1, -1, -1, -2};
final ByteBuffer buffer = ByteBuffer.wrap(input);
assertThrows(NegativeArraySizeException.class, () -> Utils.getNullableSizePrefixedArray(buffer));
}
@Test
public void getNullableSizePrefixedArrayUnderflow() {
// Integer.MAX_VALUE
byte[] input = {127, -1, -1, -1};
final ByteBuffer buffer = ByteBuffer.wrap(input);
// note, we get a buffer underflow exception instead of an OOME, even though the encoded size
// would be 2,147,483,647 aka 2.1 GB, probably larger than the available heap
assertThrows(BufferUnderflowException.class, () -> Utils.getNullableSizePrefixedArray(buffer));
}
@Test
public void utf8ByteArraySerde() {
String utf8String = "A\u00ea\u00f1\u00fcC";
byte[] utf8Bytes = utf8String.getBytes(StandardCharsets.UTF_8);
assertArrayEquals(utf8Bytes, Utils.utf8(utf8String));
assertEquals(utf8Bytes.length, Utils.utf8Length(utf8String));
assertEquals(utf8String, Utils.utf8(utf8Bytes));
}
@Test
public void utf8ByteBufferSerde() {
doTestUtf8ByteBuffer(ByteBuffer.allocate(20));
doTestUtf8ByteBuffer(ByteBuffer.allocateDirect(20));
}
private void doTestUtf8ByteBuffer(ByteBuffer utf8Buffer) {
String utf8String = "A\u00ea\u00f1\u00fcC";
byte[] utf8Bytes = utf8String.getBytes(StandardCharsets.UTF_8);
utf8Buffer.position(4);
utf8Buffer.put(utf8Bytes);
utf8Buffer.position(4);
assertEquals(utf8String, Utils.utf8(utf8Buffer, utf8Bytes.length));
assertEquals(4, utf8Buffer.position());
utf8Buffer.position(0);
assertEquals(utf8String, Utils.utf8(utf8Buffer, 4, utf8Bytes.length));
assertEquals(0, utf8Buffer.position());
}
private void subTest(ByteBuffer buffer) {
// The first byte should be 'A'
assertEquals('A', (Utils.readBytes(buffer, 0, 1))[0]);
// The offset is 2, so the first 2 bytes should be skipped.
byte[] results = Utils.readBytes(buffer, 2, 3);
assertEquals('y', results[0]);
assertEquals(' ', results[1]);
assertEquals('S', results[2]);
assertEquals(3, results.length);
// test readBytes without offset and length specified.
results = Utils.readBytes(buffer);
assertEquals('A', results[0]);
assertEquals('t', results[buffer.limit() - 1]);
assertEquals(buffer.limit(), results.length);
}
@Test
public void testReadBytes() {
byte[] myvar = "Any String you want".getBytes();
ByteBuffer buffer = ByteBuffer.allocate(myvar.length);
buffer.put(myvar);
buffer.rewind();
this.subTest(buffer);
// test readonly buffer, different path
buffer = ByteBuffer.wrap(myvar).asReadOnlyBuffer();
this.subTest(buffer);
}
@Test
public void testFileAsStringSimpleFile() throws IOException {
File tempFile = TestUtils.tempFile();
try {
String testContent = "Test Content";
Files.write(tempFile.toPath(), testContent.getBytes());
assertEquals(testContent, Utils.readFileAsString(tempFile.getPath()));
} finally {
Files.deleteIfExists(tempFile.toPath());
}
}
/**
* Test to read content of named pipe as string. As reading/writing to a pipe can block,
* timeout test after a minute (test finishes within 100 ms normally).
*/
@Timeout(60)
@Test
public void testFileAsStringNamedPipe() throws Exception {
// Create a temporary name for named pipe
Random random = new Random();
long n = random.nextLong();
n = n == Long.MIN_VALUE ? 0 : Math.abs(n);
// Use the name to create a FIFO in tmp directory
String tmpDir = System.getProperty("java.io.tmpdir");
String fifoName = "fifo-" + n + ".tmp";
File fifo = new File(tmpDir, fifoName);
Thread producerThread = null;
try {
Process mkFifoCommand = new ProcessBuilder("mkfifo", fifo.getCanonicalPath()).start();
mkFifoCommand.waitFor();
// Send some data to fifo and then read it back, but as FIFO blocks if the consumer isn't present,
// we need to send data in a separate thread.
final String testFileContent = "This is test";
producerThread = new Thread(() -> {
try {
Files.write(fifo.toPath(), testFileContent.getBytes());
} catch (IOException e) {
fail("Error when producing to fifo : " + e.getMessage());
}
}, "FIFO-Producer");
producerThread.start();
assertEquals(testFileContent, Utils.readFileAsString(fifo.getCanonicalPath()));
} finally {
Files.deleteIfExists(fifo.toPath());
if (producerThread != null) {
producerThread.join(30 * 1000); // Wait for thread to terminate
assertFalse(producerThread.isAlive());
}
}
}
@Test
public void testMin() {
assertEquals(1, Utils.min(1));
assertEquals(1, Utils.min(1, 2, 3));
assertEquals(1, Utils.min(2, 1, 3));
assertEquals(1, Utils.min(2, 3, 1));
}
@Test
public void testMax() {
assertEquals(1, Utils.max(1));
assertEquals(3, Utils.max(1, 2, 3));
assertEquals(3, Utils.max(2, 1, 3, 3));
assertEquals(100, Utils.max(0, 2, 2, 100));
assertEquals(-1, Utils.max(-1, -2, -2, -10, -100, -1000));
assertEquals(0, Utils.max(-1, -2, -2, -10, -150, -1800, 0));
}
@Test
public void mkStringTest() {
Map<String, String> map = new LinkedHashMap<>();
map.put("key1", "val1");
map.put("key2", "val2");
map.put("key3", "val3");
String result = Utils.mkString(map, "__begin__", "__end__", "=", ",");
assertEquals("__begin__key1=val1,key2=val2,key3=val3__end__", result);
String result2 = Utils.mkString(Collections.emptyMap(), "__begin__", "__end__", "=", ",");
assertEquals("__begin____end__", result2);
}
@Test
public void parseMapTest() {
Map<String, String> map1 = Utils.parseMap("k1=v1,k2=v2,k3=v3", "=", ",");
assertEquals(3, map1.size());
assertEquals("v1", map1.get("k1"));
assertEquals("v2", map1.get("k2"));
assertEquals("v3", map1.get("k3"));
Map<String, String> map3 = Utils.parseMap("k4=v4,k5=v5=vv5=vvv5", "=", ",");
assertEquals(2, map3.size());
assertEquals("v4", map3.get("k4"));
assertEquals("v5=vv5=vvv5", map3.get("k5"));
}
@Test
public void ensureCapacityTest() {
ByteBuffer byteBuffer = ByteBuffer.allocate(10);
ByteBuffer newByteBuffer = Utils.ensureCapacity(byteBuffer, 5);
assertEquals(10, newByteBuffer.capacity());
ByteBuffer byteBuffer2 = ByteBuffer.allocate(10);
ByteBuffer newByteBuffer2 = Utils.ensureCapacity(byteBuffer2, 15);
assertEquals(15, newByteBuffer2.capacity());
ByteBuffer byteBuffer3 = ByteBuffer.allocate(10);
for (int i = 1; i <= 10; i++) {
byteBuffer3.put((byte) i);
}
ByteBuffer newByteBuffer3 = Utils.ensureCapacity(byteBuffer3, 15);
newByteBuffer3.flip();
assertEquals(15, newByteBuffer3.capacity());
assertEquals(1, newByteBuffer3.get());
assertEquals(2, newByteBuffer3.get());
assertEquals(3, newByteBuffer3.get());
}
@Test
public void testCloseAll() {
TestCloseable[] closeablesWithoutException = TestCloseable.createCloseables(false, false, false);
try {
Utils.closeAll(closeablesWithoutException);
TestCloseable.checkClosed(closeablesWithoutException);
} catch (IOException e) {
fail("Unexpected exception: " + e);
}
TestCloseable[] closeablesWithException = TestCloseable.createCloseables(true, true, true);
try {
Utils.closeAll(closeablesWithException);
fail("Expected exception not thrown");
} catch (IOException e) {
TestCloseable.checkClosed(closeablesWithException);
TestCloseable.checkException(e, closeablesWithException);
}
TestCloseable[] singleExceptionCloseables = TestCloseable.createCloseables(false, true, false);
try {
Utils.closeAll(singleExceptionCloseables);
fail("Expected exception not thrown");
} catch (IOException e) {
TestCloseable.checkClosed(singleExceptionCloseables);
TestCloseable.checkException(e, singleExceptionCloseables[1]);
}
TestCloseable[] mixedCloseables = TestCloseable.createCloseables(false, true, false, true, true);
try {
Utils.closeAll(mixedCloseables);
fail("Expected exception not thrown");
} catch (IOException e) {
TestCloseable.checkClosed(mixedCloseables);
TestCloseable.checkException(e, mixedCloseables[1], mixedCloseables[3], mixedCloseables[4]);
}
}
@Test
public void testReadFullyOrFailWithRealFile() throws IOException {
try (FileChannel channel = FileChannel.open(TestUtils.tempFile().toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE)) {
// prepare channel
String msg = "hello, world";
channel.write(ByteBuffer.wrap(msg.getBytes()), 0);
channel.force(true);
assertEquals(channel.size(), msg.length(), "Message should be written to the file channel");
ByteBuffer perfectBuffer = ByteBuffer.allocate(msg.length());
ByteBuffer smallBuffer = ByteBuffer.allocate(5);
ByteBuffer largeBuffer = ByteBuffer.allocate(msg.length() + 1);
// Scenario 1: test reading into a perfectly-sized buffer
Utils.readFullyOrFail(channel, perfectBuffer, 0, "perfect");
assertFalse(perfectBuffer.hasRemaining(), "Buffer should be filled up");
assertEquals(msg, new String(perfectBuffer.array()), "Buffer should be populated correctly");
// Scenario 2: test reading into a smaller buffer
Utils.readFullyOrFail(channel, smallBuffer, 0, "small");
assertFalse(smallBuffer.hasRemaining(), "Buffer should be filled");
assertEquals("hello", new String(smallBuffer.array()), "Buffer should be populated correctly");
// Scenario 3: test reading starting from a non-zero position
smallBuffer.clear();
Utils.readFullyOrFail(channel, smallBuffer, 7, "small");
assertFalse(smallBuffer.hasRemaining(), "Buffer should be filled");
assertEquals("world", new String(smallBuffer.array()), "Buffer should be populated correctly");
// Scenario 4: test end of stream is reached before buffer is filled up
try {
Utils.readFullyOrFail(channel, largeBuffer, 0, "large");
fail("Expected EOFException to be raised");
} catch (EOFException e) {
// expected
}
}
}
/**
* Tests that `readFullyOrFail` behaves correctly if multiple `FileChannel.read` operations are required to fill
* the destination buffer.
*/
@Test
public void testReadFullyOrFailWithPartialFileChannelReads() throws IOException {
FileChannel channelMock = mock(FileChannel.class);
final int bufferSize = 100;
ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
String expectedBufferContent = fileChannelMockExpectReadWithRandomBytes(channelMock, bufferSize);
Utils.readFullyOrFail(channelMock, buffer, 0L, "test");
assertEquals(expectedBufferContent, new String(buffer.array()), "The buffer should be populated correctly");
assertFalse(buffer.hasRemaining(), "The buffer should be filled");
verify(channelMock, atLeastOnce()).read(any(), anyLong());
}
/**
* Tests that `readFullyOrFail` behaves correctly if multiple `FileChannel.read` operations are required to fill
* the destination buffer.
*/
@Test
public void testReadFullyWithPartialFileChannelReads() throws IOException {
FileChannel channelMock = mock(FileChannel.class);
final int bufferSize = 100;
String expectedBufferContent = fileChannelMockExpectReadWithRandomBytes(channelMock, bufferSize);
ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
Utils.readFully(channelMock, buffer, 0L);
assertEquals(expectedBufferContent, new String(buffer.array()), "The buffer should be populated correctly.");
assertFalse(buffer.hasRemaining(), "The buffer should be filled");
verify(channelMock, atLeastOnce()).read(any(), anyLong());
}
@Test
public void testReadFullyIfEofIsReached() throws IOException {
final FileChannel channelMock = mock(FileChannel.class);
final int bufferSize = 100;
final String fileChannelContent = "abcdefghkl";
ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
when(channelMock.read(any(), anyLong())).then(invocation -> {
ByteBuffer bufferArg = invocation.getArgument(0);
bufferArg.put(fileChannelContent.getBytes());
return -1;
});
Utils.readFully(channelMock, buffer, 0L);
assertEquals("abcdefghkl", new String(buffer.array(), 0, buffer.position()));
assertEquals(fileChannelContent.length(), buffer.position());
assertTrue(buffer.hasRemaining());
verify(channelMock, atLeastOnce()).read(any(), anyLong());
}
@Test
public void testLoadProps() throws IOException {
File tempFile = TestUtils.tempFile();
try {
String testContent = "a=1\nb=2\n#a comment\n\nc=3\nd=";
Files.write(tempFile.toPath(), testContent.getBytes());
Properties props = Utils.loadProps(tempFile.getPath());
assertEquals(4, props.size());
assertEquals("1", props.get("a"));
assertEquals("2", props.get("b"));
assertEquals("3", props.get("c"));
assertEquals("", props.get("d"));
Properties restrictedProps = Utils.loadProps(tempFile.getPath(), Arrays.asList("b", "d", "e"));
assertEquals(2, restrictedProps.size());
assertEquals("2", restrictedProps.get("b"));
assertEquals("", restrictedProps.get("d"));
} finally {
Files.deleteIfExists(tempFile.toPath());
}
}
/**
* Expectation setter for multiple reads where each one reads random bytes to the buffer.
*
* @param channelMock The mocked FileChannel object
* @param bufferSize The buffer size
* @return Expected buffer string
* @throws IOException If an I/O error occurs
*/
private String fileChannelMockExpectReadWithRandomBytes(final FileChannel channelMock,
final int bufferSize) throws IOException {
final int step = 20;
final Random random = new Random();
int remainingBytes = bufferSize;
OngoingStubbing<Integer> when = when(channelMock.read(any(), anyLong()));
StringBuilder expectedBufferContent = new StringBuilder();
while (remainingBytes > 0) {
final int bytesRead = remainingBytes < step ? remainingBytes : random.nextInt(step);
final String stringRead = IntStream.range(0, bytesRead).mapToObj(i -> "a").collect(Collectors.joining());
expectedBufferContent.append(stringRead);
when = when.then(invocation -> {
ByteBuffer buffer = invocation.getArgument(0);
buffer.put(stringRead.getBytes());
return bytesRead;
});
remainingBytes -= bytesRead;
}
return expectedBufferContent.toString();
}
private static | UtilsTest |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/FluxFilterTest.java | {
"start": 1631,
"end": 14540
} | class ____ extends FluxOperatorTest<String, String> {
@Override
protected Scenario<String, String> defaultScenarioOptions(Scenario<String, String> defaultOptions) {
return defaultOptions.fusionMode(Fuseable.ANY);
}
@Override
protected List<Scenario<String, String>> scenarios_operatorError() {
return Arrays.asList(
scenario(f -> f.filter(d -> {
throw exception();
}))
);
}
@Override
protected List<Scenario<String, String>> scenarios_operatorSuccess() {
return Arrays.asList(
scenario(f -> f.filter(d -> true)),
scenario(f -> f.filter(d -> false))
.receiverEmpty()
);
}
@Override
protected List<Scenario<String, String>> scenarios_errorFromUpstreamFailure() {
return Arrays.asList(
scenario(f -> f.filter(d -> true))
);
}
@Test
public void sourceNull() {
assertThatExceptionOfType(NullPointerException.class).isThrownBy(() -> {
new FluxFilter<Integer>(null, e -> true);
});
}
@Test
public void predicateNull() {
assertThatExceptionOfType(NullPointerException.class).isThrownBy(() -> {
Flux.never()
.filter(null);
});
}
@Test
public void normal() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 10)
.filter(v -> v % 2 == 0)
.subscribe(ts);
ts.assertValues(2, 4, 6, 8, 10)
.assertComplete()
.assertNoError();
}
@Test
public void normalBackpressuredRange() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(2);
Flux.range(1, 10)
.filter(v -> v % 2 == 0)
.subscribe(ts);
ts.assertValues(2, 4)
.assertNotComplete()
.assertNoError();
ts.request(10);
ts.assertValues(2, 4, 6, 8, 10)
.assertComplete()
.assertNoError();
}
@Test
public void normalBackpressuredArray() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(2);
Flux.just(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.filter(v -> v % 2 == 0)
.subscribe(ts);
ts.assertValues(2, 4)
.assertNotComplete()
.assertNoError();
ts.request(10);
ts.assertValues(2, 4, 6, 8, 10)
.assertComplete()
.assertNoError();
}
@Test
public void normalBackpressuredIterable() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(2);
Flux.fromIterable(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
.filter(v -> v % 2 == 0)
.subscribe(ts);
ts.assertValues(2, 4)
.assertNotComplete()
.assertNoError();
ts.request(10);
ts.assertValues(2, 4, 6, 8, 10)
.assertComplete()
.assertNoError();
}
@Test
public void predicateThrows() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(2);
Flux.range(1, 10)
.filter(v -> {
throw new RuntimeException("forced failure");
})
.subscribe(ts);
ts.assertNoValues()
.assertNotComplete()
.assertError(RuntimeException.class)
.assertErrorMessage("forced failure");
}
@Test
public void syncFusion() {
AssertSubscriber<Object> ts = AssertSubscriber.create();
Flux.range(1, 10)
.filter(v -> (v & 1) == 0)
.subscribe(ts);
ts.assertValues(2, 4, 6, 8, 10)
.assertNoError()
.assertComplete();
}
@Test
public void asyncFusion() {
AssertSubscriber<Object> ts = AssertSubscriber.create();
Sinks.Many<Integer> up =
Sinks.unsafe().many().unicast().onBackpressureBuffer(new ConcurrentLinkedQueue<>());
up.asFlux()
.filter(v -> (v & 1) == 0)
.subscribe(ts);
for (int i = 1; i < 11; i++) {
up.emitNext(i, FAIL_FAST);
}
up.emitComplete(FAIL_FAST);
ts.assertValues(2, 4, 6, 8, 10)
.assertNoError()
.assertComplete();
}
@Test
public void asyncFusionBackpressured() {
AssertSubscriber<Object> ts = AssertSubscriber.create(1);
Sinks.Many<Integer> up =
Sinks.unsafe().many().unicast().onBackpressureBuffer(new ConcurrentLinkedQueue<>());
Flux.just(1)
.hide()
.flatMap(w -> up.asFlux().filter(v -> (v & 1) == 0))
.subscribe(ts);
up.emitNext(1, FAIL_FAST);
up.emitNext(2, FAIL_FAST);
ts.assertValues(2)
.assertNoError()
.assertNotComplete();
up.emitComplete(FAIL_FAST);
ts.assertValues(2)
.assertNoError()
.assertComplete();
}
@Test
public void asyncFusionBackpressured2() {
AssertSubscriber<Object> ts = AssertSubscriber.create(1);
Sinks.Many<Integer> up =
Sinks.unsafe().many().unicast().onBackpressureBuffer(new ConcurrentLinkedQueue<>());
Flux.just(1)
.hide()
.flatMap(w -> up.asFlux().filter(v -> (v & 1) == 0), false, 1, 1)
.subscribe(ts);
up.emitNext(1, FAIL_FAST);
up.emitNext(2, FAIL_FAST);
ts.assertValues(2)
.assertNoError()
.assertNotComplete();
up.emitComplete(FAIL_FAST);
ts.assertValues(2)
.assertNoError()
.assertComplete();
}
@Test
public void scanOperator(){
Flux<Integer> parent = Flux.just(1);
FluxFilter<Integer> test = new FluxFilter<>(parent, e -> true);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanSubscriber() {
CoreSubscriber<String> actual = new LambdaSubscriber<>(null, e -> {}, null, null);
FilterSubscriber<String> test = new FilterSubscriber<>(actual, t -> true);
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.onError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
}
@Test
public void scanConditionalSubscriber() {
@SuppressWarnings("unchecked")
Fuseable.ConditionalSubscriber<String> actual = Mockito.mock(MockUtils.TestScannableConditionalSubscriber.class);
FilterConditionalSubscriber<String> test = new FilterConditionalSubscriber<>(actual, t -> true);
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.onError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
}
@Test
public void failureStrategyResume() {
Hooks.onNextError(OnNextFailureStrategy.RESUME_DROP);
try {
AtomicLong r = new AtomicLong();
StepVerifier.create(Flux.range(0, 2)
.doOnRequest(r::addAndGet)
.hide()
.filter(i -> 4 / i == 4), 1)
.expectNoFusionSupport()
.expectNext(1)
.expectComplete()
.verifyThenAssertThat()
.hasDroppedExactly(0)
.hasDroppedErrorWithMessage("/ by zero");
assertThat(r.get()).as("amount requested").isEqualTo(2L);
}
finally {
Hooks.resetOnNextError();
}
}
@Test
public void failureStrategyResumeTryOnNext() {
Hooks.onNextError(OnNextFailureStrategy.RESUME_DROP);
try {
StepVerifier.create(Flux.range(0, 2)
.distinctUntilChanged()
.filter(i -> 4 / i == 4))
.expectNoFusionSupport()
.expectNext(1)
.expectComplete()
.verifyThenAssertThat()
.hasDroppedExactly(0)
.hasDroppedErrorWithMessage("/ by zero");
}
finally {
Hooks.resetOnNextError();
}
}
@Test
public void failureStrategyResumeConditional() {
Hooks.onNextError(OnNextFailureStrategy.RESUME_DROP);
try {
AtomicLong r = new AtomicLong();
StepVerifier.create(Flux.range(0, 2)
.doOnRequest(r::addAndGet)
.hide()
.filter(i -> 4 / i == 4)
.filter(i -> true), 1)
.expectNoFusionSupport()
.expectNext(1)
.expectComplete()
.verifyThenAssertThat()
.hasDroppedExactly(0)
.hasDroppedErrorWithMessage("/ by zero");
assertThat(r.get()).as("amount requested").isEqualTo(2L);
}
finally {
Hooks.resetOnNextError();
}
}
@Test
public void failureStrategyResumeConditionalTryOnNext() {
Hooks.onNextError(OnNextFailureStrategy.RESUME_DROP);
try {
StepVerifier.create(Flux.range(0, 2)
.distinctUntilChanged()
.filter(i -> 4 / i == 4)
.filter(i -> true))
.expectNoFusionSupport()
.expectNext(1)
.expectComplete()
.verifyThenAssertThat()
.hasDroppedExactly(0)
.hasDroppedErrorWithMessage("/ by zero");
}
finally {
Hooks.resetOnNextError();
}
}
@Test
public void discardOnNextPredicateFail() {
StepVerifier.create(Flux.range(1, 10)
.hide() //hide both avoid the fuseable AND tryOnNext usage
.filter(i -> { throw new IllegalStateException("boom"); })
)
.expectErrorMessage("boom")
.verifyThenAssertThat()
.hasDiscardedExactly(1);
}
@Test
public void discardOnNextPredicateMiss() {
StepVerifier.create(Flux.range(1, 10)
.hide() //hide both avoid the fuseable AND tryOnNext usage
.filter(i -> i % 2 == 0)
)
.expectNextCount(5)
.expectComplete()
.verifyThenAssertThat()
.hasDiscardedExactly(1, 3, 5, 7, 9);
}
@Test
public void discardTryOnNextPredicateFail() {
List<Object> discarded = new ArrayList<>();
CoreSubscriber<Integer> actual = new AssertSubscriber<>(
Context.of(Hooks.KEY_ON_DISCARD, (Consumer<?>) discarded::add));
FilterSubscriber<Integer> subscriber =
new FilterSubscriber<>(actual, i -> { throw new IllegalStateException("boom"); });
subscriber.onSubscribe(Operators.emptySubscription());
subscriber.tryOnNext(1);
assertThat(discarded).containsExactly(1);
}
@Test
public void discardTryOnNextPredicateMiss() {
List<Object> discarded = new ArrayList<>();
CoreSubscriber<Integer> actual = new AssertSubscriber<>(
Context.of(Hooks.KEY_ON_DISCARD, (Consumer<?>) discarded::add));
FilterSubscriber<Integer> subscriber =
new FilterSubscriber<>(actual, i -> i % 2 == 0);
subscriber.onSubscribe(Operators.emptySubscription());
subscriber.tryOnNext(1);
subscriber.tryOnNext(2);
assertThat(discarded).containsExactly(1);
}
@Test
public void discardConditionalOnNextPredicateFail() {
StepVerifier.create(Flux.range(1, 10)
.hide()
.filter(i -> { throw new IllegalStateException("boom"); })
.filter(i -> true)
)
.expectErrorMessage("boom")
.verifyThenAssertThat()
.hasDiscardedExactly(1);
}
@Test
public void discardConditionalOnNextPredicateMiss() {
StepVerifier.create(Flux.range(1, 10)
.hide()
.filter(i -> i % 2 == 0)
.filter(i -> true)
)
.expectNextCount(5)
.expectComplete()
.verifyThenAssertThat()
.hasDiscardedExactly(1, 3, 5, 7, 9);
}
@Test
public void discardConditionalTryOnNextPredicateFail() {
List<Object> discarded = new ArrayList<>();
Fuseable.ConditionalSubscriber<Integer> actual = new FluxPeekFuseableTest.ConditionalAssertSubscriber<>(
Context.of(Hooks.KEY_ON_DISCARD, (Consumer<?>) discarded::add));
FilterConditionalSubscriber<Integer> subscriber =
new FilterConditionalSubscriber<>(actual, i -> {
throw new IllegalStateException("boom");
});
subscriber.onSubscribe(Operators.emptySubscription());
subscriber.tryOnNext(1);
assertThat(discarded).containsExactly(1);
}
@Test
public void discardConditionalTryOnNextPredicateMiss() {
List<Object> discarded = new ArrayList<>();
Fuseable.ConditionalSubscriber<Integer> actual = new FluxPeekFuseableTest.ConditionalAssertSubscriber<>(
Context.of(Hooks.KEY_ON_DISCARD, (Consumer<?>) discarded::add));
FilterConditionalSubscriber<Integer> subscriber =
new FilterConditionalSubscriber<>(actual, i -> i % 2 == 0);
subscriber.onSubscribe(Operators.emptySubscription());
subscriber.tryOnNext(1);
subscriber.tryOnNext(2);
assertThat(discarded).containsExactly(1);
}
}
| FluxFilterTest |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/env/Environment.java | {
"start": 2263,
"end": 2560
} | class ____ started the application Micronaut will inspect the stacktrace. If JUnit or Spock are
* featured in the stacktrace the {@link #TEST} environment is included. When running from Android the {@link #ANDROID} environment is included.</p>
*
* @author Graeme Rocher
* @since 1.0
*/
public | the |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/label/LabelIncludeTest.java | {
"start": 247,
"end": 632
} | class ____ extends TestCase {
public void test_includes() throws Exception {
VO vo = new VO();
vo.setId(123);
vo.setName("wenshao");
vo.setPassword("ooxxx");
String text = JSON.toJSONString(vo, Labels.includes("normal"));
Assert.assertEquals("{\"id\":123,\"name\":\"wenshao\"}", text);
}
public static | LabelIncludeTest |
java | netty__netty | handler/src/test/java/io/netty/handler/ssl/SniClientJava8TestUtil.java | {
"start": 8808,
"end": 11618
} | class ____ extends SimpleTrustManagerFactory {
private final SNIServerName name;
SniX509TrustmanagerFactory(SNIServerName name) {
this.name = name;
}
@Override
protected void engineInit(KeyStore keyStore) throws Exception {
// NOOP
}
@Override
protected void engineInit(ManagerFactoryParameters managerFactoryParameters) throws Exception {
// NOOP
}
@Override
protected TrustManager[] engineGetTrustManagers() {
return new TrustManager[] { new X509ExtendedTrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s, Socket socket)
throws CertificateException {
fail();
}
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s, Socket socket)
throws CertificateException {
fail();
}
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s, SSLEngine sslEngine)
throws CertificateException {
fail();
}
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s, SSLEngine sslEngine)
throws CertificateException {
assertSSLSession(sslEngine.getUseClientMode(), sslEngine.getHandshakeSession(), name);
}
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s)
throws CertificateException {
fail();
}
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s)
throws CertificateException {
fail();
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return EmptyArrays.EMPTY_X509_CERTIFICATES;
}
} };
}
}
static KeyManagerFactory newSniX509KeyManagerFactory(SelfSignedCertificate cert, String hostname)
throws NoSuchAlgorithmException, KeyStoreException, UnrecoverableKeyException,
IOException, CertificateException {
return new SniX509KeyManagerFactory(
new SNIHostName(hostname), SslContext.buildKeyManagerFactory(
new X509Certificate[] { cert.cert() }, null, cert.key(), null, null, null));
}
private static final | SniX509TrustmanagerFactory |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/PrematureGarbageCollectionTest.java | {
"start": 256,
"end": 915
} | class ____ {
@Test
public void provoke_premature_garbage_collection() {
for (int i = 0; i < 500; i++) {
populateNodeList();
}
}
private static void populateNodeList() {
Node node = nodes();
while (node != null) {
Node next = node.next;
node.object.run();
node = next;
}
}
private static Node nodes() {
Node node = null;
for (int i = 0; i < 1_000; ++i) {
Node next = new Node();
next.next = node;
node = next;
}
return node;
}
private static | PrematureGarbageCollectionTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsActionTests.java | {
"start": 14612,
"end": 15363
} | class ____ extends ThreadPool {
private long currentTimeInMillis;
ControlledRelativeTimeThreadPool(String name, long startTimeMillis) {
super(
Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), name).build(),
MeterRegistry.NOOP,
new DefaultBuiltInExecutorBuilders()
);
this.currentTimeInMillis = startTimeMillis;
stopCachedTimeThread();
}
@Override
public long relativeTimeInMillis() {
return currentTimeInMillis;
}
void setCurrentTimeInMillis(long currentTimeInMillis) {
this.currentTimeInMillis = currentTimeInMillis;
}
}
}
| ControlledRelativeTimeThreadPool |
java | alibaba__nacos | client/src/main/java/com/alibaba/nacos/client/naming/utils/GenericPoller.java | {
"start": 824,
"end": 1309
} | class ____<T> implements Poller<T> {
private final AtomicInteger index = new AtomicInteger(0);
private List<T> items = new ArrayList<>();
public GenericPoller(List<T> items) {
this.items = items;
}
@Override
public T next() {
return items.get(Math.abs(index.getAndIncrement() % items.size()));
}
@Override
public Poller<T> refresh(List<T> items) {
return new GenericPoller<>(items);
}
}
| GenericPoller |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java | {
"start": 112810,
"end": 121631
} | interface ____ {
/**
* Gets the current raw key.
* @return DataOutputBuffer
* @throws IOException raised on errors performing I/O.
*/
DataOutputBuffer getKey() throws IOException;
/**
* Gets the current raw value.
* @return ValueBytes
* @throws IOException raised on errors performing I/O.
*/
ValueBytes getValue() throws IOException;
/**
* Sets up the current key and value (for getKey and getValue).
* @return true if there exists a key/value, false otherwise
* @throws IOException raised on errors performing I/O.
*/
boolean next() throws IOException;
/**
* closes the iterator so that the underlying streams can be closed.
* @throws IOException raised on errors performing I/O.
*/
void close() throws IOException;
/**
* @return Gets the Progress object; this has a float (0.0 - 1.0)
* indicating the bytes processed by the iterator so far.
*/
Progress getProgress();
}
/**
* Merges the list of segments of type <code>SegmentDescriptor</code>
* @param segments the list of SegmentDescriptors
* @param tmpDir the directory to write temporary files into
* @return RawKeyValueIterator
* @throws IOException raised on errors performing I/O.
*/
public RawKeyValueIterator merge(List <SegmentDescriptor> segments,
Path tmpDir)
throws IOException {
// pass in object to report progress, if present
MergeQueue mQueue = new MergeQueue(segments, tmpDir, progressable);
return mQueue.merge();
}
/**
* Merges the contents of files passed in Path[] using a max factor value
* that is already set
* @param inNames the array of path names
* @param deleteInputs true if the input files should be deleted when
* unnecessary
* @param tmpDir the directory to write temporary files into
* @return RawKeyValueIteratorMergeQueue
* @throws IOException raised on errors performing I/O.
*/
public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs,
Path tmpDir)
throws IOException {
return merge(inNames, deleteInputs,
(inNames.length < factor) ? inNames.length : factor,
tmpDir);
}
/**
* Merges the contents of files passed in Path[]
* @param inNames the array of path names
* @param deleteInputs true if the input files should be deleted when
* unnecessary
* @param factor the factor that will be used as the maximum merge fan-in
* @param tmpDir the directory to write temporary files into
* @return RawKeyValueIteratorMergeQueue
* @throws IOException raised on errors performing I/O.
*/
public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs,
int factor, Path tmpDir)
throws IOException {
//get the segments from inNames
ArrayList <SegmentDescriptor> a = new ArrayList <SegmentDescriptor>();
for (int i = 0; i < inNames.length; i++) {
SegmentDescriptor s = new SegmentDescriptor(0,
fs.getFileStatus(inNames[i]).getLen(), inNames[i]);
s.preserveInput(!deleteInputs);
s.doSync();
a.add(s);
}
this.factor = factor;
MergeQueue mQueue = new MergeQueue(a, tmpDir, progressable);
return mQueue.merge();
}
/**
* Merges the contents of files passed in Path[]
* @param inNames the array of path names
* @param tempDir the directory for creating temp files during merge
* @param deleteInputs true if the input files should be deleted when
* unnecessary
* @return RawKeyValueIteratorMergeQueue
* @throws IOException raised on errors performing I/O.
*/
public RawKeyValueIterator merge(Path [] inNames, Path tempDir,
boolean deleteInputs)
throws IOException {
//outFile will basically be used as prefix for temp files for the
//intermediate merge outputs
this.outFile = new Path(tempDir + Path.SEPARATOR + "merged");
//get the segments from inNames
ArrayList <SegmentDescriptor> a = new ArrayList <SegmentDescriptor>();
for (int i = 0; i < inNames.length; i++) {
SegmentDescriptor s = new SegmentDescriptor(0,
fs.getFileStatus(inNames[i]).getLen(), inNames[i]);
s.preserveInput(!deleteInputs);
s.doSync();
a.add(s);
}
factor = (inNames.length < factor) ? inNames.length : factor;
// pass in object to report progress, if present
MergeQueue mQueue = new MergeQueue(a, tempDir, progressable);
return mQueue.merge();
}
/**
* Clones the attributes (like compression of the input file and creates a
* corresponding Writer
* @param inputFile the path of the input file whose attributes should be
* cloned
* @param outputFile the path of the output file
* @param prog the Progressable to report status during the file write
* @return Writer
* @throws IOException raised on errors performing I/O.
*/
public Writer cloneFileAttributes(Path inputFile, Path outputFile,
Progressable prog) throws IOException {
Reader reader = new Reader(conf,
Reader.file(inputFile),
new Reader.OnlyHeaderOption());
CompressionType compress = reader.getCompressionType();
CompressionCodec codec = reader.getCompressionCodec();
reader.close();
Writer writer = createWriter(conf,
Writer.file(outputFile),
Writer.keyClass(keyClass),
Writer.valueClass(valClass),
Writer.compression(compress, codec),
Writer.progressable(prog));
return writer;
}
/**
* Writes records from RawKeyValueIterator into a file represented by the
* passed writer.
* @param records the RawKeyValueIterator
* @param writer the Writer created earlier
* @throws IOException raised on errors performing I/O.
*/
public void writeFile(RawKeyValueIterator records, Writer writer)
throws IOException {
while(records.next()) {
writer.appendRaw(records.getKey().getData(), 0,
records.getKey().getLength(), records.getValue());
}
writer.sync();
}
/** Merge the provided files.
* @param inFiles the array of input path names
* @param outFile the final output file
* @throws IOException raised on errors performing I/O.
*/
public void merge(Path[] inFiles, Path outFile) throws IOException {
if (fs.exists(outFile)) {
throw new IOException("already exists: " + outFile);
}
RawKeyValueIterator r = merge(inFiles, false, outFile.getParent());
Writer writer = cloneFileAttributes(inFiles[0], outFile, null);
writeFile(r, writer);
writer.close();
}
/** sort calls this to generate the final merged output */
private int mergePass(Path tmpDir) throws IOException {
if(LOG.isDebugEnabled()) {
LOG.debug("running merge pass");
}
Writer writer = cloneFileAttributes(
outFile.suffix(".0"), outFile, null);
RawKeyValueIterator r = merge(outFile.suffix(".0"),
outFile.suffix(".0.index"), tmpDir);
writeFile(r, writer);
writer.close();
return 0;
}
/** Used by mergePass to merge the output of the sort
* @param inName the name of the input file containing sorted segments
* @param indexIn the offsets of the sorted segments
* @param tmpDir the relative directory to store intermediate results in
* @return RawKeyValueIterator
* @throws IOException
*/
private RawKeyValueIterator merge(Path inName, Path indexIn, Path tmpDir)
throws IOException {
//get the segments from indexIn
//we create a SegmentContainer so that we can track segments belonging to
//inName and delete inName as soon as we see that we have looked at all
//the contained segments during the merge process & hence don't need
//them anymore
SegmentContainer container = new SegmentContainer(inName, indexIn);
MergeQueue mQueue = new MergeQueue(container.getSegmentList(), tmpDir, progressable);
return mQueue.merge();
}
/** This | RawKeyValueIterator |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletInvocableHandlerMethodTests.java | {
"start": 20819,
"end": 21083
} | class ____ {
public DeferredResult<ResponseEntity<String>> handleDeferred() { return null; }
public ResponseEntity<Void> handleRawType() { return null; }
public ResponseEntity<Flux<Bar>> handleFlux() { return null; }
}
private static | ResponseEntityHandler |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/SyntheticBeanCollisionTest.java | {
"start": 1009,
"end": 1495
} | class ____ implements BeanRegistrar {
@Override
public void register(RegistrationContext context) {
context.configure(String.class).unremovable().types(String.class).param("name", "Frantisek")
.creator(StringCreator.class).done();
context.configure(String.class).unremovable().types(String.class).param("name", "Martin")
.creator(StringCreator.class).done();
}
}
public static | TestRegistrar |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/AnnotationsTransformer.java | {
"start": 7392,
"end": 7944
} | class ____ meet the given condition.
*
* @param condition
* @return self
*/
public ClassTransformerBuilder whenClass(Predicate<ClassInfo> condition) {
return when(wrap(condition, ClassTransformerBuilder::extract));
}
@Override
public boolean test(Kind kind) {
return kind == Kind.CLASS;
}
private static ClassInfo extract(TransformationContext ctx) {
return ctx.getTarget().asClass();
}
}
public abstract static | must |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestLineRecordReaderJobs.java | {
"start": 1405,
"end": 4510
} | class ____ {
private static Path workDir = new Path(new Path(System.getProperty(
"test.build.data", "."), "data"), "TestTextInputFormat");
private static Path inputDir = new Path(workDir, "input");
private static Path outputDir = new Path(workDir, "output");
/**
* Writes the input test file
*
* @param conf
* @throws IOException
*/
public void createInputFile(Configuration conf) throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
Path file = new Path(inputDir, "test.txt");
Writer writer = new OutputStreamWriter(localFs.create(file));
writer.write("abc\ndef\t\nghi\njkl");
writer.close();
}
/**
* Reads the output file into a string
*
* @param conf
* @return
* @throws IOException
*/
public String readOutputFile(Configuration conf) throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
Path file = new Path(outputDir, "part-r-00000");
return UtilsForTests.slurpHadoop(file, localFs);
}
/**
* Creates and runs an MR job
*
* @param conf
* @throws IOException
* @throws InterruptedException
* @throws ClassNotFoundException
*/
public void createAndRunJob(Configuration conf) throws IOException,
InterruptedException, ClassNotFoundException {
Job job = Job.getInstance(conf);
job.setJarByClass(TestLineRecordReaderJobs.class);
job.setMapperClass(Mapper.class);
job.setReducerClass(Reducer.class);
FileInputFormat.addInputPath(job, inputDir);
FileOutputFormat.setOutputPath(job, outputDir);
job.waitForCompletion(true);
}
/**
* Test the case when a custom record delimiter is specified using the
* textinputformat.record.delimiter configuration property
*
* @throws IOException
* @throws InterruptedException
* @throws ClassNotFoundException
*/
@Test
public void testCustomRecordDelimiters() throws IOException,
InterruptedException, ClassNotFoundException {
Configuration conf = new Configuration();
conf.set("textinputformat.record.delimiter", "\t\n");
FileSystem localFs = FileSystem.getLocal(conf);
// cleanup
localFs.delete(workDir, true);
// creating input test file
createInputFile(conf);
createAndRunJob(conf);
String expected = "0\tabc\ndef\n9\tghi\njkl\n";
assertEquals(expected, readOutputFile(conf));
}
/**
* Test the default behavior when the textinputformat.record.delimiter
* configuration property is not specified
*
* @throws IOException
* @throws InterruptedException
* @throws ClassNotFoundException
*/
@Test
public void testDefaultRecordDelimiters() throws IOException,
InterruptedException, ClassNotFoundException {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
// cleanup
localFs.delete(workDir, true);
// creating input test file
createInputFile(conf);
createAndRunJob(conf);
String expected = "0\tabc\n4\tdef\t\n9\tghi\n13\tjkl\n";
assertEquals(expected, readOutputFile(conf));
}
}
| TestLineRecordReaderJobs |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/security/SecurityConfigurationTest.java | {
"start": 1354,
"end": 3131
} | class ____ {
@Test
public void keytabWithoutPrincipalShouldThrowException() {
Configuration configuration = new Configuration();
configuration.set(KERBEROS_LOGIN_KEYTAB, "keytab.file");
IllegalConfigurationException e =
assertThrows(
IllegalConfigurationException.class,
() -> new SecurityConfiguration(configuration));
assertTrue(
e.getMessage()
.contains("either both keytab and principal must be defined, or neither"));
}
@Test
public void principalWithoutKeytabShouldThrowException() {
Configuration configuration = new Configuration();
configuration.set(KERBEROS_LOGIN_PRINCIPAL, "principal");
IllegalConfigurationException e =
assertThrows(
IllegalConfigurationException.class,
() -> new SecurityConfiguration(configuration));
assertTrue(
e.getMessage()
.contains("either both keytab and principal must be defined, or neither"));
}
@Test
public void notExistingKeytabShouldThrowException() {
Configuration configuration = new Configuration();
configuration.set(KERBEROS_LOGIN_KEYTAB, "nonexistingkeytab.file");
configuration.set(KERBEROS_LOGIN_PRINCIPAL, "principal");
IllegalConfigurationException e =
assertThrows(
IllegalConfigurationException.class,
() -> new SecurityConfiguration(configuration));
assertTrue(e.getMessage().contains("nonexistingkeytab.file"));
assertTrue(e.getMessage().contains("doesn't exist"));
}
}
| SecurityConfigurationTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JMTMRegistrationRejection.java | {
"start": 1079,
"end": 1555
} | class ____ extends RegistrationResponse.Rejection {
private static final long serialVersionUID = -5763721635090700901L;
private final String reason;
public JMTMRegistrationRejection(String reason) {
this.reason = reason;
}
public String getReason() {
return reason;
}
@Override
public String toString() {
return "The JobManager has rejected the registration attempt because: " + reason;
}
}
| JMTMRegistrationRejection |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ResponseBuffer.java | {
"start": 1133,
"end": 2104
} | class ____ extends DataOutputStream {
public ResponseBuffer() {
this(1024);
}
public ResponseBuffer(int capacity) {
super(new FramedBuffer(capacity));
}
// update framing bytes based on bytes written to stream.
private FramedBuffer getFramedBuffer() {
FramedBuffer buf = (FramedBuffer)out;
buf.setSize(written);
return buf;
}
public void writeTo(OutputStream out) throws IOException {
getFramedBuffer().writeTo(out);
}
byte[] toByteArray() {
return getFramedBuffer().toByteArray();
}
int capacity() {
return ((FramedBuffer)out).capacity();
}
void setCapacity(int capacity) {
((FramedBuffer)out).setCapacity(capacity);
}
void ensureCapacity(int capacity) {
if (((FramedBuffer)out).capacity() < capacity) {
((FramedBuffer)out).setCapacity(capacity);
}
}
ResponseBuffer reset() {
written = 0;
((FramedBuffer)out).reset();
return this;
}
private static | ResponseBuffer |
java | elastic__elasticsearch | x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java | {
"start": 2524,
"end": 12656
} | class ____ extends MlNativeAutodetectIntegTestCase {
private static final long MS_IN_DAY = TimeValue.timeValueDays(1).millis();
/**
* In production the only way to create a model snapshot is to open a job, and
* opening a job ensures that the state index exists. This suite does not open jobs
* but instead inserts snapshot and state documents directly to the results and
* state indices. This means it needs to create the state index explicitly. This
* method should not be copied to test suites that run jobs in the way they are
* run in production.
*/
@Before
public void addMlState() {
PlainActionFuture<Boolean> future = new PlainActionFuture<>();
createStateIndexAndAliasIfNecessary(
client(),
ClusterState.EMPTY_STATE,
TestIndexNameExpressionResolver.newInstance(),
TEST_REQUEST_TIMEOUT,
future
);
future.actionGet();
}
@After
public void cleanUpTest() {
cleanUp();
}
public void testModelSnapshotRetentionNoDailyThinning() throws Exception {
String jobId = "no-daily-thinning";
int numDocsPerSnapshot = randomIntBetween(1, 4);
int numSnapshotsPerDay = randomIntBetween(1, 4);
int modelSnapshotRetentionDays = randomIntBetween(1, 10);
int numPriorDays = randomIntBetween(1, 5);
createJob(jobId, modelSnapshotRetentionDays, modelSnapshotRetentionDays);
List<String> expectedModelSnapshotDocIds = new ArrayList<>();
List<String> expectedModelStateDocIds = new ArrayList<>();
long now = System.currentTimeMillis();
long timeMs = now;
// We add 1 to make the maths easier, because the retention period includes
// the cutoff time, yet is measured from the timestamp of the latest snapshot
int numSnapshotsTotal = numSnapshotsPerDay * (modelSnapshotRetentionDays + numPriorDays) + 1;
for (int i = numSnapshotsTotal; i > 0; --i) {
String snapshotId = String.valueOf(i);
createModelSnapshot(jobId, snapshotId, new Date(timeMs), numDocsPerSnapshot, i == numSnapshotsTotal);
if (timeMs >= now - MS_IN_DAY * modelSnapshotRetentionDays) {
expectedModelSnapshotDocIds.add(ModelSnapshot.documentId(jobId, snapshotId));
for (int j = 1; j <= numDocsPerSnapshot; ++j) {
expectedModelStateDocIds.add(ModelState.documentId(jobId, snapshotId, j));
}
}
timeMs -= (MS_IN_DAY / numSnapshotsPerDay);
}
refresh(".ml*");
deleteExpiredData();
Collections.sort(expectedModelSnapshotDocIds);
Collections.sort(expectedModelStateDocIds);
assertThat(getAvailableModelSnapshotDocIds(jobId), is(expectedModelSnapshotDocIds));
assertThat(getAvailableModelStateDocIds(), is(expectedModelStateDocIds));
}
public void testModelSnapshotRetentionWithDailyThinning() throws Exception {
String jobId = "with-daily-thinning";
int numDocsPerSnapshot = randomIntBetween(1, 4);
int numSnapshotsPerDay = randomIntBetween(1, 4);
int modelSnapshotRetentionDays = randomIntBetween(2, 10);
int numPriorDays = randomIntBetween(1, 5);
int dailyModelSnapshotRetentionAfterDays = randomIntBetween(0, modelSnapshotRetentionDays - 1);
createJob(jobId, modelSnapshotRetentionDays, dailyModelSnapshotRetentionAfterDays);
List<String> expectedModelSnapshotDocIds = new ArrayList<>();
List<String> expectedModelStateDocIds = new ArrayList<>();
long now = System.currentTimeMillis();
long timeMs = now;
// We add 1 to make the maths easier, because the retention period includes
// the cutoff time, yet is measured from the timestamp of the latest snapshot
int numSnapshotsTotal = numSnapshotsPerDay * (modelSnapshotRetentionDays + numPriorDays) + 1;
for (int i = numSnapshotsTotal; i > 0; --i) {
String snapshotId = String.valueOf(i);
createModelSnapshot(jobId, snapshotId, new Date(timeMs), numDocsPerSnapshot, i == numSnapshotsTotal);
// We should retain:
// - Nothing older than modelSnapshotRetentionDays
// - Everything newer than dailyModelSnapshotRetentionAfterDays
// - The first snapshot of each day in between
if (timeMs >= now - MS_IN_DAY * modelSnapshotRetentionDays
&& (timeMs >= now - MS_IN_DAY * dailyModelSnapshotRetentionAfterDays
|| (now - timeMs) % MS_IN_DAY < MS_IN_DAY / numSnapshotsPerDay)) {
expectedModelSnapshotDocIds.add(ModelSnapshot.documentId(jobId, snapshotId));
for (int j = 1; j <= numDocsPerSnapshot; ++j) {
expectedModelStateDocIds.add(ModelState.documentId(jobId, snapshotId, j));
}
}
timeMs -= (MS_IN_DAY / numSnapshotsPerDay);
}
refresh(".ml*");
deleteExpiredData();
Collections.sort(expectedModelSnapshotDocIds);
Collections.sort(expectedModelStateDocIds);
assertThat(getAvailableModelSnapshotDocIds(jobId), is(expectedModelSnapshotDocIds));
assertThat(getAvailableModelStateDocIds(), is(expectedModelStateDocIds));
}
private List<String> getAvailableModelSnapshotDocIds(String jobId) throws Exception {
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices(AnomalyDetectorsIndex.jobResultsAliasedName(jobId));
QueryBuilder query = QueryBuilders.boolQuery()
.filter(QueryBuilders.existsQuery(ModelSnapshot.SNAPSHOT_DOC_COUNT.getPreferredName()));
searchRequest.source(new SearchSourceBuilder().query(query).size(10000));
return getDocIdsFromSearch(searchRequest);
}
private List<String> getAvailableModelStateDocIds() throws Exception {
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices(AnomalyDetectorsIndex.jobStateIndexPattern());
searchRequest.source(new SearchSourceBuilder().size(10000));
return getDocIdsFromSearch(searchRequest);
}
private List<String> getDocIdsFromSearch(SearchRequest searchRequest) throws Exception {
List<String> docIds = new ArrayList<>();
assertResponse(client().execute(TransportSearchAction.TYPE, searchRequest), searchResponse -> {
for (SearchHit searchHit : searchResponse.getHits()) {
docIds.add(searchHit.getId());
}
});
Collections.sort(docIds);
return docIds;
}
private void createJob(String jobId, long modelSnapshotRetentionDays, long dailyModelSnapshotRetentionAfterDays) {
Detector detector = new Detector.Builder("count", null).build();
Job.Builder builder = new Job.Builder();
builder.setId(jobId);
builder.setAnalysisConfig(new AnalysisConfig.Builder(Collections.singletonList(detector)));
builder.setDataDescription(new DataDescription.Builder());
builder.setModelSnapshotRetentionDays(modelSnapshotRetentionDays);
builder.setDailyModelSnapshotRetentionAfterDays(dailyModelSnapshotRetentionAfterDays);
PutJobAction.Request putJobRequest = new PutJobAction.Request(builder);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();
}
private void createModelSnapshot(String jobId, String snapshotId, Date timestamp, int numDocs, boolean isActive) throws IOException {
persistModelSnapshotDoc(jobId, snapshotId, timestamp, numDocs, isActive);
persistModelStateDocs(jobId, snapshotId, numDocs);
if (isActive) {
JobUpdate jobUpdate = new JobUpdate.Builder(jobId).setModelSnapshotId(snapshotId).build();
UpdateJobAction.Request updateJobRequest = UpdateJobAction.Request.internal(jobId, jobUpdate);
client().execute(UpdateJobAction.INSTANCE, updateJobRequest).actionGet();
}
}
private void persistModelSnapshotDoc(String jobId, String snapshotId, Date timestamp, int numDocs, boolean immediateRefresh)
throws IOException {
ModelSnapshot.Builder modelSnapshotBuilder = new ModelSnapshot.Builder();
modelSnapshotBuilder.setJobId(jobId).setSnapshotId(snapshotId).setTimestamp(timestamp).setSnapshotDocCount(numDocs);
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.resultsWriteAlias(jobId)).id(
ModelSnapshot.documentId(jobId, snapshotId)
).setRequireAlias(true);
if (immediateRefresh) {
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
}
XContentBuilder xContentBuilder = JsonXContent.contentBuilder();
modelSnapshotBuilder.build().toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
indexRequest.source(xContentBuilder);
DocWriteResponse indexResponse = client().execute(TransportIndexAction.TYPE, indexRequest).actionGet();
assertThat(indexResponse.getResult(), is(DocWriteResponse.Result.CREATED));
}
private void persistModelStateDocs(String jobId, String snapshotId, int numDocs) {
assertThat(numDocs, greaterThan(0));
BulkRequest bulkRequest = new BulkRequest();
for (int i = 1; i <= numDocs; ++i) {
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.jobStateIndexWriteAlias()).id(
ModelState.documentId(jobId, snapshotId, i)
)
// The exact contents of the model state doesn't matter - we are not going to try and restore it
.source(Collections.singletonMap("compressed", Collections.singletonList("foo")))
.setRequireAlias(true);
bulkRequest.add(indexRequest);
}
BulkResponse bulkResponse = client().execute(TransportBulkAction.TYPE, bulkRequest).actionGet();
assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures());
}
}
| ModelSnapshotRetentionIT |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java | {
"start": 823,
"end": 2010
} | class ____ implements AggregatorFunctionSupplier {
public SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier() {
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return SpatialCentroidGeoPointSourceValuesAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public SpatialCentroidGeoPointSourceValuesAggregatorFunction aggregator(
DriverContext driverContext, List<Integer> channels) {
return SpatialCentroidGeoPointSourceValuesAggregatorFunction.create(driverContext, channels);
}
@Override
public SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction groupingAggregator(
DriverContext driverContext, List<Integer> channels) {
return SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext);
}
@Override
public String describe() {
return "spatial_centroid_geo_point_source of valuess";
}
}
| SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier |
java | elastic__elasticsearch | x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeEnabledRestTestIT.java | {
"start": 990,
"end": 12541
} | class ____ extends LogsIndexModeRestTestIT {
private static final String USER = "test_admin";
private static final String PASS = "x-pack-test-password";
@ClassRule()
public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.distribution(DistributionType.DEFAULT)
.module("constant-keyword")
.module("data-streams")
.module("mapper-extras")
.module("x-pack-aggregate-metric")
.module("x-pack-stack")
.setting("xpack.security.autoconfiguration.enabled", "false")
.user(USER, PASS)
.setting("xpack.license.self_generated.type", "trial")
.setting("cluster.logsdb.enabled", "true")
.build();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
@Before
public void setup() throws Exception {
client = client();
waitForLogs(client);
}
private RestClient client;
private static final String MAPPINGS = """
{
"template": {
"mappings": {
"properties": {
"method": {
"type": "keyword"
},
"message": {
"type": "text"
}
}
}
}
}""";
private static final String ALTERNATE_HOST_MAPPING = """
{
"template": {
"mappings": {
"properties": {
"method": {
"type": "keyword"
},
"message": {
"type": "text"
},
"host.cloud_region": {
"type": "keyword"
},
"host.availability_zone": {
"type": "keyword"
}
}
}
}
}""";
private static final String HOST_MAPPING_AS_OBJECT_DEFAULT_SUBOBJECTS = """
{
"template": {
"mappings": {
"properties": {
"method": {
"type": "keyword"
},
"message": {
"type": "text"
},
"host": {
"type": "object",
"properties": {
"cloud_region": {
"type": "keyword"
},
"availability_zone": {
"type": "keyword"
},
"name": {
"type": "keyword"
}
}
}
}
}
}
}""";
private static final String HOST_MAPPING_AS_OBJECT_NON_DEFAULT_SUBOBJECTS = """
{
"template": {
"mappings": {
"dynamic": "strict",
"properties": {
"method": {
"type": "keyword"
},
"message": {
"type": "text"
},
"host": {
"type": "object",
"subobjects": false,
"properties": {
"cloud_region": {
"type": "keyword"
},
"availability_zone": {
"type": "keyword"
},
"name": {
"type": "keyword"
}
}
}
}
}
}
}""";
private static String BULK_INDEX_REQUEST = """
{ "create": {}}
{ "@timestamp": "2023-01-01T05:11:00Z", "host.name": "foo", "method" : "PUT", "message": "foo put message" }
{ "create": {}}
{ "@timestamp": "2023-01-01T05:12:00Z", "host.name": "bar", "method" : "POST", "message": "bar post message" }
{ "create": {}}
{ "@timestamp": "2023-01-01T05:12:00Z", "host.name": "baz", "method" : "PUT", "message": "baz put message" }
{ "create": {}}
{ "@timestamp": "2023-01-01T05:13:00Z", "host.name": "baz", "method" : "PUT", "message": "baz put message" }
""";
private static String BULK_INDEX_REQUEST_WITH_HOST = """
{ "create": {}}
{ "@timestamp": "2023-01-01T05:11:00Z", "method" : "PUT", "message": "foo put message", \
"host": { "cloud_region" : "us-west", "availability_zone" : "us-west-4a", "name" : "ahdta-876584" } }
{ "create": {}}
{ "@timestamp": "2023-01-01T05:12:00Z", "method" : "POST", "message": "bar post message", \
"host": { "cloud_region" : "us-west", "availability_zone" : "us-west-4b", "name" : "tyrou-447898" } }
{ "create": {}}
{ "@timestamp": "2023-01-01T05:12:00Z", "method" : "PUT", "message": "baz put message", \
"host": { "cloud_region" : "us-west", "availability_zone" : "us-west-4a", "name" : "uuopl-162899" } }
{ "create": {}}
{ "@timestamp": "2023-01-01T05:13:00Z", "method" : "PUT", "message": "baz put message", \
"host": { "cloud_region" : "us-west", "availability_zone" : "us-west-4b", "name" : "fdfgf-881197" } }
""";
protected Settings restClientSettings() {
String token = basicAuthHeaderValue(USER, new SecureString(PASS.toCharArray()));
return Settings.builder().put(super.restClientSettings()).put(ThreadContext.PREFIX + ".Authorization", token).build();
}
public void testCreateDataStream() throws IOException {
assertOK(putComponentTemplate(client, "logs@custom", MAPPINGS));
assertOK(createDataStream(client, "logs-custom-dev"));
final String indexMode = (String) getSetting(
client,
getDataStreamBackingIndex(client, "logs-custom-dev", 0),
IndexSettings.MODE.getKey()
);
assertThat(indexMode, equalTo(IndexMode.LOGSDB.getName()));
}
public void testBulkIndexing() throws IOException {
assertOK(putComponentTemplate(client, "logs@custom", MAPPINGS));
assertOK(createDataStream(client, "logs-custom-dev"));
final Response response = bulkIndex(client, "logs-custom-dev", () -> BULK_INDEX_REQUEST);
assertOK(response);
assertThat(entityAsMap(response).get("errors"), Matchers.equalTo(false));
}
public void testBulkIndexingWithFlatHostProperties() throws IOException {
assertOK(putComponentTemplate(client, "logs@custom", ALTERNATE_HOST_MAPPING));
assertOK(createDataStream(client, "logs-custom-dev"));
final Response response = bulkIndex(client, "logs-custom-dev", () -> BULK_INDEX_REQUEST_WITH_HOST);
assertOK(response);
assertThat(entityAsMap(response).get("errors"), Matchers.equalTo(false));
}
public void testBulkIndexingWithObjectHostDefaultSubobjectsProperties() throws IOException {
assertOK(putComponentTemplate(client, "logs@custom", HOST_MAPPING_AS_OBJECT_DEFAULT_SUBOBJECTS));
assertOK(createDataStream(client, "logs-custom-dev"));
final Response response = bulkIndex(client, "logs-custom-dev", () -> BULK_INDEX_REQUEST_WITH_HOST);
assertOK(response);
assertThat(entityAsMap(response).get("errors"), Matchers.equalTo(false));
}
public void testBulkIndexingWithObjectHostSubobjectsFalseProperties() throws IOException {
assertOK(putComponentTemplate(client, "logs@custom", HOST_MAPPING_AS_OBJECT_NON_DEFAULT_SUBOBJECTS));
assertOK(createDataStream(client, "logs-custom-dev"));
final Response response = bulkIndex(client, "logs-custom-dev", () -> BULK_INDEX_REQUEST_WITH_HOST);
assertOK(response);
assertThat(entityAsMap(response).get("errors"), Matchers.equalTo(false));
}
public void testRolloverDataStream() throws IOException {
assertOK(putComponentTemplate(client, "logs@custom", MAPPINGS));
assertOK(createDataStream(client, "logs-custom-dev"));
final String firstBackingIndex = getDataStreamBackingIndex(client, "logs-custom-dev", 0);
assertOK(rolloverDataStream(client, "logs-custom-dev"));
final String secondBackingIndex = getDataStreamBackingIndex(client, "logs-custom-dev", 1);
assertThat(firstBackingIndex, Matchers.not(equalTo(secondBackingIndex)));
assertThat(getDataStreamBackingIndices(client, "logs-custom-dev").size(), equalTo(2));
}
public void testLogsAtSettingWithStandardOverride() throws IOException {
assertOK(putComponentTemplate(client, "logs@custom", """
{
"template": {
"settings": {
"index": {
"mode": "standard"
}
}
}
}
"""));
assertOK(createDataStream(client, "logs-custom-dev"));
final String indexMode = (String) getSetting(
client,
getDataStreamBackingIndex(client, "logs-custom-dev", 0),
IndexSettings.MODE.getKey()
);
assertThat(indexMode, equalTo(IndexMode.STANDARD.getName()));
}
public void testLogsAtSettingWithTimeSeriesOverride() throws IOException {
assertOK(putComponentTemplate(client, "logs@custom", """
{
"template": {
"settings": {
"index": {
"routing_path": [ "hostname" ],
"mode": "time_series",
"sort.field": ["_tsid", "@timestamp"],
"sort.order": ["asc", "desc"]
}
},
"mappings": {
"properties": {
"hostname": {
"type": "keyword",
"time_series_dimension": true
}
}
}
}
}
"""));
assertOK(createDataStream(client, "logs-custom-dev"));
final String indexMode = (String) getSetting(
client,
getDataStreamBackingIndex(client, "logs-custom-dev", 0),
IndexSettings.MODE.getKey()
);
assertThat(indexMode, equalTo(IndexMode.TIME_SERIES.getName()));
}
public void testLogsAtSettingWithTimeSeriesOverrideFailure() {
// NOTE: apm@settings defines sorting on @timestamp and template composition results in index.mode "time_series"
// with a non-allowed index.sort.field '@timestamp'. This fails at template composition stage before the index is even created.
final ResponseException ex = assertThrows(ResponseException.class, () -> putComponentTemplate(client, "logs@custom", """
{
"template": {
"settings": {
"index": {
"routing_path": [ "hostname" ],
"mode": "time_series"
}
},
"mappings": {
"properties": {
"hostname": {
"type": "keyword",
"time_series_dimension": true
}
}
}
}
}
"""));
assertTrue(ex.getMessage().contains("[index.mode=time_series] is incompatible with [index.sort.field]"));
}
}
| LogsIndexModeEnabledRestTestIT |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/time/FastDatePrinter.java | {
"start": 10533,
"end": 11124
} | interface ____ {
/**
* Appends the value of the specified calendar to the output buffer based on the rule implementation.
*
* @param buf the output buffer.
* @param calendar calendar to be appended.
* @throws IOException if an I/O error occurs.
*/
void appendTo(Appendable buf, Calendar calendar) throws IOException;
/**
* Returns the estimated length of the result.
*
* @return the estimated length of the result.
*/
int estimateLength();
}
/**
* Inner | Rule |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/list/ListCommands.java | {
"start": 685,
"end": 17453
} | interface ____<K, V> extends RedisCommands {
/**
* Execute the command <a href="https://redis.io/commands/blmove">BLMOVE</a>.
* Summary: Pop an element from a list, push it to another list and return it; or block until one is available
* Group: list
* Requires Redis 6.2.0
*
* @param source the key
* @param destination the key
* @param positionInSource the position of the element in the source, {@code LEFT} means the first element, {@code RIGHT}
* means the last element.
* @param positionInDest the position of the element in the destination, {@code LEFT} means the first element, {@code RIGHT}
* means the last element.
* @param timeout the operation timeout (in seconds)
* @return the element being popped from source and pushed to destination. If timeout is reached, a Null reply is returned.
**/
V blmove(K source, K destination, Position positionInSource, Position positionInDest, Duration timeout);
/**
* Execute the command <a href="https://redis.io/commands/blmpop">BLMPOP</a>.
* Summary: Pop elements from a list, or block until one is available
* Group: list
* Requires Redis 7.0.0
*
* @param timeout the operation timeout (in seconds)
* @param position whether if the element must be popped from the beginning of the list ({@code LEFT}) or from the end
* ({@code RIGHT})
* @param keys the keys from which the element must be popped
* @return {@code null} when no element could be popped, and timeout is reached, otherwise the key/value structure
**/
KeyValue<K, V> blmpop(Duration timeout, Position position, K... keys);
/**
* Execute the command <a href="https://redis.io/commands/blmpop">BLMPOP</a>.
* Summary: Pop elements from a list, or block until one is available
* Group: list
* Requires Redis 7.0.0
*
* @param timeout the operation timeout (in seconds)
* @param position whether if the element must be popped from the beginning of the list ({@code LEFT}) or from the end
* ({@code RIGHT})
* @param count the number of element to pop
* @param keys the keys from which the element must be popped
* @return {@code null} when no element could be popped, and timeout is reached, otherwise the list of key/value structures
**/
List<KeyValue<K, V>> blmpop(Duration timeout, Position position, int count, K... keys);
/**
* Execute the command <a href="https://redis.io/commands/blpop">BLPOP</a>.
* Summary: Remove and get the first element in a list, or block until one is available
* Group: list
* Requires Redis 2.0.0
*
* @param timeout the operation timeout (in seconds)
* @param keys the keys from which the element must be popped
* @return A {@code null} multi-bulk when no element could be popped and the timeout expired, otherwise the key/value
* structure.
**/
KeyValue<K, V> blpop(Duration timeout, K... keys);
/**
* Execute the command <a href="https://redis.io/commands/brpop">BRPOP</a>.
* Summary: Remove and get the last element in a list, or block until one is available
* Group: list
* Requires Redis 2.0.0
*
* @param timeout the operation timeout (in seconds)
* @param keys the keys from which the element must be popped
* @return A {@code null} multi-bulk when no element could be popped and the timeout expired, otherwise the key/value
* structure.
**/
KeyValue<K, V> brpop(Duration timeout, K... keys);
/**
* Execute the command <a href="https://redis.io/commands/brpoplpush">BRPOPLPUSH</a>.
* Summary: Pop an element from a list, push it to another list and return it; or block until one is available
* Group: list
* Requires Redis 2.2.0
*
* @param timeout the timeout, in seconds
* @param source the source key
* @param destination the detination key
* @return the element being popped from source and pushed to destination. If timeout is reached, a Null reply is returned.
* @deprecated See https://redis.io/commands/brpoplpush
**/
@Deprecated
V brpoplpush(Duration timeout, K source, K destination);
/**
* Execute the command <a href="https://redis.io/commands/lindex">LINDEX</a>.
* Summary: Get an element from a list by its index
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @param index the index
* @return the requested element, or {@code null} when index is out of range.
**/
V lindex(K key, long index);
/**
* Execute the command <a href="https://redis.io/commands/linsert">LINSERT</a>.
* Summary: Insert an element before another element in a list
* Group: list
* Requires Redis 2.2.0
*
* @param key the key
* @param pivot the pivot, i.e. the position reference
* @param element the element to insert
* @return the length of the list after the insert operation, or -1 when the value {@code pivot} was not found.
**/
long linsertBeforePivot(K key, V pivot, V element);
/**
* Execute the command <a href="https://redis.io/commands/linsert">LINSERT</a>.
* Summary: Insert an element after another element in a list
* Group: list
* Requires Redis 2.2.0
*
* @param key the key
* @param pivot the pivot, i.e. the position reference
* @param element the element to insert
* @return the length of the list after the insert operation, or -1 when the value {@code pivot} was not found.
**/
long linsertAfterPivot(K key, V pivot, V element);
/**
* Execute the command <a href="https://redis.io/commands/llen">LLEN</a>.
* Summary: Get the length of a list
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @return the length of the list at key, if the list is empty, 0 is returned.
**/
long llen(K key);
/**
* Execute the command <a href="https://redis.io/commands/lmove">LMOVE</a>.
* Summary: Pop an element from a list, push it to another list and return it
* Group: list
* Requires Redis 6.2.0
*
* @param source the key
* @param destination the key
* @param positionInSource the position of the element to pop in the source (LEFT: first element, RIGHT: last element)
* @param positionInDestination the position of the element to insert in the destination (LEFT: first element, RIGHT: last
* element)
* @return the element being popped and pushed.
**/
V lmove(K source, K destination, Position positionInSource, Position positionInDestination);
/**
* Execute the command <a href="https://redis.io/commands/lmpop">LMPOP</a>.
* Summary: Pop one element from the first non-empty list
* Group: list
* Requires Redis 7.0.0
*
* @param position the position of the item to pop (LEFT: beginning ot the list, RIGHT: end of the list)
* @param keys the keys from which the item will be popped, must not be empty
* @return A {@code null} when no element could be popped. A {@link KeyValue} with the key and popped value.
**/
KeyValue<K, V> lmpop(Position position, K... keys);
/**
* Execute the command <a href="https://redis.io/commands/lmpop">LMPOP</a>.
* Summary: Pop {@code count} elements from the first non-empty list
* Group: list
* Requires Redis 7.0.0
*
* @param position the position of the item to pop (LEFT: beginning ot the list, RIGHT: end of the list)
* @param count the number of items to pop
* @param keys the keys from which the item will be popped, must not be empty
* @return {@code empty} when no element could be popped. A list of {@link KeyValue} with at most count items.
**/
List<KeyValue<K, V>> lmpop(Position position, int count, K... keys);
/**
* Execute the command <a href="https://redis.io/commands/lpop">LPOP</a>.
* Summary: Remove and get the first elements in a list
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @return the value of the first element, or {@code null} when key does not exist.
**/
V lpop(K key);
/**
* Execute the command <a href="https://redis.io/commands/lpop">LPOP</a>.
* Summary: Remove and get the first elements in a list
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @param count the number of element to pop
* @return the popped elements (at most {@code count}), or {@code empty} when key does not exist.
**/
List<V> lpop(K key, int count);
/**
* Execute the command <a href="https://redis.io/commands/lpos">LPOS</a>.
* Summary: Return the index of matching elements on a list
* Group: list
* Requires Redis 6.0.6
*
* @param key the key
* @param element the element to find
* @return The command returns the integer representing the matching element, or {@code empty} if there is no match.
**/
OptionalLong lpos(K key, V element);
/**
* Execute the command <a href="https://redis.io/commands/lpos">LPOS</a>.
* Summary: Return the index of matching elements on a list
* Group: list
* Requires Redis 6.0.6
*
* @param key the key
* @param element the element to find
* @param args the extra command parameter
* @return The command returns the integer representing the matching element, or {@code empty} if there is no match.
**/
OptionalLong lpos(K key, V element, LPosArgs args);
/**
* Execute the command <a href="https://redis.io/commands/lpos">LPOS</a>.
* Summary: Return the index of matching elements on a list
* Group: list
* Requires Redis 6.0.6
*
* @param key the key
* @param element the element to find
* @param count the number of occurrence to find
* @return the list of positions (empty if there are no matches).
**/
List<Long> lpos(K key, V element, int count);
/**
* Execute the command <a href="https://redis.io/commands/lpos">LPOS</a>.
* Summary: Return the index of matching elements on a list
* Group: list
* Requires Redis 6.0.6
*
* @param key the key
* @param element the element to find
* @param count the number of occurrence to find
* @return the list of positions (empty if there are no matches).
**/
List<Long> lpos(K key, V element, int count, LPosArgs args);
/**
* Execute the command <a href="https://redis.io/commands/lpush">LPUSH</a>.
* Summary: Prepend one or multiple elements to a list
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @param elements the elements to add
* @return the length of the list after the push operations.
**/
long lpush(K key, V... elements);
/**
* Execute the command <a href="https://redis.io/commands/lpushx">LPUSHX</a>.
* Summary: Prepend an element to a list, only if the list exists
* Group: list
* Requires Redis 2.2.0
*
* @param key the key
* @param elements the elements to add
* @return the length of the list after the push operation.
**/
long lpushx(K key, V... elements);
/**
* Execute the command <a href="https://redis.io/commands/lrange">LRANGE</a>.
* Summary: Get a range of elements from a list
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @param start the starting position
* @param stop the last position
* @return list of elements in the specified range.
**/
List<V> lrange(K key, long start, long stop);
/**
* Execute the command <a href="https://redis.io/commands/lrem">LREM</a>.
* Summary: Remove elements from a list
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @param count the number of occurence to remove, following the given rules:
* if count > 0: Remove elements equal to element moving from head to tail.
* if count < 0: Remove elements equal to element moving from tail to head.
* if count = 0: Remove all elements equal to element.
* @param element the element to remove
* @return the number of removed elements.
**/
long lrem(K key, long count, V element);
/**
* Execute the command <a href="https://redis.io/commands/lset">LSET</a>.
* Summary: Set the value of an element in a list by its index
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @param index the index
* @param element the element to insert
**/
void lset(K key, long index, V element);
/**
* Execute the command <a href="https://redis.io/commands/ltrim">LTRIM</a>.
* Summary: Trim a list to the specified range
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @param start the starting index
* @param stop the last index
**/
void ltrim(K key, long start, long stop);
/**
* Execute the command <a href="https://redis.io/commands/rpop">RPOP</a>.
* Summary: Remove and get the last elements in a list
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @return the value of the last element, or {@code null} when key does not exist.
**/
V rpop(K key);
/**
* Execute the command <a href="https://redis.io/commands/rpop">RPOP</a>.
* Summary: Remove and get the last elements in a list
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @param count the number of element to pop
* @return the list of popped elements, or {@code null} when key does not exist.
**/
List<V> rpop(K key, int count);
/**
* Execute the command <a href="https://redis.io/commands/rpoplpush">RPOPLPUSH</a>.
* Summary: Remove the last element in a list, prepend it to another list and return it
* Group: list
* Requires Redis 1.2.0
*
* @param source the key
* @param destination the key
* @return the element being popped and pushed, or {@code null} if the source does not exist
* @deprecated See https://redis.io/commands/rpoplpush
**/
@Deprecated
V rpoplpush(K source, K destination);
/**
* Execute the command <a href="https://redis.io/commands/rpush">RPUSH</a>.
* Summary: Append one or multiple elements to a list
* Group: list
* Requires Redis 1.0.0
*
* @param key the key
* @param values the values to add to the list
* @return the length of the list after the push operation.
**/
long rpush(K key, V... values);
/**
* Execute the command <a href="https://redis.io/commands/rpushx">RPUSHX</a>.
* Summary: Append an element to a list, only if the list exists
* Group: list
* Requires Redis 2.2.0
*
* @param key the key
* @param values the values to add to the list
* @return the length of the list after the push operation.
**/
long rpushx(K key, V... values);
/**
* Execute the command <a href="https://redis.io/commands/sort">SORT</a>.
* Summary: Sort the elements in a list, set or sorted set
* Group: generic
* Requires Redis 1.0.0
*
* @return the list of sorted elements.
**/
List<V> sort(K key);
/**
* Execute the command <a href="https://redis.io/commands/sort">SORT</a>.
* Summary: Sort the elements in a list, set or sorted set
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @param sortArguments the {@code SORT} command extra-arguments
* @return the list of sorted elements.
**/
List<V> sort(K key, SortArgs sortArguments);
/**
* Execute the command <a href="https://redis.io/commands/sort">SORT</a> with the {@code STORE} option.
* Summary: Sort the elements in a list, set or sorted set
* Group: generic
* Requires Redis 1.0.0
*
* @param sortArguments the SORT command extra-arguments
* @return the number of sorted elements in the destination list.
**/
long sortAndStore(K key, K destination, SortArgs sortArguments);
/**
* Execute the command <a href="https://redis.io/commands/sort">SORT</a> with the {@code STORE} option.
* Summary: Sort the elements in a list, set or sorted set
* Group: generic
* Requires Redis 1.0.0
*
* @return the number of sorted elements in the destination list.
**/
long sortAndStore(K key, K destination);
}
| ListCommands |
java | netty__netty | resolver-dns/src/main/java/io/netty/resolver/dns/DnsCnameCache.java | {
"start": 750,
"end": 1768
} | interface ____ {
/**
* Returns the cached cname for the given hostname.
*
* @param hostname the hostname
* @return the cached entries or an {@code null} if none.
*/
String get(String hostname);
/**
* Caches a cname entry that should be used for the given hostname.
*
* @param hostname the hostname
* @param cname the cname mapping.
* @param originalTtl the TTL as returned by the DNS server
* @param loop the {@link EventLoop} used to register the TTL timeout
*/
void cache(String hostname, String cname, long originalTtl, EventLoop loop);
/**
* Clears all cached nameservers.
*
* @see #clear(String)
*/
void clear();
/**
* Clears the cached nameservers for the specified hostname.
*
* @return {@code true} if and only if there was an entry for the specified host name in the cache and
* it has been removed by this method
*/
boolean clear(String hostname);
}
| DnsCnameCache |
java | apache__camel | components/camel-tarfile/src/test/java/org/apache/camel/dataformat/tarfile/SpringTarSplitterRouteTest.java | {
"start": 1080,
"end": 1669
} | class ____ extends CamelSpringTestSupport {
@Test
void testSplitter() throws InterruptedException {
MockEndpoint processTarEntry = getMockEndpoint("mock:processTarEntry");
processTarEntry.expectedBodiesReceivedInAnyOrder("chau", "hi", "hola", "hello", "greetings");
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/dataformat/tarfile/SpringTarSplitterRouteTest.xml");
}
}
| SpringTarSplitterRouteTest |
java | quarkusio__quarkus | extensions/scheduler/deployment/src/main/java/io/quarkus/scheduler/deployment/SchedulerMethodsProcessor.java | {
"start": 265,
"end": 758
} | class ____ {
@BuildStep
ExecutionModelAnnotationsAllowedBuildItem schedulerMethods() {
return new ExecutionModelAnnotationsAllowedBuildItem(new Predicate<MethodInfo>() {
@Override
public boolean test(MethodInfo method) {
return method.hasDeclaredAnnotation(SchedulerDotNames.SCHEDULED_NAME)
|| method.hasDeclaredAnnotation(SchedulerDotNames.SCHEDULES_NAME);
}
});
}
}
| SchedulerMethodsProcessor |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/results/internal/implicit/ImplicitFetchBuilderEmbeddable.java | {
"start": 1312,
"end": 5631
} | class ____ implements ImplicitFetchBuilder {
private final NavigablePath fetchPath;
private final EmbeddableValuedFetchable fetchable;
private final Map<Fetchable, FetchBuilder> fetchBuilders;
public ImplicitFetchBuilderEmbeddable(
NavigablePath fetchPath,
EmbeddableValuedFetchable fetchable,
DomainResultCreationState creationState) {
this.fetchPath = fetchPath;
this.fetchable = fetchable;
this.fetchBuilders = fetchBuilderMap( fetchPath, fetchable, impl( creationState ) );
}
private static Map<Fetchable, FetchBuilder> fetchBuilderMap(
NavigablePath fetchPath,
EmbeddableValuedFetchable fetchable,
DomainResultCreationStateImpl creationStateImpl) {
final Function<Fetchable, FetchBuilder> fetchBuilderResolver =
creationStateImpl.getCurrentExplicitFetchMementoResolver();
final int size = fetchable.getNumberOfFetchables();
final Map<Fetchable, FetchBuilder> fetchBuilders = linkedMapOfSize( size );
for ( int i = 0; i < size; i++ ) {
final Fetchable subFetchable = fetchable.getFetchable( i );
final FetchBuilder explicitFetchBuilder = fetchBuilderResolver.apply( subFetchable );
fetchBuilders.put( subFetchable,
explicitFetchBuilder == null
? implicitFetchBuilder( fetchPath, subFetchable, creationStateImpl )
: explicitFetchBuilder );
}
return fetchBuilders;
}
private ImplicitFetchBuilderEmbeddable(ImplicitFetchBuilderEmbeddable original) {
this.fetchPath = original.fetchPath;
this.fetchable = original.fetchable;
if ( original.fetchBuilders.isEmpty() ) {
fetchBuilders = emptyMap();
}
else {
fetchBuilders = new HashMap<>( original.fetchBuilders.size() );
for ( Map.Entry<Fetchable, FetchBuilder> entry : original.fetchBuilders.entrySet() ) {
fetchBuilders.put( entry.getKey(), entry.getValue().cacheKeyInstance() );
}
}
}
@Override
public FetchBuilder cacheKeyInstance() {
return new ImplicitFetchBuilderEmbeddable( this );
}
@Override
public Fetch buildFetch(
FetchParent parent,
NavigablePath fetchPath,
JdbcValuesMetadata jdbcResultsMetadata,
DomainResultCreationState creationState) {
final DomainResultCreationStateImpl creationStateImpl = impl( creationState );
// make sure the TableGroup is available
tableGroup( parent, fetchPath, creationStateImpl );
// final FetchParent fetchParent = (FetchParent) fetch;
// fetchBuilders.forEach(
// (subFetchPath, fetchBuilder) -> fetchBuilder.buildFetch(
// fetchParent,
// subFetchPath,
// jdbcResultsMetadata,
// legacyFetchResolver,
// creationState
// )
// );
return parent.generateFetchableFetch(
fetchable,
fetchPath,
FetchTiming.IMMEDIATE,
true,
null,
creationState
);
}
private void tableGroup(FetchParent parent, NavigablePath fetchPath, DomainResultCreationStateImpl creationStateImpl) {
creationStateImpl.getFromClauseAccess().resolveTableGroup(
fetchPath,
navigablePath -> {
final TableGroup parentTableGroup =
creationStateImpl.getFromClauseAccess()
.getTableGroup( parent.getNavigablePath() );
final TableGroupJoin tableGroupJoin = fetchable.createTableGroupJoin(
fetchPath,
parentTableGroup,
null,
null,
SqlAstJoinType.INNER,
true,
false,
creationStateImpl
);
parentTableGroup.addTableGroupJoin( tableGroupJoin );
return tableGroupJoin.getJoinedGroup();
}
);
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
final ImplicitFetchBuilderEmbeddable that = (ImplicitFetchBuilderEmbeddable) o;
return fetchPath.equals( that.fetchPath )
&& fetchable.equals( that.fetchable )
&& fetchBuilders.equals( that.fetchBuilders );
}
@Override
public int hashCode() {
int result = fetchPath.hashCode();
result = 31 * result + fetchable.hashCode();
result = 31 * result + fetchBuilders.hashCode();
return result;
}
@Override
public String toString() {
return "ImplicitFetchBuilderEmbeddable(" + fetchPath + ")";
}
@Override
public void visitFetchBuilders(BiConsumer<Fetchable, FetchBuilder> consumer) {
fetchBuilders.forEach( (k, v) -> consumer.accept( k, v ) );
}
}
| ImplicitFetchBuilderEmbeddable |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/basicType/LongNullTest.java | {
"start": 2170,
"end": 2240
} | class ____ {
public Long v1;
public Long v2;
}
}
| Model |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java | {
"start": 1937,
"end": 25678
} | class ____ extends MapperTestCase {
@Override
protected Object getSampleValueForDocument() {
return Map.of("values", new double[] { 2, 3 }, "counts", new int[] { 0, 4 });
}
@Override
protected Object getSampleObjectForDocument() {
return getSampleValueForDocument();
}
@Override
protected Collection<? extends Plugin> getPlugins() {
return List.of(new AnalyticsPlugin());
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "histogram");
}
@Override
protected void registerParameters(ParameterChecker checker) throws IOException {
checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> assertTrue(((HistogramFieldMapper) m).ignoreMalformed()));
if (ExponentialHistogramParser.EXPONENTIAL_HISTOGRAM_FEATURE.isEnabled()) {
checker.registerUpdateCheck(b -> b.field("coerce", false), m -> assertFalse(((HistogramFieldMapper) m).coerce()));
}
}
@Override
protected boolean supportsSearchLookup() {
return false;
}
@Override
protected boolean supportsStoredFields() {
return false;
}
public void testParseValue() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(
source(b -> b.startObject("field").field("values", new double[] { 2, 3 }).field("counts", new int[] { 0, 4 }).endObject())
);
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testParseArrayValue() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source(b -> {
b.startArray("field");
{
b.startObject().field("counts", new int[] { 2, 2, 3 }).field("values", new double[] { 2, 2, 3 }).endObject();
b.startObject().field("counts", new int[] { 2, 2, 3 }).field("values", new double[] { 2, 2, 3 }).endObject();
}
b.endArray();
})));
assertThat(
e.getCause().getMessage(),
containsString("doesn't support indexing multiple values for the same field in the same document")
);
}
public void testEmptyArrays() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(
source(b -> b.startObject("field").field("values", new double[] {}).field("counts", new int[] {}).endObject())
);
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testNullValue() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(source(b -> b.nullField("pre_aggregated")));
assertThat(doc.rootDoc().getField("pre_aggregated"), nullValue());
}
public void testMissingFieldCounts() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
Exception e = expectThrows(
DocumentParsingException.class,
() -> mapper.parse(source(b -> b.startObject("field").field("values", new double[] { 2, 2 }).endObject()))
);
assertThat(e.getCause().getMessage(), containsString("expected field called [counts]"));
}
public void testCoerce() throws IOException {
ExponentialHistogram input = ExponentialHistogramTestUtils.randomHistogram();
XContentBuilder inputJson = XContentFactory.jsonBuilder();
inputJson.startObject().field("field");
ExponentialHistogramXContent.serialize(inputJson, input);
inputJson.endObject();
BytesReference inputDocBytes = BytesReference.bytes(inputJson);
XContentParser docParser = XContentType.JSON.xContent()
.createParser(XContentParserConfiguration.EMPTY, inputDocBytes.streamInput());
docParser.nextToken(); // start object
docParser.nextToken(); // field name
docParser.nextToken(); // start object
docParser.nextToken(); // point at first sub-field
HistogramParser.ParsedHistogram expectedCoerced = ParsedHistogramConverter.exponentialToTDigest(
ExponentialHistogramParser.parse("field", docParser)
);
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
if (ExponentialHistogramParser.EXPONENTIAL_HISTOGRAM_FEATURE.isEnabled() == false) {
// feature flag is disabled, so coerce should not work
ThrowingRunnable runnable = () -> defaultMapper.parse(new SourceToParse("1", inputDocBytes, XContentType.JSON));
DocumentParsingException e = expectThrows(DocumentParsingException.class, runnable);
assertThat(e.getCause().getMessage(), containsString("unknown parameter [scale]"));
} else {
ParsedDocument doc = defaultMapper.parse(new SourceToParse("1", inputDocBytes, XContentType.JSON));
List<IndexableField> fields = doc.rootDoc().getFields("field");
assertThat(fields.size(), equalTo(1));
assertThat(docValueToParsedHistogram(fields.getFirst()), equalTo(expectedCoerced));
DocumentMapper coerceDisabledMapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "histogram").field("coerce", false))
);
ThrowingRunnable runnable = () -> coerceDisabledMapper.parse(new SourceToParse("1", inputDocBytes, XContentType.JSON));
DocumentParsingException e = expectThrows(DocumentParsingException.class, runnable);
assertThat(e.getCause().getMessage(), containsString("unknown parameter [scale]"));
}
}
private static HistogramParser.ParsedHistogram docValueToParsedHistogram(IndexableField indexableField) {
HistogramFieldMapper.InternalHistogramValue histogramValue = new HistogramFieldMapper.InternalHistogramValue();
histogramValue.reset(indexableField.binaryValue());
List<Long> counts = new ArrayList<>();
List<Double> values = new ArrayList<>();
try {
while (histogramValue.next()) {
counts.add(histogramValue.count());
values.add(histogramValue.value());
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return new HistogramParser.ParsedHistogram(values, counts);
}
@Override
protected boolean supportsIgnoreMalformed() {
return true;
}
@Override
protected List<ExampleMalformedValue> exampleMalformedValues() {
var randomString = randomAlphaOfLengthBetween(1, 10);
var randomLong = randomLong();
var randomDouble = randomDouble();
var randomBoolean = randomBoolean();
return List.of(
exampleMalformedValue(b -> b.value(randomString)).errorMatches(
"Failed to parse object: expecting token of type [START_OBJECT]"
),
exampleMalformedValue(b -> b.value(randomLong)).errorMatches("Failed to parse object: expecting token of type [START_OBJECT]"),
exampleMalformedValue(b -> b.value(randomDouble)).errorMatches(
"Failed to parse object: expecting token of type [START_OBJECT]"
),
exampleMalformedValue(b -> b.value(randomBoolean)).errorMatches(
"Failed to parse object: expecting token of type [START_OBJECT]"
),
exampleMalformedValue(b -> b.startObject().endObject()).errorMatches("expected field called [values]"),
exampleMalformedValue(b -> b.startObject().startArray("values").value(2).value(2).endArray().endObject()).errorMatches(
"expected field called [counts]"
),
exampleMalformedValue(b -> b.startObject().startArray("counts").value(2).value(2).endArray().endObject()).errorMatches(
"expected field called [values]"
),
// Make sure that entire sub-object is preserved in synthetic source
exampleMalformedValue(
b -> b.startObject()
.startArray("values")
.value(2)
.endArray()
.field("somefield", randomString)
.array("somearray", randomLong, randomLong)
.startObject("someobject")
.field("nestedfield", randomDouble)
.endObject()
.endObject()
).errorMatches("unknown parameter [somefield]"),
exampleMalformedValue(b -> b.startArray().value(randomLong).value(randomLong).endArray()).errorMatches(
"expecting token of type [START_OBJECT] but found [VALUE_NUMBER]"
)
);
}
public void testIgnoreMalformedSkipsKeyword() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("pre_aggregated").field("type", "histogram").field("ignore_malformed", true).endObject();
b.startObject("otherField").field("type", "keyword").endObject();
}));
ParsedDocument doc = mapper.parse(source(b -> b.field("pre_aggregated", "value").field("otherField", "value")));
assertThat(doc.rootDoc().getField("pre_aggregated"), nullValue());
assertThat(doc.rootDoc().getField("otherField"), notNullValue());
}
public void testIgnoreMalformedSkipsArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("pre_aggregated").field("type", "histogram").field("ignore_malformed", true).endObject();
b.startObject("otherField").field("type", "keyword").endObject();
}));
ParsedDocument doc = mapper.parse(source(b -> b.field("pre_aggregated", new int[] { 2, 2, 2 }).field("otherField", "value")));
assertThat(doc.rootDoc().getField("pre_aggregated"), nullValue());
assertThat(doc.rootDoc().getField("otherField"), notNullValue());
}
public void testIgnoreMalformedSkipsField() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("pre_aggregated").field("type", "histogram").field("ignore_malformed", true).endObject();
b.startObject("otherField").field("type", "keyword").endObject();
}));
ParsedDocument doc = mapper.parse(source(b -> {
b.startObject("pre_aggregated").field("values", new double[] { 2, 2 }).field("typo", new double[] { 2, 2 }).endObject();
b.field("otherField", "value");
}));
assertThat(doc.rootDoc().getField("pre_aggregated"), nullValue());
assertThat(doc.rootDoc().getField("otherField"), notNullValue());
}
public void testIgnoreMalformedSkipsObjects() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("pre_aggregated").field("type", "histogram").field("ignore_malformed", true).endObject();
b.startObject("otherField").field("type", "keyword").endObject();
}));
ParsedDocument doc = mapper.parse(source(b -> {
b.startObject("pre_aggregated");
{
b.startObject("values");
{
b.field("values", new double[] { 2, 2 });
b.startObject("otherData");
{
b.startObject("more").field("toto", 1).endObject();
}
b.endObject();
}
b.endObject();
b.field("counts", new double[] { 2, 2 });
}
b.endObject();
b.field("otherField", "value");
}));
assertThat(doc.rootDoc().getField("pre_aggregated"), nullValue());
assertThat(doc.rootDoc().getField("otherField"), notNullValue());
}
public void testIgnoreMalformedSkipsEmpty() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("pre_aggregated").field("type", "histogram").field("ignore_malformed", true).endObject();
b.startObject("otherField").field("type", "keyword").endObject();
}));
ParsedDocument doc = mapper.parse(source(b -> b.startObject("pre_aggregated").endObject().field("otherField", "value")));
assertThat(doc.rootDoc().getField("pre_aggregated"), nullValue());
assertThat(doc.rootDoc().getField("otherField"), notNullValue());
}
public void testMissingFieldValues() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
Exception e = expectThrows(
DocumentParsingException.class,
() -> mapper.parse(source(b -> b.startObject("field").field("counts", new int[] { 2, 2 }).endObject()))
);
assertThat(e.getCause().getMessage(), containsString("expected field called [values]"));
}
public void testUnknownField() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(
b -> b.startObject("field")
.field("counts", new int[] { 2, 2 })
.field("values", new double[] { 2, 2 })
.field("unknown", new double[] { 2, 2 })
.endObject()
);
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source));
assertThat(e.getCause().getMessage(), containsString("with unknown parameter [unknown]"));
}
public void testFieldArraysDifferentSize() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(
b -> b.startObject("field").field("counts", new int[] { 2, 2 }).field("values", new double[] { 2, 2, 3 }).endObject()
);
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source));
assertThat(e.getCause().getMessage(), containsString("expected same length from [values] and [counts] but got [3 != 2]"));
}
public void testFieldCountsNotArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(
b -> b.startObject("field").field("counts", "bah").field("values", new double[] { 2, 2, 3 }).endObject()
);
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source));
assertThat(e.getCause().getMessage(), containsString("expecting token of type [START_ARRAY] but found [VALUE_STRING]"));
}
public void testFieldCountsStringArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(
b -> b.startObject("field")
.field("counts", new String[] { "4", "5", "6" })
.field("values", new double[] { 2, 2, 3 })
.endObject()
);
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source));
assertThat(e.getCause().getMessage(), containsString("expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]"));
}
public void testFieldValuesStringArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(
b -> b.field("field")
.startObject()
.field("counts", new int[] { 4, 5, 6 })
.field("values", new String[] { "2", "2", "3" })
.endObject()
);
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source));
assertThat(e.getCause().getMessage(), containsString("expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]"));
}
public void testFieldValuesNotArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(
b -> b.startObject("field").field("counts", new int[] { 2, 2, 3 }).field("values", "bah").endObject()
);
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source));
assertThat(e.getCause().getMessage(), containsString("expecting token of type [START_ARRAY] but found [VALUE_STRING]"));
}
public void testCountIsLong() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(
b -> b.startObject("field")
.field("counts", new long[] { 2, 2, Long.MAX_VALUE })
.field("values", new double[] { 2, 2, 3 })
.endObject()
);
ParsedDocument doc = mapper.parse(source);
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testValuesNotInOrder() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(
b -> b.field("field").startObject().field("counts", new int[] { 2, 8, 4 }).field("values", new double[] { 2, 3, 2 }).endObject()
);
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source));
assertThat(
e.getCause().getMessage(),
containsString(" values must be in increasing order, " + "got [2.0] but previous value was [3.0]")
);
}
public void testFieldNotObject() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(b -> b.field("field", "bah"));
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source));
assertThat(e.getCause().getMessage(), containsString("expecting token of type [START_OBJECT] " + "but found [VALUE_STRING]"));
}
public void testNegativeCount() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SourceToParse source = source(
b -> b.startObject("field").field("counts", new int[] { 2, 2, -3 }).field("values", new double[] { 2, 2, 3 }).endObject()
);
Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source));
assertThat(e.getCause().getMessage(), containsString("[counts] elements must be >= 0 but got -3"));
}
@Override
protected Object generateRandomInputValue(MappedFieldType ft) {
assumeFalse("Test implemented in a follow up", true);
return null;
}
public void testCannotBeUsedInMultifields() {
Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", "keyword");
b.startObject("fields");
b.startObject("hist");
b.field("type", "histogram");
b.endObject();
b.endObject();
})));
assertThat(e.getMessage(), containsString("Field [hist] of type [histogram] can't be used in multifields"));
}
@Override
protected IngestScriptSupport ingestScriptSupport() {
throw new AssumptionViolatedException("not supported");
}
public void testArrayValueSyntheticSource() throws Exception {
DocumentMapper mapper = createSytheticSourceMapperService(
fieldMapping(b -> b.field("type", "histogram").field("ignore_malformed", "true"))
).documentMapper();
var randomString = randomAlphaOfLength(10);
CheckedConsumer<XContentBuilder, IOException> arrayValue = b -> {
b.startArray("field");
{
b.startObject().field("counts", new int[] { 1, 2, 3 }).field("values", new double[] { 1, 2, 3 }).endObject();
b.startObject().field("counts", new int[] { 4, 5, 6 }).field("values", new double[] { 4, 5, 6 }).endObject();
b.value(randomString);
}
b.endArray();
};
var expected = JsonXContent.contentBuilder().startObject();
// First value comes from synthetic field loader and so is formatted in a specific format (e.g. values always come first).
// Other values are stored as is as part of ignore_malformed logic for synthetic source.
{
expected.startArray("field");
expected.startObject().field("values", new double[] { 1, 2, 3 }).field("counts", new int[] { 1, 2, 3 }).endObject();
expected.startObject().field("counts", new int[] { 4, 5, 6 }).field("values", new double[] { 4, 5, 6 }).endObject();
expected.value(randomString);
expected.endArray();
}
expected.endObject();
var syntheticSource = syntheticSource(mapper, arrayValue);
assertEquals(Strings.toString(expected), syntheticSource);
}
@Override
protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) {
return new HistogramFieldSyntheticSourceSupport(ignoreMalformed);
}
private record HistogramFieldSyntheticSourceSupport(boolean ignoreMalformed) implements SyntheticSourceSupport {
@Override
public SyntheticSourceExample example(int maxVals) {
if (randomBoolean()) {
Map<String, Object> value = new LinkedHashMap<>();
value.put("values", List.of(randomDouble()));
value.put("counts", List.of(randomCount()));
return new SyntheticSourceExample(value, value, this::mapping);
}
int size = between(1, maxVals);
List<Double> values = new ArrayList<>(size);
double prev = randomDouble();
values.add(prev);
while (values.size() < size && prev != Double.MAX_VALUE) {
prev = randomDoubleBetween(prev, Double.MAX_VALUE, false);
values.add(prev);
}
Map<String, Object> value = new LinkedHashMap<>();
value.put("values", values);
value.put("counts", randomList(values.size(), values.size(), this::randomCount));
return new SyntheticSourceExample(value, value, this::mapping);
}
private int randomCount() {
return between(1, Integer.MAX_VALUE);
}
private void mapping(XContentBuilder b) throws IOException {
b.field("type", "histogram");
if (ignoreMalformed) {
b.field("ignore_malformed", true);
}
}
@Override
public List<SyntheticSourceInvalidExample> invalidExample() throws IOException {
return List.of();
}
}
@Override
public void testSyntheticSourceKeepArrays() {
// The mapper expects to parse an array of values by default, it's not compatible with array of arrays.
}
@Override
protected List<SortShortcutSupport> getSortShortcutSupport() {
return List.of();
}
@Override
protected boolean supportsDocValuesSkippers() {
return false;
}
}
| HistogramFieldMapperTests |
java | apache__camel | components/camel-http/src/test/java/org/apache/camel/component/http/handler/DrinkAuthValidationHandler.java | {
"start": 1222,
"end": 1949
} | class ____ extends DrinkValidationHandler {
public DrinkAuthValidationHandler(String expectedMethod, String expectedQuery, Object expectedContent, String header) {
super(expectedMethod, expectedQuery, expectedContent, header);
}
@Override
public void handle(ClassicHttpRequest request, ClassicHttpResponse response, HttpContext context)
throws HttpException, IOException {
Header authorization = request.getFirstHeader(HttpHeaders.AUTHORIZATION);
Assertions.assertNotNull(authorization);
String auth = authorization.getValue();
Assertions.assertTrue(auth.startsWith("Basic"));
super.handle(request, response, context);
}
}
| DrinkAuthValidationHandler |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/test/java/org/springframework/boot/devtools/test/MockClientHttpRequestFactory.java | {
"start": 3220,
"end": 4161
} | class ____ {
private final int delay;
private final byte[] payload;
private final HttpStatus status;
Response(int delay, byte[] payload, HttpStatus status) {
this.delay = delay;
this.payload = payload;
this.status = status;
}
ClientHttpResponse asHttpResponse(AtomicLong seq) {
MockClientHttpResponse httpResponse = new MockClientHttpResponse(
(this.payload != null) ? this.payload : NO_DATA, this.status);
waitForDelay();
if (this.payload != null) {
httpResponse.getHeaders().setContentLength(this.payload.length);
httpResponse.getHeaders().setContentType(MediaType.APPLICATION_OCTET_STREAM);
httpResponse.getHeaders().add("x-seq", Long.toString(seq.incrementAndGet()));
}
return httpResponse;
}
private void waitForDelay() {
if (this.delay > 0) {
try {
Thread.sleep(this.delay);
}
catch (InterruptedException ex) {
// Ignore
}
}
}
}
}
| Response |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java | {
"start": 1190,
"end": 12028
} | class ____ extends AbstractQueryBuilder<MatchPhrasePrefixQueryBuilder> {
public static final String NAME = "match_phrase_prefix";
public static final ParseField MAX_EXPANSIONS_FIELD = new ParseField("max_expansions");
public static final ParseField ZERO_TERMS_QUERY_FIELD = new ParseField("zero_terms_query");
private final String fieldName;
private final Object value;
private String analyzer;
private int slop = MatchQueryParser.DEFAULT_PHRASE_SLOP;
private int maxExpansions = FuzzyQuery.defaultMaxExpansions;
private ZeroTermsQueryOption zeroTermsQuery = MatchQueryParser.DEFAULT_ZERO_TERMS_QUERY;
public MatchPhrasePrefixQueryBuilder(String fieldName, Object value) {
if (fieldName == null) {
throw new IllegalArgumentException("[" + NAME + "] requires fieldName");
}
if (value == null) {
throw new IllegalArgumentException("[" + NAME + "] requires query value");
}
this.fieldName = fieldName;
this.value = value;
}
/**
* Read from a stream.
*/
public MatchPhrasePrefixQueryBuilder(StreamInput in) throws IOException {
super(in);
fieldName = in.readString();
value = in.readGenericValue();
slop = in.readVInt();
maxExpansions = in.readVInt();
analyzer = in.readOptionalString();
zeroTermsQuery = ZeroTermsQueryOption.readFromStream(in);
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeGenericValue(value);
out.writeVInt(slop);
out.writeVInt(maxExpansions);
out.writeOptionalString(analyzer);
zeroTermsQuery.writeTo(out);
}
/** Returns the field name used in this query. */
public String fieldName() {
return this.fieldName;
}
/** Returns the value used in this query. */
public Object value() {
return this.value;
}
/**
* Explicitly set the analyzer to use. Defaults to use explicit mapping
* config for the field, or, if not set, the default search analyzer.
*/
public MatchPhrasePrefixQueryBuilder analyzer(String analyzer) {
this.analyzer = analyzer;
return this;
}
/** Sets a slop factor for phrase queries */
public MatchPhrasePrefixQueryBuilder slop(int slop) {
if (slop < 0) {
throw new IllegalArgumentException("No negative slop allowed.");
}
this.slop = slop;
return this;
}
/** Get the slop factor for phrase queries. */
public int slop() {
return this.slop;
}
/**
* The number of term expansions to use.
*/
public MatchPhrasePrefixQueryBuilder maxExpansions(int maxExpansions) {
if (maxExpansions < 0) {
throw new IllegalArgumentException("No negative maxExpansions allowed.");
}
this.maxExpansions = maxExpansions;
return this;
}
/**
* Get the (optional) number of term expansions when using fuzzy or prefix
* type query.
*/
public int maxExpansions() {
return this.maxExpansions;
}
/**
* Sets query to use in case no query terms are available, e.g. after analysis removed them.
* Defaults to {@link ZeroTermsQueryOption#NONE}, but can be set to
* {@link ZeroTermsQueryOption#ALL} instead.
*/
public MatchPhrasePrefixQueryBuilder zeroTermsQuery(ZeroTermsQueryOption zeroTermsQuery) {
if (zeroTermsQuery == null) {
throw new IllegalArgumentException("[" + NAME + "] requires zeroTermsQuery to be non-null");
}
this.zeroTermsQuery = zeroTermsQuery;
return this;
}
public ZeroTermsQueryOption zeroTermsQuery() {
return this.zeroTermsQuery;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.startObject(fieldName);
builder.field(MatchQueryBuilder.QUERY_FIELD.getPreferredName(), value);
if (analyzer != null) {
builder.field(MatchQueryBuilder.ANALYZER_FIELD.getPreferredName(), analyzer);
}
if (slop != MatchQueryParser.DEFAULT_PHRASE_SLOP) {
builder.field(MatchPhraseQueryBuilder.SLOP_FIELD.getPreferredName(), slop);
}
if (maxExpansions != FuzzyQuery.defaultMaxExpansions) {
builder.field(MAX_EXPANSIONS_FIELD.getPreferredName(), maxExpansions);
}
if (zeroTermsQuery != MatchQueryParser.DEFAULT_ZERO_TERMS_QUERY) {
builder.field(ZERO_TERMS_QUERY_FIELD.getPreferredName(), zeroTermsQuery.toString());
}
boostAndQueryNameToXContent(builder);
builder.endObject();
builder.endObject();
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
// validate context specific fields
if (analyzer != null && context.getIndexAnalyzers().get(analyzer) == null) {
throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
}
MatchQueryParser queryParser = new MatchQueryParser(context);
if (analyzer != null) {
queryParser.setAnalyzer(analyzer);
}
queryParser.setPhraseSlop(slop);
queryParser.setMaxExpansions(maxExpansions);
queryParser.setZeroTermsQuery(zeroTermsQuery);
return queryParser.parse(MatchQueryParser.Type.PHRASE_PREFIX, fieldName, value);
}
@Override
protected boolean doEquals(MatchPhrasePrefixQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName)
&& Objects.equals(value, other.value)
&& Objects.equals(analyzer, other.analyzer)
&& Objects.equals(slop, other.slop)
&& Objects.equals(maxExpansions, other.maxExpansions)
&& Objects.equals(zeroTermsQuery, other.zeroTermsQuery);
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, value, analyzer, slop, maxExpansions, zeroTermsQuery);
}
public static MatchPhrasePrefixQueryBuilder fromXContent(XContentParser parser) throws IOException {
String fieldName = null;
Object value = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String analyzer = null;
int slop = MatchQueryParser.DEFAULT_PHRASE_SLOP;
int maxExpansion = FuzzyQuery.defaultMaxExpansions;
String queryName = null;
XContentParser.Token token;
String currentFieldName = null;
ZeroTermsQueryOption zeroTermsQuery = MatchQueryParser.DEFAULT_ZERO_TERMS_QUERY;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, currentFieldName);
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (MatchQueryBuilder.QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
value = parser.objectText();
} else if (MatchQueryBuilder.ANALYZER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
analyzer = parser.text();
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
boost = parser.floatValue();
} else if (MatchPhraseQueryBuilder.SLOP_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
slop = parser.intValue();
} else if (MAX_EXPANSIONS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
maxExpansion = parser.intValue();
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
queryName = parser.text();
} else if (ZERO_TERMS_QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
String zeroTermsValue = parser.text();
if ("none".equalsIgnoreCase(zeroTermsValue)) {
zeroTermsQuery = ZeroTermsQueryOption.NONE;
} else if ("all".equalsIgnoreCase(zeroTermsValue)) {
zeroTermsQuery = ZeroTermsQueryOption.ALL;
} else {
throw new ParsingException(
parser.getTokenLocation(),
"Unsupported zero_terms_query value [" + zeroTermsValue + "]"
);
}
} else {
throw new ParsingException(
parser.getTokenLocation(),
"[" + NAME + "] query does not support [" + currentFieldName + "]"
);
}
} else {
throw new ParsingException(
parser.getTokenLocation(),
"[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"
);
}
}
} else {
throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, parser.currentName());
fieldName = parser.currentName();
value = parser.objectText();
}
}
MatchPhrasePrefixQueryBuilder matchQuery = new MatchPhrasePrefixQueryBuilder(fieldName, value);
matchQuery.analyzer(analyzer);
matchQuery.slop(slop);
matchQuery.maxExpansions(maxExpansion);
matchQuery.queryName(queryName);
matchQuery.boost(boost);
matchQuery.zeroTermsQuery(zeroTermsQuery);
return matchQuery;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
}
| MatchPhrasePrefixQueryBuilder |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/aot/BeanDefinitionMethodGeneratorTests.java | {
"start": 14730,
"end": 15927
} | class ____")
.contains("Bean definitions for {@link InnerBeanConfiguration.Simple}")
.doesNotContain("Another__BeanDefinitions");
});
}
@Test
void generateBeanDefinitionMethodWhenHasNestedInnerClassTargetMethodGeneratesMethod() {
this.beanFactory.registerBeanDefinition("testBeanConfiguration", new RootBeanDefinition(
InnerBeanConfiguration.Simple.Another.class));
RootBeanDefinition beanDefinition = new RootBeanDefinition(SimpleBean.class);
beanDefinition.setFactoryBeanName("testBeanConfiguration");
beanDefinition.setFactoryMethodName("anotherBean");
RegisteredBean registeredBean = registerBean(beanDefinition);
BeanDefinitionMethodGenerator generator = new BeanDefinitionMethodGenerator(
this.methodGeneratorFactory, registeredBean, null,
Collections.emptyList());
MethodReference method = generator.generateBeanDefinitionMethod(
this.generationContext, this.beanRegistrationsCode);
compile(method, (actual, compiled) -> {
SourceFile sourceFile = compiled.getSourceFile(".*BeanDefinitions");
assertThat(sourceFile.getClassName()).endsWith("InnerBeanConfiguration__BeanDefinitions");
assertThat(sourceFile).contains("public static | Simple |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/OctetLength.java | {
"start": 765,
"end": 1374
} | class ____ extends UnaryStringFunction {
public OctetLength(Source source, Expression field) {
super(source, field);
}
@Override
protected NodeInfo<OctetLength> info() {
return NodeInfo.create(this, OctetLength::new, field());
}
@Override
protected OctetLength replaceChild(Expression newChild) {
return new OctetLength(source(), newChild);
}
@Override
protected StringOperation operation() {
return StringOperation.OCTET_LENGTH;
}
@Override
public DataType dataType() {
return DataTypes.INTEGER;
}
}
| OctetLength |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedTypeParameterTest.java | {
"start": 2623,
"end": 2930
} | class ____ {
// BUG: Diagnostic contains:
private <T> void test() {}
}
""")
.doTest();
}
@Test
public void methodCouldBeOverridden_negativeFinding() {
helper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java | {
"start": 30724,
"end": 30823
} | class ____ extends FSImage {
private int mergeErrorCount;
private static | CheckpointStorage |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest88.java | {
"start": 893,
"end": 1260
} | class ____ extends TestCase {
public void test_false() throws Exception {
WallProvider provider = new MySqlWallProvider();
assertTrue(provider.checkValid(//
"DROP INDEX `PRIMARY` ON t;"));
assertEquals(1, provider.getTableStats().size());
assertTrue(provider.getTableStats().containsKey("t"));
}
}
| MySqlWallTest88 |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/IfNull.java | {
"start": 679,
"end": 1416
} | class ____ extends Coalesce {
public IfNull(Source source, Expression first, Expression second) {
this(source, Arrays.asList(first, second));
}
private IfNull(Source source, List<Expression> expressions) {
super(source, expressions);
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new IfNull(source(), newChildren);
}
@Override
protected NodeInfo<IfNull> info() {
List<Expression> children = children();
Expression first = children.size() > 0 ? children.get(0) : NULL;
Expression second = children.size() > 0 ? children.get(1) : NULL;
return NodeInfo.create(this, IfNull::new, first, second);
}
}
| IfNull |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java | {
"start": 1964,
"end": 2060
} | class ____ and monitors the file defining the mappings of DNs to internal ES Roles.
*/
public | loads |
java | google__auto | factory/src/main/java/com/google/auto/factory/processor/AutoFactoryDeclaration.java | {
"start": 3778,
"end": 9409
} | class ____ {
private final Elements elements;
private final Messager messager;
Factory(Elements elements, Messager messager) {
this.elements = elements;
this.messager = messager;
}
Optional<AutoFactoryDeclaration> createIfValid(Element element) {
checkNotNull(element);
AnnotationMirror mirror = Mirrors.getAnnotationMirror(element, AutoFactory.class).get();
checkArgument(
Mirrors.getQualifiedName(mirror.getAnnotationType())
.contentEquals(AutoFactory.class.getName()));
Map<String, AnnotationValue> values =
Mirrors.simplifyAnnotationValueMap(elements.getElementValuesWithDefaults(mirror));
// className value is a string, so we can just call toString. We know values.get("className")
// is non-null because @AutoFactory has an annotation element of that name.
AnnotationValue classNameValue = requireNonNull(values.get("className"));
String className = classNameValue.getValue().toString();
if (!className.isEmpty() && !isValidIdentifier(className)) {
messager.printMessage(
ERROR,
String.format("\"%s\" is not a valid Java identifier", className),
element,
mirror,
classNameValue);
return Optional.empty();
}
ImmutableSet<AnnotationMirror> annotations = annotationsToAdd(element);
AnnotationValue extendingValue = checkNotNull(values.get("extending"));
TypeElement extendingType = AnnotationValues.asType(extendingValue);
if (extendingType == null) {
messager.printMessage(
ERROR,
"Unable to find the type: " + extendingValue.getValue(),
element,
mirror,
extendingValue);
return Optional.empty();
} else if (!isValidSupertypeForClass(extendingType)) {
messager.printMessage(
ERROR,
String.format(
"%s is not a valid supertype for a factory. "
+ "Supertypes must be non-final classes.",
extendingType.getQualifiedName()),
element,
mirror,
extendingValue);
return Optional.empty();
}
ImmutableList<ExecutableElement> noParameterConstructors =
FluentIterable.from(ElementFilter.constructorsIn(extendingType.getEnclosedElements()))
.filter(
new Predicate<ExecutableElement>() {
@Override
public boolean apply(ExecutableElement constructor) {
return constructor.getParameters().isEmpty();
}
})
.toList();
if (noParameterConstructors.isEmpty()) {
messager.printMessage(
ERROR,
String.format(
"%s is not a valid supertype for a factory. "
+ "Factory supertypes must have a no-arg constructor.",
extendingType.getQualifiedName()),
element,
mirror,
extendingValue);
return Optional.empty();
} else if (noParameterConstructors.size() > 1) {
throw new IllegalStateException("Multiple constructors with no parameters??");
}
AnnotationValue implementingValue = checkNotNull(values.get("implementing"));
ImmutableSet.Builder<TypeElement> builder = ImmutableSet.builder();
for (AnnotationValue implementingTypeValue : AnnotationValues.asList(implementingValue)) {
builder.add(AnnotationValues.asType(implementingTypeValue));
}
ImmutableSet<TypeElement> implementingTypes = builder.build();
AnnotationValue allowSubclassesValue = checkNotNull(values.get("allowSubclasses"));
boolean allowSubclasses = AnnotationValues.asBoolean(allowSubclassesValue);
return Optional.<AutoFactoryDeclaration>of(
new AutoValue_AutoFactoryDeclaration(
getAnnotatedType(element),
element,
className.isEmpty() ? Optional.empty() : Optional.of(className),
annotations,
extendingType,
implementingTypes,
allowSubclasses,
mirror,
ImmutableMap.copyOf(values)));
}
private static TypeElement getAnnotatedType(Element element) {
List<TypeElement> types = ImmutableList.of();
while (types.isEmpty()) {
types = typesIn(Arrays.asList(element));
element = element.getEnclosingElement();
}
return getOnlyElement(types);
}
static boolean isValidIdentifier(String identifier) {
return SourceVersion.isIdentifier(identifier) && !SourceVersion.isKeyword(identifier);
}
private ImmutableSet<AnnotationMirror> annotationsToAdd(Element element) {
ImmutableSet<? extends AnnotationMirror> containers =
AnnotationMirrors.getAnnotatedAnnotations(element, AnnotationsToApply.class);
if (containers.size() > 1) {
messager.printMessage(
ERROR, "Multiple @AnnotationsToApply annotations are not supported", element);
}
return containers.stream()
.limit(1)
.map(elements::getElementValuesWithDefaults)
.map(Map::values)
.flatMap(Collection::stream)
.map(AnnotationValue::getValue)
.filter(AnnotationMirror.class::isInstance)
// Any non-annotation element should already have been flagged when processing
// @AnnotationsToApply
.map(AnnotationMirror.class::cast)
.collect(toImmutableSet());
}
}
}
| Factory |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/net/impl/KeyStoreHelper.java | {
"start": 2106,
"end": 17520
} | class ____ {
// Dummy password for encrypting pem based stores in memory
public static final String DUMMY_PASSWORD = "dummdummydummydummydummydummydummy"; // at least 32 characters for compat with FIPS mode
private static final String DUMMY_CERT_ALIAS = "cert-";
private static final Pattern BEGIN_PATTERN = Pattern.compile("-----BEGIN ([A-Z ]+)-----");
private static final Pattern END_PATTERN = Pattern.compile("-----END ([A-Z ]+)-----");
private final String password;
private final KeyStore store;
private final String aliasPassword;
private final Map<String, KeyManagerFactory> wildcardMgrFactoryMap = new HashMap<>();
private final Map<String, KeyManagerFactory> mgrFactoryMap = new HashMap<>();
private final Map<String, TrustManagerFactory> trustMgrMap = new HashMap<>();
public KeyStoreHelper(KeyStore ks, String password, String aliasPassword) throws Exception {
Enumeration<String> en = ks.aliases();
while (en.hasMoreElements()) {
String alias = en.nextElement();
Certificate cert = ks.getCertificate(alias);
if (ks.isCertificateEntry(alias) && !alias.startsWith(DUMMY_CERT_ALIAS)) {
final KeyStore keyStore = createEmptyKeyStore();
keyStore.setCertificateEntry("cert-1", cert);
TrustManagerFactory fact = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
fact.init(keyStore);
trustMgrMap.put(alias, fact);
}
if (ks.isKeyEntry(alias) && cert instanceof X509Certificate) {
X509Certificate x509Cert = (X509Certificate) cert;
Collection<List<?>> ans = x509Cert.getSubjectAlternativeNames();
List<String> domains = new ArrayList<>();
if (ans != null) {
for (List<?> l : ans) {
if (l.size() == 2 && l.get(0) instanceof Number && ((Number) l.get(0)).intValue() == 2) {
String dns = l.get(1).toString();
domains.add(dns);
}
}
}
String dn = x509Cert.getSubjectX500Principal().getName();
domains.addAll(getX509CertificateCommonNames(dn));
if (!domains.isEmpty()) {
char[] keyPassword = keyPassword(aliasPassword, password);
PrivateKey key = (PrivateKey) ks.getKey(alias, keyPassword);
Certificate[] tmp = ks.getCertificateChain(alias);
if (tmp == null) {
// It's a private key
continue;
}
X509KeyManager mgr = new X509KeyManager() {
@Override
public String[] getClientAliases(String s, Principal[] principals) {
throw new UnsupportedOperationException();
}
@Override
public String chooseClientAlias(String[] strings, Principal[] principals, Socket socket) {
throw new UnsupportedOperationException();
}
@Override
public String[] getServerAliases(String s, Principal[] principals) {
throw new UnsupportedOperationException();
}
@Override
public String chooseServerAlias(String s, Principal[] principals, Socket socket) {
throw new UnsupportedOperationException();
}
@Override
public X509Certificate[] getCertificateChain(String s) {
return Arrays.stream(tmp).map(X509Certificate.class::cast).toArray(X509Certificate[]::new);
}
@Override
public PrivateKey getPrivateKey(String s) {
return key;
}
};
KeyManagerFactory kmf = toKeyManagerFactory(mgr);
for (String domain : domains) {
if (domain.startsWith("*.")) {
wildcardMgrFactoryMap.put(domain.substring(2), kmf);
} else {
mgrFactoryMap.put(domain, kmf);
}
}
}
}
}
this.store = ks;
this.password = password;
this.aliasPassword = aliasPassword;
}
public static KeyManagerFactory toKeyManagerFactory(X509KeyManager mgr) throws Exception {
String keyStoreType = KeyStore.getDefaultType();
KeyStore ks = KeyStore.getInstance(keyStoreType);
ks.load(null, null);
ks.setKeyEntry("key", mgr.getPrivateKey(null), DUMMY_PASSWORD.toCharArray(), mgr.getCertificateChain(null));
String keyAlgorithm = KeyManagerFactory.getDefaultAlgorithm();
KeyManagerFactory kmf = KeyManagerFactory.getInstance(keyAlgorithm);
kmf.init(ks, DUMMY_PASSWORD.toCharArray());
return kmf;
}
public KeyManagerFactory getKeyMgrFactory() throws Exception {
KeyManagerFactory fact = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
char[] keyPassword = keyPassword(aliasPassword, password);
fact.init(store, keyPassword);
return fact;
}
private char[] keyPassword(String aliasPassword, String password) {
if (aliasPassword != null) return aliasPassword.toCharArray();
return (password != null) ? password.toCharArray() : null;
}
public KeyManagerFactory getKeyMgrFactory(String serverName) {
KeyManagerFactory mgr = mgrFactoryMap.get(serverName);
if (mgr == null && !wildcardMgrFactoryMap.isEmpty()) {
int index = serverName.indexOf('.') + 1;
if (index > 0) {
String s = serverName.substring(index);
mgr = wildcardMgrFactoryMap.get(s);
}
}
return mgr;
}
public KeyManager[] getKeyMgr() throws Exception {
return getKeyMgrFactory().getKeyManagers();
}
public TrustManager[] getTrustMgr(String serverName) {
TrustManagerFactory fact = trustMgrMap.get(serverName);
return fact != null ? fact.getTrustManagers() : null;
}
public TrustManagerFactory getTrustMgrFactory(VertxInternal vertx) throws Exception {
TrustManagerFactory fact = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
fact.init(store);
return fact;
}
public TrustManager[] getTrustMgrs(VertxInternal vertx) throws Exception {
return getTrustMgrFactory(vertx).getTrustManagers();
}
/**
* @return the store
*/
public KeyStore store() {
return store;
}
public static List<String> getX509CertificateCommonNames(String dn) throws Exception {
List<String> names = new ArrayList<>();
if (!PlatformDependent.isAndroid()) {
LdapName ldapDN = new LdapName(dn);
for (Rdn rdn : ldapDN.getRdns()) {
if (rdn.getType().equalsIgnoreCase("cn")) {
String name = rdn.getValue().toString();
names.add(name);
}
}
} else {
String [] rdns = dn.trim().split("[,;]");
for(String rdn : rdns) {
String [] nvp = rdn.trim().split("=");
if(nvp.length == 2 && "cn".equalsIgnoreCase(nvp[0])) {
names.add(nvp[1]);
}
}
}
return names;
}
public static KeyStore loadKeyStore(String type, String provider, String password, Supplier<Buffer> value, String alias) throws Exception {
Objects.requireNonNull(type);
KeyStore ks = provider == null ? KeyStore.getInstance(type) : KeyStore.getInstance(type, provider);
Buffer keystoreBuffer = value.get();
if (keystoreBuffer == null) {
ks.load(null, password != null ? password.toCharArray() : null);
} else {
try (InputStream in = new ByteArrayInputStream(value.get().getBytes())) {
ks.load(in, password != null ? password.toCharArray() : null);
}
}
if (alias != null) {
if (!ks.containsAlias(alias)) {
throw new IllegalArgumentException("alias does not exist in the keystore: " + alias);
}
List<String> ksAliases = Collections.list(ks.aliases());
for (String ksAlias : ksAliases) {
if (!alias.equals(ksAlias)) {
ks.deleteEntry(ksAlias);
}
}
}
return ks;
}
public static KeyStore loadKeyCert(List<Buffer> keyValue, List<Buffer> certValue) throws Exception {
if (keyValue.size() < certValue.size()) {
throw new VertxException("Missing private key");
} else if (keyValue.size() > certValue.size()) {
throw new VertxException("Missing X.509 certificate");
}
final KeyStore keyStore = createEmptyKeyStore();
Iterator<Buffer> keyValueIt = keyValue.iterator();
Iterator<Buffer> certValueIt = certValue.iterator();
int index = 0;
while (keyValueIt.hasNext() && certValueIt.hasNext()) {
PrivateKey key = loadPrivateKey(keyValueIt.next());
Certificate[] chain = loadCerts(certValueIt.next());
keyStore.setEntry("dummy-entry-" + index++, new KeyStore.PrivateKeyEntry(key, chain), new KeyStore.PasswordProtection(DUMMY_PASSWORD.toCharArray()));
}
return keyStore;
}
private static PrivateKey loadPrivateKey(Buffer keyValue) throws Exception {
if (keyValue == null) {
throw new RuntimeException("Missing private key path");
}
KeyFactory rsaKeyFactory = KeyFactory.getInstance("RSA");
KeyFactory ecKeyFactory = getECKeyFactory();
List<PrivateKey> pems = loadPems(keyValue, (delimiter, content) -> {
try {
switch (delimiter) {
case "EC PRIVATE KEY":
if (ecKeyFactory == null) {
// ECC is not supported by JVM
return Collections.emptyList();
} else {
// read PEM file as described in https://datatracker.ietf.org/doc/html/rfc5915#section-4
return Collections.singletonList(ecKeyFactory.generatePrivate(PrivateKeyParser.getECKeySpec(content)));
}
case "RSA PRIVATE KEY":
return Collections.singletonList(rsaKeyFactory.generatePrivate(PrivateKeyParser.getRSAKeySpec(content)));
case "PRIVATE KEY":
// in PKCS#8 the key algorithm is indicated at the beginning of the ASN.1 structure
// so we can use the corresponding key factory once we know the algorithm name
String algorithm = PrivateKeyParser.getPKCS8EncodedKeyAlgorithm(content);
if (rsaKeyFactory.getAlgorithm().equals(algorithm)) {
return Collections.singletonList(rsaKeyFactory.generatePrivate(new PKCS8EncodedKeySpec(content)));
} else if (ecKeyFactory != null && ecKeyFactory.getAlgorithm().equals(algorithm)) {
return Collections.singletonList(ecKeyFactory.generatePrivate(new PKCS8EncodedKeySpec(content)));
}
// fall through if ECC is not supported by JVM
default:
return Collections.emptyList();
}
} catch (InvalidKeySpecException e) {
throw new VertxException(e);
}
});
if (pems.isEmpty()) {
throw new RuntimeException("Missing -----BEGIN PRIVATE KEY----- or -----BEGIN RSA PRIVATE KEY----- or -----BEGIN EC PRIVATE KEY----- delimiter");
}
return pems.get(0);
}
private static KeyFactory getECKeyFactory() {
try {
return KeyFactory.getInstance("EC");
} catch (NoSuchAlgorithmException e) {
// support for ECC is not mandatory in JVM
return null;
}
}
public static KeyStore loadCA(Stream<Buffer> certValues) throws Exception {
final KeyStore keyStore = createEmptyKeyStore();
keyStore.load(null, null);
int count = 0;
Iterable<Buffer> iterable = certValues::iterator;
for (Buffer certValue : iterable) {
for (Certificate cert : loadCerts(certValue)) {
keyStore.setCertificateEntry(DUMMY_CERT_ALIAS + count++, cert);
}
}
return keyStore;
}
private static <P> List<P> loadPems(Buffer data, BiFunction<String, byte[], Collection<P>> pemFact) throws IOException {
String pem = data.toString();
List<P> pems = new ArrayList<>();
Matcher beginMatcher = BEGIN_PATTERN.matcher(pem);
Matcher endMatcher = END_PATTERN.matcher(pem);
while (true) {
boolean begin = beginMatcher.find();
if (!begin) {
break;
}
String beginDelimiter = beginMatcher.group(1);
boolean end = endMatcher.find();
if (!end) {
throw new RuntimeException("Missing -----END " + beginDelimiter + "----- delimiter");
} else {
String endDelimiter = endMatcher.group(1);
if (!beginDelimiter.equals(endDelimiter)) {
throw new RuntimeException("Missing -----END " + beginDelimiter + "----- delimiter");
} else {
String content = pem.substring(beginMatcher.end(), endMatcher.start());
content = content.replaceAll("\\s", "");
if (content.length() == 0) {
throw new RuntimeException("Empty pem file");
}
Collection<P> pemItems = pemFact.apply(endDelimiter, Base64.getDecoder().decode(content));
pems.addAll(pemItems);
}
}
}
return pems;
}
private static X509Certificate[] loadCerts(Buffer buffer) throws Exception {
if (buffer == null) {
throw new RuntimeException("Missing X.509 certificate path");
}
CertificateFactory certFactory = CertificateFactory.getInstance("X.509");
List<X509Certificate> certs = loadPems(buffer, (delimiter, content) -> {
try {
switch (delimiter) {
case "CERTIFICATE":
return (Collection<X509Certificate>) certFactory.generateCertificates(new ByteArrayInputStream(content));
default:
return Collections.emptyList();
}
} catch (CertificateException e) {
throw new VertxException(e);
}
});
if (certs.isEmpty()) {
throw new RuntimeException("Missing -----BEGIN CERTIFICATE----- delimiter");
}
return certs.toArray(new X509Certificate[0]);
}
/**
* Creates an empty keystore. The keystore uses the default keystore type set in
* the file 'lib/security/security.java' (located in the JRE) by the 'keystore.type' property.
* However, if the default is set to the 'JKS' format, the this function will instead attempt to
* use the newer 'PKCS12' format, if it exists.
*
* The PKCS12 format is the default format for keystores for Java >=9 and available on GraalVM.
*
* PKCS12 is an extensible, standard, and widely-supported format for storing cryptographic keys.
* As of JDK 8, PKCS12 keystores can store private keys, trusted public key certificates, and
* secret keys.
*
* The "old" default "JKS" (available since Java 1.2) can only store private keys and trusted
* public-key certificates, and they are based on a proprietary format that is not easily
* extensible to new cryptographic algorithms.
* @return keystore instance
*
* @throws KeyStoreException if the underlying engine cannot create an instance
*/
private static KeyStore createEmptyKeyStore() throws KeyStoreException {
final KeyStore keyStore;
String defaultKeyStoreType = KeyStore.getDefaultType();
if (defaultKeyStoreType.equalsIgnoreCase("jks") && Security.getAlgorithms("KeyStore").contains("PKCS12")) {
keyStore = KeyStore.getInstance("PKCS12");
} else {
keyStore = KeyStore.getInstance(defaultKeyStoreType);
}
try {
keyStore.load(null, null);
} catch (CertificateException | NoSuchAlgorithmException | IOException e) {
// these exceptions should never be thrown as there is no initial data
// provided to the initialization of the keystore
throw new KeyStoreException("Failed to initialize the keystore", e);
}
return keyStore;
}
}
| KeyStoreHelper |
java | apache__camel | test-infra/camel-test-infra-chatscript/src/main/java/org/apache/camel/test/infra/chatscript/services/ChatScriptRemoteInfraService.java | {
"start": 944,
"end": 1377
} | class ____ implements ChatScriptInfraService {
@Override
public void registerProperties() {
// NO-OP
}
@Override
public void initialize() {
registerProperties();
}
@Override
public void shutdown() {
// NO-OP
}
@Override
public String serviceAddress() {
return System.getProperty(ChatScriptProperties.CHATSCRIPT_ADDRESS);
}
}
| ChatScriptRemoteInfraService |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/function/AbstractSqmSetReturningFunctionDescriptor.java | {
"start": 792,
"end": 3426
} | class ____ implements SqmSetReturningFunctionDescriptor {
private final ArgumentsValidator argumentsValidator;
private final SetReturningFunctionTypeResolver setReturningTypeResolver;
private final FunctionArgumentTypeResolver functionArgumentTypeResolver;
private final String name;
public AbstractSqmSetReturningFunctionDescriptor(String name, SetReturningFunctionTypeResolver typeResolver) {
this( name, null, typeResolver, null );
}
public AbstractSqmSetReturningFunctionDescriptor(
String name,
@Nullable ArgumentsValidator argumentsValidator,
SetReturningFunctionTypeResolver typeResolver) {
this( name, argumentsValidator, typeResolver, null );
}
public AbstractSqmSetReturningFunctionDescriptor(
String name,
@Nullable ArgumentsValidator argumentsValidator,
SetReturningFunctionTypeResolver typeResolver,
@Nullable FunctionArgumentTypeResolver argumentTypeResolver) {
this.name = name;
this.argumentsValidator = argumentsValidator == null
? StandardArgumentsValidators.NONE
: argumentsValidator;
this.setReturningTypeResolver = typeResolver;
this.functionArgumentTypeResolver = argumentTypeResolver == null
? StandardFunctionArgumentTypeResolvers.NULL
: argumentTypeResolver;
}
public String getName() {
return name;
}
public String getSignature(String name) {
return name + getArgumentListSignature();
}
@Override
public ArgumentsValidator getArgumentsValidator() {
return argumentsValidator;
}
public SetReturningFunctionTypeResolver getSetReturningTypeResolver() {
return setReturningTypeResolver;
}
public FunctionArgumentTypeResolver getArgumentTypeResolver() {
return functionArgumentTypeResolver;
}
public String getArgumentListSignature() {
return argumentsValidator.getSignature();
}
@Override
public final <T> SelfRenderingSqmSetReturningFunction<T> generateSqmExpression(
List<? extends SqmTypedNode<?>> arguments,
QueryEngine queryEngine) {
argumentsValidator.validate( arguments, getName(), queryEngine );
return generateSqmSetReturningFunctionExpression( arguments, queryEngine );
}
/**
* Return an SQM node or subtree representing an invocation of this function
* with the given arguments. This method may be overridden in the case of
* function descriptors that wish to customize creation of the node.
*
* @param arguments the arguments of the function invocation
*/
protected abstract <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
QueryEngine queryEngine);
}
| AbstractSqmSetReturningFunctionDescriptor |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AllocationReducerTests.java | {
"start": 1079,
"end": 9481
} | class ____ extends ESTestCase {
public void testReduceTo_ValueEqualToCurrentAllocations() {
Map<List<String>, Collection<DiscoveryNode>> nodesByZone = Map.of(List.of("z"), List.of(buildNode("n")));
TrainedModelAssignment assignment = createAssignment("d", "m", 2, Map.of("n", 2));
expectThrows(IllegalArgumentException.class, () -> new AllocationReducer(assignment, nodesByZone).reduceTo(2));
}
public void testReduceTo_ValueLargerThanCurrentAllocations() {
Map<List<String>, Collection<DiscoveryNode>> nodesByZone = Map.of(List.of("z"), List.of(buildNode("n")));
TrainedModelAssignment assignment = createAssignment("d", "m", 2, Map.of("n", 2));
expectThrows(IllegalArgumentException.class, () -> new AllocationReducer(assignment, nodesByZone).reduceTo(3));
}
public void testReduceTo_GivenOneZone_OneAssignment_ReductionByOne() {
Map<List<String>, Collection<DiscoveryNode>> nodesByZone = Map.of(List.of("z"), List.of(buildNode("n")));
TrainedModelAssignment assignment = createAssignment("d", "m", 2, Map.of("n", 2));
TrainedModelAssignment updatedAssignment = new AllocationReducer(assignment, nodesByZone).reduceTo(1).build();
assertThat(updatedAssignment.getTaskParams().getNumberOfAllocations(), equalTo(1));
Map<String, RoutingInfo> routingTable = updatedAssignment.getNodeRoutingTable();
assertThat(routingTable, aMapWithSize(1));
assertThat(routingTable, hasKey("n"));
assertThat(routingTable.get("n").getTargetAllocations(), equalTo(1));
}
public void testReduceTo_GivenOneZone_OneAssignment_ReductionByMany() {
Map<List<String>, Collection<DiscoveryNode>> nodesByZone = Map.of(List.of("z"), List.of(buildNode("n")));
TrainedModelAssignment assignment = createAssignment("d", "m", 5, Map.of("n", 5));
TrainedModelAssignment updatedAssignment = new AllocationReducer(assignment, nodesByZone).reduceTo(2).build();
assertThat(updatedAssignment.getTaskParams().getNumberOfAllocations(), equalTo(2));
Map<String, RoutingInfo> routingTable = updatedAssignment.getNodeRoutingTable();
assertThat(routingTable, aMapWithSize(1));
assertThat(routingTable, hasKey("n"));
assertThat(routingTable.get("n").getTargetAllocations(), equalTo(2));
}
public void testReduceTo_GivenOneZone_MultipleAssignments_RemovableAssignments() {
Map<List<String>, Collection<DiscoveryNode>> nodesByZone = Map.of(
List.of("z"),
List.of(buildNode("n_1"), buildNode("n_2"), buildNode("n_3"))
);
TrainedModelAssignment assignment = createAssignment("d", "m", 6, Map.of("n_1", 3, "n_2", 2, "n_3", 1));
TrainedModelAssignment updatedAssignment = new AllocationReducer(assignment, nodesByZone).reduceTo(3).build();
assertThat(updatedAssignment.getTaskParams().getNumberOfAllocations(), equalTo(3));
Map<String, RoutingInfo> routingTable = updatedAssignment.getNodeRoutingTable();
assertThat(routingTable, aMapWithSize(1));
assertThat(routingTable, hasKey("n_1"));
assertThat(routingTable.get("n_1").getTargetAllocations(), equalTo(3));
}
public void testReduceTo_GivenOneZone_MultipleAssignments_NonRemovableAssignments() {
Map<List<String>, Collection<DiscoveryNode>> nodesByZone = Map.of(
List.of("z"),
List.of(buildNode("n_1"), buildNode("n_2"), buildNode("n_3"))
);
TrainedModelAssignment assignment = createAssignment("d", "m", 6, Map.of("n_1", 2, "n_2", 2, "n_3", 2));
TrainedModelAssignment updatedAssignment = new AllocationReducer(assignment, nodesByZone).reduceTo(5).build();
assertThat(updatedAssignment.getTaskParams().getNumberOfAllocations(), equalTo(5));
assertThat(updatedAssignment.totalTargetAllocations(), equalTo(5));
Map<String, RoutingInfo> routingTable = updatedAssignment.getNodeRoutingTable();
assertThat(routingTable, aMapWithSize(3));
assertThat(routingTable, hasKey("n_1"));
assertThat(routingTable, hasKey("n_2"));
assertThat(routingTable, hasKey("n_3"));
}
public void testReduceTo_GivenTwoZones_RemovableAssignments() {
Map<List<String>, Collection<DiscoveryNode>> nodesByZone = Map.of(
List.of("z_1"),
List.of(buildNode("n_1"), buildNode("n_2")),
List.of("z_2"),
List.of(buildNode("n_3"))
);
TrainedModelAssignment assignment = createAssignment("d", "m", 5, Map.of("n_1", 3, "n_2", 1, "n_3", 1));
TrainedModelAssignment updatedAssignment = new AllocationReducer(assignment, nodesByZone).reduceTo(4).build();
assertThat(updatedAssignment.getTaskParams().getNumberOfAllocations(), equalTo(4));
Map<String, RoutingInfo> routingTable = updatedAssignment.getNodeRoutingTable();
assertThat(routingTable, aMapWithSize(2));
assertThat(routingTable, hasKey("n_1"));
assertThat(routingTable.get("n_1").getTargetAllocations(), equalTo(3));
assertThat(routingTable, hasKey("n_3"));
assertThat(routingTable.get("n_3").getTargetAllocations(), equalTo(1));
}
public void testReduceTo_GivenTwoZones_NonRemovableAssignments() {
Map<List<String>, Collection<DiscoveryNode>> nodesByZone = Map.of(
List.of("z_1"),
List.of(buildNode("n_1")),
List.of("z_2"),
List.of(buildNode("n_2"))
);
TrainedModelAssignment assignment = createAssignment("d", "m", 6, Map.of("n_1", 3, "n_2", 3));
TrainedModelAssignment updatedAssignment = new AllocationReducer(assignment, nodesByZone).reduceTo(4).build();
assertThat(updatedAssignment.getTaskParams().getNumberOfAllocations(), equalTo(4));
Map<String, RoutingInfo> routingTable = updatedAssignment.getNodeRoutingTable();
assertThat(routingTable, aMapWithSize(2));
assertThat(routingTable, hasKey("n_1"));
assertThat(routingTable.get("n_1").getTargetAllocations(), equalTo(2));
assertThat(routingTable, hasKey("n_2"));
assertThat(routingTable.get("n_2").getTargetAllocations(), equalTo(2));
}
public void testReduceTo_GivenTwoZones_WithSameAssignmentsOfOneAllocationEach() {
Map<List<String>, Collection<DiscoveryNode>> nodesByZone = Map.of(
List.of("z_1"),
List.of(buildNode("n_1")),
List.of("z_2"),
List.of(buildNode("n_2"))
);
TrainedModelAssignment assignment = createAssignment("d", "m", 2, Map.of("n_1", 1, "n_2", 1));
TrainedModelAssignment updatedAssignment = new AllocationReducer(assignment, nodesByZone).reduceTo(1).build();
assertThat(updatedAssignment.getTaskParams().getNumberOfAllocations(), equalTo(1));
Map<String, RoutingInfo> routingTable = updatedAssignment.getNodeRoutingTable();
assertThat(routingTable, aMapWithSize(1));
assertThat(routingTable, hasKey("n_1"));
assertThat(routingTable.get(routingTable.keySet().iterator().next()).getTargetAllocations(), equalTo(1));
}
private static TrainedModelAssignment createAssignment(
String deploymentId,
String modelId,
int numberOfAllocations,
Map<String, Integer> allocationsByNode
) {
TrainedModelAssignment.Builder builder = TrainedModelAssignment.Builder.empty(
new StartTrainedModelDeploymentAction.TaskParams(
modelId,
deploymentId,
randomNonNegativeLong(),
numberOfAllocations,
randomIntBetween(1, 16),
1024,
null,
Priority.NORMAL,
randomNonNegativeLong(),
randomNonNegativeLong()
),
null
);
allocationsByNode.entrySet()
.stream()
.forEach(
e -> builder.addRoutingEntry(
e.getKey(),
new RoutingInfo(randomIntBetween(1, e.getValue()), e.getValue(), RoutingState.STARTED, "")
)
);
return builder.build();
}
private static DiscoveryNode buildNode(String nodeId) {
return DiscoveryNodeUtils.create(nodeId, nodeId);
}
}
| AllocationReducerTests |
java | spring-projects__spring-framework | spring-aspects/src/test/java/org/springframework/transaction/aspectj/TransactionAspectTests.java | {
"start": 6132,
"end": 6191
} | class ____ {
public void noop() {
}
}
}
| NotTransactional |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java | {
"start": 110854,
"end": 119275
} | class ____ {
/**
* Executed once the give node name has been stopped.
*/
public Settings onNodeStopped(String nodeName) throws Exception {
return Settings.EMPTY;
}
public void onAllNodesStopped() throws Exception {}
/**
* Executed for each node before the {@code n + 1} node is restarted. The given client is
* an active client to the node that will be restarted next.
*/
public void doAfterNodes(int n, Client client) throws Exception {}
/**
* If this returns <code>true</code> all data for the node with the given node name will be cleared including
* gateways and all index data. Returns <code>false</code> by default.
*/
public boolean clearData(String nodeName) {
return false;
}
/** returns true if the restart should also validate the cluster has reformed */
public boolean validateClusterForming() {
return true;
}
}
public Settings getDefaultSettings() {
return defaultSettings;
}
@Override
public void ensureEstimatedStats() {
if (size() > 0) {
awaitIndexShardCloseAsyncTasks();
// Checks that the breakers have been reset without incurring a
// network request, because a network request can increment one
// of the breakers
for (NodeAndClient nodeAndClient : nodes.values()) {
final IndicesFieldDataCache fdCache = getInstanceFromNode(IndicesService.class, nodeAndClient.node)
.getIndicesFieldDataCache();
// Clean up the cache, ensuring that entries' listeners have been called
fdCache.getCache().refresh();
final String name = nodeAndClient.name;
final CircuitBreakerService breakerService = getInstanceFromNode(CircuitBreakerService.class, nodeAndClient.node);
CircuitBreaker fdBreaker = breakerService.getBreaker(CircuitBreaker.FIELDDATA);
assertThat("Fielddata breaker not reset to 0 on node: " + name, fdBreaker.getUsed(), equalTo(0L));
// Anything that uses transport or HTTP can increase the
// request breaker (because they use bigarrays), because of
// that the breaker can sometimes be incremented from ping
// requests from other clusters because Jenkins is running
// multiple ES testing jobs in parallel on the same machine.
// To combat this we check whether the breaker has reached 0
// in an assertBusy loop, so it will try for 10 seconds and
// fail if it never reached 0
try {
assertBusy(() -> {
CircuitBreaker reqBreaker = breakerService.getBreaker(CircuitBreaker.REQUEST);
assertThat("Request breaker not reset to 0 on node: " + name, reqBreaker.getUsed(), equalTo(0L));
});
} catch (Exception e) {
throw new AssertionError("Exception during check for request breaker reset to 0", e);
}
NodeService nodeService = getInstanceFromNode(NodeService.class, nodeAndClient.node);
CommonStatsFlags flags = new CommonStatsFlags(Flag.FieldData, Flag.QueryCache, Flag.Segments);
NodeStats stats = nodeService.stats(
flags,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false
);
assertThat(
"Fielddata size must be 0 on node: " + stats.getNode(),
stats.getIndices().getFieldData().getMemorySizeInBytes(),
equalTo(0L)
);
assertThat(
"Query cache size must be 0 on node: " + stats.getNode(),
stats.getIndices().getQueryCache().getMemorySizeInBytes(),
equalTo(0L)
);
assertThat(
"FixedBitSet cache size must be 0 on node: " + stats.getNode(),
stats.getIndices().getSegments().getBitsetMemoryInBytes(),
equalTo(0L)
);
}
}
}
@Override
public synchronized void assertAfterTest() throws Exception {
super.assertAfterTest();
assertRequestsFinished();
assertSearchContextsReleased();
assertNoInFlightDocsInEngine();
assertMergeExecutorIsDone();
awaitIndexShardCloseAsyncTasks();
for (NodeAndClient nodeAndClient : nodes.values()) {
NodeEnvironment env = nodeAndClient.node().getNodeEnvironment();
Set<ShardId> shardIds = env.lockedShards();
for (ShardId id : shardIds) {
try {
env.shardLock(id, "InternalTestCluster assert after test", TimeUnit.SECONDS.toMillis(5)).close();
} catch (ShardLockObtainFailedException ex) {
throw new AssertionError("Shard " + id + " is still locked after 5 sec waiting", ex);
}
}
}
}
public void assertRequestsFinished() {
assert Thread.holdsLock(this);
if (size() > 0) {
for (NodeAndClient nodeAndClient : nodes.values()) {
CircuitBreaker inFlightRequestsBreaker = getInstance(CircuitBreakerService.class, nodeAndClient.name).getBreaker(
CircuitBreaker.IN_FLIGHT_REQUESTS
);
TaskManager taskManager = getInstance(TransportService.class, nodeAndClient.name).getTaskManager();
try {
// see #ensureEstimatedStats()
assertBusy(() -> {
// ensure that our size accounting on transport level is reset properly
long bytesUsed = inFlightRequestsBreaker.getUsed();
if (bytesUsed != 0) {
String pendingTasks = taskManager.getTasks()
.values()
.stream()
.map(t -> t.taskInfo(nodeAndClient.name, true).toString())
.collect(Collectors.joining(",", "[", "]"));
throw new AssertionError(
"All incoming requests on node ["
+ nodeAndClient.name
+ "] should have finished. "
+ "Expected 0 bytes for requests in-flight but got "
+ bytesUsed
+ " bytes; pending tasks ["
+ pendingTasks
+ "]"
);
}
}, 1, TimeUnit.MINUTES);
} catch (Exception e) {
logger.error("Could not assert finished requests within timeout", e);
fail("Could not assert finished requests within timeout on node [" + nodeAndClient.name + "]");
}
}
}
}
private void assertSearchContextsReleased() {
for (NodeAndClient nodeAndClient : nodes.values()) {
ESTestCase.ensureAllContextsReleased(getInstance(SearchService.class, nodeAndClient.name));
}
}
public void awaitIndexShardCloseAsyncTasks() {
final var latch = new CountDownLatch(1);
try (var refs = new RefCountingRunnable(latch::countDown)) {
for (final var nodeAndClient : nodes.values()) {
final var ref = refs.acquire();
getInstanceFromNode(IndicesClusterStateService.class, nodeAndClient.node()).onClusterStateShardsClosed(ref::close);
}
}
safeAwait(latch);
}
}
| RestartCallback |
java | processing__processing4 | app/src/processing/app/syntax/SyntaxDocument.java | {
"start": 585,
"end": 3412
} | class ____ extends PlainDocument
{
/**
* Returns the token marker that is to be used to split lines
* of this document up into tokens. May return null if this
* document is not to be colorized.
*/
public TokenMarkerState getTokenMarker()
{
return tokenMarker;
}
/**
* Sets the token marker that is to be used to split lines of
* this document up into tokens. May throw an exception if
* this is not supported for this type of document.
* @param tm The new token marker
*/
public void setTokenMarker(TokenMarker tm)
{
if (tm == null) {
tokenMarker = null;
return;
}
tokenMarker = tm.createStateInstance();
tokenMarker.insertLines(0,getDefaultRootElement()
.getElementCount());
tokenizeLines();
}
/**
* Reparses the document, by passing all lines to the token
* marker. This should be called after the document is first
* loaded.
*/
public void tokenizeLines()
{
tokenizeLines(0,getDefaultRootElement().getElementCount());
}
/**
* Reparses the document, by passing the specified lines to the
* token marker. This should be called after a large quantity of
* text is first inserted.
* @param start The first line to parse
* @param len The number of lines, after the first one to parse
*/
public void tokenizeLines(int start, int len)
{
if(tokenMarker == null || !tokenMarker.marker.supportsMultilineTokens())
return;
Segment lineSegment = new Segment();
Element map = getDefaultRootElement();
len += start;
try
{
for(int i = start; i < len; i++)
{
Element lineElement = map.getElement(i);
int lineStart = lineElement.getStartOffset();
getText(lineStart,lineElement.getEndOffset()
- lineStart - 1,lineSegment);
tokenMarker.markTokens(lineSegment,i);
}
}
catch(BadLocationException bl)
{
bl.printStackTrace();
}
}
/**
* Starts a compound edit that can be undone in one operation.
* Subclasses that implement undo should override this method;
* this | SyntaxDocument |
java | apache__dubbo | dubbo-plugin/dubbo-mcp/src/main/java/org/apache/dubbo/mcp/core/McpStreamableServiceImpl.java | {
"start": 1092,
"end": 1861
} | class ____ implements McpStreamableService, Disposable {
private volatile DubboMcpStreamableTransportProvider transportProvider = null;
@Override
public void streamable(StreamObserver<ServerSentEvent<byte[]>> responseObserver) {
if (transportProvider == null) {
synchronized (this) {
if (transportProvider == null) {
transportProvider = getTransportProvider();
}
}
}
transportProvider.handleRequest(responseObserver);
}
public DubboMcpStreamableTransportProvider getTransportProvider() {
return McpApplicationDeployListener.getDubboMcpStreamableTransportProvider();
}
@Override
public void destroy() {}
}
| McpStreamableServiceImpl |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/test/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/humio/HumioPropertiesConfigAdapterTests.java | {
"start": 1040,
"end": 2089
} | class ____
extends AbstractPropertiesConfigAdapterTests<HumioProperties, HumioPropertiesConfigAdapter> {
HumioPropertiesConfigAdapterTests() {
super(HumioPropertiesConfigAdapter.class);
}
@Test
void whenApiTokenIsSetAdapterApiTokenReturnsIt() {
HumioProperties properties = new HumioProperties();
properties.setApiToken("ABC123");
assertThat(new HumioPropertiesConfigAdapter(properties).apiToken()).isEqualTo("ABC123");
}
@Test
void whenPropertiesTagsIsSetAdapterTagsReturnsIt() {
HumioProperties properties = new HumioProperties();
properties.setTags(Collections.singletonMap("name", "test"));
assertThat(new HumioPropertiesConfigAdapter(properties).tags())
.isEqualTo(Collections.singletonMap("name", "test"));
}
@Test
void whenPropertiesUriIsSetAdapterUriReturnsIt() {
HumioProperties properties = new HumioProperties();
properties.setUri("https://humio.example.com");
assertThat(new HumioPropertiesConfigAdapter(properties).uri()).isEqualTo("https://humio.example.com");
}
}
| HumioPropertiesConfigAdapterTests |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/BootstrapTestUtils.java | {
"start": 978,
"end": 1473
} | class ____ {
private BootstrapTestUtils() {
/* no-op */
}
public static BootstrapContext buildBootstrapContext(Class<?> testClass,
CacheAwareContextLoaderDelegate cacheAwareContextLoaderDelegate) {
return new DefaultBootstrapContext(testClass, cacheAwareContextLoaderDelegate);
}
public static TestContextBootstrapper resolveTestContextBootstrapper(BootstrapContext bootstrapContext) {
return BootstrapUtils.resolveTestContextBootstrapper(bootstrapContext);
}
}
| BootstrapTestUtils |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.