language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/abstract_/AbstractAssert_extracting_with_String_and_AssertFactory_Test.java
|
{
"start": 1750,
"end": 4713
}
|
class ____ implements NavigationMethodBaseTest<TestAssert> {
private TestAssert underTest;
@BeforeEach
void setup() {
Employee luke = new Employee(2L, new Name("Luke", "Skywalker"), 26);
underTest = new TestAssert(luke);
}
@Test
void should_throw_npe_if_the_given_propertyOrField_is_null() {
// GIVEN
String propertyOrField = null;
// WHEN
Throwable thrown = catchThrowable(() -> underTest.extracting(propertyOrField, Assertions::assertThat));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("propertyOrField").create());
}
@Test
void should_throw_npe_if_the_given_assert_factory_is_null() {
// WHEN
Throwable thrown = catchThrowable(() -> underTest.extracting("age", null));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("assertFactory").create());
}
@Test
void should_throw_IntrospectionError_if_given_field_name_cannot_be_read() {
// WHEN
Throwable thrown = catchThrowable(() -> underTest.extracting("foo", Assertions::assertThat));
// THEN
then(thrown).isInstanceOf(IntrospectionError.class)
.hasMessageContaining("Can't find any field or property with name 'foo'.");
}
@Test
void should_pass_allowing_assertions_on_property_value() {
// WHEN
AbstractAssert<?, ?> result = underTest.extracting("age", Assertions::assertThat);
// THEN
result.isEqualTo(26);
}
@Test
void should_pass_allowing_assertions_on_inner_property_value() {
// WHEN
AbstractAssert<?, ?> result = underTest.extracting("name.first", Assertions::assertThat);
// THEN
result.isEqualTo("Luke");
}
@Test
void should_pass_allowing_narrowed_assertions_on_property_value_extracted_with_instanceOfAssertFactory() {
// WHEN
AbstractIntegerAssert<?> result = underTest.extracting("age", INTEGER);
// THEN
result.isNotZero();
}
@Test
void should_use_property_field_name_as_description_when_extracting_single_property() {
// WHEN
var error = expectAssertionError(() -> underTest.extracting("name.first", Assertions::assertThat)
.isNull());
// THEN
then(error).hasMessageContaining("[Extracted: name.first]");
}
@Test
void should_throw_assertion_error_if_actual_is_null() {
// GIVEN
TestAssert underTest = new TestAssert(null);
// WHEN
var assertionError = expectAssertionError(() -> underTest.extracting("age", Assertions::assertThat));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
@Override
public TestAssert getAssertion() {
return underTest;
}
@Override
public AbstractAssert<?, ?> invoke_navigation_method(TestAssert assertion) {
return assertion.extracting("age", Assertions::assertThat);
}
static
|
AbstractAssert_extracting_with_String_and_AssertFactory_Test
|
java
|
spring-projects__spring-boot
|
module/spring-boot-web-server/src/testFixtures/java/org/springframework/boot/web/server/autoconfigure/servlet/AbstractServletWebServerAutoConfigurationTests.java
|
{
"start": 8810,
"end": 9155
}
|
class ____ {
@Bean
CookieSameSiteSupplier cookieSameSiteSupplier1() {
return CookieSameSiteSupplier.ofLax().whenHasName("test1");
}
@Bean
CookieSameSiteSupplier cookieSameSiteSupplier2() {
return CookieSameSiteSupplier.ofNone().whenHasName("test2");
}
}
@ServerEndpoint("/")
public static
|
CookieSameSiteSupplierConfiguration
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java
|
{
"start": 1137,
"end": 3393
}
|
class ____ {
@Test
public void putUpload() throws Exception {
test("PUT", HttpFSFileSystem.Operation.CREATE.toString(), "application/octet-stream", true, false);
}
@Test
public void postUpload() throws Exception {
test("POST", HttpFSFileSystem.Operation.APPEND.toString(), "APPLICATION/OCTET-STREAM", true, false);
}
@Test
public void putUploadWrong() throws Exception {
test("PUT", HttpFSFileSystem.Operation.CREATE.toString(), "plain/text", false, false);
test("PUT", HttpFSFileSystem.Operation.CREATE.toString(), "plain/text", true, true);
}
@Test
public void postUploadWrong() throws Exception {
test("POST", HttpFSFileSystem.Operation.APPEND.toString(), "plain/text", false, false);
test("POST", HttpFSFileSystem.Operation.APPEND.toString(), "plain/text", true, true);
}
@Test
public void getOther() throws Exception {
test("GET", HttpFSFileSystem.Operation.GETHOMEDIRECTORY.toString(), "plain/text", false, false);
}
@Test
public void putOther() throws Exception {
test("PUT", HttpFSFileSystem.Operation.MKDIRS.toString(), "plain/text", false, false);
}
private void test(String method, String operation, String contentType,
boolean upload, boolean error) throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.reset(request);
Mockito.when(request.getMethod()).thenReturn(method);
Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).thenReturn(operation);
Mockito.when(request.getParameter(HttpFSParametersProvider.DataParam.NAME)).
thenReturn(Boolean.toString(upload));
Mockito.when(request.getContentType()).thenReturn(contentType);
FilterChain chain = Mockito.mock(FilterChain.class);
Filter filter = new CheckUploadContentTypeFilter();
filter.doFilter(request, response, chain);
if (error) {
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_BAD_REQUEST),
Mockito.contains("Data upload"));
}
else {
Mockito.verify(chain).doFilter(request, response);
}
}
}
|
TestCheckUploadContentTypeFilter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollableIndexCaps.java
|
{
"start": 1041,
"end": 2782
}
|
class ____ implements Writeable, ToXContentObject {
private static final ParseField ROLLUP_JOBS = new ParseField("rollup_jobs");
private final String indexName;
private final List<RollupJobCaps> jobCaps;
public RollableIndexCaps(String indexName, List<RollupJobCaps> caps) {
this.indexName = indexName;
this.jobCaps = caps.stream().sorted(Comparator.comparing(RollupJobCaps::getJobID)).toList();
}
public RollableIndexCaps(StreamInput in) throws IOException {
this.indexName = in.readString();
this.jobCaps = in.readCollectionAsList(RollupJobCaps::new);
}
public String getIndexName() {
return indexName;
}
public List<RollupJobCaps> getJobCaps() {
return jobCaps;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(indexName);
out.writeCollection(jobCaps);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(indexName);
{
builder.xContentList(ROLLUP_JOBS.getPreferredName(), jobCaps);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
RollableIndexCaps that = (RollableIndexCaps) other;
return Objects.equals(this.jobCaps, that.jobCaps) && Objects.equals(this.indexName, that.indexName);
}
@Override
public int hashCode() {
return Objects.hash(jobCaps, indexName);
}
}
|
RollableIndexCaps
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/MultiSelectPicklistSerializer.java
|
{
"start": 1683,
"end": 2640
}
|
class
____<?> arrayClass = value.getClass();
final Class<?> aClass = arrayClass.getComponentType();
try {
Method getterMethod = aClass.getMethod("value");
final int length = Array.getLength(value);
// construct a string of form value1;value2;...
final StringBuilder buffer = new StringBuilder();
for (int i = 0; i < length; i++) {
buffer.append((String) getterMethod.invoke(Array.get(value, i)));
if (i < (length - 1)) {
buffer.append(';');
}
}
jgen.writeString(buffer.toString());
} catch (Exception e) {
throw new JsonGenerationException(
String.format("Exception writing pick list value %s of type %s: %s", value, value.getClass().getName(),
e.getMessage()),
jgen);
}
}
}
|
Class
|
java
|
quarkusio__quarkus
|
extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/publicfields/PublicFieldAccessFieldTypesTest.java
|
{
"start": 682,
"end": 3359
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(MyEntity.class)
.addClass(FieldAccessEnhancedDelegate.class))
.withConfigurationResource("application.properties");
@Inject
Mutiny.SessionFactory sessionFactory;
@Test
@RunOnVertxContext
public void testFieldAccess(UniAsserter asserter) {
// Ideally we'd write a @ParameterizedTest and pass the delegates as parameters,
// but we cannot do that due to JUnit using a different classloader than the test.
for (FieldAccessEnhancedDelegate delegate : FieldAccessEnhancedDelegate.values()) {
doTestFieldAccess(delegate, asserter);
}
}
private void doTestFieldAccess(final FieldAccessEnhancedDelegate delegate, final UniAsserter asserter) {
//First verify we don't pass the assertion when not modifying the entity:
asserter.assertThat(() -> sessionFactory.withTransaction((session, tx) -> {
MyEntity entity = new MyEntity();
return session.persist(entity).replaceWith(() -> entity.id);
})
.chain(id -> sessionFactory.withTransaction((session, tx) -> session.find(MyEntity.class, id))),
loadedEntity -> notPassingAssertion(loadedEntity, delegate));
// Now again, but modify the entity and assert dirtiness was detected:
asserter.assertThat(() -> sessionFactory.withTransaction((session, tx) -> {
MyEntity entity = new MyEntity();
return session.persist(entity).replaceWith(() -> entity.id);
})
.chain(id -> sessionFactory
.withTransaction((session, tx) -> session.find(MyEntity.class, id).invoke(delegate::setValue))
.replaceWith(id))
.chain(id -> sessionFactory.withTransaction((session, tx) -> session.find(MyEntity.class, id))),
delegate::assertValue);
}
// Self-test: initially the assertion doesn't pass: the value was not set yet.
// Verify that we would fail the test in such case.
private void notPassingAssertion(MyEntity entity, FieldAccessEnhancedDelegate delegate) {
AssertionError expected = null;
try {
delegate.assertValue(entity);
} catch (AssertionError e) {
expected = e;
}
if (expected == null) {
throw new IllegalStateException("This test is buggy: assertions should not pass at this point.");
}
}
@Entity
public static
|
PublicFieldAccessFieldTypesTest
|
java
|
junit-team__junit5
|
platform-tooling-support-tests/src/test/java/platform/tooling/support/tests/ModularCompilationTests.java
|
{
"start": 833,
"end": 2699
}
|
class ____ {
@Test
void compileAllJUnitModules(@TempDir Path workspace, @FilePrefix("javac") OutputFiles javacOutputFiles)
throws Exception {
var lib = Files.createDirectories(workspace.resolve("lib"));
ThirdPartyJars.copyAll(lib);
var moduleNames = Arrays.asList(System.getProperty("junit.modules").split(","));
var outputDir = workspace.resolve("classes").toAbsolutePath();
var processStarter = ProcessStarters.javaCommand("javac") //
.workingDir(workspace) //
.addArguments("-d", outputDir.toString()) //
.addArguments("-Xlint:all", "-Werror") //
.addArguments("-Xlint:-requires-automatic,-requires-transitive-automatic") // JUnit 4
// external modules
.addArguments("--module-path", lib.toAbsolutePath().toString());
// source locations in module-specific form
moduleNames.forEach(
moduleName -> processStarter.addArguments("--module-source-path", moduleSourcePath(moduleName)));
var result = processStarter
// un-shadow
.addArguments("--add-modules", "info.picocli") //
.addArguments("--add-reads", "org.junit.platform.console=info.picocli") //
.addArguments("--add-modules", "org.opentest4j.reporting.events") //
.addArguments("--add-reads", "org.junit.platform.reporting=org.opentest4j.reporting.events") //
.addArguments("--add-modules", "de.siegmar.fastcsv") //
.addArguments("--add-reads", "org.junit.jupiter.params=de.siegmar.fastcsv")
// modules to compile
.addArguments("--module", String.join(",", moduleNames)) //
.redirectOutput(javacOutputFiles) //
.startAndWait();
assertEquals(0, result.exitCode());
assertThat(outputDir).isNotEmptyDirectory();
}
static String moduleSourcePath(String moduleName) {
return "%s=%s".formatted(moduleName,
requireNonNull(System.getProperty("junit.moduleSourcePath." + moduleName)));
}
}
|
ModularCompilationTests
|
java
|
resilience4j__resilience4j
|
resilience4j-core/src/test/java/io/github/resilience4j/core/JavaClockWrapper.java
|
{
"start": 838,
"end": 1219
}
|
class ____ implements Clock {
private final java.time.Clock clock;
public JavaClockWrapper(java.time.Clock clock) {
this.clock = clock;
}
@Override
public long wallTime() {
return this.clock.millis();
}
@Override
public long monotonicTime() {
return TimeUnit.MILLISECONDS.toNanos(this.clock.millis());
}
}
|
JavaClockWrapper
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/PrettyFormatTest.java
|
{
"start": 256,
"end": 577
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
Assert.assertEquals(0, new JSONSerializer().getIndentCount());
Assert.assertEquals("[\n\t{},\n\t{}\n]", JSON.toJSONString(new Object[] { new Object(), new Object() }, SerializerFeature.PrettyFormat));
}
}
|
PrettyFormatTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/EqualsIncompatibleTypeTest.java
|
{
"start": 22322,
"end": 22796
}
|
class ____ {
boolean test(Stream<Long> xs) {
return xs.allMatch(isEqual(1L));
}
}
""")
.doTest();
}
@Test
public void predicateIsEqual_methodRef() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Stream;
|
Test
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/RestServer.java
|
{
"start": 3192,
"end": 23539
}
|
class ____ {
// TODO: This should not be so long. However, due to potentially long rebalances that may have to wait a full
// session timeout to complete, during which we cannot serve some requests. Ideally we could reduce this, but
// we need to consider all possible scenarios this could fail. It might be ok to fail with a timeout in rare cases,
// but currently a worker simply leaving the group can take this long as well.
public static final long DEFAULT_REST_REQUEST_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(90);
public static final long DEFAULT_HEALTH_CHECK_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(10);
private static final Logger log = LoggerFactory.getLogger(RestServer.class);
// Used to distinguish between Admin connectors and regular REST API connectors when binding admin handlers
private static final String ADMIN_SERVER_CONNECTOR_NAME = "Admin";
private static final Pattern LISTENER_PATTERN = Pattern.compile("^(.*)://\\[?([0-9a-zA-Z\\-%._:]*)\\]?:(-?[0-9]+)");
private static final long GRACEFUL_SHUTDOWN_TIMEOUT_MS = 60 * 1000;
private static final String PROTOCOL_HTTP = "http";
private static final String PROTOCOL_HTTPS = "https";
protected final RestServerConfig config;
private final ContextHandlerCollection handlers;
private final Server jettyServer;
private final RequestTimeout requestTimeout;
private List<Plugin<ConnectRestExtension>> connectRestExtensionPlugins = List.of();
/**
* Create a REST server for this herder using the specified configs.
*/
protected RestServer(RestServerConfig config) {
this.config = config;
List<String> listeners = config.listeners();
List<String> adminListeners = config.adminListeners();
jettyServer = new Server();
handlers = new ContextHandlerCollection();
requestTimeout = new RequestTimeout(DEFAULT_REST_REQUEST_TIMEOUT_MS, DEFAULT_HEALTH_CHECK_TIMEOUT_MS);
createConnectors(listeners, adminListeners);
}
/**
* Adds Jetty connector for each configured listener
*/
public final void createConnectors(List<String> listeners, List<String> adminListeners) {
List<Connector> connectors = new ArrayList<>();
for (String listener : listeners) {
Connector connector = createConnector(listener);
connectors.add(connector);
log.info("Added connector for {}", listener);
}
jettyServer.setConnectors(connectors.toArray(new Connector[0]));
if (adminListeners != null && !adminListeners.isEmpty()) {
for (String adminListener : adminListeners) {
Connector conn = createConnector(adminListener, true);
jettyServer.addConnector(conn);
log.info("Added admin connector for {}", adminListener);
}
}
}
/**
* Creates regular (non-admin) Jetty connector according to configuration
*/
public final Connector createConnector(String listener) {
return createConnector(listener, false);
}
/**
* Creates Jetty connector according to configuration
*/
public final Connector createConnector(String listener, boolean isAdmin) {
Matcher listenerMatcher = LISTENER_PATTERN.matcher(listener);
if (!listenerMatcher.matches())
throw new ConfigException("Listener doesn't have the right format (protocol://hostname:port).");
String protocol = listenerMatcher.group(1).toLowerCase(Locale.ENGLISH);
if (!PROTOCOL_HTTP.equals(protocol) && !PROTOCOL_HTTPS.equals(protocol))
throw new ConfigException(String.format("Listener protocol must be either \"%s\" or \"%s\".", PROTOCOL_HTTP, PROTOCOL_HTTPS));
String hostname = listenerMatcher.group(2);
int port = Integer.parseInt(listenerMatcher.group(3));
ServerConnector connector;
if (PROTOCOL_HTTPS.equals(protocol)) {
SslContextFactory.Server ssl;
if (isAdmin) {
ssl = SSLUtils.createServerSideSslContextFactory(config, RestServerConfig.ADMIN_LISTENERS_HTTPS_CONFIGS_PREFIX);
} else {
ssl = SSLUtils.createServerSideSslContextFactory(config);
}
connector = new ServerConnector(jettyServer, ssl);
if (!isAdmin) {
connector.setName(String.format("%s_%s%d", PROTOCOL_HTTPS, hostname, port));
}
} else {
connector = new ServerConnector(jettyServer);
if (!isAdmin) {
connector.setName(String.format("%s_%s%d", PROTOCOL_HTTP, hostname, port));
}
}
if (isAdmin) {
connector.setName(ADMIN_SERVER_CONNECTOR_NAME);
}
if (!hostname.isEmpty())
connector.setHost(hostname);
connector.setPort(port);
// TODO: do we need this?
connector.setIdleTimeout(requestTimeout.timeoutMs());
return connector;
}
public void initializeServer() {
log.info("Initializing REST server");
Slf4jRequestLogWriter slf4jRequestLogWriter = new Slf4jRequestLogWriter();
slf4jRequestLogWriter.setLoggerName(RestServer.class.getCanonicalName());
CustomRequestLog requestLog = new CustomRequestLog(slf4jRequestLogWriter, CustomRequestLog.EXTENDED_NCSA_FORMAT + " %{ms}T");
jettyServer.setRequestLog(requestLog);
/* Needed for graceful shutdown as per `setStopTimeout` documentation */
StatisticsHandler statsHandler = new StatisticsHandler();
statsHandler.setHandler(handlers);
jettyServer.setHandler(statsHandler);
jettyServer.setStopTimeout(GRACEFUL_SHUTDOWN_TIMEOUT_MS);
jettyServer.setStopAtShutdown(true);
try {
jettyServer.start();
} catch (Exception e) {
throw new ConnectException("Unable to initialize REST server", e);
}
log.info("REST server listening at {}, advertising URL {}", jettyServer.getURI(), advertisedUrl());
URI adminUrl = adminUrl();
if (adminUrl != null)
log.info("REST admin endpoints at {}", adminUrl);
}
protected final void initializeResources() {
log.info("Initializing REST resources");
ResourceConfig resourceConfig = newResourceConfig();
Collection<Class<?>> regularResources = regularResources();
regularResources.forEach(resourceConfig::register);
configureRegularResources(resourceConfig);
List<String> adminListeners = config.adminListeners();
ResourceConfig adminResourceConfig;
if (adminListeners != null && adminListeners.isEmpty()) {
log.info("Skipping adding admin resources");
// set up adminResource but add no handlers to it
adminResourceConfig = resourceConfig;
} else {
if (adminListeners == null) {
log.info("Adding admin resources to main listener");
adminResourceConfig = resourceConfig;
} else {
// TODO: we need to check if these listeners are same as 'listeners'
// TODO: the following code assumes that they are different
log.info("Adding admin resources to admin listener");
adminResourceConfig = newResourceConfig();
}
Collection<Class<?>> adminResources = adminResources();
adminResources.forEach(adminResourceConfig::register);
configureAdminResources(adminResourceConfig);
}
ServletContainer servletContainer = new ServletContainer(resourceConfig);
ServletHolder servletHolder = new ServletHolder(servletContainer);
List<Handler> contextHandlers = new ArrayList<>();
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
context.addServlet(servletHolder, "/*");
contextHandlers.add(context);
ServletContextHandler adminContext = null;
if (adminResourceConfig != resourceConfig) {
adminContext = new ServletContextHandler(ServletContextHandler.SESSIONS);
ServletHolder adminServletHolder = new ServletHolder(new ServletContainer(adminResourceConfig));
adminContext.setContextPath("/");
adminContext.addServlet(adminServletHolder, "/*");
adminContext.setVirtualHosts(List.of("@" + ADMIN_SERVER_CONNECTOR_NAME));
contextHandlers.add(adminContext);
}
String allowedOrigins = config.allowedOrigins();
if (!Utils.isBlank(allowedOrigins)) {
CrossOriginHandler crossOriginHandler = new CrossOriginHandler();
crossOriginHandler.setAllowedOriginPatterns(Set.of(allowedOrigins.split(",")));
String allowedMethods = config.allowedMethods();
if (!Utils.isBlank(allowedMethods)) {
crossOriginHandler.setAllowedMethods(Set.of(allowedMethods.split(",")));
}
// Setting to true matches the previously used CrossOriginFilter
crossOriginHandler.setDeliverPreflightRequests(true);
context.insertHandler(crossOriginHandler);
}
String headerConfig = config.responseHeaders();
if (!Utils.isBlank(headerConfig)) {
configureHttpResponseHeaderFilter(context, headerConfig);
}
handlers.setHandlers(contextHandlers.toArray(new Handler[0]));
try {
context.start();
} catch (Exception e) {
throw new ConnectException("Unable to initialize REST resources", e);
}
if (adminResourceConfig != resourceConfig) {
try {
log.debug("Starting admin context");
adminContext.start();
} catch (Exception e) {
throw new ConnectException("Unable to initialize Admin REST resources", e);
}
}
log.info("REST resources initialized; server is started and ready to handle requests");
}
private ResourceConfig newResourceConfig() {
ResourceConfig result = new ResourceConfig();
result.register(new JacksonJsonProvider());
result.register(requestTimeout.binder());
result.register(ConnectExceptionMapper.class);
result.property(ServerProperties.WADL_FEATURE_DISABLE, true);
return result;
}
/**
* @return the resources that should be registered with the
* standard (i.e., non-admin) listener for this server; may be empty, but not null
*/
protected abstract Collection<Class<?>> regularResources();
/**
* @return the resources that should be registered with the
* admin listener for this server; may be empty, but not null
*/
protected abstract Collection<Class<?>> adminResources();
/**
* Pluggable hook to customize the regular (i.e., non-admin) resources on this server
* after they have been instantiated and registered with the given {@link ResourceConfig}.
* This may be used to, for example, add REST extensions via {@link #registerRestExtensions(Herder, ResourceConfig)}.
* <p>
* <em>N.B.: Classes do <b>not</b> need to register the resources provided in {@link #regularResources()} with
* the {@link ResourceConfig} parameter in this method; they are automatically registered by the parent class.</em>
* @param resourceConfig the {@link ResourceConfig} that the server's regular listeners are registered with; never null
*/
protected void configureRegularResources(ResourceConfig resourceConfig) {
// No-op by default
}
/**
* Pluggable hook to customize the admin resources on this server after they have been instantiated and registered
* with the given {@link ResourceConfig}. This may be used to, for example, add REST extensions via
* {@link #registerRestExtensions(Herder, ResourceConfig)}.
* <p>
* <em>N.B.: Classes do <b>not</b> need to register the resources provided in {@link #adminResources()} with
* the {@link ResourceConfig} parameter in this method; they are automatically registered by the parent class.</em>
* @param adminResourceConfig the {@link ResourceConfig} that the server's admin listeners are registered with; never null
*/
protected void configureAdminResources(ResourceConfig adminResourceConfig) {
// No-op by default
}
public URI serverUrl() {
return jettyServer.getURI();
}
public void stop() {
log.info("Stopping REST server");
try {
if (handlers.isRunning()) {
for (Handler handler : handlers.getHandlers()) {
if (handler != null) {
Utils.closeQuietly(handler::stop, handler.toString());
}
}
}
for (Plugin<ConnectRestExtension> connectRestExtensionPlugin : connectRestExtensionPlugins) {
try {
connectRestExtensionPlugin.close();
} catch (IOException e) {
log.warn("Error while invoking close on {}", connectRestExtensionPlugin.get().getClass(), e);
}
}
jettyServer.stop();
jettyServer.join();
} catch (Exception e) {
throw new ConnectException("Unable to stop REST server", e);
} finally {
try {
jettyServer.destroy();
} catch (Exception e) {
log.error("Unable to destroy REST server", e);
}
}
log.info("REST server stopped");
}
/**
* Get the URL to advertise to other workers and clients. This uses the default connector from the embedded Jetty
* server, unless overrides for advertised hostname and/or port are provided via configs. {@link #initializeServer()}
* must be invoked successfully before calling this method.
*/
public URI advertisedUrl() {
UriBuilder builder = UriBuilder.fromUri(jettyServer.getURI());
String advertisedSecurityProtocol = determineAdvertisedProtocol();
ServerConnector serverConnector = findConnector(advertisedSecurityProtocol);
builder.scheme(advertisedSecurityProtocol);
String advertisedHostname = config.advertisedHostName();
if (advertisedHostname != null && !advertisedHostname.isEmpty())
builder.host(advertisedHostname);
else if (serverConnector != null && serverConnector.getHost() != null && !serverConnector.getHost().isEmpty())
builder.host(serverConnector.getHost());
Integer advertisedPort = config.advertisedPort();
if (advertisedPort != null)
builder.port(advertisedPort);
else if (serverConnector != null && serverConnector.getPort() > 0)
builder.port(serverConnector.getPort());
else if (serverConnector != null && serverConnector.getLocalPort() > 0)
builder.port(serverConnector.getLocalPort());
log.info("Advertised URI: {}", builder.build());
return builder.build();
}
/**
* @return the admin url for this worker. Can be null if admin endpoints are disabled.
*/
public URI adminUrl() {
ServerConnector adminConnector = null;
for (Connector connector : jettyServer.getConnectors()) {
if (ADMIN_SERVER_CONNECTOR_NAME.equals(connector.getName()))
adminConnector = (ServerConnector) connector;
}
if (adminConnector == null) {
List<String> adminListeners = config.adminListeners();
if (adminListeners == null) {
return advertisedUrl();
} else if (adminListeners.isEmpty()) {
return null;
} else {
log.error("No admin connector found for listeners {}", adminListeners);
return null;
}
}
UriBuilder builder = UriBuilder.fromUri(jettyServer.getURI());
builder.port(adminConnector.getLocalPort());
return builder.build();
}
// For testing only
public void requestTimeout(long requestTimeoutMs) {
this.requestTimeout.timeoutMs(requestTimeoutMs);
}
// For testing only
public void healthCheckTimeout(long healthCheckTimeoutMs) {
this.requestTimeout.healthCheckTimeoutMs(healthCheckTimeoutMs);
}
String determineAdvertisedProtocol() {
String advertisedSecurityProtocol = config.advertisedListener();
if (advertisedSecurityProtocol == null) {
String listeners = config.rawListeners();
if (listeners == null)
return PROTOCOL_HTTP;
else
listeners = listeners.toLowerCase(Locale.ENGLISH);
if (listeners.contains(String.format("%s://", PROTOCOL_HTTP)))
return PROTOCOL_HTTP;
else if (listeners.contains(String.format("%s://", PROTOCOL_HTTPS)))
return PROTOCOL_HTTPS;
else
return PROTOCOL_HTTP;
} else {
return advertisedSecurityProtocol.toLowerCase(Locale.ENGLISH);
}
}
/**
* Locate a Jetty connector for the standard (non-admin) REST API that uses the given protocol.
* @param protocol the protocol for the connector (e.g., "http" or "https").
* @return a {@link ServerConnector} for the server that uses the requested protocol, or
* {@code null} if none exist.
*/
ServerConnector findConnector(String protocol) {
for (Connector connector : jettyServer.getConnectors()) {
String connectorName = connector.getName();
// We set the names for these connectors when instantiating them, beginning with the
// protocol for the connector and then an underscore ("_"). We rely on that format here
// when trying to locate a connector with the requested protocol; if the naming format
// for the connectors we create is ever changed, we'll need to adjust the logic here
// accordingly.
if (connectorName.startsWith(protocol + "_") && !ADMIN_SERVER_CONNECTOR_NAME.equals(connectorName))
return (ServerConnector) connector;
}
return null;
}
protected final void registerRestExtensions(Herder herder, ResourceConfig resourceConfig) {
connectRestExtensionPlugins = Plugin.wrapInstances(
herder.plugins().newPlugins(
config.restExtensions(),
config,
ConnectRestExtension.class
),
herder.connectMetrics().metrics(),
RestServerConfig.REST_EXTENSION_CLASSES_CONFIG);
long herderRequestTimeoutMs = DEFAULT_REST_REQUEST_TIMEOUT_MS;
Integer rebalanceTimeoutMs = config.rebalanceTimeoutMs();
if (rebalanceTimeoutMs != null) {
herderRequestTimeoutMs = Math.min(herderRequestTimeoutMs, rebalanceTimeoutMs.longValue());
}
ConnectClusterDetails connectClusterDetails = new ConnectClusterDetailsImpl(
herder.kafkaClusterId()
);
ConnectRestExtensionContext connectRestExtensionContext =
new ConnectRestExtensionContextImpl(
new ConnectRestConfigurable(resourceConfig),
new ConnectClusterStateImpl(herderRequestTimeoutMs, connectClusterDetails, herder)
);
for (Plugin<ConnectRestExtension> connectRestExtensionPlugin : connectRestExtensionPlugins) {
connectRestExtensionPlugin.get().register(connectRestExtensionContext);
}
}
/**
* Register header filter to ServletContextHandler.
* @param context The servlet context handler
*/
protected void configureHttpResponseHeaderFilter(ServletContextHandler context, String headerConfig) {
FilterHolder headerFilterHolder = new FilterHolder(HeaderFilter.class);
headerFilterHolder.setInitParameter("headerConfig", headerConfig);
context.addFilter(headerFilterHolder, "/*", EnumSet.of(DispatcherType.REQUEST));
}
private static
|
RestServer
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/plugable/NonReusingDeserializationDelegate.java
|
{
"start": 1146,
"end": 1893
}
|
class ____<T> implements DeserializationDelegate<T> {
private T instance;
private final TypeSerializer<T> serializer;
public NonReusingDeserializationDelegate(TypeSerializer<T> serializer) {
this.serializer = serializer;
}
public void setInstance(T instance) {
this.instance = instance;
}
public T getInstance() {
return instance;
}
@Override
public void write(DataOutputView out) throws IOException {
throw new IllegalStateException("Serialization method called on DeserializationDelegate.");
}
@Override
public void read(DataInputView in) throws IOException {
this.instance = this.serializer.deserialize(in);
}
}
|
NonReusingDeserializationDelegate
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/core/v2/index/ClientServiceIndexesManager.java
|
{
"start": 1813,
"end": 7618
}
|
class ____ extends SmartSubscriber {
private final ConcurrentMap<Service, Set<String>> publisherIndexes = new ConcurrentHashMap<>();
private final ConcurrentMap<Service, Set<String>> subscriberIndexes = new ConcurrentHashMap<>();
public ClientServiceIndexesManager() {
NotifyCenter.registerSubscriber(this, NamingEventPublisherFactory.getInstance());
}
public Collection<String> getAllClientsRegisteredService(Service service) {
return publisherIndexes.containsKey(service) ? publisherIndexes.get(service) : new ConcurrentHashSet<>();
}
public Collection<String> getAllClientsSubscribeService(Service service) {
return subscriberIndexes.containsKey(service) ? subscriberIndexes.get(service) : new ConcurrentHashSet<>();
}
public Collection<Service> getSubscribedService() {
return subscriberIndexes.keySet();
}
/**
* Clear the service index without instances.
*
* @param service The service of the Nacos.
*/
public void removePublisherIndexesByEmptyService(Service service) {
if (publisherIndexes.containsKey(service) && publisherIndexes.get(service).isEmpty()) {
publisherIndexes.remove(service);
}
}
@Override
public List<Class<? extends Event>> subscribeTypes() {
List<Class<? extends Event>> result = new LinkedList<>();
result.add(ClientOperationEvent.ClientRegisterServiceEvent.class);
result.add(ClientOperationEvent.ClientDeregisterServiceEvent.class);
result.add(ClientOperationEvent.ClientSubscribeServiceEvent.class);
result.add(ClientOperationEvent.ClientUnsubscribeServiceEvent.class);
result.add(ClientOperationEvent.ClientReleaseEvent.class);
return result;
}
@Override
public void onEvent(Event event) {
if (event instanceof ClientOperationEvent.ClientReleaseEvent) {
handleClientDisconnect((ClientOperationEvent.ClientReleaseEvent) event);
} else if (event instanceof ClientOperationEvent) {
handleClientOperation((ClientOperationEvent) event);
}
}
private void handleClientDisconnect(ClientOperationEvent.ClientReleaseEvent event) {
Client client = event.getClient();
for (Service each : client.getAllSubscribeService()) {
removeSubscriberIndexes(each, client.getClientId());
}
DeregisterInstanceReason reason = event.isNative() ? DeregisterInstanceReason.NATIVE_DISCONNECTED
: DeregisterInstanceReason.SYNCED_DISCONNECTED;
long currentTimeMillis = System.currentTimeMillis();
for (Service each : client.getAllPublishedService()) {
removePublisherIndexes(each, client.getClientId());
InstancePublishInfo instance = client.getInstancePublishInfo(each);
NotifyCenter.publishEvent(
new DeregisterInstanceTraceEvent(currentTimeMillis, "", false, reason, each.getNamespace(),
each.getGroup(), each.getName(), instance.getIp(), instance.getPort()));
}
}
private void handleClientOperation(ClientOperationEvent event) {
Service service = event.getService();
String clientId = event.getClientId();
if (event instanceof ClientOperationEvent.ClientRegisterServiceEvent) {
addPublisherIndexes(service, clientId);
} else if (event instanceof ClientOperationEvent.ClientDeregisterServiceEvent) {
removePublisherIndexes(service, clientId);
} else if (event instanceof ClientOperationEvent.ClientSubscribeServiceEvent) {
addSubscriberIndexes(service, clientId);
} else if (event instanceof ClientOperationEvent.ClientUnsubscribeServiceEvent) {
removeSubscriberIndexes(service, clientId);
}
}
private void addPublisherIndexes(Service service, String clientId) {
String serviceChangedType = Constants.ServiceChangedType.INSTANCE_CHANGED;
if (!publisherIndexes.containsKey(service)) {
// The only time the index needs to be updated is when the service is first created
serviceChangedType = Constants.ServiceChangedType.ADD_SERVICE;
}
NotifyCenter.publishEvent(new ServiceEvent.ServiceChangedEvent(service, serviceChangedType, true));
publisherIndexes.computeIfAbsent(service, key -> new ConcurrentHashSet<>()).add(clientId);
}
private void removePublisherIndexes(Service service, String clientId) {
publisherIndexes.computeIfPresent(service, (s, ids) -> {
ids.remove(clientId);
String serviceChangedType = ids.isEmpty() ? Constants.ServiceChangedType.DELETE_SERVICE
: Constants.ServiceChangedType.INSTANCE_CHANGED;
NotifyCenter.publishEvent(new ServiceEvent.ServiceChangedEvent(service, serviceChangedType, true));
return ids.isEmpty() ? null : ids;
});
}
private void addSubscriberIndexes(Service service, String clientId) {
Set<String> clientIds = subscriberIndexes.computeIfAbsent(service, key -> new ConcurrentHashSet<>());
// Fix #5404, Only first time add need notify event.
if (clientIds.add(clientId)) {
NotifyCenter.publishEvent(new ServiceEvent.ServiceSubscribedEvent(service, clientId));
}
}
private void removeSubscriberIndexes(Service service, String clientId) {
Set<String> clientIds = subscriberIndexes.get(service);
if (clientIds == null) {
return;
}
clientIds.remove(clientId);
if (clientIds.isEmpty()) {
subscriberIndexes.remove(service);
}
}
}
|
ClientServiceIndexesManager
|
java
|
apache__spark
|
streaming/src/test/java/org/apache/spark/streaming/JavaStreamingListenerAPISuite.java
|
{
"start": 894,
"end": 3181
}
|
class ____ extends JavaStreamingListener {
@Override
public void onStreamingStarted(JavaStreamingListenerStreamingStarted streamingStarted) {
super.onStreamingStarted(streamingStarted);
}
@Override
public void onReceiverStarted(JavaStreamingListenerReceiverStarted receiverStarted) {
JavaReceiverInfo receiverInfo = receiverStarted.receiverInfo();
receiverInfo.streamId();
receiverInfo.name();
receiverInfo.active();
receiverInfo.location();
receiverInfo.executorId();
receiverInfo.lastErrorMessage();
receiverInfo.lastError();
receiverInfo.lastErrorTime();
}
@Override
public void onReceiverError(JavaStreamingListenerReceiverError receiverError) {
JavaReceiverInfo receiverInfo = receiverError.receiverInfo();
receiverInfo.streamId();
receiverInfo.name();
receiverInfo.active();
receiverInfo.location();
receiverInfo.executorId();
receiverInfo.lastErrorMessage();
receiverInfo.lastError();
receiverInfo.lastErrorTime();
}
@Override
public void onReceiverStopped(JavaStreamingListenerReceiverStopped receiverStopped) {
JavaReceiverInfo receiverInfo = receiverStopped.receiverInfo();
receiverInfo.streamId();
receiverInfo.name();
receiverInfo.active();
receiverInfo.location();
receiverInfo.executorId();
receiverInfo.lastErrorMessage();
receiverInfo.lastError();
receiverInfo.lastErrorTime();
}
@Override
public void onBatchSubmitted(JavaStreamingListenerBatchSubmitted batchSubmitted) {
super.onBatchSubmitted(batchSubmitted);
}
@Override
public void onBatchStarted(JavaStreamingListenerBatchStarted batchStarted) {
super.onBatchStarted(batchStarted);
}
@Override
public void onBatchCompleted(JavaStreamingListenerBatchCompleted batchCompleted) {
super.onBatchCompleted(batchCompleted);
}
@Override
public void onOutputOperationStarted(
JavaStreamingListenerOutputOperationStarted outputOperationStarted) {
super.onOutputOperationStarted(outputOperationStarted);
}
@Override
public void onOutputOperationCompleted(
JavaStreamingListenerOutputOperationCompleted outputOperationCompleted) {
super.onOutputOperationCompleted(outputOperationCompleted);
}
}
|
JavaStreamingListenerAPISuite
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/datageneration/DocumentGenerator.java
|
{
"start": 882,
"end": 4582
}
|
class ____ {
private final DataGeneratorSpecification specification;
private final DataSourceResponse.ObjectArrayGenerator objectArrayGenerator;
public DocumentGenerator(DataGeneratorSpecification specification) {
this.specification = specification;
this.objectArrayGenerator = specification.dataSource().get(new DataSourceRequest.ObjectArrayGenerator());
}
/**
* Generates a valid random document following the provided template.
* @param template template for the document
* @param mapping generated mapping that will be applied to the destination index of this document
* @return document as a map where subobjects are represented as nested maps
*/
public Map<String, Object> generate(Template template, Mapping mapping) {
var documentMap = new TreeMap<String, Object>();
for (var predefinedField : specification.predefinedFields()) {
documentMap.put(
predefinedField.name(),
predefinedField.generator(specification.dataSource()).generateValue(predefinedField.mapping())
);
}
generateFields(documentMap, template.template(), new Context("", mapping.lookup()));
return documentMap;
}
private void generateFields(Map<String, Object> document, Map<String, Template.Entry> template, Context context) {
for (var entry : template.entrySet()) {
String fieldName = entry.getKey();
Template.Entry templateEntry = entry.getValue();
if (templateEntry instanceof Template.Leaf leaf) {
var fieldMapping = context.mappingLookup().get(context.pathTo(fieldName));
// Unsigned long does not play well when dynamically mapped because
// it gets mapped as just long and large values fail to index.
// Just skip it.
// TODO we can actually handle this in UnsignedLongFieldDataGenerator
if (leaf.type().equals(FieldType.UNSIGNED_LONG.toString()) && fieldMapping == null) {
continue;
}
var generator = specification.dataSource()
.get(new DataSourceRequest.FieldDataGenerator(fieldName, leaf.type(), specification.dataSource()))
.generator();
document.put(fieldName, generator.generateValue(fieldMapping));
} else if (templateEntry instanceof Template.Object object) {
Optional<Integer> arrayLength = objectArrayGenerator.lengthGenerator().get();
if (arrayLength.isPresent()) {
var children = new ArrayList<>(arrayLength.get());
document.put(object.name(), children);
for (int i = 0; i < arrayLength.get(); i++) {
children.add(generateObject(object, context));
}
} else {
document.put(object.name(), generateObject(object, context));
}
}
}
}
private Map<String, Object> generateObject(Template.Object object, Context context) {
var children = new TreeMap<String, Object>();
generateFields(children, object.children(), context.stepIntoObject(object.name()));
return children;
}
record Context(String path, Map<String, Map<String, Object>> mappingLookup) {
Context stepIntoObject(String name) {
return new Context(pathTo(name), mappingLookup);
}
String pathTo(String leafFieldName) {
return path.isEmpty() ? leafFieldName : path + "." + leafFieldName;
}
}
}
|
DocumentGenerator
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/NoRedirectParam.java
|
{
"start": 889,
"end": 1531
}
|
class ____ extends BooleanParam {
/** Parameter name. */
public static final String NAME = "noredirect";
/** Default parameter value. */
public static final String DEFAULT = FALSE;
private static final Domain DOMAIN = new Domain(NAME);
/**
* Constructor.
* @param value the parameter value.
*/
public NoRedirectParam(final Boolean value) {
super(DOMAIN, value);
}
/**
* Constructor.
* @param str a string representation of the parameter value.
*/
public NoRedirectParam(final String str) {
this(DOMAIN.parse(str));
}
@Override
public String getName() {
return NAME;
}
}
|
NoRedirectParam
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4320AggregatorAndDependenciesTest.java
|
{
"start": 1183,
"end": 2763
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that for aggregator mojos invoked from the CLI that require dependency resolution the dependencies
* of all projects in the reactor are resolved and not only the dependencies of the top-level project.
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-4320");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.deleteArtifacts("org.apache.maven.its.mng4320");
verifier.addCliArgument("-s");
verifier.addCliArgument("settings.xml");
verifier.filterFile("settings-template.xml", "settings.xml");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-dependency-resolution:aggregate-test");
verifier.execute();
verifier.verifyErrorFreeLog();
List<String> classpath;
classpath = verifier.loadLines("target/sub-1.txt");
assertTrue(classpath.contains("a-0.1.jar"), classpath.toString());
classpath = verifier.loadLines("target/sub-2.txt");
assertTrue(classpath.contains("b-0.2.jar"), classpath.toString());
classpath = verifier.loadLines("target/aggregator.txt");
assertFalse(classpath.contains("a-0.1.jar"), classpath.toString());
assertFalse(classpath.contains("b-0.2.jar"), classpath.toString());
}
}
|
MavenITmng4320AggregatorAndDependenciesTest
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/postprocessor/PlainSpringCustomPostProcessorOnRouteBuilderTest.java
|
{
"start": 1148,
"end": 1874
}
|
class ____ {
@Test
public void testShouldProcessAnnotatedFields() {
ApplicationContext context = new ClassPathXmlApplicationContext(
"classpath:/org/apache/camel/spring/postprocessor/plainSpringCustomPostProcessorOnRouteBuilderTest.xml");
assertNotNull(context, "Context not created");
assertNotNull(context.getBeansOfType(MagicAnnotationPostProcessor.class), "Post processor not registered");
TestPojo pojo = context.getBean("testPojo", TestPojo.class);
assertNotNull(pojo, "Test pojo not registered");
assertEquals("Changed Value", pojo.getTestValue(), "Processor has not changed field value");
}
}
|
PlainSpringCustomPostProcessorOnRouteBuilderTest
|
java
|
spring-projects__spring-framework
|
spring-beans/src/testFixtures/java/org/springframework/beans/testfixture/beans/factory/annotation/subpkg/PackagePrivateMethodInjectionFromParentSample.java
|
{
"start": 821,
"end": 954
}
|
class ____ extends PackagePrivateMethodInjectionSample {
// see setTestBean from parent
}
|
PackagePrivateMethodInjectionFromParentSample
|
java
|
apache__camel
|
components/camel-ai/camel-langchain4j-tokenizer/src/test/java/org/apache/camel/component/langchain4j/tokenizer/LangChain4JWordTokenizerTest.java
|
{
"start": 1116,
"end": 2107
}
|
class ____ extends LangChain4JTokenizerTestSupport {
@Test
public void testTokenizer() throws InterruptedException {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(144);
template.sendBody("direct:start", TEXT);
mock.assertIsSatisfied();
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.tokenize(tokenizer()
.byWord()
.maxTokens(1024)
.maxOverlap(10)
.using(LangChain4jTokenizerDefinition.TokenizerType.OPEN_AI)
.end())
.split().body()
.to("mock:result");
}
};
}
}
|
LangChain4JWordTokenizerTest
|
java
|
reactor__reactor-core
|
reactor-test/src/test/java/reactor/test/StepVerifierOptionsTest.java
|
{
"start": 1096,
"end": 8554
}
|
class ____ {
@Test
void valueFormatterDefaultNull() {
StepVerifierOptions options = StepVerifierOptions.create();
assertThat(options.getValueFormatter()).isNull();
}
@Test
void valueFormatterCanSetNull() {
ValueFormatters.ToStringConverter formatter = ValueFormatters.forClass(Object.class, o -> o.getClass().getSimpleName());
final StepVerifierOptions options = StepVerifierOptions.create().valueFormatter(formatter);
assertThat(options.getValueFormatter()).as("before remove").isSameAs(formatter);
options.valueFormatter(null);
assertThat(options.getValueFormatter()).as("after remove").isNull();
}
@Test
void valueFormatterSetterReplaces() {
ValueFormatters.ToStringConverter formatter1 = ValueFormatters.forClass(Object.class, o -> o.getClass().getSimpleName());
ValueFormatters.ToStringConverter formatter2 = ValueFormatters.forClass(Object.class, o -> o.getClass().getSimpleName());
final StepVerifierOptions options = StepVerifierOptions.create().valueFormatter(formatter1);
assertThat(options.getValueFormatter()).as("before replace").isSameAs(formatter1);
options.valueFormatter(formatter2);
assertThat(options.getValueFormatter()).as("after replace").isSameAs(formatter2);
}
@Test
void extractorsDefaultAtEnd() {
StepVerifierOptions options = StepVerifierOptions.create();
options.extractor(new Extractor<String>() {
@Override
public Class<String> getTargetClass() {
return String.class;
}
@Override
public Stream<Object> explode(String original) {
return Arrays.stream(original.split(" "));
}
});
assertThat(options.getExtractors()
.stream()
.map(e -> e.getTargetClass().getSimpleName()))
.containsExactly("String", "Signal", "Iterable", "Object[]");
}
@Test
void extractorReplacingDefaultMovesUp() {
StepVerifierOptions options = StepVerifierOptions.create();
options.extractor(new Extractor<Signal>() {
@Override
public Class<Signal> getTargetClass() {
return Signal.class;
}
@Override
public boolean matches(Signal value) {
return false;
}
@Override
public String prefix(Signal original) {
return "signal(";
}
@Override
public String suffix(Signal original) {
return ")";
}
@Override
public Stream<Object> explode(Signal original) {
return Stream.of(original.getType());
}
});
options.extractor(new Extractor<String>() {
@Override
public Class<String> getTargetClass() {
return String.class;
}
@Override
public Stream<Object> explode(String original) {
return Arrays.stream(original.split(" "));
}
});
assertThat(options.getExtractors()
.stream()
.map(e -> e.getTargetClass().getSimpleName()))
.containsExactly("Signal", "String", "Iterable", "Object[]");
}
@Test
void extractorReplacingCustomInPlace() {
StepVerifierOptions options = StepVerifierOptions.create();
Extractor<String> extractorV1 = new Extractor<String>() {
@Override
public Class<String> getTargetClass() {
return String.class;
}
@Override
public Stream<Object> explode(String original) {
return Arrays.stream(original.split(" "));
}
};
Extractor<String> extractorV2 = new Extractor<String>() {
@Override
public Class<String> getTargetClass() {
return String.class;
}
@Override
public Stream<Object> explode(String original) {
return Arrays.stream(original.split(""));
}
};
options.extractor(extractorV1)
.extractor(extractorV2);
assertThat(options.getExtractors()
.stream()
.map(e -> e.getTargetClass().getSimpleName()))
.containsExactly("String", "Signal", "Iterable", "Object[]");
assertThat(options.getExtractors())
.first()
.isSameAs(extractorV2);
}
@Test
void getExtractorsIsCopy() {
StepVerifierOptions options = StepVerifierOptions.create();
options.extractor(new Extractor<String>() {
@Override
public Class<String> getTargetClass() {
return String.class;
}
@Override
public Stream<Object> explode(String original) {
return Arrays.stream(original.split(" "));
}
});
Collection<Extractor<?>> extractors1 = options.getExtractors();
Collection<Extractor<?>> extractors2 = options.getExtractors();
assertThat(extractors1).isNotSameAs(extractors2);
extractors1.clear();
assertThat(extractors1).isEmpty();
assertThat(extractors2).isNotEmpty();
}
@Test
@SuppressWarnings("rawtypes")
void copy() {
Extractor<Signal> customExtractor1 = new Extractor<Signal>() {
@Override
public Class<Signal> getTargetClass() {
return Signal.class;
}
@Override
public boolean matches(Signal value) {
return value.isOnNext() && value.hasValue();
}
@Override
public Stream<Object> explode(Signal original) {
return Stream.of("CUSTOM1", original.get());
}
};
Extractor<Signal> customExtractor2 = new Extractor<Signal>() {
@Override
public Class<Signal> getTargetClass() {
return Signal.class;
}
@Override
public boolean matches(Signal value) {
return value.isOnNext() && value.hasValue();
}
@Override
public Stream<Object> explode(Signal original) {
return Stream.of("CUSTOM2", original.get());
}
};
StepVerifierOptions options = StepVerifierOptions.create()
.initialRequest(123L)
.withInitialContext(Context.of("example", true))
.scenarioName("scenarioName")
.extractor(customExtractor1)
.checkUnderRequesting(false)
.valueFormatter(ValueFormatters.forClass(Signal.class, s -> "SIGNAL"))
.virtualTimeSchedulerSupplier(VirtualTimeScheduler::create);
StepVerifierOptions copy = options.copy();
assertThat(copy)
.as("deep copy")
.isNotSameAs(options)
.usingRecursiveComparison()
.isEqualTo(options);
assertThat(copy.extractorMap)
.as("extractorMap not shared")
.isNotSameAs(options.extractorMap)
.containsOnlyKeys(Signal.class)
.containsEntry(Signal.class, customExtractor1);
copy.initialRequest(234L)
.withInitialContext(Context.of("exampleSame", false))
.scenarioName("scenarioName2")
.checkUnderRequesting(true)
.extractor(customExtractor2)
.valueFormatter(ValueFormatters.forClass(Signal.class, s -> "SIGNAL2"))
.virtualTimeSchedulerSupplier(() -> VirtualTimeScheduler.create(false));
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(copy)
.usingRecursiveComparison()
.isEqualTo(options)
)
.as("post mutation")
.withMessageContainingAll(
"when recursively comparing field by field, but found the following 5 differences:",
"field/property 'checkUnderRequesting' differ:",
"field/property 'initialContext.key' differ:",
"field/property 'initialContext.value' differ:",
"field/property 'initialRequest' differ:",
"field/property 'scenarioName' differ:"
);
assertThat(copy.extractorMap)
.as("post mutation extractorMap")
.containsOnlyKeys(Signal.class)
.doesNotContainEntry(Signal.class, customExtractor1)
.containsEntry(Signal.class, customExtractor2);
assertThat(copy.getValueFormatter()).as("valueFormatter").isNotSameAs(options.getValueFormatter());
assertThat(copy.getVirtualTimeSchedulerSupplier()).as("vts supplier").isNotSameAs(options.getVirtualTimeSchedulerSupplier());
}
}
|
StepVerifierOptionsTest
|
java
|
apache__flink
|
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/service/SqlGatewayServiceStatementITCase.java
|
{
"start": 7008,
"end": 12094
}
|
class ____ implements Iterator<RowData> {
private final SessionHandle sessionHandle;
private final OperationHandle operationHandle;
private Long token = 0L;
private Iterator<RowData> fetchedRows = Collections.emptyIterator();
public RowDataIterator(SessionHandle sessionHandle, OperationHandle operationHandle) {
this.sessionHandle = sessionHandle;
this.operationHandle = operationHandle;
fetch();
}
@Override
public boolean hasNext() {
while (token != null && !fetchedRows.hasNext()) {
fetch();
}
return token != null;
}
@Override
public RowData next() {
return fetchedRows.next();
}
private void fetch() {
ResultSet resultSet =
service.fetchResults(sessionHandle, operationHandle, token, Integer.MAX_VALUE);
token = resultSet.getNextToken();
fetchedRows = resultSet.getData().iterator();
}
}
// --------------------------------------------------------------------------------------------
// Validate ResultSet fields
// --------------------------------------------------------------------------------------------
@Test
void testIsQueryResult() throws Exception {
SessionHandle sessionHandle = createInitializedSession(service);
BiFunction<SessionHandle, OperationHandle, Boolean> isQueryResultGetter =
(sessionHandle1, operationHandle) ->
fetchResults(service, sessionHandle1, operationHandle).isQueryResult();
// trivial query syntax
validateResultSetField(
sessionHandle, "SELECT * FROM cat1.db1.tbl1;", isQueryResultGetter, true);
// query with CTE
validateResultSetField(
sessionHandle,
"WITH hub AS (SELECT * FROM cat1.db1.tbl1)\nSELECT * FROM hub;",
isQueryResultGetter,
true);
// non-query
validateResultSetField(
sessionHandle,
"INSERT INTO cat1.db1.tbl1 SELECT * FROM cat1.db1.tbl2;",
isQueryResultGetter,
false);
}
@Test
void testHasJobID() throws Exception {
SessionHandle sessionHandle = createInitializedSession(service);
BiFunction<SessionHandle, OperationHandle, Boolean> hasJobIDGetter =
(sessionHandle1, operationHandle) ->
fetchResults(service, sessionHandle1, operationHandle).getJobID() != null;
// query
validateResultSetField(sessionHandle, "SELECT * FROM cat1.db1.tbl1;", hasJobIDGetter, true);
// insert
validateResultSetField(
sessionHandle,
"INSERT INTO cat1.db1.tbl1 SELECT * FROM cat1.db1.tbl2;",
hasJobIDGetter,
true);
// ddl
validateResultSetField(
sessionHandle,
"CREATE TABLE test (f0 INT) WITH ('connector' = 'values');",
hasJobIDGetter,
false);
}
@Test
void testResultKind() throws Exception {
SessionHandle sessionHandle = createInitializedSession(service);
BiFunction<SessionHandle, OperationHandle, ResultKind> resultKindGetter =
(sessionHandle1, operationHandle) ->
fetchResults(service, sessionHandle1, operationHandle).getResultKind();
// query
validateResultSetField(
sessionHandle,
"SELECT * FROM cat1.db1.tbl1;",
resultKindGetter,
ResultKind.SUCCESS_WITH_CONTENT);
// insert
validateResultSetField(
sessionHandle,
"INSERT INTO cat1.db1.tbl1 SELECT * FROM cat1.db1.tbl2;",
resultKindGetter,
ResultKind.SUCCESS_WITH_CONTENT);
// ddl
validateResultSetField(
sessionHandle,
"CREATE TABLE test (f0 INT) WITH ('connector' = 'values');",
resultKindGetter,
ResultKind.SUCCESS);
// set
validateResultSetField(
sessionHandle, "SET 'key' = 'value';", resultKindGetter, ResultKind.SUCCESS);
validateResultSetField(
sessionHandle, "SET;", resultKindGetter, ResultKind.SUCCESS_WITH_CONTENT);
}
private <T> void validateResultSetField(
SessionHandle sessionHandle,
String statement,
BiFunction<SessionHandle, OperationHandle, T> resultGetter,
T expected)
throws Exception {
OperationHandle operationHandle =
service.executeStatement(sessionHandle, statement, -1, new Configuration());
awaitOperationTermination(service, sessionHandle, operationHandle);
assertThat(resultGetter.apply(sessionHandle, operationHandle)).isEqualTo(expected);
}
private static
|
RowDataIterator
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jms/src/main/java/org/springframework/boot/jms/autoconfigure/health/JmsHealthContributorAutoConfiguration.java
|
{
"start": 2012,
"end": 2479
}
|
class ____
extends CompositeHealthContributorConfiguration<JmsHealthIndicator, ConnectionFactory> {
JmsHealthContributorAutoConfiguration() {
super(JmsHealthIndicator::new);
}
@Bean
@ConditionalOnMissingBean(name = { "jmsHealthIndicator", "jmsHealthContributor" })
HealthContributor jmsHealthContributor(ConfigurableListableBeanFactory beanFactory) {
return createContributor(beanFactory, ConnectionFactory.class);
}
}
|
JmsHealthContributorAutoConfiguration
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/AnnotationBackCompatibilityTests.java
|
{
"start": 2279,
"end": 2381
}
|
interface ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@MetaTestAnnotation
@
|
TestAndMetaTestAnnotation
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/checkpointing/SavepointITCase.java
|
{
"start": 15637,
"end": 30545
}
|
class ____
extends RichMapFunction<Integer, Integer> implements CheckpointListener {
private final long savepointId;
private FailingOnCompletedSavepointMapFunction(long savepointId) {
this.savepointId = savepointId;
}
@Override
public Integer map(Integer value) throws Exception {
return value;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
if (checkpointId == savepointId) {
throw new ExpectedTestException();
}
}
}
/**
* Triggers a savepoint for a job that uses the FsStateBackend. We expect that all checkpoint
* files are written to a new savepoint directory.
*
* <ol>
* <li>Submit job, wait for some progress
* <li>Trigger savepoint and verify that savepoint has been created
* <li>Shut down the cluster, re-submit the job from the savepoint, verify that the initial
* state has been reset, and all tasks are running again
* <li>Cancel job, dispose the savepoint, and verify that everything has been cleaned up
* </ol>
*/
@Test
public void testTriggerSavepointAndResumeWithFileBasedCheckpoints() throws Exception {
final int numTaskManagers = 2;
final int numSlotsPerTaskManager = 2;
final int parallelism = numTaskManagers * numSlotsPerTaskManager;
final MiniClusterResourceFactory clusterFactory =
new MiniClusterResourceFactory(
numTaskManagers, numSlotsPerTaskManager, getFileBasedCheckpointsConfig());
final String savepointPath = submitJobAndTakeSavepoint(clusterFactory, parallelism);
verifySavepoint(parallelism, savepointPath);
restoreJobAndVerifyState(savepointPath, clusterFactory, parallelism);
}
@Test
public void testTriggerSavepointAndResumeWithClaim() throws Exception {
final int numTaskManagers = 2;
final int numSlotsPerTaskManager = 2;
final int parallelism = numTaskManagers * numSlotsPerTaskManager;
final MiniClusterResourceFactory clusterFactory =
new MiniClusterResourceFactory(
numTaskManagers, numSlotsPerTaskManager, getFileBasedCheckpointsConfig());
final String savepointPath = submitJobAndTakeSavepoint(clusterFactory, parallelism);
verifySavepoint(parallelism, savepointPath);
restoreJobAndVerifyState(
clusterFactory,
parallelism,
SavepointRestoreSettings.forPath(savepointPath, false, RecoveryClaimMode.CLAIM),
cluster -> {
cluster.after();
assertFalse(
"Savepoint not properly cleaned up.",
new File(new URI(savepointPath)).exists());
});
}
@Test
public void testTriggerSavepointAndResumeWithLegacyMode() throws Exception {
final int numTaskManagers = 2;
final int numSlotsPerTaskManager = 2;
final int parallelism = numTaskManagers * numSlotsPerTaskManager;
final MiniClusterResourceFactory clusterFactory =
new MiniClusterResourceFactory(
numTaskManagers, numSlotsPerTaskManager, getFileBasedCheckpointsConfig());
final String savepointPath = submitJobAndTakeSavepoint(clusterFactory, parallelism);
verifySavepoint(parallelism, savepointPath);
restoreJobAndVerifyState(
clusterFactory,
parallelism,
SavepointRestoreSettings.forPath(savepointPath, false, RecoveryClaimMode.LEGACY),
cluster -> {
cluster.after();
assertTrue(
"Savepoint unexpectedly cleaned up.",
new File(new URI(savepointPath)).exists());
});
}
@Rule public SharedObjects sharedObjects = SharedObjects.create();
@Test
@Ignore("Disabling this test because it regularly fails on AZP. See FLINK-25427.")
public void testTriggerSavepointAndResumeWithNoClaim() throws Exception {
final int numTaskManagers = 2;
final int numSlotsPerTaskManager = 2;
final int parallelism = numTaskManagers * numSlotsPerTaskManager;
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StateBackendUtils.configureRocksDBStateBackend(env, true);
env.getCheckpointConfig()
.setExternalizedCheckpointRetention(
ExternalizedCheckpointRetention.RETAIN_ON_CANCELLATION);
CheckpointStorageUtils.configureFileSystemCheckpointStorage(
env, folder.newFolder().toURI());
env.setParallelism(parallelism);
final SharedReference<CountDownLatch> counter =
sharedObjects.add(new CountDownLatch(10_000));
env.fromSequence(1, Long.MAX_VALUE)
.keyBy(i -> i % parallelism)
.process(
new KeyedProcessFunction<Long, Long, Long>() {
private ListState<Long> last;
@Override
public void open(OpenContext openContext) {
// we use list state here to create sst files of a significant size
// if sst files do not reach certain thresholds they are not stored
// in files, but as a byte stream in checkpoints metadata
last =
getRuntimeContext()
.getListState(
new ListStateDescriptor<>(
"last",
BasicTypeInfo.LONG_TYPE_INFO));
}
@Override
public void processElement(
Long value,
KeyedProcessFunction<Long, Long, Long>.Context ctx,
Collector<Long> out)
throws Exception {
last.add(value);
out.collect(value);
}
})
.addSink(
new SinkFunction<Long>() {
@Override
public void invoke(Long value) {
counter.consumeSync(CountDownLatch::countDown);
}
})
.setParallelism(1);
final JobGraph jobGraph = env.getStreamGraph().getJobGraph();
MiniClusterWithClientResource cluster =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setNumberTaskManagers(numTaskManagers)
.setNumberSlotsPerTaskManager(numSlotsPerTaskManager)
.build());
cluster.before();
try {
final JobID jobID1 = new JobID();
jobGraph.setJobID(jobID1);
cluster.getClusterClient().submitJob(jobGraph).get();
CommonTestUtils.waitForAllTaskRunning(cluster.getMiniCluster(), jobID1, false);
// wait for some records to be processed before taking the checkpoint
counter.get().await();
final String firstCheckpoint = cluster.getMiniCluster().triggerCheckpoint(jobID1).get();
cluster.getClusterClient().cancel(jobID1).get();
jobGraph.setSavepointRestoreSettings(
SavepointRestoreSettings.forPath(
firstCheckpoint, false, RecoveryClaimMode.NO_CLAIM));
final JobID jobID2 = new JobID();
jobGraph.setJobID(jobID2);
cluster.getClusterClient().submitJob(jobGraph).get();
CommonTestUtils.waitForAllTaskRunning(cluster.getMiniCluster(), jobID2, false);
String secondCheckpoint = cluster.getMiniCluster().triggerCheckpoint(jobID2).get();
cluster.getClusterClient().cancel(jobID2).get();
// delete the checkpoint we restored from
FileUtils.deleteDirectory(Paths.get(new URI(firstCheckpoint)).getParent().toFile());
// we should be able to restore from the second checkpoint even though it has been built
// on top of the first checkpoint
jobGraph.setSavepointRestoreSettings(
SavepointRestoreSettings.forPath(
secondCheckpoint, false, RecoveryClaimMode.NO_CLAIM));
final JobID jobID3 = new JobID();
jobGraph.setJobID(jobID3);
cluster.getClusterClient().submitJob(jobGraph).get();
CommonTestUtils.waitForAllTaskRunning(cluster.getMiniCluster(), jobID3, false);
} finally {
cluster.after();
}
}
@Test
public void testTriggerSavepointAndResumeWithFileBasedCheckpointsAndRelocateBasePath()
throws Exception {
final int numTaskManagers = 2;
final int numSlotsPerTaskManager = 2;
final int parallelism = numTaskManagers * numSlotsPerTaskManager;
final MiniClusterResourceFactory clusterFactory =
new MiniClusterResourceFactory(
numTaskManagers, numSlotsPerTaskManager, getFileBasedCheckpointsConfig());
final String savepointPath = submitJobAndTakeSavepoint(clusterFactory, parallelism);
final org.apache.flink.core.fs.Path oldPath =
new org.apache.flink.core.fs.Path(savepointPath);
final org.apache.flink.core.fs.Path newPath =
new org.apache.flink.core.fs.Path(folder.newFolder().toURI().toString());
(new org.apache.flink.core.fs.Path(savepointPath).getFileSystem()).rename(oldPath, newPath);
verifySavepoint(parallelism, newPath.toUri().toString());
restoreJobAndVerifyState(newPath.toUri().toString(), clusterFactory, parallelism);
}
@Test
public void testShouldAddEntropyToSavepointPath() throws Exception {
final int numTaskManagers = 2;
final int numSlotsPerTaskManager = 2;
final int parallelism = numTaskManagers * numSlotsPerTaskManager;
final MiniClusterResourceFactory clusterFactory =
new MiniClusterResourceFactory(
numTaskManagers,
numSlotsPerTaskManager,
getCheckpointingWithEntropyConfig());
final String savepointPath = submitJobAndTakeSavepoint(clusterFactory, parallelism);
assertThat(savepointDir, hasEntropyInFileStateHandlePaths());
restoreJobAndVerifyState(
clusterFactory,
parallelism,
SavepointRestoreSettings.forPath(savepointPath),
cluster -> {
final URI localURI = new URI(savepointPath.replace("test-entropy:/", "file:/"));
assertTrue("Savepoint has not been created", new File(localURI).exists());
cluster.getClusterClient().disposeSavepoint(savepointPath).get();
assertFalse("Savepoint not properly cleaned up.", new File(localURI).exists());
});
}
private Configuration getCheckpointingWithEntropyConfig() {
final String savepointPathWithEntropyPlaceholder =
new File(savepointDir, EntropyInjectingTestFileSystem.ENTROPY_INJECTION_KEY)
.getPath();
final Configuration config =
getFileBasedCheckpointsConfig(
"test-entropy://" + savepointPathWithEntropyPlaceholder);
config.setString("s3.entropy.key", EntropyInjectingTestFileSystem.ENTROPY_INJECTION_KEY);
return config;
}
private String submitJobAndTakeSavepoint(
MiniClusterResourceFactory clusterFactory, int parallelism) throws Exception {
final JobGraph jobGraph = createJobGraph(parallelism, 0, 1000);
final JobID jobId = jobGraph.getJobID();
StatefulCounter.resetForTest(parallelism);
MiniClusterWithClientResource cluster = clusterFactory.get();
cluster.before();
ClusterClient<?> client = cluster.getClusterClient();
try {
client.submitJob(jobGraph).get();
waitForAllTaskRunning(cluster.getMiniCluster(), jobId, false);
StatefulCounter.getProgressLatch().await();
return client.cancelWithSavepoint(jobId, null, SavepointFormatType.CANONICAL).get();
} finally {
cluster.after();
StatefulCounter.resetForTest(parallelism);
}
}
private void verifySavepoint(final int parallelism, final String savepointPath)
throws URISyntaxException {
// Only one savepoint should exist
File savepointDir = new File(new URI(savepointPath));
assertTrue("Savepoint directory does not exist.", savepointDir.exists());
assertTrue(
"Savepoint did not create self-contained directory.", savepointDir.isDirectory());
File[] savepointFiles = savepointDir.listFiles();
if (savepointFiles != null) {
// Expect one metadata file and one checkpoint file per stateful
// parallel subtask
String errMsg =
"Did not write expected number of savepoint/checkpoint files to directory: "
+ Arrays.toString(savepointFiles);
assertEquals(errMsg, 1 + parallelism, savepointFiles.length);
} else {
fail(String.format("Returned savepoint path (%s) is not valid.", savepointPath));
}
}
private void restoreJobAndVerifyState(
String savepointPath, MiniClusterResourceFactory clusterFactory, int parallelism)
throws Exception {
restoreJobAndVerifyState(
clusterFactory,
parallelism,
SavepointRestoreSettings.forPath(savepointPath, false),
cluster -> {
cluster.getClusterClient().disposeSavepoint(savepointPath).get();
assertFalse(
"Savepoint not properly cleaned up.",
new File(new URI(savepointPath)).exists());
});
}
@FunctionalInterface
|
FailingOnCompletedSavepointMapFunction
|
java
|
apache__spark
|
sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceException.java
|
{
"start": 875,
"end": 1159
}
|
class ____ extends RuntimeException {
public ServiceException(Throwable cause) {
super(cause);
}
public ServiceException(String message) {
super(message);
}
public ServiceException(String message, Throwable cause) {
super(message, cause);
}
}
|
ServiceException
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableChecker.java
|
{
"start": 8472,
"end": 21493
}
|
class ____
// invocations
checkClassTreeInstantiation(classBody, state, createImmutableAnalysis(state));
}
return NO_MATCH;
}
@Override
public Description matchMethod(MethodTree tree, VisitorState state) {
checkInstantiation(
tree,
state,
getSymbol(tree).getTypeParameters(),
ASTHelpers.getType(tree).getTypeArguments());
return NO_MATCH;
}
private ImmutableAnalysis createImmutableAnalysis(VisitorState state) {
return immutableAnalysisFactory.create(this::isSuppressed, state, immutableAnnotations);
}
private void checkInvocation(
Tree tree, MethodSymbol symbol, Type methodType, VisitorState state) {
ImmutableAnalysis analysis = createImmutableAnalysis(state);
Violation info = analysis.checkInvocation(methodType, symbol);
if (info.isPresent()) {
state.reportMatch(buildDescription(tree).setMessage(info.message()).build());
}
}
private void checkInstantiation(
Tree tree,
VisitorState state,
ImmutableAnalysis analysis,
Collection<TypeVariableSymbol> typeParameters,
Collection<Type> typeArguments) {
Violation info = analysis.checkInstantiation(typeParameters, typeArguments);
if (info.isPresent()) {
state.reportMatch(buildDescription(tree).setMessage(info.message()).build());
}
}
private void checkInstantiation(
Tree tree,
VisitorState state,
Collection<TypeVariableSymbol> typeParameters,
Collection<Type> typeArguments) {
checkInstantiation(tree, state, createImmutableAnalysis(state), typeParameters, typeArguments);
}
@Override
public Description matchClass(ClassTree tree, VisitorState state) {
ImmutableAnalysis analysis = createImmutableAnalysis(state);
checkClassTreeInstantiation(tree, state, analysis);
if (tree.getSimpleName().length() == 0) {
// anonymous classes have empty names
return handleAnonymousClass(tree, state, analysis);
}
AnnotationInfo annotation = getImmutableAnnotation(analysis, tree, state);
if (annotation == null) {
// If the type isn't annotated, and doesn't extend anything annotated, there's nothing to do.
// An earlier version of the check required an explicit annotation on classes that extended
// @Immutable classes, but didn't enforce the subtyping requirement for interfaces. We now
// don't require the explicit annotations on any subtypes.
return NO_MATCH;
}
// Special-case visiting declarations of known-immutable types; these uses
// of the annotation are "trusted".
if (wellKnownMutability.getKnownImmutableClasses().containsValue(annotation)) {
return NO_MATCH;
}
// Check that the types in containerOf actually exist
Map<String, TypeVariableSymbol> typarams = new HashMap<>();
for (TypeParameterTree typaram : tree.getTypeParameters()) {
typarams.put(typaram.getName().toString(), (TypeVariableSymbol) getSymbol(typaram));
}
SetView<String> difference = Sets.difference(annotation.containerOf(), typarams.keySet());
if (!difference.isEmpty()) {
return buildDescription(tree)
.setMessage(
format(
"could not find type(s) referenced by containerOf: %s",
Joiner.on("', '").join(difference)))
.build();
}
ImmutableSet<String> immutableAndContainer =
typarams.entrySet().stream()
.filter(
e ->
annotation.containerOf().contains(e.getKey())
&& analysis.hasThreadSafeTypeParameterAnnotation(e.getValue()))
.map(Map.Entry::getKey)
.collect(toImmutableSet());
if (!immutableAndContainer.isEmpty()) {
return buildDescription(tree)
.setMessage(
format(
"using both @ImmutableTypeParameter and containerOf is redundant: %s",
Joiner.on("', '").join(immutableAndContainer)))
.build();
}
// Main path for @Immutable-annotated types:
//
// Check that the fields (including inherited fields) are immutable, and
// validate the type hierarchy superclass.
ClassSymbol sym = getSymbol(tree);
Violation info =
analysis.checkForImmutability(
Optional.of(tree),
immutableTypeParametersInScope(getSymbol(tree), state, analysis),
ASTHelpers.getType(tree),
(Tree matched, Violation violation) ->
describeClass(matched, sym, annotation, violation));
Type superType = immutableSupertype(sym, state);
if (superType != null && sym.isDirectlyOrIndirectlyLocal()) {
checkClosedTypes(tree, state, superType.tsym, analysis);
}
if (!info.isPresent()) {
return NO_MATCH;
}
return describeClass(tree, sym, annotation, info).build();
}
private void checkClassTreeInstantiation(
ClassTree tree, VisitorState state, ImmutableAnalysis analysis) {
for (Tree implementTree : tree.getImplementsClause()) {
checkInstantiation(
tree,
state,
analysis,
getSymbol(implementTree).getTypeParameters(),
ASTHelpers.getType(implementTree).getTypeArguments());
}
Tree extendsClause = tree.getExtendsClause();
if (extendsClause != null) {
checkInstantiation(
tree,
state,
analysis,
getSymbol(extendsClause).getTypeParameters(),
ASTHelpers.getType(extendsClause).getTypeArguments());
}
}
private Description.Builder describeClass(
Tree tree, ClassSymbol sym, AnnotationInfo annotation, Violation info) {
String message;
if (sym.getQualifiedName().contentEquals(annotation.typeName())) {
message = "type annotated with @Immutable could not be proven immutable: " + info.message();
} else {
message =
format(
"Class extends @Immutable type %s, but is not immutable: %s",
annotation.typeName(), info.message());
}
return buildDescription(tree).setMessage(message);
}
// Anonymous classes
/** Check anonymous implementations of {@code @Immutable} types. */
private Description handleAnonymousClass(
ClassTree tree, VisitorState state, ImmutableAnalysis analysis) {
ClassSymbol sym = getSymbol(tree);
Type superType = immutableSupertype(sym, state);
if (superType == null) {
return NO_MATCH;
}
checkClosedTypes(tree, state, superType.tsym, analysis);
// We don't need to check that the superclass has an immutable instantiation.
// The anonymous instance can only be referred to using a superclass type, so
// the type arguments will be validated at any type use site where we care about
// the instance's immutability.
//
// Also, we have no way to express something like:
//
// public static <@Immutable T> ImmutableBox<T> create(T t) {
// return new ImmutableBox<>(t);
// }
ImmutableSet<String> typarams = immutableTypeParametersInScope(sym, state, analysis);
Violation info =
analysis.areFieldsImmutable(
Optional.of(tree),
typarams,
ASTHelpers.getType(tree),
(t, i) -> describeAnonymous(t, superType, i));
if (!info.isPresent()) {
return NO_MATCH;
}
return describeAnonymous(tree, superType, info).build();
}
private void checkClosedTypes(
Tree lambdaOrAnonymousClass,
VisitorState state,
TypeSymbol lambdaType,
ImmutableAnalysis analysis) {
Set<VarSymbol> variablesClosed = new HashSet<>();
SetMultimap<ClassSymbol, MethodSymbol> typesClosed = LinkedHashMultimap.create();
Set<VarSymbol> variablesOwnedByLambda = new HashSet<>();
new TreePathScanner<Void, Void>() {
@Override
public Void visitVariable(VariableTree tree, Void unused) {
var symbol = getSymbol(tree);
variablesOwnedByLambda.add(symbol);
return super.visitVariable(tree, null);
}
@Override
public Void visitMethodInvocation(MethodInvocationTree tree, Void unused) {
if (getReceiver(tree) == null) {
var symbol = getSymbol(tree);
if (!symbol.isStatic() && !symbol.isConstructor()) {
effectiveTypeOfThis(symbol, getCurrentPath(), state)
.filter(t -> !isSameType(t.type, getType(lambdaOrAnonymousClass), state))
.ifPresent(t -> typesClosed.put(t, symbol));
}
}
return super.visitMethodInvocation(tree, null);
}
@Override
public Void visitMemberSelect(MemberSelectTree tree, Void unused) {
// Note: member selects are not intrinsically problematic; the issue is what might be on the
// LHS of them, which is going to be handled by another visit* method.
// If we're only seeing a field access, don't complain about the fact we closed around
// `this`. This is special-case as it would otherwise be vexing to complain about accessing
// a field of type ImmutableList.
if (tree.getExpression() instanceof IdentifierTree identifierTree
&& getSymbol(tree) instanceof VarSymbol
&& identifierTree.getName().contentEquals("this")) {
handleIdentifier(getSymbol(tree));
return null;
}
return super.visitMemberSelect(tree, null);
}
@Override
public Void visitIdentifier(IdentifierTree tree, Void unused) {
handleIdentifier(getSymbol(tree));
return super.visitIdentifier(tree, null);
}
private void handleIdentifier(Symbol symbol) {
if (symbol instanceof VarSymbol varSymbol
&& !variablesOwnedByLambda.contains(symbol)
&& !isStatic(symbol)) {
variablesClosed.add(varSymbol);
}
}
}.scan(state.getPath(), null);
ImmutableSet<String> typarams =
immutableTypeParametersInScope(getSymbol(lambdaOrAnonymousClass), state, analysis);
for (VarSymbol closedVariable : variablesClosed) {
Violation v = checkClosedVariable(closedVariable, lambdaOrAnonymousClass, typarams, analysis);
if (!v.isPresent()) {
continue;
}
String message =
format(
"%s, but closes over '%s', which is not @Immutable because %s",
formAnonymousReason(lambdaOrAnonymousClass, lambdaType), closedVariable, v.message());
state.reportMatch(buildDescription(lambdaOrAnonymousClass).setMessage(message).build());
}
for (var entry : typesClosed.asMap().entrySet()) {
var classSymbol = entry.getKey();
var methods = entry.getValue();
if (!typeOrSuperHasImmutableAnnotation(classSymbol.type.tsym, state)) {
String message =
format(
"%s, but accesses instance method(s) '%s' on '%s' which is not @Immutable.",
formAnonymousReason(lambdaOrAnonymousClass, lambdaType),
methods.stream().map(Symbol::getSimpleName).collect(joining(", ")),
classSymbol.getSimpleName());
state.reportMatch(buildDescription(lambdaOrAnonymousClass).setMessage(message).build());
}
}
}
/**
* Gets the effective type of `this`, had the bare invocation of {@code symbol} been qualified
* with it.
*/
private static Optional<ClassSymbol> effectiveTypeOfThis(
MethodSymbol symbol, TreePath currentPath, VisitorState state) {
return stream(currentPath.iterator())
.filter(ClassTree.class::isInstance)
.map(t -> ASTHelpers.getSymbol((ClassTree) t))
.filter(c -> isSubtype(c.type, symbol.owner.type, state))
.findFirst();
}
private Violation checkClosedVariable(
VarSymbol closedVariable,
Tree tree,
ImmutableSet<String> typarams,
ImmutableAnalysis analysis) {
if (!closedVariable.getKind().equals(ElementKind.FIELD)) {
return analysis.isThreadSafeType(false, typarams, closedVariable.type);
}
return analysis.isFieldImmutable(
Optional.empty(),
typarams,
(ClassSymbol) closedVariable.owner,
(ClassType) closedVariable.owner.type,
closedVariable,
(t, v) -> buildDescription(tree));
}
private static String formAnonymousReason(Tree tree, TypeSymbol typeSymbol) {
return "This "
+ (tree instanceof LambdaExpressionTree ? "lambda" : "anonymous class")
+ " implements @Immutable interface '"
+ typeSymbol.getSimpleName()
+ "'";
}
private Description.Builder describeAnonymous(Tree tree, Type superType, Violation info) {
String message =
format(
"Class extends @Immutable type %s, but is not immutable: %s",
superType, info.message());
return buildDescription(tree).setMessage(message);
}
// Strong behavioural subtyping
/**
* Check for classes with {@code @Immutable}, or that inherited it from a super
|
constructor
|
java
|
apache__camel
|
components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/multimap/HazelcastMultimapConsumer.java
|
{
"start": 1188,
"end": 1962
}
|
class ____ extends HazelcastDefaultConsumer {
private final MultiMap<Object, Object> cache;
private UUID listener;
public HazelcastMultimapConsumer(HazelcastInstance hazelcastInstance, Endpoint endpoint, Processor processor,
String cacheName) {
super(hazelcastInstance, endpoint, processor, cacheName);
cache = hazelcastInstance.getMultiMap(cacheName);
}
@Override
protected void doStart() throws Exception {
super.doStart();
listener = cache.addEntryListener(new CamelEntryListener(this, cacheName), true);
}
@Override
protected void doStop() throws Exception {
cache.removeEntryListener(listener);
super.doStop();
}
}
|
HazelcastMultimapConsumer
|
java
|
apache__camel
|
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DefaultMessageHistoryFactory.java
|
{
"start": 1416,
"end": 3636
}
|
class ____ extends ServiceSupport implements MessageHistoryFactory {
private CamelContext camelContext;
private boolean copyMessage;
private String nodePattern;
private volatile String[] nodePatternParts;
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public MessageHistory newMessageHistory(String routeId, NamedNode node, Exchange exchange) {
if (nodePatternParts != null) {
String name = node.getShortName();
for (String part : nodePatternParts) {
boolean match = PatternHelper.matchPattern(name, part);
if (!match) {
return null;
}
}
}
Message msg = null;
if (copyMessage) {
msg = exchange.getMessage().copy();
}
DefaultMessageHistory answer = new DefaultMessageHistory(routeId, node, msg);
answer.setAcceptDebugger(node.acceptDebugger(exchange));
return answer;
}
@ManagedAttribute(description = "Whether message history is enabled")
public boolean isEnabled() {
return camelContext != null ? camelContext.isMessageHistory() : false;
}
@Override
@ManagedAttribute(description = "Whether a copy of the message is included in the message history")
public boolean isCopyMessage() {
return copyMessage;
}
@Override
@ManagedAttribute(description = "Whether a copy of the message is included in the message history")
public void setCopyMessage(boolean copyMessage) {
this.copyMessage = copyMessage;
}
@Override
@ManagedAttribute(description = "Pattern to filter EIPs")
public String getNodePattern() {
return nodePattern;
}
@Override
@ManagedAttribute(description = "Pattern to filter EIPs")
public void setNodePattern(String nodePattern) {
this.nodePattern = nodePattern;
if (nodePattern != null) {
this.nodePatternParts = nodePattern.split(",");
}
}
}
|
DefaultMessageHistoryFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/IntIntBucketedSort.java
|
{
"start": 1207,
"end": 15611
}
|
class ____ implements Releasable {
private final BigArrays bigArrays;
private final SortOrder order;
private final int bucketSize;
/**
* {@code true} if the bucket is in heap mode, {@code false} if
* it is still gathering.
*/
private final BitArray heapMode;
/**
* An array containing all the values on all buckets. The structure is as follows:
* <p>
* For each bucket, there are bucketSize elements, based on the bucket id (0, 1, 2...).
* Then, for each bucket, it can be in 2 states:
* </p>
* <ul>
* <li>
* Gather mode: All buckets start in gather mode, and remain here while they have less than bucketSize elements.
* In gather mode, the elements are stored in the array from the highest index to the lowest index.
* The lowest index contains the offset to the next slot to be filled.
* <p>
* This allows us to insert elements in O(1) time.
* </p>
* <p>
* When the bucketSize-th element is collected, the bucket transitions to heap mode, by heapifying its contents.
* </p>
* </li>
* <li>
* Heap mode: The bucket slots are organized as a min heap structure.
* <p>
* The root of the heap is the minimum value in the bucket,
* which allows us to quickly discard new values that are not in the top N.
* </p>
* </li>
* </ul>
*/
private IntArray values;
private IntArray extraValues;
public IntIntBucketedSort(BigArrays bigArrays, SortOrder order, int bucketSize) {
this.bigArrays = bigArrays;
this.order = order;
this.bucketSize = bucketSize;
heapMode = new BitArray(0, bigArrays);
boolean success = false;
try {
values = bigArrays.newIntArray(0, false);
extraValues = bigArrays.newIntArray(0, false);
success = true;
} finally {
if (success == false) {
close();
}
}
}
/**
* Collects a {@code value} into a {@code bucket}.
* <p>
* It may or may not be inserted in the heap, depending on if it is better than the current root.
* </p>
*/
public void collect(int value, int extraValue, int bucket) {
long rootIndex = (long) bucket * bucketSize;
if (inHeapMode(bucket)) {
if (betterThan(value, values.get(rootIndex), extraValue, extraValues.get(rootIndex))) {
values.set(rootIndex, value);
extraValues.set(rootIndex, extraValue);
downHeap(rootIndex, 0, bucketSize);
}
return;
}
// Gathering mode
long requiredSize = rootIndex + bucketSize;
if (values.size() < requiredSize) {
grow(bucket);
}
int next = getNextGatherOffset(rootIndex);
assert 0 <= next && next < bucketSize
: "Expected next to be in the range of valid buckets [0 <= " + next + " < " + bucketSize + "]";
long index = next + rootIndex;
values.set(index, value);
extraValues.set(index, extraValue);
if (next == 0) {
heapMode.set(bucket);
heapify(rootIndex, bucketSize);
} else {
setNextGatherOffset(rootIndex, next - 1);
}
}
/**
* The order of the sort.
*/
public SortOrder getOrder() {
return order;
}
/**
* The number of values to store per bucket.
*/
public int getBucketSize() {
return bucketSize;
}
/**
* Get the first and last indexes (inclusive, exclusive) of the values for a bucket.
* Returns [0, 0] if the bucket has never been collected.
*/
private Tuple<Long, Long> getBucketValuesIndexes(int bucket) {
long rootIndex = (long) bucket * bucketSize;
if (rootIndex >= values.size()) {
// We've never seen this bucket.
return Tuple.tuple(0L, 0L);
}
long start = inHeapMode(bucket) ? rootIndex : (rootIndex + getNextGatherOffset(rootIndex) + 1);
long end = rootIndex + bucketSize;
return Tuple.tuple(start, end);
}
/**
* Merge the values from {@code other}'s {@code otherGroupId} into {@code groupId}.
*/
public void merge(int groupId, IntIntBucketedSort other, int otherGroupId) {
var otherBounds = other.getBucketValuesIndexes(otherGroupId);
// TODO: This can be improved for heapified buckets by making use of the heap structures
for (long i = otherBounds.v1(); i < otherBounds.v2(); i++) {
collect(other.values.get(i), other.extraValues.get(i), groupId);
}
}
/**
* Creates a block with the values from the {@code selected} groups.
*/
public void toBlocks(BlockFactory blockFactory, Block[] blocks, int offset, IntVector selected) {
// Check if the selected groups are all empty, to avoid allocating extra memory
if (allSelectedGroupsAreEmpty(selected)) {
Block constantNullBlock = blockFactory.newConstantNullBlock(selected.getPositionCount());
constantNullBlock.incRef();
blocks[offset] = constantNullBlock;
blocks[offset + 1] = constantNullBlock;
return;
}
try (
var builder = blockFactory.newIntBlockBuilder(selected.getPositionCount());
var extraBuilder = blockFactory.newIntBlockBuilder(selected.getPositionCount())
) {
for (int s = 0; s < selected.getPositionCount(); s++) {
int bucket = selected.getInt(s);
var bounds = getBucketValuesIndexes(bucket);
var rootIndex = bounds.v1();
var size = bounds.v2() - bounds.v1();
if (size == 0) {
builder.appendNull();
extraBuilder.appendNull();
continue;
}
if (size == 1) {
builder.appendInt(values.get(rootIndex));
extraBuilder.appendInt(extraValues.get(rootIndex));
continue;
}
// If we are in the gathering mode, we need to heapify before sorting.
if (inHeapMode(bucket) == false) {
heapify(rootIndex, (int) size);
}
heapSort(rootIndex, (int) size);
builder.beginPositionEntry();
extraBuilder.beginPositionEntry();
for (int i = 0; i < size; i++) {
builder.appendInt(values.get(rootIndex + i));
extraBuilder.appendInt(extraValues.get(rootIndex + i));
}
builder.endPositionEntry();
extraBuilder.endPositionEntry();
}
blocks[offset] = builder.build();
blocks[offset + 1] = extraBuilder.build();
}
}
/**
* Checks if the selected groups are all empty.
*/
private boolean allSelectedGroupsAreEmpty(IntVector selected) {
return IntStream.range(0, selected.getPositionCount()).map(selected::getInt).noneMatch(bucket -> {
var bounds = this.getBucketValuesIndexes(bucket);
var size = bounds.v2() - bounds.v1();
return size > 0;
});
}
/**
* Is this bucket a min heap {@code true} or in gathering mode {@code false}?
*/
private boolean inHeapMode(int bucket) {
return heapMode.get(bucket);
}
/**
* Get the next index that should be "gathered" for a bucket rooted
* at {@code rootIndex}.
*/
private int getNextGatherOffset(long rootIndex) {
return values.get(rootIndex);
}
/**
* Set the next index that should be "gathered" for a bucket rooted
* at {@code rootIndex}.
*/
private void setNextGatherOffset(long rootIndex, int offset) {
values.set(rootIndex, offset);
}
/**
* {@code true} if the entry at index {@code lhs} is "better" than
* the entry at {@code rhs}. "Better" in this means "lower" for
* {@link SortOrder#ASC} and "higher" for {@link SortOrder#DESC}.
*/
private boolean betterThan(int lhs, int rhs, int lhsExtra, int rhsExtra) {
int res = Integer.compare(lhs, rhs);
if (res != 0) {
return getOrder().reverseMul() * res < 0;
}
res = Integer.compare(lhsExtra, rhsExtra);
return getOrder().reverseMul() * res < 0;
}
/**
* Swap the data at two indices.
*/
private void swap(long lhs, long rhs) {
var tmp = values.get(lhs);
values.set(lhs, values.get(rhs));
values.set(rhs, tmp);
var tmpExtra = extraValues.get(lhs);
extraValues.set(lhs, extraValues.get(rhs));
extraValues.set(rhs, tmpExtra);
}
/**
* Allocate storage for more buckets and store the "next gather offset"
* for those new buckets. We always grow the storage by whole bucket's
* worth of slots at a time. We never allocate space for partial buckets.
*/
private void grow(int bucket) {
long oldMax = values.size();
assert oldMax % bucketSize == 0;
long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.INT_PAGE_SIZE, Integer.BYTES);
// Round up to the next full bucket.
newSize = (newSize + bucketSize - 1) / bucketSize;
values = bigArrays.resize(values, newSize * bucketSize);
// Round up to the next full bucket.
extraValues = bigArrays.resize(extraValues, newSize * bucketSize);
// Set the next gather offsets for all newly allocated buckets.
fillGatherOffsets(oldMax);
}
/**
* Maintain the "next gather offsets" for newly allocated buckets.
*/
private void fillGatherOffsets(long startingAt) {
int nextOffset = getBucketSize() - 1;
for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) {
setNextGatherOffset(bucketRoot, nextOffset);
}
}
/**
* Heapify a bucket whose entries are in random order.
* <p>
* This works by validating the heap property on each node, iterating
* "upwards", pushing any out of order parents "down". Check out the
* <a href="https://en.wikipedia.org/w/index.php?title=Binary_heap&oldid=940542991#Building_a_heap">wikipedia</a>
* entry on binary heaps for more about this.
* </p>
* <p>
* While this *looks* like it could easily be {@code O(n * log n)}, it is
* a fairly well studied algorithm attributed to Floyd. There's
* been a bunch of work that puts this at {@code O(n)}, close to 1.88n worst
* case.
* </p>
* <ul>
* <li>Hayward, Ryan; McDiarmid, Colin (1991).
* <a href="https://web.archive.org/web/20160205023201/http://www.stats.ox.ac.uk/__data/assets/pdf_file/0015/4173/heapbuildjalg.pdf">
* Average Case Analysis of Heap Building byRepeated Insertion</a> J. Algorithms.
* <li>D.E. Knuth, ”The Art of Computer Programming, Vol. 3, Sorting and Searching”</li>
* </ul>
* @param rootIndex the index the start of the bucket
*/
private void heapify(long rootIndex, int heapSize) {
int maxParent = heapSize / 2 - 1;
for (int parent = maxParent; parent >= 0; parent--) {
downHeap(rootIndex, parent, heapSize);
}
}
/**
* Sorts all the values in the heap using heap sort algorithm.
* This runs in {@code O(n log n)} time.
* @param rootIndex index of the start of the bucket
* @param heapSize Number of values that belong to the heap.
* Can be less than bucketSize.
* In such a case, the remaining values in range
* (rootIndex + heapSize, rootIndex + bucketSize)
* are *not* considered part of the heap.
*/
private void heapSort(long rootIndex, int heapSize) {
while (heapSize > 0) {
swap(rootIndex, rootIndex + heapSize - 1);
heapSize--;
downHeap(rootIndex, 0, heapSize);
}
}
/**
* Correct the heap invariant of a parent and its children. This
* runs in {@code O(log n)} time.
* @param rootIndex index of the start of the bucket
* @param parent Index within the bucket of the parent to check.
* For example, 0 is the "root".
* @param heapSize Number of values that belong to the heap.
* Can be less than bucketSize.
* In such a case, the remaining values in range
* (rootIndex + heapSize, rootIndex + bucketSize)
* are *not* considered part of the heap.
*/
private void downHeap(long rootIndex, int parent, int heapSize) {
while (true) {
long parentIndex = rootIndex + parent;
int worst = parent;
long worstIndex = parentIndex;
int leftChild = parent * 2 + 1;
long leftIndex = rootIndex + leftChild;
if (leftChild < heapSize) {
if (betterThan(values.get(worstIndex), values.get(leftIndex), extraValues.get(worstIndex), extraValues.get(leftIndex))) {
worst = leftChild;
worstIndex = leftIndex;
}
int rightChild = leftChild + 1;
long rightIndex = rootIndex + rightChild;
if (rightChild < heapSize
&& betterThan(
values.get(worstIndex),
values.get(rightIndex),
extraValues.get(worstIndex),
extraValues.get(rightIndex)
)) {
worst = rightChild;
worstIndex = rightIndex;
}
}
if (worst == parent) {
break;
}
swap(worstIndex, parentIndex);
parent = worst;
}
}
@Override
public final void close() {
Releasables.close(values, extraValues, heapMode);
}
}
|
IntIntBucketedSort
|
java
|
apache__camel
|
components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyRouteSimpleDynamicURITest.java
|
{
"start": 1163,
"end": 2433
}
|
class ____ extends BaseNettyTest {
@RegisterExtension
AvailablePortFinder.Port port2 = AvailablePortFinder.find();
@Test
public void testHttpSimple() throws Exception {
getMockEndpoint("mock:input1").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:input2").expectedBodiesReceived("Hello World");
String out = template.requestBody("netty-http:http://localhost:{{port}}/foo", "Hello World", String.class);
assertEquals("Bye World", out);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("netty-http:http://0.0.0.0:{{port}}/foo")
.to("mock:input1")
.setHeader("id", constant("bar"))
.setHeader("host", constant("localhost"))
.toD("netty-http:http://${header.host}:" + port2 + "/${header.id}");
from("netty-http:http://0.0.0.0:" + port2 + "/bar")
.to("mock:input2")
.transform().constant("Bye World");
}
};
}
}
|
NettyRouteSimpleDynamicURITest
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java
|
{
"start": 3042,
"end": 13257
}
|
enum ____ to prevent repeated allocation of new
// arrays during deserialization.
private static final AclEntryType[] ACL_ENTRY_TYPES = AclEntryType.values();
private static final AclEntryScope[] ACL_ENTRY_SCOPES = AclEntryScope.values();
private static final FsAction[] FS_ACTIONS = FsAction.values();
private List<AclEntry> aclEntries;
private Map<String, byte[]> xAttrs;
// <chunkOffset, chunkLength> represents the offset and length of a file
// chunk in number of bytes.
// used when splitting a large file to chunks to copy in parallel.
// If a file is not large enough to split, chunkOffset would be 0 and
// chunkLength would be the length of the file.
private long chunkOffset = 0;
private long chunkLength = Long.MAX_VALUE;
/**
* Default constructor.
*/
public CopyListingFileStatus() {
this(0, false, 0, 0, 0, 0, null, null, null, null);
}
/**
* Creates a new CopyListingFileStatus by copying the members of the given
* FileStatus.
*
* @param fileStatus FileStatus to copy
*/
public CopyListingFileStatus(FileStatus fileStatus) {
this(fileStatus.getLen(), fileStatus.isDirectory(),
fileStatus.getReplication(), fileStatus.getBlockSize(),
fileStatus.getModificationTime(), fileStatus.getAccessTime(),
fileStatus.getPermission(), fileStatus.getOwner(),
fileStatus.getGroup(),
fileStatus.getPath());
}
public CopyListingFileStatus(FileStatus fileStatus,
long chunkOffset, long chunkLength) {
this(fileStatus.getLen(), fileStatus.isDirectory(),
fileStatus.getReplication(), fileStatus.getBlockSize(),
fileStatus.getModificationTime(), fileStatus.getAccessTime(),
fileStatus.getPermission(), fileStatus.getOwner(),
fileStatus.getGroup(),
fileStatus.getPath());
this.chunkOffset = chunkOffset;
this.chunkLength = chunkLength;
}
@SuppressWarnings("checkstyle:parameternumber")
public CopyListingFileStatus(long length, boolean isdir,
int blockReplication, long blocksize, long modificationTime,
long accessTime, FsPermission permission, String owner, String group,
Path path) {
this(length, isdir, blockReplication, blocksize, modificationTime,
accessTime, permission, owner, group, path, 0, Long.MAX_VALUE);
}
@SuppressWarnings("checkstyle:parameternumber")
public CopyListingFileStatus(long length, boolean isdir,
int blockReplication, long blocksize, long modificationTime,
long accessTime, FsPermission permission, String owner, String group,
Path path, long chunkOffset, long chunkLength) {
this.length = length;
this.isdir = isdir;
this.blockReplication = (short)blockReplication;
this.blocksize = blocksize;
this.modificationTime = modificationTime;
this.accessTime = accessTime;
if (permission != null) {
this.permission = permission;
} else {
this.permission = isdir
? FsPermission.getDirDefault()
: FsPermission.getFileDefault();
}
this.owner = (owner == null) ? "" : owner;
this.group = (group == null) ? "" : group;
this.path = path;
this.chunkOffset = chunkOffset;
this.chunkLength = chunkLength;
}
public CopyListingFileStatus(CopyListingFileStatus other) {
this.length = other.length;
this.isdir = other.isdir;
this.blockReplication = other.blockReplication;
this.blocksize = other.blocksize;
this.modificationTime = other.modificationTime;
this.accessTime = other.accessTime;
this.permission = other.permission;
this.owner = other.owner;
this.group = other.group;
this.path = new Path(other.path.toUri());
this.chunkOffset = other.chunkOffset;
this.chunkLength = other.chunkLength;
}
public Path getPath() {
return path;
}
public long getLen() {
return length;
}
public long getBlockSize() {
return blocksize;
}
public boolean isDirectory() {
return isdir;
}
public short getReplication() {
return blockReplication;
}
public long getModificationTime() {
return modificationTime;
}
public String getOwner() {
return owner;
}
public String getGroup() {
return group;
}
public long getAccessTime() {
return accessTime;
}
public FsPermission getPermission() {
return permission;
}
public boolean isErasureCoded() {
return getPermission().getErasureCodedBit();
}
/**
* Returns the full logical ACL.
*
* @return List containing full logical ACL
*/
public List<AclEntry> getAclEntries() {
return AclUtil.getAclFromPermAndEntries(getPermission(),
aclEntries != null ? aclEntries : Collections.<AclEntry>emptyList());
}
/**
* Sets optional ACL entries.
*
* @param aclEntries List containing all ACL entries
*/
public void setAclEntries(List<AclEntry> aclEntries) {
this.aclEntries = aclEntries;
}
/**
* Returns all xAttrs.
*
* @return Map containing all xAttrs
*/
public Map<String, byte[]> getXAttrs() {
return xAttrs != null ? xAttrs : Collections.<String, byte[]>emptyMap();
}
/**
* Sets optional xAttrs.
*
* @param xAttrs Map containing all xAttrs
*/
public void setXAttrs(Map<String, byte[]> xAttrs) {
this.xAttrs = xAttrs;
}
public long getChunkOffset() {
return chunkOffset;
}
public void setChunkOffset(long offset) {
this.chunkOffset = offset;
}
public long getChunkLength() {
return chunkLength;
}
public void setChunkLength(long chunkLength) {
this.chunkLength = chunkLength;
}
public boolean isSplit() {
return getChunkLength() != Long.MAX_VALUE &&
getChunkLength() != getLen();
}
public long getSizeToCopy() {
return isSplit()? getChunkLength() : getLen();
}
@Override
public void write(DataOutput out) throws IOException {
Text.writeString(out, getPath().toString(), Text.DEFAULT_MAX_LEN);
out.writeLong(getLen());
out.writeBoolean(isDirectory());
out.writeShort(getReplication());
out.writeLong(getBlockSize());
out.writeLong(getModificationTime());
out.writeLong(getAccessTime());
out.writeShort(getPermission().toShort());
Text.writeString(out, getOwner(), Text.DEFAULT_MAX_LEN);
Text.writeString(out, getGroup(), Text.DEFAULT_MAX_LEN);
if (aclEntries != null) {
// byte is sufficient, because 32 ACL entries is the max enforced by HDFS.
out.writeByte(aclEntries.size());
for (AclEntry entry: aclEntries) {
out.writeByte(entry.getScope().ordinal());
out.writeByte(entry.getType().ordinal());
WritableUtils.writeString(out, entry.getName());
out.writeByte(entry.getPermission().ordinal());
}
} else {
out.writeByte(NO_ACL_ENTRIES);
}
if (xAttrs != null) {
out.writeInt(xAttrs.size());
Iterator<Entry<String, byte[]>> iter = xAttrs.entrySet().iterator();
while (iter.hasNext()) {
Entry<String, byte[]> entry = iter.next();
WritableUtils.writeString(out, entry.getKey());
final byte[] value = entry.getValue();
if (value != null) {
out.writeInt(value.length);
if (value.length > 0) {
out.write(value);
}
} else {
out.writeInt(-1);
}
}
} else {
out.writeInt(NO_XATTRS);
}
out.writeLong(chunkOffset);
out.writeLong(chunkLength);
}
@Override
public void readFields(DataInput in) throws IOException {
String strPath = Text.readString(in, Text.DEFAULT_MAX_LEN);
this.path = new Path(strPath);
this.length = in.readLong();
this.isdir = in.readBoolean();
this.blockReplication = in.readShort();
blocksize = in.readLong();
modificationTime = in.readLong();
accessTime = in.readLong();
permission.fromShort(in.readShort());
owner = Text.readString(in, Text.DEFAULT_MAX_LEN);
group = Text.readString(in, Text.DEFAULT_MAX_LEN);
byte aclEntriesSize = in.readByte();
if (aclEntriesSize != NO_ACL_ENTRIES) {
aclEntries = Lists.newArrayListWithCapacity(aclEntriesSize);
for (int i = 0; i < aclEntriesSize; ++i) {
aclEntries.add(new AclEntry.Builder()
.setScope(ACL_ENTRY_SCOPES[in.readByte()])
.setType(ACL_ENTRY_TYPES[in.readByte()])
.setName(WritableUtils.readString(in))
.setPermission(FS_ACTIONS[in.readByte()])
.build());
}
} else {
aclEntries = null;
}
int xAttrsSize = in.readInt();
if (xAttrsSize != NO_XATTRS) {
xAttrs = Maps.newHashMap();
for (int i = 0; i < xAttrsSize; ++i) {
final String name = WritableUtils.readString(in);
final int valueLen = in.readInt();
byte[] value = null;
if (valueLen > -1) {
value = new byte[valueLen];
if (valueLen > 0) {
in.readFully(value);
}
}
xAttrs.put(name, value);
}
} else {
xAttrs = null;
}
chunkOffset = in.readLong();
chunkLength = in.readLong();
}
@Override
public boolean equals(Object o) {
if (null == o) {
return false;
}
if (getClass() != o.getClass()) {
return false;
}
CopyListingFileStatus other = (CopyListingFileStatus)o;
return getPath().equals(other.getPath())
&& Objects.equal(aclEntries, other.aclEntries)
&& Objects.equal(xAttrs, other.xAttrs);
}
@Override
public int hashCode() {
return Objects.hashCode(super.hashCode(), aclEntries, xAttrs);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(super.toString());
sb.append('{');
sb.append(this.getPath() == null ? "" : this.getPath().toString())
.append(" length = ").append(this.getLen())
.append(" aclEntries = ").append(aclEntries)
.append(", xAttrs = ").append(xAttrs)
.append(", modTime = ").append(modificationTime);
if (isSplit()) {
sb.append(", chunkOffset = ").append(this.getChunkOffset())
.append(", chunkLength = ").append(this.getChunkLength());
}
sb.append('}');
return sb.toString();
}
}
|
values
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/targetclass/mixed/AroundInvokeOnTargetClassAndOutsideAndManySuperclassesWithOverridesTest.java
|
{
"start": 1698,
"end": 2086
}
|
class ____ extends Bravo {
@AroundInvoke
Object superIntercept(InvocationContext ctx) throws Exception {
return "super-target: " + ctx.proceed();
}
@Override
Object specialIntercept(InvocationContext ctx) {
return "this is not an interceptor method";
}
}
@Singleton
@MyInterceptorBinding
static
|
Charlie
|
java
|
apache__camel
|
components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsTemporaryTopicEndpoint.java
|
{
"start": 1224,
"end": 2486
}
|
class ____ extends JmsEndpoint implements DestinationEndpoint {
private Destination jmsDestination;
public JmsTemporaryTopicEndpoint(String uri, JmsComponent component, String destination, JmsConfiguration configuration) {
super(uri, component, destination, true, configuration);
}
public JmsTemporaryTopicEndpoint(String endpointUri, String destination) {
super(endpointUri, destination);
setDestinationType("temp-topic");
}
/**
* This endpoint is a singleton so that the temporary destination instances are shared across all producers and
* consumers of the same endpoint URI
*
* @return true
*/
@Override
public boolean isSingleton() {
return true;
}
@Override
public Destination getJmsDestination(Session session) throws JMSException {
lock.lock();
try {
if (jmsDestination == null) {
jmsDestination = createJmsDestination(session);
}
return jmsDestination;
} finally {
lock.unlock();
}
}
protected Destination createJmsDestination(Session session) throws JMSException {
return session.createTemporaryTopic();
}
}
|
JmsTemporaryTopicEndpoint
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Classpath.java
|
{
"start": 2159,
"end": 4621
}
|
class ____ {
private static final String usage =
"classpath [--glob|--jar <path>|-h|--help] :\n"
+ " Prints the classpath needed to get the Hadoop jar and the required\n"
+ " libraries.\n"
+ " Options:\n"
+ "\n"
+ " --glob expand wildcards\n"
+ " --jar <path> write classpath as manifest in jar named <path>\n"
+ " -h, --help print help\n";
/**
* Main entry point.
*
* @param args command-line arguments
*/
public static void main(String[] args) {
if (args.length < 1 || args[0].equals("-h") || args[0].equals("--help")) {
System.out.println(usage);
return;
}
// Copy args, because CommandFormat mutates the list.
List<String> argsList = new ArrayList<String>(Arrays.asList(args));
CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, "-glob", "-jar");
try {
cf.parse(argsList);
} catch (UnknownOptionException e) {
terminate(1, "unrecognized option");
return;
}
String classPath = System.getProperty("java.class.path");
if (cf.getOpt("-glob")) {
// The classpath returned from the property has been globbed already.
System.out.println(classPath);
} else if (cf.getOpt("-jar")) {
if (argsList.isEmpty() || argsList.get(0) == null ||
argsList.get(0).isEmpty()) {
terminate(1, "-jar option requires path of jar file to write");
return;
}
// Write the classpath into the manifest of a temporary jar file.
Path workingDir = new Path(System.getProperty("user.dir"));
final String tmpJarPath;
try {
tmpJarPath = FileUtil.createJarWithClassPath(classPath, workingDir,
System.getenv())[0];
} catch (IOException e) {
terminate(1, "I/O error creating jar: " + e.getMessage());
return;
}
// Rename the temporary file to its final location.
String jarPath = argsList.get(0);
try {
FileUtil.replaceFile(new File(tmpJarPath), new File(jarPath));
} catch (IOException e) {
terminate(1, "I/O error renaming jar temporary file to path: " +
e.getMessage());
return;
}
}
}
/**
* Prints a message to stderr and exits with a status code.
*
* @param status exit code
* @param msg message
*/
private static void terminate(int status, String msg) {
System.err.println(msg);
ExitUtil.terminate(status, msg);
}
}
|
Classpath
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldNotHaveThrownExcept.java
|
{
"start": 720,
"end": 1327
}
|
class ____ extends BasicErrorMessageFactory {
@SafeVarargs
public static ErrorMessageFactory shouldNotHaveThrownExcept(Throwable throwable,
Class<? extends Throwable>... throwableClasses) {
return new ShouldNotHaveThrownExcept(throwable, throwableClasses);
}
private ShouldNotHaveThrownExcept(Throwable throwable, Class<? extends Throwable>[] throwableClasses) {
super("%nExpecting code not to raise a throwable except%n %s%nbut caught%n %s",
throwableClasses, getStackTrace(throwable));
}
}
|
ShouldNotHaveThrownExcept
|
java
|
apache__camel
|
components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/IncomingAudio.java
|
{
"start": 1076,
"end": 2993
}
|
class ____ {
private static final long serialVersionUID = 2716544815581270395L;
@JsonProperty("file_id")
private String fileId;
@JsonProperty("duration")
private Integer durationSeconds;
private String performer;
private String title;
@JsonProperty("mime_type")
private String mimeType;
@JsonProperty("file_size")
private Long fileSize;
public IncomingAudio() {
}
public String getFileId() {
return fileId;
}
public void setFileId(String fileId) {
this.fileId = fileId;
}
public Integer getDurationSeconds() {
return durationSeconds;
}
public void setDurationSeconds(Integer durationSeconds) {
this.durationSeconds = durationSeconds;
}
public String getPerformer() {
return performer;
}
public void setPerformer(String performer) {
this.performer = performer;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getMimeType() {
return mimeType;
}
public void setMimeType(String mimeType) {
this.mimeType = mimeType;
}
public Long getFileSize() {
return fileSize;
}
public void setFileSize(Long fileSize) {
this.fileSize = fileSize;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("IncomingAudio{");
sb.append("fileId='").append(fileId).append('\'');
sb.append(", durationSeconds=").append(durationSeconds);
sb.append(", performer='").append(performer).append('\'');
sb.append(", title='").append(title).append('\'');
sb.append(", mimeType='").append(mimeType).append('\'');
sb.append(", fileSize=").append(fileSize);
sb.append('}');
return sb.toString();
}
}
|
IncomingAudio
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/suppress/testdata/UnsuppressiblePositiveCases.java
|
{
"start": 804,
"end": 928
}
|
class ____ {
@SuppressWarnings("MyChecker")
public void testUnsuppressible() {
return;
}
}
|
UnsuppressiblePositiveCases
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e1/b/specjmapid/lazy/CustomerInventoryTwo.java
|
{
"start": 1090,
"end": 3123
}
|
class ____ implements Serializable,
Comparator<CustomerInventoryTwo> {
@Id
@TableGenerator(name = "inventory", table = "U_SEQUENCES", pkColumnName = "S_ID", valueColumnName = "S_NEXTNUM", pkColumnValue = "inventory", allocationSize = 1000)
@GeneratedValue(strategy = GenerationType.TABLE, generator = "inventory")
@Column(name = "CI_ID")
private Integer id;
@Id
@Column(name = "CI_CUSTOMERID", insertable = false, updatable = false)
private int custId;
@ManyToOne(cascade = CascadeType.MERGE)
@JoinColumn(name = "CI_CUSTOMERID", nullable = false)
private CustomerTwo customer;
@ManyToOne(cascade = CascadeType.MERGE)
@JoinColumn(name = "CI_ITEMID")
private Item vehicle;
@Column(name = "CI_VALUE")
private BigDecimal totalCost;
@Column(name = "CI_QUANTITY")
private int quantity;
@Version
@Column(name = "CI_VERSION")
private int version;
protected CustomerInventoryTwo() {
}
CustomerInventoryTwo(CustomerTwo customer, Item vehicle, int quantity,
BigDecimal totalValue) {
this.customer = customer;
this.vehicle = vehicle;
this.quantity = quantity;
this.totalCost = totalValue;
}
public Item getVehicle() {
return vehicle;
}
public BigDecimal getTotalCost() {
return totalCost;
}
public int getQuantity() {
return quantity;
}
public Integer getId() {
return id;
}
public CustomerTwo getCustomer() {
return customer;
}
public int getCustId() {
return custId;
}
public int getVersion() {
return version;
}
public int compare(CustomerInventoryTwo cdb1, CustomerInventoryTwo cdb2) {
return cdb1.id.compareTo(cdb2.id);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || !(obj instanceof CustomerInventoryTwo)) {
return false;
}
if (this.id == ((CustomerInventoryTwo) obj).id) {
return true;
}
if (this.id != null && ((CustomerInventoryTwo) obj).id == null) {
return false;
}
if (this.id == null && ((CustomerInventoryTwo) obj).id != null) {
return false;
}
return this.id.equals(((CustomerInventoryTwo) obj).id);
}
}
|
CustomerInventoryTwo
|
java
|
apache__dubbo
|
dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/state/TailStateRouter.java
|
{
"start": 1125,
"end": 2182
}
|
class ____<T> implements StateRouter<T> {
private static final TailStateRouter INSTANCE = new TailStateRouter();
@SuppressWarnings("unchecked")
public static <T> TailStateRouter<T> getInstance() {
return INSTANCE;
}
private TailStateRouter() {}
@Override
public void setNextRouter(StateRouter<T> nextRouter) {}
@Override
public URL getUrl() {
return null;
}
@Override
public BitList<Invoker<T>> route(
BitList<Invoker<T>> invokers,
URL url,
Invocation invocation,
boolean needToPrintMessage,
Holder<RouterSnapshotNode<T>> nodeHolder)
throws RpcException {
return invokers;
}
@Override
public boolean isRuntime() {
return false;
}
@Override
public boolean isForce() {
return false;
}
@Override
public void notify(BitList<Invoker<T>> invokers) {}
@Override
public String buildSnapshot() {
return "TailStateRouter End";
}
}
|
TailStateRouter
|
java
|
netty__netty
|
buffer/src/main/java/io/netty/buffer/UnpooledSlicedByteBuf.java
|
{
"start": 823,
"end": 3322
}
|
class ____ extends AbstractUnpooledSlicedByteBuf {
UnpooledSlicedByteBuf(AbstractByteBuf buffer, int index, int length) {
super(buffer, index, length);
}
@Override
public int capacity() {
return maxCapacity();
}
@Override
public AbstractByteBuf unwrap() {
return (AbstractByteBuf) super.unwrap();
}
@Override
protected byte _getByte(int index) {
return unwrap()._getByte(idx(index));
}
@Override
protected short _getShort(int index) {
return unwrap()._getShort(idx(index));
}
@Override
protected short _getShortLE(int index) {
return unwrap()._getShortLE(idx(index));
}
@Override
protected int _getUnsignedMedium(int index) {
return unwrap()._getUnsignedMedium(idx(index));
}
@Override
protected int _getUnsignedMediumLE(int index) {
return unwrap()._getUnsignedMediumLE(idx(index));
}
@Override
protected int _getInt(int index) {
return unwrap()._getInt(idx(index));
}
@Override
protected int _getIntLE(int index) {
return unwrap()._getIntLE(idx(index));
}
@Override
protected long _getLong(int index) {
return unwrap()._getLong(idx(index));
}
@Override
protected long _getLongLE(int index) {
return unwrap()._getLongLE(idx(index));
}
@Override
protected void _setByte(int index, int value) {
unwrap()._setByte(idx(index), value);
}
@Override
protected void _setShort(int index, int value) {
unwrap()._setShort(idx(index), value);
}
@Override
protected void _setShortLE(int index, int value) {
unwrap()._setShortLE(idx(index), value);
}
@Override
protected void _setMedium(int index, int value) {
unwrap()._setMedium(idx(index), value);
}
@Override
protected void _setMediumLE(int index, int value) {
unwrap()._setMediumLE(idx(index), value);
}
@Override
protected void _setInt(int index, int value) {
unwrap()._setInt(idx(index), value);
}
@Override
protected void _setIntLE(int index, int value) {
unwrap()._setIntLE(idx(index), value);
}
@Override
protected void _setLong(int index, long value) {
unwrap()._setLong(idx(index), value);
}
@Override
protected void _setLongLE(int index, long value) {
unwrap()._setLongLE(idx(index), value);
}
}
|
UnpooledSlicedByteBuf
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/proxy/pojo/BasicLazyInitializer.java
|
{
"start": 565,
"end": 3716
}
|
class ____ extends AbstractLazyInitializer {
protected static final Object INVOKE_IMPLEMENTATION = new MarkerObject( "INVOKE_IMPLEMENTATION" );
protected final Class<?> persistentClass;
protected final Method getIdentifierMethod;
protected final Method setIdentifierMethod;
protected final boolean overridesEquals;
protected final CompositeType componentIdType;
private Object replacement;
protected BasicLazyInitializer(
String entityName,
Class<?> persistentClass,
Object id,
Method getIdentifierMethod,
Method setIdentifierMethod,
CompositeType componentIdType,
SharedSessionContractImplementor session,
boolean overridesEquals) {
super( entityName, id, session );
this.persistentClass = persistentClass;
this.getIdentifierMethod = getIdentifierMethod;
this.setIdentifierMethod = setIdentifierMethod;
this.componentIdType = componentIdType;
this.overridesEquals = overridesEquals;
}
protected abstract Object serializableProxy();
protected final Object invoke(Method method, Object[] args, Object proxy) throws Throwable {
final String methodName = method.getName();
switch ( args.length ) {
case 0:
if ( "writeReplace".equals( methodName ) ) {
return getReplacement();
}
else if ( !overridesEquals && "hashCode".equals( methodName ) ) {
return identityHashCode( proxy );
}
else if ( isUninitialized() && method.equals( getIdentifierMethod ) ) {
return getIdentifier();
}
else if ( "getHibernateLazyInitializer".equals( methodName ) ) {
return this;
}
break;
case 1:
if ( !overridesEquals && "equals".equals( methodName ) ) {
return args[0] == proxy;
}
else if ( method.equals( setIdentifierMethod ) ) {
initialize();
setIdentifier( args[0] );
return INVOKE_IMPLEMENTATION;
}
break;
}
//if it is a property of an embedded component, invoke on the "identifier"
if ( componentIdType != null && componentIdType.isMethodOf( method ) ) {
return method.invoke( getIdentifier(), args );
}
// otherwise:
return INVOKE_IMPLEMENTATION;
}
private Object getReplacement() {
// If the target has already been loaded somewhere, just not
// set on the proxy, then use it to initialize the proxy so
// that we will serialize that instead of the proxy.
initializeWithoutLoadIfPossible();
if ( isUninitialized() ) {
if ( replacement == null ) {
prepareForPossibleLoadingOutsideTransaction();
replacement = serializableProxy();
}
return replacement;
}
else {
return getTarget();
}
}
@Override
public final Class<?> getPersistentClass() {
return persistentClass;
}
@Override
public Class<?> getImplementationClass() {
if ( !isUninitialized() ) {
return getImplementation().getClass();
}
else if ( getSession() == null ) {
throw new LazyInitializationException( "could not retrieve real entity class ["
+ getEntityName() + "#" + getInternalIdentifier() + "] - no Session" );
}
else {
return getEntityDescriptor().hasSubclasses()
? getImplementation().getClass()
: persistentClass;
}
}
}
|
BasicLazyInitializer
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/errors/WorkerErrantRecordReporter.java
|
{
"start": 7633,
"end": 8746
}
|
class ____ implements Future<Void> {
private final List<Future<RecordMetadata>> futures;
public ErrantRecordFuture(List<Future<RecordMetadata>> producerFutures) {
futures = producerFutures;
}
public boolean cancel(boolean mayInterruptIfRunning) {
throw new UnsupportedOperationException("Reporting an errant record cannot be cancelled.");
}
public boolean isCancelled() {
return false;
}
public boolean isDone() {
return futures.stream().allMatch(Future::isDone);
}
public Void get() throws InterruptedException, ExecutionException {
for (Future<RecordMetadata> future: futures) {
future.get();
}
return null;
}
public Void get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
for (Future<RecordMetadata> future: futures) {
future.get(timeout, unit);
}
return null;
}
}
}
|
ErrantRecordFuture
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/async/RouterAsyncRpcClient.java
|
{
"start": 10351,
"end": 28626
}
|
class ____ the method to be invoked.
* @param method The method to be invoked on the NameNodes.
* @param params The parameters for the method invocation.
*/
private void invokeMethodAsync(
final UserGroupInformation ugi,
final List<FederationNamenodeContext> namenodes,
boolean useObserver,
final Class<?> protocol, final Method method, final Object... params) {
addClientInfoToCallerContext(ugi);
if (rpcMonitor != null) {
rpcMonitor.proxyOp();
}
final ExecutionStatus status = new ExecutionStatus(false, useObserver);
Map<FederationNamenodeContext, IOException> ioes = new LinkedHashMap<>();
final ConnectionContext[] connection = new ConnectionContext[1];
asyncForEach(namenodes.iterator(),
(foreach, namenode) -> {
if (!status.isShouldUseObserver()
&& (namenode.getState() == FederationNamenodeServiceState.OBSERVER)) {
asyncComplete(null);
return;
}
String nsId = namenode.getNameserviceId();
String rpcAddress = namenode.getRpcAddress();
asyncTry(() -> {
connection[0] = getConnection(ugi, nsId, rpcAddress, protocol);
NameNodeProxiesClient.ProxyAndInfo<?> client = connection[0].getClient();
invoke(namenode, status.isShouldUseObserver(), 0, method,
client.getProxy(), params);
asyncApply(res -> {
status.setComplete(true);
postProcessResult(method, status, namenode, nsId, client);
foreach.breakNow();
return res;
});
});
asyncCatch((res, ioe) -> {
ioes.put(namenode, ioe);
handleInvokeMethodIOException(namenode, ioe, status, useObserver);
return res;
}, IOException.class);
asyncFinally(res -> {
if (connection[0] != null) {
connection[0].release();
}
return res;
});
});
asyncApply(res -> {
if (status.isComplete()) {
return res;
}
return handlerAllNamenodeFail(namenodes, method, ioes, params);
});
}
/**
* Asynchronously invokes a method on a specified NameNode in the context of the given
* namespace and NameNode information. This method is designed to handle the invocation
* in a non-blocking manner, allowing for improved performance and scalability when
* interacting with the NameNode.
*
* @param namenode The context information for the NameNode.
* @param listObserverFirst Whether to list the observer node first in the invocation list.
* @param retryCount The current retry count for the operation.
* @param method The method to be invoked on the NameNode.
* @param obj The proxy object through which the method will be invoked.
* @param params The parameters for the method invocation.
*/
protected Object invoke(
FederationNamenodeContext namenode, Boolean listObserverFirst,
int retryCount, final Method method,
final Object obj, final Object... params) throws IOException {
try {
Client.setAsynchronousMode(true);
method.invoke(obj, params);
Client.setAsynchronousMode(false);
asyncCatch((AsyncCatchFunction<Object, Throwable>) (o, e) -> {
handlerInvokeException(namenode, listObserverFirst,
retryCount, method, obj, e, params);
}, Throwable.class);
} catch (InvocationTargetException e) {
asyncThrowException(e.getCause());
} catch (IllegalAccessException | IllegalArgumentException e) {
LOG.error("Unexpected exception while proxying API", e);
asyncThrowException(e);
}
return null;
}
/**
* Invokes sequential proxy calls to different locations. Continues to invoke
* calls until the success condition is met, or until all locations have been
* attempted.
*
* The success condition may be specified by:
* <ul>
* <li>An expected result class
* <li>An expected result value
* </ul>
*
* If no expected result class/values are specified, the success condition is
* a call that does not throw a remote exception.
*
* @param <T> The type of the remote method return.
* @param locations List of locations/nameservices to call concurrently.
* @param remoteMethod The remote method and parameters to invoke.
* @param expectedResultClass In order to be considered a positive result, the
* return type must be of this class.
* @param expectedResultValue In order to be considered a positive result, the
* return value must equal the value of this object.
* @return The result of the first successful call, or if no calls are
* successful, the result of the first RPC call executed.
* @throws IOException if the success condition is not met, return the first
* remote exception generated.
*/
@Override
public <T> T invokeSequential(
final List<? extends RemoteLocationContext> locations,
final RemoteMethod remoteMethod, Class<T> expectedResultClass,
Object expectedResultValue) throws IOException {
invokeSequential(remoteMethod, locations, expectedResultClass, expectedResultValue);
asyncApply((ApplyFunction<RemoteResult, Object>) RemoteResult::getResult);
return asyncReturn(expectedResultClass);
}
/**
* Invokes sequential proxy calls to different locations. Continues to invoke
* calls until the success condition is met, or until all locations have been
* attempted.
*
* The success condition may be specified by:
* <ul>
* <li>An expected result class
* <li>An expected result value
* </ul>
*
* If no expected result class/values are specified, the success condition is
* a call that does not throw a remote exception.
*
* This returns RemoteResult, which contains the invoked location as well
* as the result.
*
* @param <R> The type of the remote location.
* @param <T> The type of the remote method return.
* @param remoteMethod The remote method and parameters to invoke.
* @param locations List of locations/nameservices to call concurrently.
* @param expectedResultClass In order to be considered a positive result, the
* return type must be of this class.
* @param expectedResultValue In order to be considered a positive result, the
* return value must equal the value of this object.
* @return The result of the first successful call, or if no calls are
* successful, the result of the first RPC call executed, along with
* the invoked location in form of RemoteResult.
* @throws IOException if the success condition is not met, return the first
* remote exception generated.
*/
@Override
public <R extends RemoteLocationContext, T> RemoteResult invokeSequential(
final RemoteMethod remoteMethod, final List<R> locations,
Class<T> expectedResultClass, Object expectedResultValue)
throws IOException {
final UserGroupInformation ugi = RouterRpcServer.getRemoteUser();
final Method m = remoteMethod.getMethod();
List<IOException> thrownExceptions = new ArrayList<>();
final Object[] firstResult = {null};
final ExecutionStatus status = new ExecutionStatus();
Iterator<RemoteLocationContext> locationIterator =
(Iterator<RemoteLocationContext>) locations.iterator();
// Invoke in priority order
asyncForEach(locationIterator,
(foreach, loc) -> {
String ns = loc.getNameserviceId();
boolean isObserverRead = isObserverReadEligible(ns, m);
List<? extends FederationNamenodeContext> namenodes =
getOrderedNamenodes(ns, isObserverRead);
asyncTry(() -> {
Class<?> proto = remoteMethod.getProtocol();
Object[] params = remoteMethod.getParams(loc);
invokeMethod(ugi, namenodes, isObserverRead, proto, m, params);
asyncApply(result -> {
// Check if the result is what we expected
if (isExpectedClass(expectedResultClass, result) &&
isExpectedValue(expectedResultValue, result)) {
// Valid result, stop here
@SuppressWarnings("unchecked") R location = (R) loc;
@SuppressWarnings("unchecked") T ret = (T) result;
foreach.breakNow();
status.setComplete(true);
return new RemoteResult<>(location, ret);
}
if (firstResult[0] == null) {
firstResult[0] = result;
}
return null;
});
});
asyncCatch((ret, e) -> {
if (e instanceof IOException) {
IOException ioe = (IOException) e;
// Localize the exception
ioe = processException(ioe, loc);
// Record it and move on
thrownExceptions.add(ioe);
} else {
// Unusual error, ClientProtocol calls always use IOException (or
// RemoteException). Re-wrap in IOException for compatibility with
// ClientProtocol.
LOG.error("Unexpected exception {} proxying {} to {}",
e.getClass(), m.getName(), ns, e);
IOException ioe = new IOException(
"Unexpected exception proxying API " + e.getMessage(), e);
thrownExceptions.add(ioe);
}
return ret;
}, Exception.class);
});
asyncApply(result -> {
if (status.isComplete()) {
return result;
}
if (!thrownExceptions.isEmpty()) {
// An unavailable subcluster may be the actual cause
// We cannot surface other exceptions (e.g., FileNotFoundException)
for (int i = 0; i < thrownExceptions.size(); i++) {
IOException ioe = thrownExceptions.get(i);
if (isUnavailableException(ioe)) {
throw ioe;
}
}
// re-throw the first exception thrown for compatibility
throw thrownExceptions.get(0);
}
// Return the first result, whether it is the value or not
@SuppressWarnings("unchecked") T ret = (T) firstResult[0];
return new RemoteResult<>(locations.get(0), ret);
});
return asyncReturn(RemoteResult.class);
}
/**
* Invokes multiple concurrent proxy calls to different clients. Returns an
* array of results.
* <p>
* Re-throws exceptions generated by the remote RPC call as either
* RemoteException or IOException.
*
* @param <T> The type of the remote location.
* @param <R> The type of the remote method return.
* @param locations List of remote locations to call concurrently.
* @param method The remote method and parameters to invoke.
* @param requireResponse If true an exception will be thrown if all calls do
* not complete. If false exceptions are ignored and all data results
* successfully received are returned.
* @param standby If the requests should go to the standby namenodes too.
* @param timeOutMs Timeout for each individual call.
* @param clazz Type of the remote return type.
* @return Result of invoking the method per subcluster: nsId to result.
* @throws IOException If requiredResponse=true and any of the calls throw an
* exception.
*/
@Override
public <T extends RemoteLocationContext, R> Map<T, R> invokeConcurrent(
final Collection<T> locations, final RemoteMethod method,
boolean requireResponse, boolean standby, long timeOutMs, Class<R> clazz)
throws IOException {
invokeConcurrent(locations, method, standby, timeOutMs, clazz);
asyncApply((ApplyFunction<List<RemoteResult<T, R>>, Object>)
results -> postProcessResult(requireResponse, results));
return asyncReturn(Map.class);
}
/**
* Invokes multiple concurrent proxy calls to different clients. Returns an
* array of results.
*
* @param <T> The type of the remote location.
* @param <R> The type of the remote method return.
* @param method The remote method and parameters to invoke.
* @param timeOutMs Timeout for each individual call.
* @param controller Fairness manager to control handlers assigned per NS.
* @param orderedLocations List of remote locations to call concurrently.
* @param callables Invoke method for each NameNode.
* @return Result of invoking the method per subcluster (list of results),
* This includes the exception for each remote location.
* @throws IOException If there are errors invoking the method.
*/
@Override
protected <T extends RemoteLocationContext, R> List<RemoteResult<T, R>> getRemoteResults(
RemoteMethod method, long timeOutMs, RouterRpcFairnessPolicyController controller,
List<T> orderedLocations, List<Callable<Object>> callables) throws IOException {
final Method m = method.getMethod();
final CompletableFuture<Object>[] futures =
new CompletableFuture[callables.size()];
int i = 0;
for (Callable<Object> callable : callables) {
CompletableFuture<Object> future = null;
try {
callable.call();
future = getCompletableFuture();
} catch (Exception e) {
future = new CompletableFuture<>();
future.completeExceptionally(warpCompletionException(e));
}
futures[i++] = future;
}
asyncCompleteWith(CompletableFuture.allOf(futures)
.handle((unused, throwable) -> {
try {
return processFutures(method, m, orderedLocations, Arrays.asList(futures));
} catch (InterruptedException e) {
LOG.error("Unexpected error while invoking API: {}", e.getMessage());
throw warpCompletionException(new IOException(
"Unexpected error while invoking API " + e.getMessage(), e));
}
}));
return asyncReturn(List.class);
}
/**
* Invokes a ClientProtocol method against the specified namespace.
* <p>
* Re-throws exceptions generated by the remote RPC call as either
* RemoteException or IOException.
*
* @param <T> The type of the remote location.
* @param <R> The type of the remote method return.
* @param location RemoteLocation to invoke.
* @param method The remote method and parameters to invoke.
* @return Result of invoking the method per subcluster (list of results),
* This includes the exception for each remote location.
* @throws IOException If there are errors invoking the method.
*/
@Override
public <T extends RemoteLocationContext, R> List<RemoteResult<T, R>> invokeSingle(
T location, RemoteMethod method) throws IOException {
final UserGroupInformation ugi = RouterRpcServer.getRemoteUser();
final Method m = method.getMethod();
String ns = location.getNameserviceId();
boolean isObserverRead = isObserverReadEligible(ns, m);
final List<? extends FederationNamenodeContext> namenodes =
getOrderedNamenodes(ns, isObserverRead);
asyncTry(() -> {
Class<?> proto = method.getProtocol();
Object[] paramList = method.getParams(location);
invokeMethod(ugi, namenodes, isObserverRead, proto, m, paramList);
asyncApply((ApplyFunction<R, Object>) result -> {
RemoteResult<T, R> remoteResult = new RemoteResult<>(location, result);
return Collections.singletonList(remoteResult);
});
});
asyncCatch((o, ioe) -> {
throw processException(ioe, location);
}, IOException.class);
return asyncReturn(List.class);
}
/**
* Invokes a ClientProtocol method against the specified namespace.
* <p>
* Re-throws exceptions generated by the remote RPC call as either
* RemoteException or IOException.
*
* @param nsId Target namespace for the method.
* @param method The remote method and parameters to invoke.
* @return The result of invoking the method.
* @throws IOException If the invoke generated an error.
*/
@Override
public Object invokeSingle(final String nsId, RemoteMethod method)
throws IOException {
UserGroupInformation ugi = RouterRpcServer.getRemoteUser();
boolean isObserverRead = isObserverReadEligible(nsId, method.getMethod());
List<? extends FederationNamenodeContext> nns = getOrderedNamenodes(nsId, isObserverRead);
RemoteLocationContext loc = new RemoteLocation(nsId, "/", "/");
Class<?> proto = method.getProtocol();
Method m = method.getMethod();
Object[] params = method.getParams(loc);
invokeMethod(ugi, nns, isObserverRead, proto, m, params);
return null;
}
/**
* Invokes a single proxy call for a single location.
* <p>
* Re-throws exceptions generated by the remote RPC call as either
* RemoteException or IOException.
*
* @param location RemoteLocation to invoke.
* @param remoteMethod The remote method and parameters to invoke.
* @param clazz Class for the return type.
* @param <T> The type of the remote method return.
* @return The result of invoking the method if successful.
* @throws IOException If the invoke generated an error.
*/
public <T> T invokeSingle(
final RemoteLocationContext location,
RemoteMethod remoteMethod, Class<T> clazz) throws IOException {
List<RemoteLocationContext> locations = Collections.singletonList(location);
invokeSequential(locations, remoteMethod);
return asyncReturn(clazz);
}
/**
* Release permit for specific nsId after processing against downstream
* nsId is completed.
* @param nsId Identifier of the block pool.
* @param ugi UserGroupIdentifier associated with the user.
* @param m Remote method that needs to be invoked.
* @param controller fairness policy controller to release permit from
*/
protected void releasePermit(final String nsId, final UserGroupInformation ugi,
final Method m, RouterRpcFairnessPolicyController controller) {
if (controller != null) {
controller.releasePermit(nsId);
LOG.trace("Permit released for ugi: {} for method: {}", ugi,
m.getName());
}
}
}
|
defining
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeUnsubscribeOnTest.java
|
{
"start": 1103,
"end": 3986
}
|
class ____ extends RxJavaTest {
@Test
public void normal() throws Exception {
PublishProcessor<Integer> pp = PublishProcessor.create();
final String[] name = { null };
final CountDownLatch cdl = new CountDownLatch(1);
pp.doOnCancel(new Action() {
@Override
public void run() throws Exception {
name[0] = Thread.currentThread().getName();
cdl.countDown();
}
})
.singleElement()
.unsubscribeOn(Schedulers.single())
.test(true)
;
assertTrue(cdl.await(5, TimeUnit.SECONDS));
int times = 10;
while (times-- > 0 && pp.hasSubscribers()) {
Thread.sleep(100);
}
assertFalse(pp.hasSubscribers());
assertNotEquals(Thread.currentThread().getName(), name[0]);
}
@Test
public void just() {
Maybe.just(1)
.unsubscribeOn(Schedulers.single())
.test()
.assertResult(1);
}
@Test
public void error() {
Maybe.<Integer>error(new TestException())
.unsubscribeOn(Schedulers.single())
.test()
.assertFailure(TestException.class);
}
@Test
public void empty() {
Maybe.empty()
.unsubscribeOn(Schedulers.single())
.test()
.assertResult();
}
@Test
public void dispose() {
TestHelper.checkDisposed(Maybe.just(1)
.unsubscribeOn(Schedulers.single()));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeMaybe(new Function<Maybe<Object>, MaybeSource<Object>>() {
@Override
public MaybeSource<Object> apply(Maybe<Object> v) throws Exception {
return v.unsubscribeOn(Schedulers.single());
}
});
}
@Test
public void disposeRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
PublishProcessor<Integer> pp = PublishProcessor.create();
final Disposable[] ds = { null };
pp.singleElement().unsubscribeOn(Schedulers.computation())
.subscribe(new MaybeObserver<Integer>() {
@Override
public void onSubscribe(Disposable d) {
ds[0] = d;
}
@Override
public void onSuccess(Integer value) {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onComplete() {
}
});
Runnable r = new Runnable() {
@Override
public void run() {
ds[0].dispose();
}
};
TestHelper.race(r, r);
}
}
}
|
MaybeUnsubscribeOnTest
|
java
|
elastic__elasticsearch
|
distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/PluginCliProvider.java
|
{
"start": 601,
"end": 812
}
|
class ____ implements CliToolProvider {
@Override
public String name() {
return "plugin";
}
@Override
public Command create() {
return new PluginCli();
}
}
|
PluginCliProvider
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test-autoconfigure/src/test/java/org/springframework/boot/test/autoconfigure/ImportsContextCustomizerFactoryWithAutoConfigurationTests.java
|
{
"start": 1776,
"end": 3456
}
|
class ____ {
static @Nullable ApplicationContext contextFromTest;
@Test
void testClassesThatHaveSameAnnotationsShareAContext() {
executeTests(ExampleTest1.class);
ApplicationContext test1Context = contextFromTest;
assertThat(test1Context).isNotNull();
executeTests(ExampleTest3.class);
ApplicationContext test2Context = contextFromTest;
assertThat(test2Context).isNotNull();
assertThat(test1Context).isSameAs(test2Context);
}
@Test
void testClassesThatOnlyHaveDifferingUnrelatedAnnotationsShareAContext() {
executeTests(ExampleTest1.class);
ApplicationContext test1Context = contextFromTest;
assertThat(test1Context).isNotNull();
executeTests(ExampleTest2.class);
ApplicationContext test2Context = contextFromTest;
assertThat(test2Context).isNotNull();
assertThat(test1Context).isSameAs(test2Context);
}
@Test
void testClassesThatOnlyHaveDifferingPropertyMappedAnnotationAttributesDoNotShareAContext() {
executeTests(ExampleTest1.class);
ApplicationContext test1Context = contextFromTest;
assertThat(test1Context).isNotNull();
executeTests(ExampleTest4.class);
ApplicationContext test2Context = contextFromTest;
assertThat(test2Context).isNotNull();
assertThat(test1Context).isNotSameAs(test2Context);
}
private void executeTests(Class<?> testClass) {
LauncherDiscoveryRequest request = LauncherDiscoveryRequestBuilder.request()
.selectors(DiscoverySelectors.selectClass(testClass))
.build();
Launcher launcher = LauncherFactory.create();
launcher.execute(request);
}
@ExampleTest
@ContextConfiguration(classes = EmptyConfig.class)
@Unrelated1
static
|
ImportsContextCustomizerFactoryWithAutoConfigurationTests
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/util/pattern/InternalPathPatternParser.java
|
{
"start": 1194,
"end": 15132
}
|
class ____ {
private final PathPatternParser parser;
// The input data for parsing
private char[] pathPatternData = new char[0];
// The length of the input data
private int pathPatternLength;
// Current parsing position
int pos;
// How many ? characters in a particular path element
private int singleCharWildcardCount;
// Is the path pattern using * characters in a particular path element
private boolean wildcard = false;
// Is the construct {*...} being used in a particular path element
private boolean isCaptureSegmentsVariable = false;
// Has the parser entered a {...} variable capture block in a particular
// path element
private boolean insideVariableCapture = false;
// How many variable captures are occurring in a particular path element
private int variableCaptureCount = 0;
// Start of the most recent path element in a particular path element
private int pathElementStart;
// Start of the most recent variable capture in a particular path element
private int variableCaptureStart;
// Did we parse a WildcardSegments(**) or CaptureSegments({*foo}) PathElement already?
private boolean hasMultipleSegmentsElement = false;
// Variables captures in this path pattern
private @Nullable List<String> capturedVariableNames;
// The head of the path element chain currently being built
private @Nullable PathElement headPE;
// The most recently constructed path element in the chain
private @Nullable PathElement currentPE;
/**
* Package private constructor for use in {@link PathPatternParser#parse}.
* @param parentParser reference back to the stateless, public parser
*/
InternalPathPatternParser(PathPatternParser parentParser) {
this.parser = parentParser;
}
/**
* Package private delegate for {@link PathPatternParser#parse(String)}.
*/
public PathPattern parse(String pathPattern) throws PatternParseException {
Assert.notNull(pathPattern, "Path pattern must not be null");
this.pathPatternData = pathPattern.toCharArray();
this.pathPatternLength = this.pathPatternData.length;
this.headPE = null;
this.currentPE = null;
this.capturedVariableNames = null;
this.pathElementStart = -1;
this.pos = 0;
resetPathElementState();
while (this.pos < this.pathPatternLength) {
char ch = this.pathPatternData[this.pos];
char separator = this.parser.getPathOptions().separator();
if (ch == separator) {
if (this.pathElementStart != -1) {
pushPathElement(createPathElement());
}
pushPathElement(new SeparatorPathElement(this.pos, separator));
}
else {
if (this.pathElementStart == -1) {
this.pathElementStart = this.pos;
}
if (ch == '?') {
this.singleCharWildcardCount++;
}
else if (ch == '{') {
if (this.insideVariableCapture) {
throw new PatternParseException(this.pos, this.pathPatternData,
PatternMessage.ILLEGAL_NESTED_CAPTURE);
}
// If we enforced that adjacent captures weren't allowed,
// this would do it (this would be an error: /foo/{bar}{boo}/)
// } else if (pos > 0 && pathPatternData[pos - 1] == '}') {
// throw new PatternParseException(pos, pathPatternData,
// PatternMessage.CANNOT_HAVE_ADJACENT_CAPTURES);
this.insideVariableCapture = true;
this.variableCaptureStart = this.pos;
}
else if (ch == '}') {
if (!this.insideVariableCapture) {
throw new PatternParseException(this.pos, this.pathPatternData,
PatternMessage.MISSING_OPEN_CAPTURE);
}
this.insideVariableCapture = false;
this.variableCaptureCount++;
}
else if (ch == ':') {
if (this.insideVariableCapture && !this.isCaptureSegmentsVariable) {
skipCaptureRegex();
this.insideVariableCapture = false;
this.variableCaptureCount++;
}
}
else if (isDoubleWildcard(separator)) {
checkValidMultipleSegmentsElements(this.pos, this.pos + 1);
pushPathElement(new WildcardSegmentsPathElement(this.pos, separator));
this.hasMultipleSegmentsElement = true;
this.pos++;
}
else if (ch == '*') {
if (this.insideVariableCapture && this.variableCaptureStart == this.pos - 1) {
this.isCaptureSegmentsVariable = true;
}
this.wildcard = true;
}
// Check that the characters used for captured variable names are like java identifiers
if (this.insideVariableCapture) {
if ((this.variableCaptureStart + 1 + (this.isCaptureSegmentsVariable ? 1 : 0)) == this.pos &&
!Character.isJavaIdentifierStart(ch)) {
throw new PatternParseException(this.pos, this.pathPatternData,
PatternMessage.ILLEGAL_CHARACTER_AT_START_OF_CAPTURE_DESCRIPTOR,
Character.toString(ch));
}
else if ((this.pos > (this.variableCaptureStart + 1 + (this.isCaptureSegmentsVariable ? 1 : 0)) &&
!Character.isJavaIdentifierPart(ch) && ch != '-')) {
throw new PatternParseException(this.pos, this.pathPatternData,
PatternMessage.ILLEGAL_CHARACTER_IN_CAPTURE_DESCRIPTOR,
Character.toString(ch));
}
}
}
this.pos++;
}
if (this.pathElementStart != -1) {
pushPathElement(createPathElement());
}
return new PathPattern(pathPattern, this.parser, this.headPE);
}
/**
* Just hit a ':' and want to jump over the regex specification for this
* variable. pos will be pointing at the ':', we want to skip until the }.
* <p>
* Nested {...} pairs don't have to be escaped: <code>/abc/{var:x{1,2}}/def</code>
* <p>An escaped } will not be treated as the end of the regex: <code>/abc/{var:x\\{y:}/def</code>
* <p>A separator that should not indicate the end of the regex can be escaped:
*/
private void skipCaptureRegex() {
this.pos++;
int regexStart = this.pos;
int curlyBracketDepth = 0; // how deep in nested {...} pairs
boolean previousBackslash = false;
while (this.pos < this.pathPatternLength) {
char ch = this.pathPatternData[this.pos];
if (ch == '\\' && !previousBackslash) {
this.pos++;
previousBackslash = true;
continue;
}
if (ch == '{' && !previousBackslash) {
curlyBracketDepth++;
}
else if (ch == '}' && !previousBackslash) {
if (curlyBracketDepth == 0) {
if (regexStart == this.pos) {
throw new PatternParseException(regexStart, this.pathPatternData,
PatternMessage.MISSING_REGEX_CONSTRAINT);
}
return;
}
curlyBracketDepth--;
}
if (ch == this.parser.getPathOptions().separator() && !previousBackslash) {
throw new PatternParseException(this.pos, this.pathPatternData,
PatternMessage.MISSING_CLOSE_CAPTURE);
}
this.pos++;
previousBackslash = false;
}
throw new PatternParseException(this.pos - 1, this.pathPatternData,
PatternMessage.MISSING_CLOSE_CAPTURE);
}
private boolean isDoubleWildcard(char separator) {
// next char is present
if ((this.pos + 1) >= this.pathPatternLength) {
return false;
}
// current char and next char are '*'
if (this.pathPatternData[this.pos] != '*' || this.pathPatternData[this.pos + 1] != '*') {
return false;
}
// previous char is a separator, if any
if ((this.pos - 1 >= 0) && (this.pathPatternData[this.pos - 1] != separator)) {
return false;
}
// next char is a separator, if any
if (((this.pos + 2) < this.pathPatternLength) &&
this.pathPatternData[this.pos + 2] != separator) {
return false;
}
return true;
}
private void checkValidMultipleSegmentsElements(int startPosition, int endPosition) {
if (this.hasMultipleSegmentsElement) {
throw new PatternParseException(this.pos, this.pathPatternData,
PatternMessage.CANNOT_HAVE_MANY_MULTISEGMENT_PATHELEMENTS);
}
if (startPosition > 1 && endPosition != this.pathPatternLength - 1) {
throw new PatternParseException(this.pos, this.pathPatternData,
PatternMessage.INVALID_LOCATION_FOR_MULTISEGMENT_PATHELEMENT);
}
}
/**
* Push a path element to the chain being build.
* @param newPathElement the new path element to add
*/
private void pushPathElement(PathElement newPathElement) {
if (newPathElement instanceof CaptureSegmentsPathElement ||
newPathElement instanceof WildcardSegmentsPathElement) {
// There must be a separator ahead of this thing
// currentPE SHOULD be a SeparatorPathElement
if (this.currentPE == null) {
this.headPE = newPathElement;
this.currentPE = newPathElement;
}
else if (this.currentPE instanceof SeparatorPathElement) {
PathElement peBeforeSeparator = this.currentPE.prev;
if (peBeforeSeparator == null) {
// /{*foobar} is at the start
this.headPE = newPathElement;
newPathElement.prev = null;
}
else {
peBeforeSeparator.next = newPathElement;
newPathElement.prev = peBeforeSeparator;
}
this.currentPE = newPathElement;
}
else {
throw new IllegalStateException("Expected SeparatorPathElement before " +
newPathElement.getClass().getName() +" but was " + this.currentPE);
}
}
else {
if (this.headPE == null) {
this.headPE = newPathElement;
this.currentPE = newPathElement;
}
else if (this.currentPE != null) {
this.currentPE.next = newPathElement;
newPathElement.prev = this.currentPE;
this.currentPE = newPathElement;
}
}
resetPathElementState();
}
private char[] getPathElementText() {
char[] pathElementText = new char[this.pos - this.pathElementStart];
System.arraycopy(this.pathPatternData, this.pathElementStart, pathElementText, 0,
this.pos - this.pathElementStart);
return pathElementText;
}
/**
* Used the knowledge built up whilst processing since the last path element to determine what kind of path
* element to create.
* @return the new path element
*/
private PathElement createPathElement() {
if (this.insideVariableCapture) {
throw new PatternParseException(this.pos, this.pathPatternData, PatternMessage.MISSING_CLOSE_CAPTURE);
}
PathElement newPE = null;
char separator = this.parser.getPathOptions().separator();
if (this.variableCaptureCount > 0) {
if (this.variableCaptureCount == 1 && this.pathElementStart == this.variableCaptureStart &&
this.pathPatternData[this.pos - 1] == '}') {
if (this.isCaptureSegmentsVariable) {
// It is {*....}
checkValidMultipleSegmentsElements(this.pathElementStart, this.pos -1);
this.hasMultipleSegmentsElement = true;
newPE = new CaptureSegmentsPathElement(
this.pathElementStart, getPathElementText(), separator);
}
else {
// It is a full capture of this element (possibly with constraint), for example: /foo/{abc}/
try {
newPE = new CaptureVariablePathElement(this.pathElementStart, getPathElementText(),
this.parser.isCaseSensitive(), separator);
}
catch (PatternSyntaxException pse) {
throw new PatternParseException(pse,
findRegexStart(this.pathPatternData, this.pathElementStart) + pse.getIndex(),
this.pathPatternData, PatternMessage.REGEX_PATTERN_SYNTAX_EXCEPTION);
}
recordCapturedVariable(this.pathElementStart,
((CaptureVariablePathElement) newPE).getVariableName());
}
}
else {
if (this.isCaptureSegmentsVariable) {
throw new PatternParseException(this.pathElementStart, this.pathPatternData,
PatternMessage.CAPTURE_ALL_IS_STANDALONE_CONSTRUCT);
}
RegexPathElement newRegexSection = new RegexPathElement(this.pathElementStart,
getPathElementText(), this.parser.isCaseSensitive(),
this.pathPatternData, separator);
for (String variableName : newRegexSection.getVariableNames()) {
recordCapturedVariable(this.pathElementStart, variableName);
}
newPE = newRegexSection;
}
}
else {
if (this.wildcard) {
if (this.pos - 1 == this.pathElementStart) {
newPE = new WildcardPathElement(this.pathElementStart, separator);
}
else {
newPE = new RegexPathElement(this.pathElementStart, getPathElementText(),
this.parser.isCaseSensitive(), this.pathPatternData, separator);
}
}
else if (this.singleCharWildcardCount != 0) {
newPE = new SingleCharWildcardedPathElement(this.pathElementStart, getPathElementText(),
this.singleCharWildcardCount, this.parser.isCaseSensitive(), separator);
}
else {
newPE = new LiteralPathElement(this.pathElementStart, getPathElementText(),
this.parser.isCaseSensitive(), separator);
}
}
return newPE;
}
/**
* For a path element representing a captured variable, locate the constraint pattern.
* Assumes there is a constraint pattern.
* @param data a complete path expression, for example, /aaa/bbb/{ccc:...}
* @param offset the start of the capture pattern of interest
* @return the index of the character after the ':' within
* the pattern expression relative to the start of the whole expression
*/
private int findRegexStart(char[] data, int offset) {
int pos = offset;
while (pos < data.length) {
if (data[pos] == ':') {
return pos + 1;
}
pos++;
}
return -1;
}
/**
* Reset all the flags and position markers computed during path element processing.
*/
private void resetPathElementState() {
this.pathElementStart = -1;
this.singleCharWildcardCount = 0;
this.insideVariableCapture = false;
this.variableCaptureCount = 0;
this.wildcard = false;
this.isCaptureSegmentsVariable = false;
this.variableCaptureStart = -1;
}
/**
* Record a new captured variable. If it clashes with an existing one then report an error.
*/
private void recordCapturedVariable(int pos, String variableName) {
if (this.capturedVariableNames == null) {
this.capturedVariableNames = new ArrayList<>();
}
if (this.capturedVariableNames.contains(variableName)) {
throw new PatternParseException(pos, this.pathPatternData,
PatternMessage.ILLEGAL_DOUBLE_CAPTURE, variableName);
}
this.capturedVariableNames.add(variableName);
}
}
|
InternalPathPatternParser
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/common/CamelJBangConstants.java
|
{
"start": 1059,
"end": 12739
}
|
class ____ {
// internal options which is not intended for Camel users
public static final String BACKGROUND = "camel.jbang.background";
public static final String BACKGROUND_WAIT = "camel.jbang.backgroundWait";
public static final String JVM_DEBUG = "camel.jbang.jvmDebug";
public static final String TRANSFORM = "camel.jbang.transform";
public static final String EXPORT = "camel.jbang.export";
public static final String DEBUG = "camel.jbang.debug";
@Metadata(description = "Additional files to add to classpath (Use commas to separate multiple files).",
javaType = "String")
public static final String CLASSPATH_FILES = "camel.jbang.classpathFiles";
@Metadata(description = "Local file directory for loading custom Kamelets",
javaType = "String")
public static final String LOCAL_KAMELET_DIR = "camel.jbang.localKameletDir";
@Metadata(description = "Additional groovy source files to export to src/main/resources/camel-groovy directory (Use commas to separate multiple files)",
javaType = "String")
public static final String GROOVY_FILES = "camel.jbang.groovyFiles";
@Metadata(description = "Additional shell script files to export to src/main/scripts directory",
javaType = "String")
public static final String SCRIPT_FILES = "camel.jbang.scriptFiles";
@Metadata(description = "Additional SSL/TLS files to export to src/main/tls directory",
javaType = "String")
public static final String TLS_FILES = "camel.jbang.tlsFiles";
@Metadata(description = "Resource YAML fragments for Kubernetes using Eclipse JKube tool (Use commas to separate multiple files).",
javaType = "String", label = "kubernetes")
public static final String JKUBE_FILES = "camel.jbang.jkubeFiles";
@Metadata(description = "Which runtime to use (camel-main, spring-boot, quarkus)",
javaType = "String", enums = "camel-main,spring-boot,quarkus")
public static final String RUNTIME = "camel.jbang.runtime";
@Metadata(description = "Maven coordinate (groupId:artifactId:version)",
javaType = "String")
public static final String GAV = "camel.jbang.gav";
@Metadata(description = "Java version (17 or 21)",
javaType = "String", enums = "17,21", defaultValue = "21")
public static final String JAVA_VERSION = "camel.jbang.javaVersion";
@Metadata(description = "Apache Camel Kamelets version. By default the Kamelets are the same version as Camel.",
javaType = "String")
public static final String KAMELETS_VERSION = "camel.jbang.kameletsVersion";
@Metadata(description = "Quarkus Platform Maven groupId",
javaType = "String", label = "quarkus")
public static final String QUARKUS_GROUP_ID = "camel.jbang.quarkusGroupId";
@Metadata(description = "Quarkus Platform Maven artifactId",
javaType = "String", label = "quarkus")
public static final String QUARKUS_ARTIFACT_ID = "camel.jbang.quarkusArtifactId";
@Metadata(description = "Quarkus Platform version",
javaType = "String", label = "quarkus")
public static final String QUARKUS_VERSION = "camel.jbang.quarkusVersion";
@Metadata(description = "Spring Boot version",
javaType = "String", label = "spring-boot")
public static final String SPRING_BOOT_VERSION = "camel.jbang.springBootVersion";
@Metadata(description = "Include Maven Wrapper files in the exported project",
javaType = "boolean", defaultValue = "true")
public static final String MAVEN_WRAPPER = "camel.jbang.mavenWrapper";
@Metadata(description = "Include Gradle Wrapper files in the exported project",
javaType = "boolean", defaultValue = "true")
public static final String GRADLE_WRAPPER = "camel.jbang.gradleWrapper";
@Metadata(description = "Build tool to use (Maven or Gradle)",
javaType = "String", defaultValue = "Maven")
public static final String BUILD_TOOL = "camel.jbang.buildTool";
@Metadata(description = "Directory where the project will be exported",
javaType = "String", defaultValue = ".")
public static final String EXPORT_DIR = "camel.jbang.exportDir";
@Metadata(description = "File name of open-api spec file (JSON or YAML) to generate routes from the swagger/openapi API spec file.",
javaType = "String")
public static final String OPEN_API = "camel.jbang.openApi";
@Metadata(description = "Additional Maven repositories for download on-demand (Use commas to separate multiple repositories)",
javaType = "String", label = "maven")
public static final String REPOS = "camel.jbang.repos";
@Metadata(description = "Optional location of Maven settings.xml file to configure servers, repositories, mirrors, and proxies. If set to false, not even the default ~/.m2/settings.xml will be used.",
javaType = "String", label = "maven")
public static final String MAVEN_SETTINGS = "camel.jbang.maven-settings";
@Metadata(description = "Optional location of Maven settings-security.xml file to decrypt Maven Settings (settings.xml) file",
javaType = "String", label = "maven")
public static final String MAVEN_SETTINGS_SECURITY = "camel.jbang.maven-settings-security";
@Metadata(description = "Whether downloading JARs from Maven Central repository is enabled",
javaType = "boolean", defaultValue = "true", label = "maven")
public static final String MAVEN_CENTRAL_ENABLED = "camel.jbang.maven-central-enabled";
@Metadata(description = "Whether downloading JARs from ASF Maven Snapshot repository is enabled",
javaType = "boolean", defaultValue = "true", label = "maven")
public static final String MAVEN_APACHE_SNAPSHOTS = "camel.jbang.maven-apache-snapshot-enabled";
@Metadata(description = "Exclude files by name or pattern (Use commas to separate multiple files)",
javaType = "String")
public static final String EXCLUDES = "camel.jbang.excludes";
@Metadata(description = "Additional dependencies (Use commas to separate multiple dependencies).",
javaType = "String")
public static final String DEPENDENCIES = "camel.jbang.dependencies";
@Metadata(description = "Additional dependencies for Camel Main runtime only", javaType = "String")
public static final String DEPENDENCIES_MAIN = "camel.jbang.dependencies.main";
@Metadata(description = "Additional dependencies for Spring Boot runtime only", javaType = "String")
public static final String DEPENDENCIES_SPRING_BOOT = "camel.jbang.dependencies.spring-boot";
@Metadata(description = "Additional dependencies for Quarkus runtime only", javaType = "String")
public static final String DEPENDENCIES_QUARKUS = "camel.jbang.dependencies.quarkus";
@Metadata(description = "Version to use for jib-maven-plugin if exporting to camel-main and have Kubernetes enabled (jkube.xxx options)",
javaType = "String", defaultValue = "3.4.5", label = "kubernetes")
public static final String JIB_MAVEN_PLUGIN_VERSION = "camel.jbang.jib-maven-plugin-version";
@Metadata(description = "Version to use for jkube-maven-plugin if exporting to camel-main and have Kubernetes enabled (jkube.xxx options)",
javaType = "String", defaultValue = "1.18.1", label = "kubernetes")
public static final String JKUBE_MAVEN_PLUGIN_VERSION = "camel.jbang.jkube-maven-plugin-version";
@Metadata(description = "Stubs all the matching endpoint with the given component name or pattern. Multiple names can be separated by comma. (all = everything).",
javaType = "String")
public static final String STUB = "camel.jbang.stub";
@Metadata(description = "Source directory for dynamically loading Camel file(s) to run. When using this, then files cannot be specified at the same time.",
javaType = "String", label = "advanced")
public static final String SOURCE_DIR = "camel.jbang.sourceDir";
@Metadata(description = "Whether to ignore route loading and compilation errors (use this with care!)",
javaType = "boolean", label = "advanced")
public static final String IGNORE_LOADING_ERROR = "camel.jbang.ignoreLoadingError";
@Metadata(description = "Whether to use lazy bean initialization (can help with complex classloading issues)",
javaType = "boolean", label = "advanced")
public static final String LAZY_BEAN = "camel.jbang.lazyBean";
@Metadata(description = "Allow user to type in required parameters in prompt if not present in application",
javaType = "boolean", label = "advanced")
public static final String PROMPT = "camel.jbang.prompt";
@Metadata(description = "Work directory for compiler. Can be used to write compiled classes or other resources.",
javaType = "String", defaultValue = ".camel-jbang/compile", label = "advanced")
public static final String COMPILE_WORK_DIR = "camel.jbang.compileWorkDir";
@Deprecated
@Metadata(description = "Health check at /observe/health on local HTTP server (port 8080 by default)",
javaType = "boolean", defaultValue = ".camel-jbang/compile", deprecationNote = "Deprecated: use observe instead")
public static final String HEALTH = "camel.jbang.health";
@Deprecated
@Metadata(description = "Metrics (Micrometer and Prometheus) at /observe/metrics on local HTTP server (port 8080 by default) when running standalone Camel",
javaType = "boolean", defaultValue = ".camel-jbang/compile", deprecationNote = "Deprecated: use observe instead")
public static final String METRICS = "camel.jbang.metrics";
@Metadata(description = "Developer console at /q/dev on local HTTP server (port 8080 by default)",
javaType = "boolean")
public static final String CONSOLE = "camel.jbang.console";
@Metadata(description = "Verbose output of startup activity (dependency resolution and downloading",
javaType = "boolean")
public static final String VERBOSE = "camel.jbang.verbose";
@Metadata(description = "The version of Apache Camel to use",
javaType = "String")
public static final String CAMEL_VERSION = "camel.jbang.camel-version";
@Metadata(description = "Enables Java Flight Recorder saving recording to disk on exit",
javaType = "boolean")
public static final String JFR = "camel.jbang.jfr";
@Metadata(description = "Java Flight Recorder profile to use (such as default or profile)",
javaType = "String", defaultValue = "default")
public static final String JFR_PROFILE = "camel.jbang.jfr-profile";
@Metadata(description = "Whether to allow automatic downloading JAR dependencies (over the internet)",
javaType = "boolean", defaultValue = "true")
public static final String DOWNLOAD = "camel.jbang.download";
@Metadata(description = "Whether to automatic package scan JARs for custom Spring or Quarkus beans making them available for Camel JBang",
javaType = "boolean", label = "advanced")
public static final String PACKAGE_SCAN_JARS = "camel.jbang.packageScanJars";
@Metadata(description = "To use a custom Camel version when running or export to Spring Boot",
javaType = "String", label = "spring-boot")
public static final String CAMEL_SPRING_BOOT_VERSION = "camel.jbang.camelSpringBootVersion";
private CamelJBangConstants() {
}
}
|
CamelJBangConstants
|
java
|
apache__maven
|
compat/maven-toolchain-builder/src/test/java/org/apache/maven/toolchain/building/DefaultToolchainsBuilderTest.java
|
{
"start": 13996,
"end": 14344
}
|
class ____ implements OperatingSystemUtils.EnvVarSource {
private final Map<String, String> envVarMap;
TestEnvVarSource(Map<String, String> envVarMap) {
this.envVarMap = envVarMap;
}
@Override
public Map<String, String> getEnvMap() {
return envVarMap;
}
}
}
|
TestEnvVarSource
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/ClassUtils.java
|
{
"start": 16074,
"end": 17124
}
|
class ____)
*/
public static boolean isCacheSafe(Class<?> clazz, @Nullable ClassLoader classLoader) {
Assert.notNull(clazz, "Class must not be null");
try {
ClassLoader target = clazz.getClassLoader();
// Common cases
if (target == classLoader || target == null) {
return true;
}
if (classLoader == null) {
return false;
}
// Check for match in ancestors -> positive
ClassLoader current = classLoader;
while (current != null) {
current = current.getParent();
if (current == target) {
return true;
}
}
// Check for match in children -> negative
while (target != null) {
target = target.getParent();
if (target == classLoader) {
return false;
}
}
}
catch (SecurityException ex) {
// Fall through to loadable check below
}
// Fallback for ClassLoaders without parent/child relationship:
// safe if same Class can be loaded from given ClassLoader
return (classLoader != null && isLoadable(clazz, classLoader));
}
/**
* Check whether the given
|
loader
|
java
|
spring-projects__spring-boot
|
module/spring-boot-transaction/src/test/java/org/springframework/boot/transaction/autoconfigure/ExecutionListenersTransactionManagerCustomizerTests.java
|
{
"start": 1078,
"end": 1734
}
|
class ____ {
@Test
void whenTransactionManagerIsCustomizedThenExecutionListenersAreAddedToIt() {
TransactionExecutionListener listener1 = mock(TransactionExecutionListener.class);
TransactionExecutionListener listener2 = mock(TransactionExecutionListener.class);
ConfigurableTransactionManager transactionManager = mock(ConfigurableTransactionManager.class);
new ExecutionListenersTransactionManagerCustomizer(List.of(listener1, listener2)).customize(transactionManager);
then(transactionManager).should().addListener(listener1);
then(transactionManager).should().addListener(listener2);
}
}
|
ExecutionListenersTransactionManagerCustomizerTests
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/function/support/HandlerFunctionAdapter.java
|
{
"start": 6268,
"end": 6616
}
|
class ____ implements ServerResponse.Context {
private final ServerRequest serverRequest;
public ServerRequestContext(ServerRequest serverRequest) {
this.serverRequest = serverRequest;
}
@Override
public List<HttpMessageConverter<?>> messageConverters() {
return this.serverRequest.messageConverters();
}
}
}
|
ServerRequestContext
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/errors/ProcessingContext.java
|
{
"start": 1180,
"end": 2217
}
|
class ____<T> {
private final T original;
private Stage position;
private Class<?> klass;
private int attempt;
private Throwable error;
/**
* Construct a context associated with the processing of a particular record
* @param original The original record before processing, as received from either Kafka or a Source Task
*/
public ProcessingContext(T original) {
this.original = original;
}
/**
* @return The original record before processing, as received from either Kafka or a Source Task
*/
public T original() {
return original;
}
/**
* Set the stage in the connector pipeline which is currently executing.
*
* @param position the stage
*/
public void position(Stage position) {
this.position = position;
}
/**
* @return the stage in the connector pipeline which is currently executing.
*/
public Stage stage() {
return position;
}
/**
* @return the
|
ProcessingContext
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/support/ibatis/DruidDataSourceFactoryTest.java
|
{
"start": 229,
"end": 664
}
|
class ____ extends TestCase {
public void test_facttory() throws Exception {
DruidDataSourceFactory factory = new DruidDataSourceFactory();
Properties properties = new Properties();
properties.setProperty("url", "jdbc:mock:xx");
factory.initialize(properties);
DruidDataSource dataSource = (DruidDataSource) factory.getDataSource();
dataSource.close();
}
}
|
DruidDataSourceFactoryTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/Type.java
|
{
"start": 1774,
"end": 4859
}
|
interface ____ extends Serializable {
/**
* Return true if the implementation is castable to {@link AssociationType}. This does not
* necessarily imply that the type actually represents an association. Shortcut for
* {@code type instanceof AssociationType}.
*
* @return True if this type is also an {@link AssociationType} implementor; false otherwise.
*/
boolean isAssociationType();
/**
* Return true if the implementation is castable to {@link CollectionType}. Shortcut for
* {@code type instanceof CollectionType}
* <p>
* A {@link CollectionType} is additionally an {@link AssociationType}; so if this method
* returns true, {@link #isAssociationType()} should also return true.
*
* @return True if this type is also a {@link CollectionType} implementor; false otherwise.
*/
boolean isCollectionType();
/**
* Return true if the implementation is castable to {@link EntityType}. Shortcut for
* {@code type instanceof EntityType}.
* <p>
* An {@link EntityType} is additionally an {@link AssociationType}; so if this method
* returns true, {@link #isAssociationType()} should also return true.
*
* @return True if this type is also an {@link EntityType} implementor; false otherwise.
*/
boolean isEntityType();
/**
* Return true if the implementation is castable to {@link AnyType}. Shortcut for
* {@code type instanceof AnyType}.
* <p>
* An {@link AnyType} is additionally an {@link AssociationType}; so if this method
* returns true, then {@link #isAssociationType()} should also return true.
*
* @return True if this type is also an {@link AnyType} implementor; false otherwise.
*/
boolean isAnyType();
/**
* Return true if the implementation is castable to {@link CompositeType}. Shortcut for
* {@code type instanceof CompositeType}.
* <p>
* A component type may own collections or associations and hence must provide certain
* extra functionality.
*
* @return True if this type is also a {@link CompositeType} implementor; false otherwise.
*/
boolean isComponentType();
/**
* How many columns are used to persist this type?
* <p>
* Always the same as {@link #getSqlTypeCodes(MappingContext) getSqlTypCodes(mappingContext).length}.
*
* @param mappingContext The mapping Context object {@link MappingContext}
*
* @return The number of columns
*
* @throws MappingException Generally indicates an issue accessing the passed mappingContext object.
*/
int getColumnSpan(MappingContext mappingContext) throws MappingException;
/**
* Return the JDBC types codes as defined by {@link java.sql.Types} or {@link SqlTypes}
* for the columns mapped by this type.
* <p>
* The number of elements in this array must match the return from {@link #getColumnSpan}.
*
* @param mappingContext The mapping context {@link MappingContext} :/
*
* @return The JDBC type codes.
*
* @throws MappingException Generally indicates an issue accessing the passed mapping object.
*/
int[] getSqlTypeCodes(MappingContext mappingContext) throws MappingException;
/**
* The
|
Type
|
java
|
playframework__playframework
|
documentation/manual/working/commonGuide/filters/code/javaguide/detailed/filters/csp/CSPReportController.java
|
{
"start": 442,
"end": 994
}
|
class ____ extends Controller {
private final Logger logger = LoggerFactory.getLogger(getClass());
@BodyParser.Of(CSPReportBodyParser.class)
public Result cspReport(Http.Request request) {
JavaCSPReport cspReport = request.body().as(JavaCSPReport.class);
logger.warn(
"CSP violation: violatedDirective = {}, blockedUri = {}, originalPolicy = {}",
cspReport.violatedDirective(),
cspReport.blockedUri(),
cspReport.originalPolicy());
return Results.ok();
}
}
// #csp-report-controller
|
CSPReportController
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenIT0143TransitiveDependencyScopesTest.java
|
{
"start": 5096,
"end": 7995
}
|
class ____ when mediated from
* a provided-scope dependency.
*
* @throws Exception in case of failure
*/
@Test
public void testitProvidedScope() throws Exception {
Verifier verifier = run("provided");
String targetDir = "target-provided";
List<String> compileArtifacts = verifier.loadLines(targetDir + "/compile-artifacts.txt");
assertTrue(
compileArtifacts.contains("org.apache.maven.its.it0143:direct:jar:0.1"), compileArtifacts.toString());
assertTrue(
compileArtifacts.contains("org.apache.maven.its.it0143:compile:jar:0.1"), compileArtifacts.toString());
assertTrue(
compileArtifacts.contains("org.apache.maven.its.it0143:runtime:jar:0.1"), compileArtifacts.toString());
assertEquals(3, compileArtifacts.size());
List<String> compileClassPath = verifier.loadLines(targetDir + "/compile-cp.txt");
assertTrue(compileClassPath.contains("classes"), compileClassPath.toString());
assertTrue(compileClassPath.contains("direct-0.1.jar"), compileClassPath.toString());
assertTrue(compileClassPath.contains("compile-0.1.jar"), compileClassPath.toString());
assertTrue(compileClassPath.contains("runtime-0.1.jar"), compileClassPath.toString());
assertEquals(4, compileClassPath.size());
List<String> runtimeArtifacts = verifier.loadLines(targetDir + "/runtime-artifacts.txt");
assertEquals(0, runtimeArtifacts.size());
List<String> runtimeClassPath = verifier.loadLines(targetDir + "/runtime-cp.txt");
assertTrue(runtimeClassPath.contains("classes"), runtimeClassPath.toString());
assertEquals(1, runtimeClassPath.size());
List<String> testArtifacts = verifier.loadLines(targetDir + "/test-artifacts.txt");
assertTrue(testArtifacts.contains("org.apache.maven.its.it0143:direct:jar:0.1"), testArtifacts.toString());
assertTrue(testArtifacts.contains("org.apache.maven.its.it0143:compile:jar:0.1"), testArtifacts.toString());
assertTrue(testArtifacts.contains("org.apache.maven.its.it0143:runtime:jar:0.1"), testArtifacts.toString());
assertEquals(3, testArtifacts.size());
List<String> testClassPath = verifier.loadLines(targetDir + "/test-cp.txt");
assertTrue(testClassPath.contains("classes"), testClassPath.toString());
assertTrue(testClassPath.contains("test-classes"), testClassPath.toString());
assertTrue(testClassPath.contains("direct-0.1.jar"), testClassPath.toString());
assertTrue(testClassPath.contains("compile-0.1.jar"), testClassPath.toString());
assertTrue(testClassPath.contains("runtime-0.1.jar"), testClassPath.toString());
assertEquals(5, testClassPath.size());
}
/**
* Test that the different scopes of transitive dependencies end up on the right
|
paths
|
java
|
netty__netty
|
codec-http/src/test/java/io/netty/handler/codec/http/websocketx/extensions/compression/PerMessageDeflateServerExtensionHandshakerTest.java
|
{
"start": 1368,
"end": 6792
}
|
class ____ {
@Test
public void testNormalHandshake() {
WebSocketServerExtension extension;
WebSocketExtensionData data;
Map<String, String> parameters;
// initialize
PerMessageDeflateServerExtensionHandshaker handshaker =
new PerMessageDeflateServerExtensionHandshaker(0);
// execute
extension = handshaker.handshakeExtension(
new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, Collections.<String, String>emptyMap()));
// test
assertNotNull(extension);
assertEquals(WebSocketServerExtension.RSV1, extension.rsv());
assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder);
assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder);
// execute
data = extension.newReponseData();
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, data.name());
assertTrue(data.parameters().isEmpty());
// initialize
parameters = new HashMap<String, String>();
parameters.put(CLIENT_MAX_WINDOW, null);
parameters.put(CLIENT_NO_CONTEXT, null);
// execute
extension = handshaker.handshakeExtension(
new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, Collections.<String, String>emptyMap()));
// test
assertNotNull(extension);
assertEquals(WebSocketServerExtension.RSV1, extension.rsv());
assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder);
assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder);
// execute
data = extension.newReponseData();
// test
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, data.name());
assertTrue(data.parameters().isEmpty());
// initialize
parameters = new HashMap<String, String>();
parameters.put(SERVER_MAX_WINDOW, "12");
parameters.put(SERVER_NO_CONTEXT, null);
// execute
extension = handshaker.handshakeExtension(
new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters));
// test
assertNull(extension);
}
@Test
public void testCustomHandshake() {
WebSocketServerExtension extension;
Map<String, String> parameters;
WebSocketExtensionData data;
// initialize
PerMessageDeflateServerExtensionHandshaker handshaker =
new PerMessageDeflateServerExtensionHandshaker(6, true, 10, true, true, 0);
parameters = new HashMap<String, String>();
parameters.put(CLIENT_MAX_WINDOW, null);
parameters.put(SERVER_MAX_WINDOW, "12");
parameters.put(CLIENT_NO_CONTEXT, null);
parameters.put(SERVER_NO_CONTEXT, null);
// execute
extension = handshaker.handshakeExtension(
new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters));
// test
assertNotNull(extension);
assertEquals(WebSocketServerExtension.RSV1, extension.rsv());
assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder);
assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder);
// execute
data = extension.newReponseData();
// test
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, data.name());
assertTrue(data.parameters().containsKey(CLIENT_MAX_WINDOW));
assertEquals("10", data.parameters().get(CLIENT_MAX_WINDOW));
assertTrue(data.parameters().containsKey(SERVER_MAX_WINDOW));
assertEquals("12", data.parameters().get(SERVER_MAX_WINDOW));
assertTrue(data.parameters().containsKey(CLIENT_MAX_WINDOW));
assertTrue(data.parameters().containsKey(SERVER_MAX_WINDOW));
// initialize
parameters = new HashMap<String, String>();
parameters.put(SERVER_MAX_WINDOW, "12");
parameters.put(SERVER_NO_CONTEXT, null);
// execute
extension = handshaker.handshakeExtension(
new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters));
// test
assertNotNull(extension);
assertEquals(WebSocketServerExtension.RSV1, extension.rsv());
assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder);
assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder);
// execute
data = extension.newReponseData();
// test
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, data.name());
assertEquals(2, data.parameters().size());
assertTrue(data.parameters().containsKey(SERVER_MAX_WINDOW));
assertEquals("12", data.parameters().get(SERVER_MAX_WINDOW));
assertTrue(data.parameters().containsKey(SERVER_NO_CONTEXT));
// initialize
parameters = new HashMap<String, String>();
// execute
extension = handshaker.handshakeExtension(
new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters));
// test
assertNotNull(extension);
// execute
data = extension.newReponseData();
// test
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, data.name());
assertTrue(data.parameters().isEmpty());
}
}
|
PerMessageDeflateServerExtensionHandshakerTest
|
java
|
micronaut-projects__micronaut-core
|
http-client/src/main/java/io/micronaut/http/client/netty/ConnectionManager.java
|
{
"start": 45495,
"end": 50416
}
|
class ____ extends ChannelOutboundHandlerAdapter {
private final PoolHolder pool;
private final String host;
private final int port;
private NettyClientCustomizer bootstrappedCustomizer;
Http3ChannelInitializer(PoolHolder pool, String host, int port) {
this.pool = pool;
this.host = host;
this.port = port;
}
// delay channel initialization until bind is complete. This is required so that we can see
// the local address
@Override
public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise) throws Exception {
ChannelPromise downstreamPromise = ctx.newPromise();
super.bind(ctx, localAddress, downstreamPromise);
downstreamPromise.addListener(future -> {
if (future.isSuccess()) {
try {
initChannel(promise.channel());
ctx.pipeline().remove(this);
promise.setSuccess();
} catch (Exception e) {
promise.setFailure(e);
}
} else {
promise.setFailure(future.cause());
}
});
}
private void initChannel(Channel ch) {
NettyClientCustomizer channelCustomizer = bootstrappedCustomizer.specializeForChannel(ch, NettyClientCustomizer.ChannelRole.CONNECTION);
insertPcapLoggingHandlerLazy(ch, "outer");
QuicSslContext quicSslContext = sslContextWrapper.takeRetained().quicSslContext();
try {
ch.pipeline()
.addLast(Http3.newQuicClientCodecBuilder()
.sslEngineProvider(c -> quicSslContext.newEngine(c.alloc(), host, port))
.initialMaxData(10000000)
.initialMaxStreamDataBidirectionalLocal(1000000)
.build())
.addLast(ChannelPipelineCustomizer.HANDLER_INITIAL_ERROR, pool.initialErrorHandler);
} finally {
ReferenceCountUtil.release(quicSslContext);
}
channelCustomizer.onInitialPipelineBuilt();
QuicChannel.newBootstrap(ch)
.handler(new ChannelInboundHandlerAdapter() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
QuicChannel quicChannel = (QuicChannel) ctx.channel();
ctx.pipeline().addLast(ChannelPipelineCustomizer.HANDLER_HTTP2_CONNECTION, new Http3ClientConnectionHandler(
// control stream handler
new ChannelInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof Http3SettingsFrame) {
ch.pipeline().remove(ChannelPipelineCustomizer.HANDLER_INITIAL_ERROR);
pool.new Http3ConnectionHolder(ch, quicChannel, channelCustomizer).init();
}
super.channelRead(ctx, msg);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
ch.pipeline().remove(ChannelPipelineCustomizer.HANDLER_INITIAL_ERROR);
ch.close();
pool.pool.onNewConnectionFailure(ctx.channel().eventLoop(), cause);
}
},
null,
null,
null,
false
));
ctx.pipeline().remove(this);
}
})
.remoteAddress(new InetSocketAddress(this.host, this.port))
.localAddress(ch.localAddress())
.connect()
.addListener((GenericFutureListener<Future<QuicChannel>>) future -> {
if (!future.isSuccess()) {
pool.pool.onNewConnectionFailure(ch.eventLoop(), future.cause());
}
});
}
}
/**
* Handle for a pooled connection. One pool handle generally corresponds to one request, and
* once the request and response are done, the handle is {@link #release() released} and a new
* request can claim the same connection.
*/
public abstract static
|
Http3ChannelInitializer
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/aot/hint/support/PathMatchingResourcePatternResolverRuntimeHints.java
|
{
"start": 1071,
"end": 1354
}
|
class ____ implements RuntimeHintsRegistrar {
@Override
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
hints.reflection().registerType(TypeReference.of("org.eclipse.core.runtime.FileLocator"));
}
}
|
PathMatchingResourcePatternResolverRuntimeHints
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-rest/src/main/java/org/apache/camel/component/cxf/jaxrs/BindingStyle.java
|
{
"start": 857,
"end": 1689
}
|
enum ____ {
/**
* <i>Only available for consumers.</i> This binding style processes request parameters, multiparts, etc. and maps
* them to IN headers, IN attachments and to the message body. It aims to eliminate low-level processing of
* {@link org.apache.cxf.message.MessageContentsList}. It also also adds more flexibility and simplicity to the
* response mapping.
*/
SimpleConsumer,
/**
* This is the traditional binding style, which simply dumps the {@link org.apache.cxf.message.MessageContentsList}
* coming in from the CXF stack onto the IN message body. The user is then responsible for processing it according
* to the contract defined by the JAX-RS method signature.
*/
Default,
/**
* A custom binding set by the user.
*/
Custom
}
|
BindingStyle
|
java
|
quarkusio__quarkus
|
integration-tests/jsonb/src/test/java/io/quarkus/it/jsonb/JsonInStaticBlockTestCase.java
|
{
"start": 167,
"end": 300
}
|
class ____ {
static {
JsonbProvider.provider().create();
}
@Test
void get() {
}
}
|
JsonInStaticBlockTestCase
|
java
|
spring-projects__spring-boot
|
module/spring-boot-servlet/src/main/java/org/springframework/boot/servlet/actuate/web/mappings/FilterRegistrationMappingDescription.java
|
{
"start": 911,
"end": 1738
}
|
class ____ extends RegistrationMappingDescription<FilterRegistration> {
/**
* Creates a new {@code FilterRegistrationMappingDescription} derived from the given
* {@code filterRegistration}.
* @param filterRegistration the filter registration
*/
public FilterRegistrationMappingDescription(FilterRegistration filterRegistration) {
super(filterRegistration);
}
/**
* Returns the servlet name mappings for the registered filter.
* @return the mappings
*/
public Collection<String> getServletNameMappings() {
return getRegistration().getServletNameMappings();
}
/**
* Returns the URL pattern mappings for the registered filter.
* @return the mappings
*/
public Collection<String> getUrlPatternMappings() {
return getRegistration().getUrlPatternMappings();
}
}
|
FilterRegistrationMappingDescription
|
java
|
google__gson
|
gson/src/test/java/com/google/gson/functional/MapTest.java
|
{
"start": 26369,
"end": 26810
}
|
class ____ {
private final int x;
private final int y;
Point(int x, int y) {
this.x = x;
this.y = y;
}
@Override
public boolean equals(Object o) {
return o instanceof Point && x == ((Point) o).x && y == ((Point) o).y;
}
@Override
public int hashCode() {
return x * 37 + y;
}
@Override
public String toString() {
return x + "," + y;
}
}
static final
|
Point
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ErrorTranslation.java
|
{
"start": 9927,
"end": 9976
}
|
class ____ isolation.
*/
public static final
|
for
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/cluster/api/async/NodeSelectionAclAsyncCommands.java
|
{
"start": 1235,
"end": 5952
}
|
interface ____<K, V> {
/**
* The command shows the available ACL categories if called without arguments.
*
* @return List<AclCategory> a list of ACL categories or
*/
AsyncExecutions<Set<AclCategory>> aclCat();
/**
* The command shows all the Redis commands in the specified category.
*
* @param category the specified category
* @return List<CommandType> a list of commands inside a given category
*/
AsyncExecutions<Set<CommandType>> aclCat(AclCategory category);
/**
* Delete all the specified ACL users and terminate all the connections that are authenticated with such users.
*
* @param usernames the specified usernames
* @return Long The number of users that were deleted
*/
AsyncExecutions<Long> aclDeluser(String... usernames);
/**
* Simulate the execution of a given command by a given user.
*
* @param username the specified username
* @param command the specified command
* @param args the specified args of command
* @return String reply: OK on success.
* @since 6.2
*/
AsyncExecutions<String> aclDryRun(String username, String command, String... args);
/**
* Simulate the execution of a given command by a given user.
*
* @param username the specified username
* @param command the specified command to inspect
* @return String reply: OK on success.
* @since 6.2
*/
AsyncExecutions<String> aclDryRun(String username, RedisCommand<K, V, ?> command);
/**
* The command generates a password.
*
* @return String bulk-string-reply 64 bytes string password representing 256 bits of pseudorandom data.
*/
AsyncExecutions<String> aclGenpass();
/**
* The command generates a password.
*
* @param bits amount of bits
* @return String bulk-string-reply N/4 bytes string password representing N bits of pseudorandom data.
*/
AsyncExecutions<String> aclGenpass(int bits);
/**
* The command returns all the rules defined for an existing ACL user.
*
* @param username the specified username
* @return Map<String, Object> a map of ACL rule definitions for the user.
*/
AsyncExecutions<List<Object>> aclGetuser(String username);
/**
* The command shows the currently active ACL rules in the Redis server.
*
* @return List<String> a list of strings.
*/
AsyncExecutions<List<String>> aclList();
/**
* When Redis is configured to use an ACL file (with the aclfile configuration option), this command will reload the ACLs
* from the file, replacing all the current ACL rules with the ones defined in the file.
*
* @return String simple-string-reply OK or error message.
*/
AsyncExecutions<String> aclLoad();
/**
* The command shows a list of recent ACL security events.
*
* @return List<Map<K,Object>> list of security events.
*/
AsyncExecutions<List<Map<String, Object>>> aclLog();
/**
* The command shows a list of recent ACL security events.
*
* @param count max count of events
* @return List<Map<K, Object>> list of security events.
*/
AsyncExecutions<List<Map<String, Object>>> aclLog(int count);
/**
* The command clears ACL security events.
*
* @return String simple-string-reply OK if the security log was cleared.
*/
AsyncExecutions<String> aclLogReset();
/**
* When Redis is configured to use an ACL file (with the aclfile configuration option), this command will save the currently
* defined ACLs from the server memory to the ACL file.
*
* @return String simple-string-reply OK or error message.
*/
AsyncExecutions<String> aclSave();
/**
* Create an ACL user with the specified rules or modify the rules of an existing user.
*
* @param username the specified username
* @param setuserArgs rules
* @return String simple-string-reply OK or error message.
*/
AsyncExecutions<String> aclSetuser(String username, AclSetuserArgs setuserArgs);
/**
* The command shows a list of all the usernames of the currently configured users in the Redis ACL system.
*
* @return List<K> a list of usernames.
*/
AsyncExecutions<List<String>> aclUsers();
/**
* The command shows a list of all the usernames of the currently configured users in the Redis ACL system.
*
* @return K bulk-string-reply the username of the current connection.
*/
AsyncExecutions<String> aclWhoami();
}
|
NodeSelectionAclAsyncCommands
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/mixins/MapperMixinsCopy1998Test.java
|
{
"start": 1176,
"end": 2005
}
|
class ____ {
@JsonProperty
private String format = "1.0";
public String getFormat() {
return format;
}
@JsonProperty
private MyModelChildBase child;
public MyModelChildBase getChild() {
return child;
}
public void setChild(MyModelChildBase child) {
this.child = child;
}
@JsonProperty
private String notVisible = "should not be present";
public String getNotVisible() {
return notVisible;
}
}
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes({
@JsonSubTypes.Type(value = MyChildA.class, name = "CHILD_A"),
@JsonSubTypes.Type(value = MyChildB.class, name = "CHILD_B")
})
abstract static
|
MyModelRoot
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/SubcomponentValidationTest.java
|
{
"start": 11574,
"end": 11889
}
|
interface ____");
});
}
@Test public void subcomponentOnConcreteType() {
Source subcomponentFile =
CompilerTests.javaSource("test.NotASubcomponent",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent",
"final
|
TestComponent
|
java
|
dropwizard__dropwizard
|
dropwizard-health/src/test/java/io/dropwizard/health/ScheduleTest.java
|
{
"start": 437,
"end": 1242
}
|
class ____ {
private final ObjectMapper objectMapper = Jackson.newObjectMapper();
private final Validator validator = Validators.newValidator();
private final YamlConfigurationFactory<Schedule> configFactory =
new YamlConfigurationFactory<>(Schedule.class, validator, objectMapper, "dw");
@Test
void shouldBuildAScheduleFromYaml() throws Exception {
final Schedule schedule = configFactory.build(new ResourceConfigurationSourceProvider(), "/yml/schedule.yml");
assertThat(schedule.getCheckInterval().toMilliseconds()).isEqualTo(2500L);
assertThat(schedule.getDowntimeInterval().toSeconds()).isEqualTo(25L);
assertThat(schedule.getFailureAttempts()).isEqualTo(2);
assertThat(schedule.getSuccessAttempts()).isEqualTo(1);
}
}
|
ScheduleTest
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/main/java/org/apache/logging/log4j/core/test/AvailablePortFinder.java
|
{
"start": 1368,
"end": 5439
}
|
class ____ {
/**
* The minimum server currentMinPort number for IPv4.
* Set at 1100 to avoid returning privileged currentMinPort numbers.
*/
public static final int MIN_PORT_NUMBER = 1100;
/**
* The maximum server currentMinPort number for IPv4.
*/
public static final int MAX_PORT_NUMBER = 65535;
private static final Logger LOG = StatusLogger.getLogger();
/**
* We'll hold open the lowest port in this process
* so parallel processes won't use the same block
* of ports. They'll go up to the next block.
*/
private static final ServerSocket LOCK;
/**
* Incremented to the next lowest available port when getNextAvailable() is called.
*/
private static final AtomicInteger currentMinPort = new AtomicInteger(MIN_PORT_NUMBER);
/**
* Creates a new instance.
*/
private AvailablePortFinder() {
// Do nothing
}
static {
int port = MIN_PORT_NUMBER;
ServerSocket ss = null;
while (ss == null) {
try {
ss = new ServerSocket(port);
} catch (final Exception e) {
ss = null;
port += 200;
}
}
LOCK = ss;
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
Closer.closeSilently(LOCK);
}
});
currentMinPort.set(port + 1);
}
/**
* Gets the next available port starting at the lowest number. This is the preferred
* method to use. The port return is immediately marked in use and doesn't rely on the caller actually opening
* the port.
*
* @throws IllegalArgumentException is thrown if the port number is out of range
* @throws NoSuchElementException if there are no ports available
* @return the available port
*/
public static synchronized int getNextAvailable() {
final int next = getNextAvailable(currentMinPort.get());
currentMinPort.set(next + 1);
return next;
}
/**
* Gets the next available port starting at a given from port.
*
* @param fromPort the from port to scan for availability
* @throws IllegalArgumentException is thrown if the port number is out of range
* @throws NoSuchElementException if there are no ports available
* @return the available port
*/
public static synchronized int getNextAvailable(final int fromPort) {
if (fromPort < currentMinPort.get() || fromPort > MAX_PORT_NUMBER) {
throw new IllegalArgumentException("From port number not in valid range: " + fromPort);
}
for (int i = fromPort; i <= MAX_PORT_NUMBER; i++) {
if (available(i)) {
LOG.debug("getNextAvailable({}) -> {}", fromPort, i);
return i;
}
}
throw new NoSuchElementException("Could not find an available port above " + fromPort);
}
/**
* Checks to see if a specific port is available.
*
* @param port the port number to check for availability
* @return {@code true} if the port is available, or {@code false} if not
* @throws IllegalArgumentException is thrown if the port number is out of range
*/
public static synchronized boolean available(final int port) throws IllegalArgumentException {
if (port < currentMinPort.get() || port > MAX_PORT_NUMBER) {
throw new IllegalArgumentException("Invalid start currentMinPort: " + port);
}
ServerSocket ss = null;
DatagramSocket ds = null;
try {
ss = new ServerSocket(port);
ss.setReuseAddress(true);
ds = new DatagramSocket(port);
ds.setReuseAddress(true);
return true;
} catch (final IOException e) {
// Do nothing
} finally {
Closer.closeSilently(ds);
Closer.closeSilently(ss);
}
return false;
}
}
|
AvailablePortFinder
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java
|
{
"start": 14377,
"end": 14556
}
|
class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return dialect.useFollowOnLocking( null, null );
}
}
public static
|
SupportFollowOnLocking
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ModuleFactoryGeneratorTest.java
|
{
"start": 36031,
"end": 37194
}
|
class ____<T extends Bar> "
+ "must be publicly accessible.");
} else {
// Note: In this case, when calling the factory the component will use the requested
// type, Foo, e.g. "ParameterizedModule_ProvideListTFactory.<Foo>create()" since Foo
// is publicly accessible. It doesn't matter that the bound type, Bar, is
// package-private.
subject.hasErrorCount(0);
assertSourceMatchesGolden(subject, "test/ParameterizedModule_ProvideListTFactory");
subject.generatedSource(goldenFileRule.goldenSource("other/DaggerMyComponent"));
}
});
}
@Test
public void parameterizedModule_withPackagePrivateTypeArgumentAndPublicBounds() {
Source component =
CompilerTests.javaSource(
"other.MyComponent",
"package other;",
"",
"import dagger.Component;",
"import test.ConcreteModule;",
"import test.Usage;",
"",
"@Component(modules = {ConcreteModule.class})",
"
|
ParameterizedModule
|
java
|
google__guice
|
extensions/dagger-adapter/test/com/google/inject/daggeradapter/BindsTest.java
|
{
"start": 3892,
"end": 3940
}
|
interface ____ {}
@Module
|
JakartaBindsQualifier
|
java
|
junit-team__junit5
|
junit-vintage-engine/src/main/java/org/junit/vintage/engine/descriptor/VintageEngineDescriptor.java
|
{
"start": 703,
"end": 933
}
|
class ____ extends EngineDescriptor {
public VintageEngineDescriptor(UniqueId uniqueId) {
super(uniqueId, "JUnit Vintage");
}
public Set<TestDescriptor> getModifiableChildren() {
return children;
}
}
|
VintageEngineDescriptor
|
java
|
spring-projects__spring-boot
|
module/spring-boot-hibernate/src/main/java/org/springframework/boot/hibernate/autoconfigure/HibernatePropertiesCustomizer.java
|
{
"start": 1001,
"end": 1236
}
|
interface ____ {
/**
* Customize the specified JPA vendor properties.
* @param hibernateProperties the JPA vendor properties to customize
*/
void customize(Map<String, Object> hibernateProperties);
}
|
HibernatePropertiesCustomizer
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java
|
{
"start": 1557,
"end": 10677
}
|
class ____ extends ESTestCase {
public void testFetchSource() throws IOException {
XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject();
HitContext hitContext = hitExecute(source, true, null, null);
assertEquals(Collections.singletonMap("field", "value"), hitContext.hit().getSourceAsMap());
}
public void testBasicFiltering() throws IOException {
XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field1", "value").field("field2", "value2").endObject();
HitContext hitContext = hitExecute(source, false, null, null);
assertNull(hitContext.hit().getSourceAsMap());
hitContext = hitExecute(source, true, "field1", null);
assertEquals(Collections.singletonMap("field1", "value"), hitContext.hit().getSourceAsMap());
hitContext = hitExecute(source, true, "hello", null);
assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap());
hitContext = hitExecute(source, true, "*", "field2");
assertEquals(Collections.singletonMap("field1", "value"), hitContext.hit().getSourceAsMap());
}
public void testExcludesAll() throws IOException {
XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field1", "value").field("field2", "value2").endObject();
HitContext hitContext = hitExecute(source, false, null, null);
assertNull(hitContext.hit().getSourceAsMap());
hitContext = hitExecute(source, true, "field1", "*");
assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap());
hitContext = hitExecute(source, true, null, "*");
assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap());
hitContext = hitExecute(source, true, "*", "*");
assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap());
hitContext = hitExecuteMultiple(source, true, new String[] { "field1", "field2" }, new String[] { "*", "field1" });
assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap());
hitContext = hitExecuteMultiple(source, true, null, new String[] { "field2", "*", "field1" });
assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap());
}
public void testMultipleFiltering() throws IOException {
XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value").field("field2", "value2").endObject();
HitContext hitContext = hitExecuteMultiple(source, true, new String[] { "*.notexisting", "field" }, null);
assertEquals(Collections.singletonMap("field", "value"), hitContext.hit().getSourceAsMap());
hitContext = hitExecuteMultiple(source, true, new String[] { "field.notexisting.*", "field" }, null);
assertEquals(Collections.singletonMap("field", "value"), hitContext.hit().getSourceAsMap());
}
public void testNestedSource() throws IOException {
Map<String, Object> expectedNested = Collections.singletonMap("nested2", Collections.singletonMap("field", "value0"));
XContentBuilder source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.field("field2", "value2")
.field("nested1", expectedNested)
.endObject();
HitContext hitContext = hitExecuteMultiple(source, true, null, null, new SearchHit.NestedIdentity("nested1", 0, null));
assertEquals(expectedNested, hitContext.hit().getSourceAsMap());
hitContext = hitExecuteMultiple(source, true, new String[] { "invalid" }, null, new SearchHit.NestedIdentity("nested1", 0, null));
assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap());
hitContext = hitExecuteMultiple(
source,
true,
null,
null,
new SearchHit.NestedIdentity("nested1", 0, new SearchHit.NestedIdentity("nested2", 0, null))
);
assertEquals(Collections.singletonMap("field", "value0"), hitContext.hit().getSourceAsMap());
hitContext = hitExecuteMultiple(
source,
true,
new String[] { "invalid" },
null,
new SearchHit.NestedIdentity("nested1", 0, new SearchHit.NestedIdentity("nested2", 0, null))
);
assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap());
}
public void testSourceDisabled() throws IOException {
HitContext hitContext = hitExecute(null, true, null, null);
assertNull(hitContext.hit().getSourceAsMap());
hitContext = hitExecute(null, false, null, null);
assertNull(hitContext.hit().getSourceAsMap());
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> hitExecute(null, true, "field1", null));
assertEquals(
"unable to fetch fields from _source field: _source is disabled in the mappings " + "for index [index]",
exception.getMessage()
);
exception = expectThrows(
IllegalArgumentException.class,
() -> hitExecuteMultiple(null, true, new String[] { "*" }, new String[] { "field2" })
);
assertEquals(
"unable to fetch fields from _source field: _source is disabled in the mappings " + "for index [index]",
exception.getMessage()
);
}
public void testNestedSourceWithSourceDisabled() throws IOException {
HitContext hitContext = hitExecute(null, true, null, null, new SearchHit.NestedIdentity("nested1", 0, null));
assertNull(hitContext.hit().getSourceAsMap());
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> hitExecute(null, true, "field1", null, new SearchHit.NestedIdentity("nested1", 0, null))
);
assertEquals(
"unable to fetch fields from _source field: _source is disabled in the mappings " + "for index [index]",
e.getMessage()
);
}
private HitContext hitExecute(XContentBuilder source, boolean fetchSource, String include, String exclude) throws IOException {
return hitExecute(source, fetchSource, include, exclude, null);
}
private HitContext hitExecute(
XContentBuilder source,
boolean fetchSource,
String include,
String exclude,
SearchHit.NestedIdentity nestedIdentity
) throws IOException {
return hitExecuteMultiple(
source,
fetchSource,
include == null ? Strings.EMPTY_ARRAY : new String[] { include },
exclude == null ? Strings.EMPTY_ARRAY : new String[] { exclude },
nestedIdentity
);
}
private HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSource, String[] includes, String[] excludes)
throws IOException {
return hitExecuteMultiple(source, fetchSource, includes, excludes, null);
}
private HitContext hitExecuteMultiple(
XContentBuilder sourceBuilder,
boolean fetchSource,
String[] includes,
String[] excludes,
SearchHit.NestedIdentity nestedIdentity
) throws IOException {
FetchSourceContext fetchSourceContext = FetchSourceContext.of(fetchSource, includes, excludes);
FetchContext fetchContext = mock(FetchContext.class);
when(fetchContext.fetchSourceContext()).thenReturn(fetchSourceContext);
when(fetchContext.getIndexName()).thenReturn("index");
SearchExecutionContext sec = mock(SearchExecutionContext.class);
when(sec.isSourceEnabled()).thenReturn(sourceBuilder != null);
IndexSettings indexSettings = new IndexSettings(
IndexMetadata.builder("index").settings(indexSettings(IndexVersion.current(), 1, 0)).build(),
Settings.EMPTY
);
when(sec.indexVersionCreated()).thenReturn(indexSettings.getIndexVersionCreated());
when(sec.getIndexSettings()).thenReturn(indexSettings);
when(fetchContext.getSearchExecutionContext()).thenReturn(sec);
final SearchHit searchHit = SearchHit.unpooled(1, null, nestedIdentity);
// We don't need a real index, just a LeafReaderContext which cannot be mocked.
MemoryIndex index = new MemoryIndex();
LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0);
Source source = sourceBuilder == null ? Source.empty(null) : Source.fromBytes(BytesReference.bytes(sourceBuilder));
HitContext hitContext = new HitContext(searchHit, leafReaderContext, 1, Map.of(), source, null);
FetchSourcePhase phase = new FetchSourcePhase();
FetchSubPhaseProcessor processor = phase.getProcessor(fetchContext);
if (fetchSource == false) {
assertNull(processor);
} else {
assertNotNull(processor);
processor.process(hitContext);
}
return hitContext;
}
}
|
FetchSourcePhaseTests
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/UriAssert.java
|
{
"start": 730,
"end": 929
}
|
class ____ extends AbstractUriAssert<UriAssert> {
/**
* Constructor
*
* @param actual the url to test
*/
public UriAssert(URI actual) {
super(actual, UriAssert.class);
}
}
|
UriAssert
|
java
|
apache__kafka
|
raft/src/main/java/org/apache/kafka/raft/ElectionState.java
|
{
"start": 1145,
"end": 7223
}
|
class ____ {
private static final int UNKNOWN_LEADER_ID = -1;
private static final int NOT_VOTED = -1;
private final int epoch;
private final OptionalInt leaderId;
private final Optional<ReplicaKey> votedKey;
// This is deprecated. It is only used when writing version 0 of the quorum state file
private final Set<Integer> voters;
ElectionState(
int epoch,
OptionalInt leaderId,
Optional<ReplicaKey> votedKey,
Set<Integer> voters
) {
this.epoch = epoch;
this.leaderId = leaderId;
this.votedKey = votedKey;
this.voters = voters;
}
public int epoch() {
return epoch;
}
public boolean isLeader(int nodeId) {
if (nodeId < 0)
throw new IllegalArgumentException("Invalid negative nodeId: " + nodeId);
return leaderIdOrSentinel() == nodeId;
}
/**
* Return if the replica has voted for the given candidate.
*
* A replica has voted for a candidate if all the following are true:
* 1. the node's id and voted id match and
* 2. if the voted directory id is set, it matches the node's directory id
*
* @param nodeKey the id and directory id of the replica
* @return true when the arguments match, otherwise false
*/
public boolean isVotedCandidate(ReplicaKey nodeKey) {
if (nodeKey.id() < 0) {
throw new IllegalArgumentException("Invalid node key " + nodeKey);
} else if (votedKey.isEmpty()) {
return false;
} else if (votedKey.get().id() != nodeKey.id()) {
return false;
} else if (votedKey.get().directoryId().isEmpty()) {
// when the persisted voted directory id is not present assume that we voted for this candidate;
// this happens when the kraft version is 0.
return true;
}
return votedKey.get().directoryId().equals(nodeKey.directoryId());
}
public int leaderId() {
if (leaderId.isEmpty())
throw new IllegalStateException("Attempt to access nil leaderId");
return leaderId.getAsInt();
}
public int leaderIdOrSentinel() {
return leaderId.orElse(UNKNOWN_LEADER_ID);
}
public OptionalInt optionalLeaderId() {
return leaderId;
}
public ReplicaKey votedKey() {
if (votedKey.isEmpty()) {
throw new IllegalStateException("Attempt to access nil votedId");
}
return votedKey.get();
}
public Optional<ReplicaKey> optionalVotedKey() {
return votedKey;
}
public boolean hasLeader() {
return leaderId.isPresent();
}
public boolean hasVoted() {
return votedKey.isPresent();
}
public QuorumStateData toQuorumStateData(short version) {
QuorumStateData data = new QuorumStateData()
.setLeaderEpoch(epoch)
.setLeaderId(leaderIdOrSentinel())
.setVotedId(votedKey.map(ReplicaKey::id).orElse(NOT_VOTED));
if (version == 0) {
List<QuorumStateData.Voter> dataVoters = voters
.stream()
.map(voterId -> new QuorumStateData.Voter().setVoterId(voterId))
.collect(Collectors.toList());
data.setCurrentVoters(dataVoters);
} else if (version == 1) {
data.setVotedDirectoryId(
votedKey.flatMap(ReplicaKey::directoryId).orElse(ReplicaKey.NO_DIRECTORY_ID)
);
} else {
throw new IllegalStateException(
String.format(
"File quorum state store doesn't handle supported version %d", version
)
);
}
return data;
}
@Override
public String toString() {
return String.format(
"Election(epoch=%d, leaderId=%s, votedKey=%s, voters=%s)",
epoch,
leaderId,
votedKey,
voters
);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ElectionState that = (ElectionState) o;
if (epoch != that.epoch) return false;
if (!leaderId.equals(that.leaderId)) return false;
if (!votedKey.equals(that.votedKey)) return false;
return voters.equals(that.voters);
}
@Override
public int hashCode() {
return Objects.hash(epoch, leaderId, votedKey, voters);
}
public static ElectionState withVotedCandidate(int epoch, ReplicaKey votedKey, Set<Integer> voters) {
if (votedKey.id() < 0) {
throw new IllegalArgumentException("Illegal voted Id " + votedKey.id() + ": must be non-negative");
}
return new ElectionState(epoch, OptionalInt.empty(), Optional.of(votedKey), voters);
}
public static ElectionState withElectedLeader(
int epoch,
int leaderId,
Optional<ReplicaKey> votedKey,
Set<Integer> voters
) {
if (leaderId < 0) {
throw new IllegalArgumentException("Illegal leader Id " + leaderId + ": must be non-negative");
}
return new ElectionState(epoch, OptionalInt.of(leaderId), votedKey, voters);
}
public static ElectionState withUnknownLeader(int epoch, Set<Integer> voters) {
return new ElectionState(epoch, OptionalInt.empty(), Optional.empty(), voters);
}
public static ElectionState fromQuorumStateData(QuorumStateData data) {
Optional<ReplicaKey> votedKey = data.votedId() == NOT_VOTED ?
Optional.empty() :
Optional.of(ReplicaKey.of(data.votedId(), data.votedDirectoryId()));
return new ElectionState(
data.leaderEpoch(),
data.leaderId() == UNKNOWN_LEADER_ID ? OptionalInt.empty() : OptionalInt.of(data.leaderId()),
votedKey,
data.currentVoters().stream().map(QuorumStateData.Voter::voterId).collect(Collectors.toSet())
);
}
}
|
ElectionState
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/v2/AbstractReducingState.java
|
{
"start": 1712,
"end": 7490
}
|
class ____<K, N, V> extends AbstractKeyedState<K, N, V>
implements InternalReducingState<K, N, V> {
protected final ReduceFunction<V> reduceFunction;
public AbstractReducingState(
StateRequestHandler stateRequestHandler,
ReduceFunction<V> reduceFunction,
TypeSerializer<V> valueSerializer) {
super(stateRequestHandler, valueSerializer);
this.reduceFunction = reduceFunction;
}
@Override
public StateFuture<V> asyncGet() {
return asyncGetInternal();
}
@Override
public StateFuture<Void> asyncAdd(V value) {
return asyncGetInternal()
.thenCompose(
oldValue -> {
V newValue =
oldValue == null
? value
: reduceFunction.reduce((V) oldValue, value);
return asyncUpdateInternal(newValue);
});
}
@Override
public V get() {
return getInternal();
}
@Override
public void add(V value) {
V oldValue = getInternal();
try {
V newValue = oldValue == null ? value : reduceFunction.reduce(oldValue, value);
updateInternal(newValue);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public StateFuture<Void> asyncMergeNamespaces(N target, Collection<N> sources) {
if (sources == null || sources.isEmpty()) {
return StateFutureUtils.completedVoidFuture();
}
// phase 1: read from the sources and target
List<StateFuture<V>> futures = new ArrayList<>(sources.size() + 1);
for (N source : sources) {
if (source != null) {
setCurrentNamespace(source);
futures.add(asyncGetInternal());
}
}
setCurrentNamespace(target);
futures.add(asyncGetInternal());
// phase 2: merge the sources to the target
return StateFutureUtils.combineAll(futures)
.thenCompose(
values -> {
List<StateFuture<Void>> updateFutures =
new ArrayList<>(sources.size() + 1);
V current = null;
Iterator<V> valueIterator = values.iterator();
for (N source : sources) {
V value = valueIterator.next();
if (value != null) {
setCurrentNamespace(source);
updateFutures.add(asyncUpdateInternal(null));
if (current != null) {
current = reduceFunction.reduce(current, value);
} else {
current = value;
}
}
}
V targetValue = valueIterator.next();
if (current != null) {
if (targetValue != null) {
current = reduceFunction.reduce(current, targetValue);
}
setCurrentNamespace(target);
updateFutures.add(asyncUpdateInternal(current));
}
return StateFutureUtils.combineAll(updateFutures)
.thenAccept(ignores -> {});
});
}
@Override
public void mergeNamespaces(N target, Collection<N> sources) {
if (sources == null || sources.isEmpty()) {
return;
}
try {
V current = null;
// merge the sources to the target
for (N source : sources) {
if (source != null) {
setCurrentNamespace(source);
V oldValue = getInternal();
if (oldValue != null) {
updateInternal(null);
if (current != null) {
current = reduceFunction.reduce(current, oldValue);
} else {
current = oldValue;
}
}
}
}
// if something came out of merging the sources, merge it or write it to the target
if (current != null) {
// create the target full-binary-key
setCurrentNamespace(target);
V targetValue = getInternal();
if (targetValue != null) {
current = reduceFunction.reduce(current, targetValue);
}
updateInternal(current);
}
} catch (Exception e) {
throw new RuntimeException("merge namespace fail.", e);
}
}
@Override
public StateFuture<V> asyncGetInternal() {
return handleRequest(StateRequestType.REDUCING_GET, null);
}
@Override
public StateFuture<Void> asyncUpdateInternal(V valueToStore) {
return handleRequest(StateRequestType.REDUCING_ADD, valueToStore);
}
@Override
public V getInternal() {
return handleRequestSync(StateRequestType.REDUCING_GET, null);
}
@Override
public void updateInternal(V valueToStore) {
handleRequestSync(StateRequestType.REDUCING_ADD, valueToStore);
}
}
|
AbstractReducingState
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/hql/Employee.java
|
{
"start": 531,
"end": 1838
}
|
class ____ implements Serializable {
@Id
@GeneratedValue
@Column(name="id_emp")
private Integer id;
private String firstName;
private String lastName;
@OneToOne
@JoinColumn(name="id_title")
private Title title;
@ManyToOne(fetch=FetchType.LAZY)
@JoinColumn(name="id_depto")
private Department department;
public Employee() {}
public Employee(Integer _id, String _lastName, Integer _idTitle, String _descriptionTitle, Department _dept, String _fname) {
setId(_id);
setLastName(_lastName);
Title _title = new Title();
_title.setId(_idTitle);
_title.setDescription(_descriptionTitle);
setTitle(_title);
setDepartment(_dept);
setFirstName(_fname);
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public Title getTitle() {
return title;
}
public void setTitle(Title title) {
this.title = title;
}
public Department getDepartment() {
return department;
}
public void setDepartment(Department department) {
this.department = department;
}
}
|
Employee
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecycleMetadata.java
|
{
"start": 1553,
"end": 6591
}
|
class ____ implements Metadata.ProjectCustom {
public static final String TYPE = "snapshot_lifecycle";
private static final ParseField OPERATION_MODE_FIELD = new ParseField("operation_mode");
private static final ParseField POLICIES_FIELD = new ParseField("policies");
private static final ParseField STATS_FIELD = new ParseField("stats");
public static final SnapshotLifecycleMetadata EMPTY = new SnapshotLifecycleMetadata(
Collections.emptyMap(),
OperationMode.RUNNING,
new SnapshotLifecycleStats()
);
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<SnapshotLifecycleMetadata, Void> PARSER = new ConstructingObjectParser<>(
TYPE,
a -> new SnapshotLifecycleMetadata(
((List<SnapshotLifecyclePolicyMetadata>) a[0]).stream()
.collect(Collectors.toMap(m -> m.getPolicy().getId(), Function.identity())),
OperationMode.valueOf((String) a[1]),
(SnapshotLifecycleStats) a[2]
)
);
static {
PARSER.declareNamedObjects(
ConstructingObjectParser.constructorArg(),
(p, c, n) -> SnapshotLifecyclePolicyMetadata.parse(p, n),
v -> {
throw new IllegalArgumentException("ordered " + POLICIES_FIELD.getPreferredName() + " are not supported");
},
POLICIES_FIELD
);
PARSER.declareString(ConstructingObjectParser.constructorArg(), OPERATION_MODE_FIELD);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (v, o) -> SnapshotLifecycleStats.parse(v), STATS_FIELD);
}
private final Map<String, SnapshotLifecyclePolicyMetadata> snapshotConfigurations;
private final OperationMode operationMode;
private final SnapshotLifecycleStats slmStats;
public SnapshotLifecycleMetadata(
Map<String, SnapshotLifecyclePolicyMetadata> snapshotConfigurations,
OperationMode operationMode,
SnapshotLifecycleStats slmStats
) {
this.snapshotConfigurations = new HashMap<>(snapshotConfigurations);
this.operationMode = operationMode;
this.slmStats = slmStats != null ? slmStats : new SnapshotLifecycleStats();
}
public SnapshotLifecycleMetadata(StreamInput in) throws IOException {
this.snapshotConfigurations = in.readMap(SnapshotLifecyclePolicyMetadata::new);
this.operationMode = in.readEnum(OperationMode.class);
this.slmStats = new SnapshotLifecycleStats(in);
}
public Map<String, SnapshotLifecyclePolicyMetadata> getSnapshotConfigurations() {
return Collections.unmodifiableMap(this.snapshotConfigurations);
}
/**
* @deprecated use {@link LifecycleOperationMetadata#getSLMOperationMode()} instead. This may be incorrect.
*/
@Deprecated(since = "8.7.0")
public OperationMode getOperationMode() {
return operationMode;
}
public SnapshotLifecycleStats getStats() {
return this.slmStats;
}
@Override
public EnumSet<Metadata.XContentContext> context() {
return Metadata.ALL_CONTEXTS;
}
@Override
public Diff<Metadata.ProjectCustom> diff(Metadata.ProjectCustom previousState) {
return new SnapshotLifecycleMetadataDiff((SnapshotLifecycleMetadata) previousState, this);
}
@Override
public String getWriteableName() {
return TYPE;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeMap(this.snapshotConfigurations, StreamOutput::writeWriteable);
out.writeEnum(this.operationMode);
this.slmStats.writeTo(out);
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return Iterators.concat(
ChunkedToXContentHelper.xContentObjectFields(POLICIES_FIELD.getPreferredName(), this.snapshotConfigurations),
Iterators.single(
(builder, p) -> builder.field(OPERATION_MODE_FIELD.getPreferredName(), operationMode)
.field(STATS_FIELD.getPreferredName(), this.slmStats)
)
);
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public int hashCode() {
return Objects.hash(this.snapshotConfigurations, this.operationMode, this.slmStats);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
SnapshotLifecycleMetadata other = (SnapshotLifecycleMetadata) obj;
return this.snapshotConfigurations.equals(other.snapshotConfigurations)
&& this.operationMode.equals(other.operationMode)
&& this.slmStats.equals(other.slmStats);
}
public static
|
SnapshotLifecycleMetadata
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/foreach/introduction/XTransactionalInterceptor.java
|
{
"start": 378,
"end": 833
}
|
class ____ implements MethodInterceptor<Object, Object> {
private final XSessionFactory sessionFactory;
XTransactionalInterceptor(BeanContext beanContext, Qualifier<XSessionFactory> qualifier) {
this.sessionFactory = beanContext.getBean(XSessionFactory.class, qualifier);
}
@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
return sessionFactory.name();
}
}
|
XTransactionalInterceptor
|
java
|
apache__dubbo
|
dubbo-compatible/src/test/java/org/apache/dubbo/metadata/annotation/processing/builder/EnumTypeDefinitionBuilderTest.java
|
{
"start": 1474,
"end": 2542
}
|
class ____ extends AbstractAnnotationProcessingTest {
private EnumTypeDefinitionBuilder builder;
@Override
protected void addCompiledClasses(Set<Class<?>> classesToBeCompiled) {
classesToBeCompiled.add(Color.class);
}
@Override
protected void beforeEach() {
builder = new EnumTypeDefinitionBuilder();
}
@Test
void testAccept() {
TypeElement typeElement = getType(Color.class);
assertTrue(builder.accept(processingEnv, typeElement.asType()));
}
@Test
void testBuild() {
TypeElement typeElement = getType(Color.class);
Map<String, TypeDefinition> typeCache = new HashMap<>();
TypeDefinition typeDefinition = TypeDefinitionBuilder.build(processingEnv, typeElement, typeCache);
assertEquals(Color.class.getName(), typeDefinition.getType());
assertEquals(asList("RED", "YELLOW", "BLUE"), typeDefinition.getEnums());
// assertEquals(typeDefinition.getTypeBuilderName(), builder.getClass().getName());
}
}
|
EnumTypeDefinitionBuilderTest
|
java
|
dropwizard__dropwizard
|
dropwizard-validation/src/main/java/io/dropwizard/validation/DataSizeRange.java
|
{
"start": 1174,
"end": 2789
}
|
interface ____ {
/**
* The minimum value of the range the validated {@link io.dropwizard.util.DataSize} must be in.
*
* @return the minimum value
*/
@OverridesAttribute(constraint = MinDataSize.class, name = "value")
long min() default 0;
/**
* The maximum value of the range the validated {@link io.dropwizard.util.DataSize} must be in.
*
* @return the maximum value
*/
@OverridesAttribute(constraint = MaxDataSize.class, name = "value")
long max() default Long.MAX_VALUE;
/**
* The unit of the validated range.
*
* @return the {@link DataSizeUnit}
*/
@OverridesAttribute(constraint = MinDataSize.class, name = "unit")
@OverridesAttribute(constraint = MaxDataSize.class, name = "unit")
DataSizeUnit unit() default DataSizeUnit.BYTES;
/**
* The validation message for this constraint.
*
* @return the message
*/
String message() default "must be between {min} {unit} and {max} {unit}";
/**
* The groups the constraint belongs to.
*
* @return an array of classes representing the groups
*/
Class<?>[] groups() default { };
/**
* The payloads of this constraint.
*
* @return the array of payload classes
*/
@SuppressWarnings("UnusedDeclaration") Class<? extends Payload>[] payload() default { };
/**
* Defines several {@code @DataSizeRange} annotations on the same element.
*/
@Target({ METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER, TYPE_USE })
@Retention(RUNTIME)
@Documented
@
|
DataSizeRange
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/security/permission/AbstractHttpSecurityPolicyGrantingPermissionsTest.java
|
{
"start": 13054,
"end": 14944
}
|
enum ____ implements AuthenticatedUser {
ADMIN(AuthenticatedUserImpl::useAdminRole),
ROOT(AuthenticatedUserImpl::useRootRole),
USER(AuthenticatedUserImpl::useUserRole),
TEST(AuthenticatedUserImpl::useTestRole),
TEST2(AuthenticatedUserImpl::useTest2Role);
private final Runnable authenticate;
AuthenticatedUserImpl(Runnable authenticate) {
this.authenticate = authenticate;
}
public void authenticate() {
authenticate.run();
}
public String role() {
return this.toString().toLowerCase();
}
private static void useTestRole() {
TestIdentityController.resetRoles().add("test", "test", "test");
}
private static void useTest2Role() {
TestIdentityController.resetRoles().add("test2", "test2", "test2");
}
private static void useRootRole() {
TestIdentityController.resetRoles().add("root", "root", "root", "Admin1");
}
private static void useAdminRole() {
TestIdentityController.resetRoles().add("admin", "admin", "admin");
}
private static void useUserRole() {
TestIdentityController.resetRoles().add("user", "user", "user");
}
}
private static AuthenticatedUser withOtherPermissions(String permissionName) {
return new AuthenticatedUser() {
private static final String OTHER = "other";
@Override
public String role() {
return OTHER;
}
@Override
public void authenticate() {
// we grant additional permissions to the user directly
TestIdentityController.resetRoles().add(OTHER, OTHER, new StringPermission(permissionName));
}
};
}
}
|
AuthenticatedUserImpl
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ClassCanBeStaticTest.java
|
{
"start": 12646,
"end": 12745
}
|
class ____ {
static Runnable r =
new Runnable() {
|
A
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/pc/WhereTest.java
|
{
"start": 4304,
"end": 5392
}
|
class ____ {
@Id
private Long id;
@ManyToOne
private Client client;
@Column(name = "account_type")
@Enumerated(EnumType.STRING)
private AccountType type;
private Double amount;
private Double rate;
private boolean active;
//Getters and setters omitted for brevity
//end::pc-where-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Client getClient() {
return client;
}
public void setClient(Client client) {
this.client = client;
}
public AccountType getType() {
return type;
}
public void setType(AccountType type) {
this.type = type;
}
public Double getAmount() {
return amount;
}
public void setAmount(Double amount) {
this.amount = amount;
}
public Double getRate() {
return rate;
}
public void setRate(Double rate) {
this.rate = rate;
}
public boolean isActive() {
return active;
}
public void setActive(boolean active) {
this.active = active;
}
//tag::pc-where-example[]
}
//end::pc-where-example[]
}
|
Account
|
java
|
spring-projects__spring-boot
|
test-support/spring-boot-test-support/src/test/java/org/springframework/boot/testsupport/classpath/resources/OnSuperClassWithPackageResourcesTests.java
|
{
"start": 1021,
"end": 1568
}
|
class ____ extends WithPackageResourcesClass {
@Test
void whenWithPackageResourcesIsUsedOnASuperClassThenResourcesAreAvailable() throws IOException {
assertThat(new ClassPathResource("resource-1.txt").getContentAsString(StandardCharsets.UTF_8)).isEqualTo("one");
assertThat(new ClassPathResource("resource-2.txt").getContentAsString(StandardCharsets.UTF_8)).isEqualTo("two");
assertThat(new ClassPathResource("sub/resource-3.txt").getContentAsString(StandardCharsets.UTF_8))
.isEqualTo("three");
}
}
|
OnSuperClassWithPackageResourcesTests
|
java
|
apache__camel
|
components/camel-file/src/main/java/org/apache/camel/component/file/strategy/GenericFileNoOpProcessStrategy.java
|
{
"start": 861,
"end": 951
}
|
class ____<T> extends GenericFileProcessStrategySupport<T> {
}
|
GenericFileNoOpProcessStrategy
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/issues/Issue5686.java
|
{
"start": 450,
"end": 1542
}
|
class ____ {
@Test
public void test_column_comment() throws Exception {
String sql = "INSERT INTO TABLE_TEST_1(\n" + "\tDATE_ID,-- qianzhushi\n" + "\tCUS_NO -- houzhushi\n,\n" + "\tCUS_NAME\n" + ")\n" + "SELECT A.DATE_ID,\n" + "\tA.CUS_NO,\n"
+ "\tA.CUS_NAME\n" + "FROM TABLE_TEST_2 \n" + "WHERE COL1='1';";
System.out.println("原始的sql===" + sql);
MySqlInsertStatement sqlStatement = (MySqlInsertStatement) SQLUtils.parseSingleStatement(sql, DbType.mysql, true);
int ccc = 0;
for (SQLExpr column : sqlStatement.getColumns()) {
column.addAfterComment("-- comment注释" + (ccc++));
}
System.out.println(sqlStatement);
String newSql = sqlStatement.toString();
System.out.println("首次解析后生成的sql===" + newSql);
MySqlInsertStatement sqlStatementNew = (MySqlInsertStatement) SQLUtils.parseSingleStatement(newSql, DbType.mysql, true);
String newSql2 = sqlStatement.toString();
System.out.println("再次解析后生成的sql===" + newSql2);
assertEquals(newSql, newSql2);
}
}
|
Issue5686
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/validation/ValidationBindHandlerTests.java
|
{
"start": 12518,
"end": 13039
}
|
class ____ {
private @Nullable String name;
@Min(5)
private int age;
@NotNull
@SuppressWarnings("NullAway.Init")
private String address;
@Nullable String getName() {
return this.name;
}
void setName(@Nullable String name) {
this.name = name;
}
int getAge() {
return this.age;
}
void setAge(int age) {
this.age = age;
}
String getAddress() {
return this.address;
}
void setAddress(String address) {
this.address = address;
}
}
@Validated
static
|
ExampleNested
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/internal/creation/bytebuddy/ByteBuddyCrossClassLoaderSerializationSupport.java
|
{
"start": 19358,
"end": 19442
}
|
interface ____ {
Object writeReplace();
}
}
|
CrossClassLoaderSerializableMock
|
java
|
alibaba__nacos
|
api/src/main/java/com/alibaba/nacos/api/ai/listener/NacosAgentCardEvent.java
|
{
"start": 801,
"end": 1264
}
|
class ____ implements NacosAiEvent {
private final String agentName;
private final AgentCardDetailInfo agentCard;
public NacosAgentCardEvent(AgentCardDetailInfo agentCard) {
this.agentName = agentCard.getName();
this.agentCard = agentCard;
}
public String getAgentName() {
return agentName;
}
public AgentCardDetailInfo getAgentCard() {
return agentCard;
}
}
|
NacosAgentCardEvent
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 1136864,
"end": 1141278
}
|
class ____ extends YamlDeserializerBase<ThrottleDefinition> {
public ThrottleDefinitionDeserializer() {
super(ThrottleDefinition.class);
}
@Override
protected ThrottleDefinition newInstance() {
return new ThrottleDefinition();
}
@Override
protected boolean setProperty(ThrottleDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "asyncDelayed": {
String val = asText(node);
target.setAsyncDelayed(val);
break;
}
case "callerRunsWhenRejected": {
String val = asText(node);
target.setCallerRunsWhenRejected(val);
break;
}
case "correlationExpression": {
org.apache.camel.model.ExpressionSubElementDefinition val = asType(node, org.apache.camel.model.ExpressionSubElementDefinition.class);
target.setCorrelationExpression(val);
break;
}
case "disabled": {
String val = asText(node);
target.setDisabled(val);
break;
}
case "executorService": {
String val = asText(node);
target.setExecutorService(val);
break;
}
case "expression": {
org.apache.camel.model.language.ExpressionDefinition val = asType(node, org.apache.camel.model.language.ExpressionDefinition.class);
target.setExpression(val);
break;
}
case "mode": {
String val = asText(node);
target.setMode(val);
break;
}
case "rejectExecution": {
String val = asText(node);
target.setRejectExecution(val);
break;
}
case "timePeriodMillis": {
String val = asText(node);
target.setTimePeriodMillis(val);
break;
}
case "id": {
String val = asText(node);
target.setId(val);
break;
}
case "description": {
String val = asText(node);
target.setDescription(val);
break;
}
case "note": {
String val = asText(node);
target.setNote(val);
break;
}
default: {
ExpressionDefinition ed = target.getExpressionType();
if (ed != null) {
throw new org.apache.camel.dsl.yaml.common.exception.DuplicateFieldException(node, propertyName, "as an expression");
}
ed = ExpressionDeserializers.constructExpressionType(propertyKey, node);
if (ed != null) {
target.setExpressionType(ed);
} else {
return false;
}
}
}
return true;
}
}
@YamlType(
nodes = "throwException",
types = org.apache.camel.model.ThrowExceptionDefinition.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Throw Exception",
description = "Throws an exception",
deprecated = false,
properties = {
@YamlProperty(name = "description", type = "string", description = "Sets the description of this node", displayName = "Description"),
@YamlProperty(name = "disabled", type = "boolean", defaultValue = "false", description = "Disables this EIP from the route.", displayName = "Disabled"),
@YamlProperty(name = "exceptionType", type = "string", description = "The
|
ThrottleDefinitionDeserializer
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/ConfigurationClassProcessingTests.java
|
{
"start": 15791,
"end": 16033
}
|
class ____ {
static TestBean testBean = new TestBean(ConfigWithBeanWithCustomName.class.getSimpleName());
@Bean("customName")
public TestBean methodName() {
return testBean;
}
}
@Configuration
static
|
ConfigWithBeanWithCustomName
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/resolution/TypedTest.java
|
{
"start": 2128,
"end": 2351
}
|
class ____ {
void myObserver(@Observes String event) {
EVENT.set(event);
}
}
@Typed(MyOtherBean.class) // -> bean types = [MyOtherBean.class, Object.class]
@Singleton
static
|
MyBean
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/enums/EnumSerializationTest.java
|
{
"start": 4891,
"end": 5022
}
|
enum ____, some users
* prefer calling toString() instead. So let's verify that
* this can be done using annotation for
|
names
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/QdrantEndpointBuilderFactory.java
|
{
"start": 15646,
"end": 15963
}
|
class ____ extends AbstractEndpointBuilder implements QdrantEndpointBuilder, AdvancedQdrantEndpointBuilder {
public QdrantEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new QdrantEndpointBuilderImpl(path);
}
}
|
QdrantEndpointBuilderImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/components/MappedSuperclassComponentWithCollectionTest.java
|
{
"start": 4740,
"end": 4921
}
|
class ____ {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
protected Long id;
public Long getId() {
return id;
}
}
@MappedSuperclass
public static
|
AbstractEntity
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.