language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/features/devservices/testcontainers/atdevelopmenttime/test/MyContainersConfiguration.java | {
"start": 1028,
"end": 1178
} | class ____ {
@Bean
@ServiceConnection
public Neo4jContainer neo4jContainer() {
return new Neo4jContainer("neo4j:5");
}
}
| MyContainersConfiguration |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringFromVariableTest.java | {
"start": 1037,
"end": 1287
} | class ____ extends FromVariableTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/FromVariableTest.xml");
}
}
| SpringFromVariableTest |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/autoconfigure/DataSourceConfiguration.java | {
"start": 3083,
"end": 4530
} | class ____ {
@Bean
@ConditionalOnMissingBean(PropertiesJdbcConnectionDetails.class)
static TomcatJdbcConnectionDetailsBeanPostProcessor tomcatJdbcConnectionDetailsBeanPostProcessor(
ObjectProvider<JdbcConnectionDetails> connectionDetailsProvider) {
return new TomcatJdbcConnectionDetailsBeanPostProcessor(connectionDetailsProvider);
}
@Bean
@ConfigurationProperties("spring.datasource.tomcat")
org.apache.tomcat.jdbc.pool.DataSource dataSource(DataSourceProperties properties,
JdbcConnectionDetails connectionDetails) {
Class<? extends DataSource> dataSourceType = org.apache.tomcat.jdbc.pool.DataSource.class;
org.apache.tomcat.jdbc.pool.DataSource dataSource = createDataSource(connectionDetails, dataSourceType,
properties.getClassLoader());
String validationQuery;
DatabaseDriver databaseDriver = DatabaseDriver.fromJdbcUrl(connectionDetails.getJdbcUrl());
validationQuery = databaseDriver.getValidationQuery();
if (validationQuery != null) {
dataSource.setTestOnBorrow(true);
dataSource.setValidationQuery(validationQuery);
}
return dataSource;
}
}
/**
* Hikari DataSource configuration.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(HikariDataSource.class)
@ConditionalOnMissingBean(DataSource.class)
@ConditionalOnProperty(name = "spring.datasource.type", havingValue = "com.zaxxer.hikari.HikariDataSource",
matchIfMissing = true)
static | Tomcat |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/security/RolesAllowedTestCase.java | {
"start": 568,
"end": 2383
} | class ____ extends AbstractRolesAllowedTestCase {
private static final String APP_PROPS = "" +
"quarkus.http.auth.basic=true\n" +
"quarkus.http.limits.max-body-size=100m\n" +
"quarkus.http.auth.policy.r1.roles-allowed=test\n" +
"quarkus.http.auth.policy.r2.roles-allowed=admin\n" +
"quarkus.http.auth.permission.roles1.paths=/roles1,/deny,/permit,/combined,/wildcard1/*,/wildcard2*\n" +
"quarkus.http.auth.permission.roles1.policy=r1\n" +
"quarkus.http.auth.permission.roles2.paths=/roles2,/deny,/permit/combined,/wildcard3/*\n" +
"quarkus.http.auth.permission.roles2.policy=r2\n" +
"quarkus.http.auth.permission.permit1.paths=/permit\n" +
"quarkus.http.auth.permission.permit1.policy=permit\n" +
"quarkus.http.auth.permission.deny1.paths=/deny,/combined\n" +
"quarkus.http.auth.permission.deny1.policy=deny\n";
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(TestIdentityController.class, TestIdentityProvider.class, PathHandler.class)
.addAsResource(new StringAsset(APP_PROPS), "application.properties");
}
});
@Test
public void testUnauthenticatedPath() {
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/public")
.then()
.assertThat()
.statusCode(200)
.body(equalTo("test:/public"));
}
}
| RolesAllowedTestCase |
java | apache__camel | components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/deployments/KubernetesDeploymentsConsumer.java | {
"start": 3235,
"end": 6453
} | class ____ implements Runnable {
private Watch watch;
@Override
public void run() {
FilterWatchListDeletable<Deployment, DeploymentList, RollableScalableResource<Deployment>> w;
/*
Valid options are (according to how the client can be constructed):
- inAnyNamespace
- inAnyNamespace + withLabel
- inNamespace
- inNamespace + withLabel
- inNamespace + withName
*/
String namespace = getEndpoint().getKubernetesConfiguration().getNamespace();
String labelKey = getEndpoint().getKubernetesConfiguration().getLabelKey();
String labelValue = getEndpoint().getKubernetesConfiguration().getLabelValue();
String resourceName = getEndpoint().getKubernetesConfiguration().getResourceName();
if (ObjectHelper.isEmpty(namespace)) {
w = getEndpoint().getKubernetesClient().apps().deployments().inAnyNamespace();
if (ObjectHelper.isNotEmpty(labelKey) && ObjectHelper.isNotEmpty(labelValue)) {
w = w.withLabel(labelKey, labelValue);
}
} else {
final NonNamespaceOperation<Deployment, DeploymentList, RollableScalableResource<Deployment>> client
= getEndpoint().getKubernetesClient().apps().deployments().inNamespace(namespace);
w = client;
if (ObjectHelper.isNotEmpty(labelKey) && ObjectHelper.isNotEmpty(labelValue)) {
w = client.withLabel(labelKey, labelValue);
} else if (ObjectHelper.isNotEmpty(resourceName)) {
w = (FilterWatchListDeletable<Deployment, DeploymentList, RollableScalableResource<Deployment>>) client
.withName(resourceName);
}
}
watch = w.watch(new Watcher<>() {
@Override
public void eventReceived(Action action, Deployment resource) {
Exchange exchange = createExchange(false);
exchange.getIn().setBody(resource);
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_EVENT_ACTION, action);
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_EVENT_TIMESTAMP, System.currentTimeMillis());
try {
processor.process(exchange);
} catch (Exception e) {
getExceptionHandler().handleException("Error during processing", exchange, e);
} finally {
releaseExchange(exchange, false);
}
}
@Override
public void onClose(WatcherException cause) {
if (cause != null) {
LOG.error(cause.getMessage(), cause);
}
}
});
}
public Watch getWatch() {
return watch;
}
public void setWatch(Watch watch) {
this.watch = watch;
}
}
}
| DeploymentsConsumerTask |
java | apache__flink | flink-clients/src/test/java/org/apache/flink/client/cli/util/DummyClusterClientServiceLoader.java | {
"start": 1310,
"end": 1949
} | class ____<ClusterID> implements ClusterClientServiceLoader {
private final ClusterClient<ClusterID> clusterClient;
public DummyClusterClientServiceLoader(final ClusterClient<ClusterID> clusterClient) {
this.clusterClient = checkNotNull(clusterClient);
}
@Override
public <C> ClusterClientFactory<C> getClusterClientFactory(final Configuration configuration) {
checkNotNull(configuration);
return new DummyClusterClientFactory<>(clusterClient);
}
@Override
public Stream<String> getApplicationModeTargetNames() {
return Stream.empty();
}
}
| DummyClusterClientServiceLoader |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/jdbc/proxy/WrappedNClob.java | {
"start": 244,
"end": 445
} | interface ____ extends WrappedClob {
/**
* Retrieve the wrapped {@link java.sql.Blob} reference
*
* @return The wrapped {@link java.sql.Blob} reference
*/
NClob getWrappedNClob();
}
| WrappedNClob |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/repository/metadata/GraphConflictResolver.java | {
"start": 1078,
"end": 1773
} | interface ____ {
String ROLE = GraphConflictResolver.class.getName();
/**
* Cleanses the supplied graph by leaving only one directed versioned edge\
* between any two nodes, if multiple exists. Uses scope relationships, defined
* in <code>ArtifactScopeEnum</code>
*
* @param graph the "dirty" graph to be simplified via conflict resolution
* @param scope scope for which the graph should be resolved
*
* @return resulting "clean" graph for the specified scope
*
* @since 3.0
*/
MetadataGraph resolveConflicts(MetadataGraph graph, ArtifactScopeEnum scope)
throws GraphConflictResolutionException;
}
| GraphConflictResolver |
java | playframework__playframework | core/play/src/main/java/play/libs/F.java | {
"start": 9492,
"end": 9971
} | class ____<T> implements Supplier<T> {
private T value;
private final Supplier<T> instantiator;
private LazySupplier(Supplier<T> instantiator) {
this.instantiator = instantiator;
}
@Override
public T get() {
if (this.value == null) {
this.value = instantiator.get();
}
return this.value;
}
public static <T> Supplier<T> lazy(Supplier<T> creator) {
return new LazySupplier<>(creator);
}
}
}
| LazySupplier |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/IndexVersion.java | {
"start": 1068,
"end": 3594
} | class ____ the index format version
* from the running node version.
* <p>
* Each index version constant has an id number, which for versions prior to 8.9.0 is the same as the release version
* for backwards compatibility. In 8.9.0 this is changed to an incrementing number, disconnected from the release version.
* <p>
* Each version constant has a unique id string. This is not actually stored in the index, but is there to ensure
* each index version is only added to the source file once. This string needs to be unique (normally a UUID,
* but can be any other unique nonempty string).
* If two concurrent PRs add the same index version, the different unique ids cause a git conflict, ensuring the second PR to be merged
* must be updated with the next free version first. Without the unique id string, git will happily merge the two versions together,
* resulting in the same index version being used across multiple commits,
* causing problems when you try to upgrade between those two merged commits.
* <h2>Version compatibility</h2>
* The earliest compatible version is hardcoded in the {@link IndexVersions#MINIMUM_COMPATIBLE} field. Previously, this was dynamically
* calculated from the major/minor versions of {@link Version}, but {@code IndexVersion} does not have separate major/minor version
* numbers. So the minimum compatible version is hard-coded as the index version used by the first version of the previous major release.
* {@link IndexVersions#MINIMUM_COMPATIBLE} should be updated appropriately whenever a major release happens.
* <h2>Adding a new version</h2>
* A new index version should be added <em>every time</em> a change is made to the serialization protocol of one or more classes.
* Each index version should only be used in a single merged commit (apart from BwC versions copied from {@link Version}).
* <p>
* To add a new index version, add a new constant at the bottom of the list that is one greater than the current highest version,
* ensure it has a unique id, and update the {@link #current()} constant to point to the new version.
* <h2>Reverting an index version</h2>
* If you revert a commit with an index version change, you <em>must</em> ensure there is a <em>new</em> index version
* representing the reverted change. <em>Do not</em> let the index version go backwards, it must <em>always</em> be incremented.
*/
public record IndexVersion(int id, Version luceneVersion) implements VersionId<IndexVersion>, ToXContentFragment {
private static | separates |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostProcessorTests.java | {
"start": 71126,
"end": 71630
} | class ____ {
@Bean
public Repository<?> genericRepo() {
return new Repository<String>();
}
@Bean
public Repository<? extends Number> numberRepo() {
return new Repository<>();
}
@Bean
public Object repoConsumer(Repository<String> repo) {
return repo;
}
}
@Configuration
@ComponentScan(basePackages = "org.springframework.context.annotation.componentscan.simple")
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @ | WildcardWithGenericExtendsConfiguration |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/parser/deserializer/SqlDateDeserializer.java | {
"start": 474,
"end": 5242
} | class ____ extends AbstractDateDeserializer implements ObjectDeserializer {
public final static SqlDateDeserializer instance = new SqlDateDeserializer();
public final static SqlDateDeserializer instance_timestamp = new SqlDateDeserializer(true);
private boolean timestamp = false;
public SqlDateDeserializer() {
}
public SqlDateDeserializer(boolean timestmap) {
this.timestamp = true;
}
@SuppressWarnings("unchecked")
protected <T> T cast(DefaultJSONParser parser, Type clazz, Object fieldName, Object val) {
if (timestamp) {
return castTimestamp(parser, clazz, fieldName, val);
}
if (val == null) {
return null;
}
if (val instanceof java.util.Date) {
val = new java.sql.Date(((Date) val).getTime());
} else if (val instanceof BigDecimal) {
val = (T) new java.sql.Date(TypeUtils.longValue((BigDecimal) val));
} else if (val instanceof Number) {
val = (T) new java.sql.Date(((Number) val).longValue());
} else if (val instanceof String) {
String strVal = (String) val;
if (strVal.length() == 0) {
return null;
}
long longVal;
JSONScanner dateLexer = new JSONScanner(strVal);
try {
if (dateLexer.scanISO8601DateIfMatch()) {
longVal = dateLexer.getCalendar().getTimeInMillis();
} else {
DateFormat dateFormat = parser.getDateFormat();
try {
java.util.Date date = (java.util.Date) dateFormat.parse(strVal);
java.sql.Date sqlDate = new java.sql.Date(date.getTime());
return (T) sqlDate;
} catch (ParseException e) {
// skip
}
longVal = Long.parseLong(strVal);
}
} finally {
dateLexer.close();
}
return (T) new java.sql.Date(longVal);
} else {
throw new JSONException("parse error : " + val);
}
return (T) val;
}
@SuppressWarnings("unchecked")
protected <T> T castTimestamp(DefaultJSONParser parser, Type clazz, Object fieldName, Object val) {
if (val == null) {
return null;
}
if (val instanceof java.util.Date) {
return (T) new java.sql.Timestamp(((Date) val).getTime());
}
if (val instanceof BigDecimal) {
return (T) new java.sql.Timestamp(TypeUtils.longValue((BigDecimal) val));
}
if (val instanceof Number) {
return (T) new java.sql.Timestamp(((Number) val).longValue());
}
if (val instanceof String) {
String strVal = (String) val;
if (strVal.length() == 0) {
return null;
}
long longVal;
JSONScanner dateLexer = new JSONScanner(strVal);
try {
if (strVal.length() > 19
&& strVal.charAt(4) == '-'
&& strVal.charAt(7) == '-'
&& strVal.charAt(10) == ' '
&& strVal.charAt(13) == ':'
&& strVal.charAt(16) == ':'
&& strVal.charAt(19) == '.') {
String dateFomartPattern = parser.getDateFomartPattern();
if (dateFomartPattern.length() != strVal.length() && dateFomartPattern == JSON.DEFFAULT_DATE_FORMAT) {
return (T) java.sql.Timestamp.valueOf(strVal);
}
}
if (dateLexer.scanISO8601DateIfMatch(false)) {
longVal = dateLexer.getCalendar().getTimeInMillis();
} else {
DateFormat dateFormat = parser.getDateFormat();
try {
java.util.Date date = (java.util.Date) dateFormat.parse(strVal);
java.sql.Timestamp sqlDate = new java.sql.Timestamp(date.getTime());
return (T) sqlDate;
} catch (ParseException e) {
// skip
}
longVal = Long.parseLong(strVal);
}
} finally {
dateLexer.close();
}
return (T) new java.sql.Timestamp(longVal);
}
throw new JSONException("parse error");
}
public int getFastMatchToken() {
return JSONToken.LITERAL_INT;
}
}
| SqlDateDeserializer |
java | apache__camel | test-infra/camel-test-infra-cli/src/test/java/org/apache/camel/test/infra/cli/services/CliLocalContainerService.java | {
"start": 1418,
"end": 9033
} | class ____ implements CliService, ContainerService<CliBuiltContainer> {
public static final String CONTAINER_NAME = "camel-cli";
private static final Logger LOG = LoggerFactory.getLogger(CliLocalContainerService.class);
private final CliBuiltContainer container;
private String version;
private String forceToRunVersion;
private String mavenRepos;
public CliLocalContainerService() {
this(new CliBuiltContainer.CliBuiltContainerParams()
.setCamelRepo(System.getProperty(CliProperties.REPO, "apache/camel"))
.setCamelRef(System.getProperty(CliProperties.BRANCH, "main"))
.setCamelJBangVersion(System.getProperty(CliProperties.VERSION, "default"))
.setKeepContainerRunning(true)
.setDataFolder(System.getProperty(CliProperties.DATA_FOLDER))
.setSshPassword(System.getProperty(CliProperties.SSH_PASSWORD, "jbang"))
.setExtraHosts(getHostsMap())
.setTrustedCertPaths(getCertPaths())
.setLocalMavenRepo(System.getProperty(CliProperties.MVN_LOCAL_REPO))
.setDockerFile(System.getProperty(CliProperties.DOCKERFILE)),
System.getProperty(CliProperties.FORCE_RUN_VERSION, ""), System.getProperty(CliProperties.MVN_REPOS));
}
protected CliLocalContainerService(CliBuiltContainer.CliBuiltContainerParams containerParams,
String forceToRunVersion, String mavenRepos) {
container = new CliBuiltContainer(containerParams);
this.forceToRunVersion = forceToRunVersion;
this.mavenRepos = mavenRepos;
}
@Override
public void registerProperties() {
//do nothing
}
@Override
public void initialize() {
if (!container.isRunning()) {
LOG.info("Trying to start the {} container", CONTAINER_NAME);
container.start();
registerProperties();
LOG.info("{} instance running", CONTAINER_NAME);
if (ObjectHelper.isNotEmpty(forceToRunVersion)) {
LOG.info("force to use version {}", forceToRunVersion);
execute("version set " + forceToRunVersion);
}
if (ObjectHelper.isNotEmpty(mavenRepos)) {
LOG.info("set repositories {}", mavenRepos);
execute(String.format("config set repos=%s", mavenRepos));
}
if (LOG.isDebugEnabled()) {
LOG.debug("Camel JBang version {}", version());
}
} else {
LOG.debug("the container is already running");
}
}
@Override
public void shutdown() {
if (container.isRunning()) {
LOG.info("Stopping the {} container", CONTAINER_NAME);
container.stop();
} else {
LOG.debug("the container is already stopped");
}
}
@Override
public CliBuiltContainer getContainer() {
return container;
}
@Override
public String execute(String command) {
return executeGenericCommand(String.format("camel %s", command));
}
@Override
public String executeBackground(String command) {
final String pid = StringHelper.after(execute(command.concat(" --background")), "PID:").trim();
return org.apache.camel.support.ObjectHelper.isNumber(pid) ? pid : StringHelper.before(pid, " ");
}
@Override
public String executeGenericCommand(String command) {
try {
LOG.debug("Executing command: {}", command);
Container.ExecResult execResult = container.execInContainer("/bin/bash", "-c", command);
if (execResult.getExitCode() != 0) {
Assertions.fail(String.format("command %s failed with output %s and error %s", command, execResult.getStdout(),
execResult.getStderr()));
}
if (LOG.isDebugEnabled()) {
if (ObjectHelper.isNotEmpty(execResult.getStdout())) {
LOG.debug("result out {}", execResult.getStdout());
}
if (ObjectHelper.isNotEmpty(execResult.getStderr())) {
LOG.debug("result error {}", execResult.getStderr());
}
}
return execResult.getStdout();
} catch (Exception e) {
LOG.error("ERROR running generic command: {}", command, e);
throw new RuntimeException(e);
}
}
@Override
public void copyFileInternally(String source, String destination) {
try {
Assertions.assertEquals(0,
container.execInContainer(String.format("cp %s %s", source, destination).split(" ")).getExitCode(),
"copy file exit code");
} catch (IOException | InterruptedException e) {
Assertions.fail(String.format("unable to copy file %s to %s", source, destination), e);
throw new RuntimeException(e);
}
}
@Override
public String getMountPoint() {
return container.getMountPoint();
}
@Override
public String getContainerLogs() {
return container.getLogs();
}
@Override
public int getDevConsolePort() {
return container.getMappedPort(container.DEV_CONSOLE_PORT);
}
@Override
public Stream<String> listDirectory(String directoryPath) {
try {
Container.ExecResult result = container.execInContainer("ls", "-m", directoryPath);
Assertions.assertEquals(0, result.getExitCode(), "list folder exit code");
return Arrays.stream(result.getStdout().split(",")).map(String::trim);
} catch (IOException | InterruptedException e) {
Assertions.fail("unable to list " + directoryPath, e);
throw new RuntimeException(e);
}
}
@Override
public String id() {
return container.getContainerId().substring(0, 13);
}
@Override
public String version() {
return Optional.ofNullable(version)
.orElseGet(() -> {
final String versionSummary = execute("version");
if (versionSummary.contains("User configuration") && versionSummary.contains("camel-version = ")) {
version = StringHelper.between(versionSummary, "camel-version = ", "\n").trim();
}
if (version == null) {
version = StringHelper.between(versionSummary, "Camel JBang version:", "\n").trim();
}
return version;
});
}
@Override
public int getSshPort() {
return container.getMappedPort(container.SSH_PORT);
}
@Override
public String getSshPassword() {
return container.getSshPassword();
}
private static Map<String, String> getHostsMap() {
return Optional.ofNullable(System.getProperty(CliProperties.EXTRA_HOSTS))
.map(p -> p.split(","))
.stream().flatMap(strings -> Arrays.asList(strings).stream())
.map(s -> s.split("="))
.collect(Collectors.toMap(entry -> entry[0], entry -> entry[1]));
}
private static List<String> getCertPaths() {
return Optional.ofNullable(System.getProperty(CliProperties.TRUSTED_CERT_PATHS))
.map(p -> p.split(","))
.stream().flatMap(strings -> Arrays.asList(strings).stream())
.collect(Collectors.toList());
}
}
| CliLocalContainerService |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UngroupedOverloadsTest.java | {
"start": 18401,
"end": 18819
} | class ____ {
AboveLimit() {}
void foo() {}
void foo(int x) {}
void bar() {}
void baz() {}
}
""")
.doTest();
}
@Ignore // TODO(b/71818169): fix and re-enable
@Test
public void staticAndNonStatic() {
refactoringHelper
.addInputLines(
"Test.java",
"""
| AboveLimit |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/annotation/MergedAnnotations.java | {
"start": 5884,
"end": 6582
} | class ____ of the annotation type
* to check
* @return {@code true} if the annotation is present
*/
boolean isPresent(String annotationType);
/**
* Determine if the specified annotation type is directly present.
* <p>Equivalent to calling {@code get(annotationType).isDirectlyPresent()}.
* @param annotationType the annotation type to check
* @return {@code true} if the annotation is directly present
*/
<A extends Annotation> boolean isDirectlyPresent(Class<A> annotationType);
/**
* Determine if the specified annotation type is directly present.
* <p>Equivalent to calling {@code get(annotationType).isDirectlyPresent()}.
* @param annotationType the fully qualified | name |
java | netty__netty | transport/src/test/java/io/netty/channel/AbstractChannelTest.java | {
"start": 1325,
"end": 6886
} | class ____ {
@Test
public void ensureInitialRegistrationFiresActive() throws Throwable {
EventLoop eventLoop = mock(EventLoop.class);
// This allows us to have a single-threaded test
when(eventLoop.inEventLoop()).thenReturn(true);
TestChannel channel = new TestChannel();
ChannelInboundHandler handler = mock(ChannelInboundHandler.class);
channel.pipeline().addLast(handler);
registerChannel(eventLoop, channel);
verify(handler).handlerAdded(any(ChannelHandlerContext.class));
verify(handler).channelRegistered(any(ChannelHandlerContext.class));
verify(handler).channelActive(any(ChannelHandlerContext.class));
}
@Test
public void ensureSubsequentRegistrationDoesNotFireActive() throws Throwable {
final EventLoop eventLoop = mock(EventLoop.class);
// This allows us to have a single-threaded test
when(eventLoop.inEventLoop()).thenReturn(true);
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) {
((Runnable) invocationOnMock.getArgument(0)).run();
return null;
}
}).when(eventLoop).execute(any(Runnable.class));
final TestChannel channel = new TestChannel();
ChannelInboundHandler handler = mock(ChannelInboundHandler.class);
channel.pipeline().addLast(handler);
registerChannel(eventLoop, channel);
channel.unsafe().deregister(new DefaultChannelPromise(channel));
registerChannel(eventLoop, channel);
verify(handler).handlerAdded(any(ChannelHandlerContext.class));
// Should register twice
verify(handler, times(2)) .channelRegistered(any(ChannelHandlerContext.class));
verify(handler).channelActive(any(ChannelHandlerContext.class));
verify(handler).channelUnregistered(any(ChannelHandlerContext.class));
}
@Test
public void ensureDefaultChannelId() {
TestChannel channel = new TestChannel();
final ChannelId channelId = channel.id();
assertTrue(channelId instanceof DefaultChannelId);
}
@Test
@EnabledForJreRange(min = JRE.JAVA_9)
void processIdWithProcessHandleJava9() {
ClassLoader loader = PlatformDependent.getClassLoader(DefaultChannelId.class);
int processHandlePid = DefaultChannelId.processHandlePid(loader);
assertTrue(processHandlePid != -1);
assertEquals(DefaultChannelId.jmxPid(loader), processHandlePid);
assertEquals(DefaultChannelId.defaultProcessId(), processHandlePid);
}
@Test
@EnabledForJreRange(max = JRE.JAVA_8)
void processIdWithJmxPrejava9() {
ClassLoader loader = PlatformDependent.getClassLoader(DefaultChannelId.class);
int processHandlePid = DefaultChannelId.processHandlePid(loader);
assertEquals(-1, processHandlePid);
assertEquals(DefaultChannelId.defaultProcessId(), DefaultChannelId.jmxPid(loader));
}
@Test
public void testClosedChannelExceptionCarryIOException() throws Exception {
final IOException ioException = new IOException();
final Channel channel = new TestChannel() {
private boolean open = true;
private boolean active;
@Override
protected AbstractUnsafe newUnsafe() {
return new AbstractUnsafe() {
@Override
public void connect(
SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) {
active = true;
promise.setSuccess();
}
};
}
@Override
protected void doClose() {
active = false;
open = false;
}
@Override
protected void doWrite(ChannelOutboundBuffer in) throws Exception {
throw ioException;
}
@Override
public boolean isOpen() {
return open;
}
@Override
public boolean isActive() {
return active;
}
};
EventLoop loop = new DefaultEventLoop();
try {
registerChannel(loop, channel);
channel.connect(new InetSocketAddress(NetUtil.LOCALHOST, 8888)).sync();
assertSame(ioException, channel.writeAndFlush("").await().cause());
assertClosedChannelException(channel.writeAndFlush(""), ioException);
assertClosedChannelException(channel.write(""), ioException);
assertClosedChannelException(channel.bind(new InetSocketAddress(NetUtil.LOCALHOST, 8888)), ioException);
} finally {
channel.close();
loop.shutdownGracefully();
}
}
private static void assertClosedChannelException(ChannelFuture future, IOException expected)
throws InterruptedException {
Throwable cause = future.await().cause();
assertTrue(cause instanceof ClosedChannelException);
assertSame(expected, cause.getCause());
}
private static void registerChannel(EventLoop eventLoop, Channel channel) throws Exception {
DefaultChannelPromise future = new DefaultChannelPromise(channel);
channel.unsafe().register(eventLoop, future);
future.sync(); // Cause any exceptions to be thrown
}
private static | AbstractChannelTest |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/creation/proxy/MethodHandleProxy.java | {
"start": 1445,
"end": 2662
} | class ____ implements ProxyRealMethod {
private final Constructor<MethodHandles.Lookup> constructor;
LegacyVersion() throws Throwable {
try {
Class.forName("java.lang.Module");
throw new RuntimeException("Must not be used when modules are available");
} catch (ClassNotFoundException ignored) {
}
constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class);
constructor.setAccessible(true);
}
@Override
public RealMethod resolve(Object proxy, Method method, Object[] args) {
try {
return new MethodHandleRealMethod(
constructor
.newInstance(method.getDeclaringClass())
.in(method.getDeclaringClass())
.unreflectSpecial(method, method.getDeclaringClass())
.bindTo(proxy),
args);
} catch (Throwable ignored) {
return RealMethod.IsIllegal.INSTANCE;
}
}
}
@SuppressSignatureCheck
private static | LegacyVersion |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/TripleConstants.java | {
"start": 894,
"end": 1886
} | class ____ {
public static final String DEFAULT_VERSION = "1.0.0";
public static final String SERIALIZATION_KEY = "serialization";
public static final String HESSIAN4 = "hessian4";
public static final String HESSIAN2 = "hessian2";
public static final String HEADER_BIN_SUFFIX = "-bin";
public static final AsciiString HTTPS_SCHEME = AsciiString.of("https");
public static final AsciiString HTTP_SCHEME = AsciiString.of("http");
public static final String REMOTE_ADDRESS_KEY = "tri.remote.address";
public static final String HANDLER_TYPE_KEY = "tri.handler.type";
public static final String HTTP_REQUEST_KEY = "tri.http.request";
public static final String HTTP_RESPONSE_KEY = "tri.http.response";
public static final String TRIPLE_HANDLER_TYPE_REST = "rest";
public static final String TRIPLE_HANDLER_TYPE_GRPC = "grpc";
public static final String UPGRADE_HEADER_KEY = "Upgrade";
private TripleConstants() {}
}
| TripleConstants |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/request/RequestListener.java | {
"start": 405,
"end": 498
} | class ____ monitoring the status of a request while images load.
*
* <p>All methods in this | for |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/context/HttpClientOptionsContextResolver.java | {
"start": 154,
"end": 615
} | class ____ implements ContextResolver<HttpClientOptions> {
private final HttpClientOptions component;
public HttpClientOptionsContextResolver(HttpClientOptions component) {
this.component = component;
}
@Override
public HttpClientOptions getContext(Class<?> wantedClass) {
if (wantedClass.equals(HttpClientOptions.class)) {
return component;
}
return null;
}
}
| HttpClientOptionsContextResolver |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRequestReplyFixedReplyToInEndpointTest.java | {
"start": 1621,
"end": 5077
} | class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testJmsRequestReplyTempReplyTo() {
Exchange reply = template.request("activemq:queue:JmsRequestReplyFixedReplyToInEndpointTest",
exchange -> exchange.getIn().setBody("World"));
assertEquals("Hello World", reply.getMessage().getBody());
assertTrue(reply.getMessage().hasHeaders(), "Should have headers");
String replyTo = reply.getMessage().getHeader("JMSReplyTo", String.class);
assertTrue(replyTo.contains("ActiveMQTemporaryQueue"), "Should be a temp queue");
}
@Test
public void testJmsRequestReplyFixedReplyToInEndpoint() {
Exchange reply = template.request(
"activemq:queue:JmsRequestReplyFixedReplyToInEndpointTest?replyTo=JmsRequestReplyFixedReplyToInEndpointTest.reply",
exchange -> exchange.getIn().setBody("World"));
assertEquals("Hello World", reply.getMessage().getBody());
assertTrue(reply.getMessage().hasHeaders(), "Should have headers");
assertEquals("ActiveMQQueue[JmsRequestReplyFixedReplyToInEndpointTest.reply]",
reply.getMessage().getHeader("JMSReplyTo", String.class));
}
@Test
public void testJmsRequestReplyFixedReplyToInEndpointTwoMessages() {
Exchange reply = template.request(
"activemq:queue:JmsRequestReplyFixedReplyToInEndpointTest?replyTo=JmsRequestReplyFixedReplyToInEndpointTest.reply",
exchange -> exchange.getIn().setBody("World"));
assertEquals("Hello World", reply.getMessage().getBody());
assertTrue(reply.getMessage().hasHeaders(), "Should have headers");
assertEquals("ActiveMQQueue[JmsRequestReplyFixedReplyToInEndpointTest.reply]",
reply.getMessage().getHeader("JMSReplyTo", String.class));
reply = template.request(
"activemq:queue:JmsRequestReplyFixedReplyToInEndpointTest?replyTo=JmsRequestReplyFixedReplyToInEndpointTest.reply",
exchange -> exchange.getIn().setBody("Moon"));
assertEquals("Hello Moon", reply.getMessage().getBody());
assertTrue(reply.getMessage().hasHeaders(), "Should have headers");
assertEquals("ActiveMQQueue[JmsRequestReplyFixedReplyToInEndpointTest.reply]",
reply.getMessage().getHeader("JMSReplyTo", String.class));
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("activemq:queue:JmsRequestReplyFixedReplyToInEndpointTest")
.transform(body().prepend("Hello "));
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
| JmsRequestReplyFixedReplyToInEndpointTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java | {
"start": 2904,
"end": 5111
} | class ____
implements DancingLinks.SolutionAcceptor<ColumnName> {
int size;
public SolutionPrinter(int size) {
this.size = size;
}
/**
* A debugging aid that just prints the raw information about the
* dancing link columns that were selected for each row.
* @param solution a list of list of column names
*/
void rawWrite(List solution) {
for (Iterator itr=solution.iterator(); itr.hasNext(); ) {
Iterator subitr = ((List) itr.next()).iterator();
while (subitr.hasNext()) {
System.out.print(subitr.next().toString() + " ");
}
System.out.println();
}
}
public void solution(List<List<ColumnName>> names) {
System.out.println(stringifySolution(size, names));
}
}
/**
* Set up a puzzle board to the given size.
* Boards may be asymmetric, but the squares will always be divided to be
* more cells wide than they are tall. For example, a 6x6 puzzle will make
* sub-squares that are 3x2 (3 cells wide, 2 cells tall). Clearly that means
* the board is made up of 2x3 sub-squares.
* @param stream The input stream to read the data from
*/
public Sudoku(InputStream stream) throws IOException {
BufferedReader file = new BufferedReader(
new InputStreamReader(stream, StandardCharsets.UTF_8));
String line = file.readLine();
List<int[]> result = new ArrayList<int[]>();
while (line != null) {
StringTokenizer tokenizer = new StringTokenizer(line);
int size = tokenizer.countTokens();
int[] col = new int[size];
int y = 0;
while(tokenizer.hasMoreElements()) {
String word = tokenizer.nextToken();
if ("?".equals(word)) {
col[y] = - 1;
} else {
col[y] = Integer.parseInt(word);
}
y += 1;
}
result.add(col);
line = file.readLine();
}
size = result.size();
board = result.toArray(new int [size][]);
squareYSize = (int) Math.sqrt(size);
squareXSize = size / squareYSize;
file.close();
}
/**
* A constraint that each number can appear just once in a column.
*/
static private | SolutionPrinter |
java | apache__avro | lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java | {
"start": 65932,
"end": 71221
} | enum ____ implements org.apache.thrift.TFieldIdEnum {
;
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(zip_args.class, metaDataMap);
}
public zip_args() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public zip_args(zip_args other) {
}
public zip_args deepCopy() {
return new zip_args(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
}
throw new java.lang.IllegalStateException();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned a
* value) and false otherwise
*/
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof zip_args)
return this.equals((zip_args) that);
return false;
}
public boolean equals(zip_args that) {
if (that == null)
return false;
if (this == that)
return true;
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
return hashCode;
}
@Override
public int compareTo(zip_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("zip_args(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static | _Fields |
java | apache__spark | common/network-common/src/main/java/org/apache/spark/network/protocol/MergedBlockMetaRequest.java | {
"start": 1119,
"end": 3123
} | class ____ extends AbstractMessage implements RequestMessage {
public final long requestId;
public final String appId;
public final int shuffleId;
public final int shuffleMergeId;
public final int reduceId;
public MergedBlockMetaRequest(
long requestId,
String appId,
int shuffleId,
int shuffleMergeId,
int reduceId) {
super(null, false);
this.requestId = requestId;
this.appId = appId;
this.shuffleId = shuffleId;
this.shuffleMergeId = shuffleMergeId;
this.reduceId = reduceId;
}
@Override
public Type type() {
return Type.MergedBlockMetaRequest;
}
@Override
public int encodedLength() {
return 8 + Encoders.Strings.encodedLength(appId) + 4 + 4 + 4;
}
@Override
public void encode(ByteBuf buf) {
buf.writeLong(requestId);
Encoders.Strings.encode(buf, appId);
buf.writeInt(shuffleId);
buf.writeInt(shuffleMergeId);
buf.writeInt(reduceId);
}
public static MergedBlockMetaRequest decode(ByteBuf buf) {
long requestId = buf.readLong();
String appId = Encoders.Strings.decode(buf);
int shuffleId = buf.readInt();
int shuffleMergeId = buf.readInt();
int reduceId = buf.readInt();
return new MergedBlockMetaRequest(requestId, appId, shuffleId, shuffleMergeId, reduceId);
}
@Override
public int hashCode() {
return Objects.hash(requestId, appId, shuffleId, shuffleMergeId, reduceId);
}
@Override
public boolean equals(Object other) {
if (other instanceof MergedBlockMetaRequest o) {
return requestId == o.requestId && shuffleId == o.shuffleId &&
shuffleMergeId == o.shuffleMergeId && reduceId == o.reduceId &&
Objects.equals(appId, o.appId);
}
return false;
}
@Override
public String toString() {
return "MergedBlockMetaRequest[requestId=" + requestId + ",appId=" + appId + ",shuffleId=" +
shuffleId + ",shuffleMergeId=" + shuffleMergeId + ",reduceId=" + reduceId + "]";
}
}
| MergedBlockMetaRequest |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/LambdaExtractionTest.java | {
"start": 4902,
"end": 11768
} | class ____ {
private final String s = "mystring";
public MapFunction<Integer, String> getMapFunction() {
return (i) -> s;
}
}
@Test
void testLambdaWithMemberVariable() {
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(new MyClass().getMapFunction(), Types.INT);
assertThat(ti).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
@Test
void testLambdaWithLocalVariable() {
String s = "mystring";
final int k = 24;
int j = 26;
MapFunction<Integer, String> f = (i) -> s + k + j;
TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(f, Types.INT);
assertThat(ti).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
@Test
void testLambdaWithNonGenericResultType() {
MapFunction<Tuple2<Tuple1<Integer>, Boolean>, Boolean> f = (i) -> null;
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(f, NESTED_TUPLE_BOOLEAN_TYPE, null, true);
assertThat(ti).isInstanceOf(BasicTypeInfo.class);
assertThat(ti).isEqualTo(BasicTypeInfo.BOOLEAN_TYPE_INFO);
}
@Test
void testMapLambda() {
MapFunction<Tuple2<Tuple1<Integer>, Boolean>, Tuple2<Tuple1<Integer>, String>> f =
(i) -> null;
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(f, NESTED_TUPLE_BOOLEAN_TYPE, null, true);
if (!(ti instanceof MissingTypeInfo)) {
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(0).isTupleType()).isTrue();
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(1))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
}
@Test
void testFlatMapLambda() {
FlatMapFunction<Tuple2<Tuple1<Integer>, Boolean>, Tuple2<Tuple1<Integer>, String>> f =
(i, out) -> out.collect(null);
TypeInformation<?> ti =
TypeExtractor.getFlatMapReturnTypes(f, NESTED_TUPLE_BOOLEAN_TYPE, null, true);
if (!(ti instanceof MissingTypeInfo)) {
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(0).isTupleType()).isTrue();
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(1))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
}
@Test
void testMapPartitionLambda() {
MapPartitionFunction<Tuple2<Tuple1<Integer>, Boolean>, Tuple2<Tuple1<Integer>, String>> f =
(i, o) -> {};
TypeInformation<?> ti =
TypeExtractor.getMapPartitionReturnTypes(f, NESTED_TUPLE_BOOLEAN_TYPE, null, true);
if (!(ti instanceof MissingTypeInfo)) {
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(0).isTupleType()).isTrue();
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(1))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
}
@Test
void testJoinLambda() {
JoinFunction<
Tuple2<Tuple1<Integer>, Boolean>,
Tuple2<Tuple1<Integer>, Double>,
Tuple2<Tuple1<Integer>, String>>
f = (i1, i2) -> null;
TypeInformation<?> ti =
TypeExtractor.getJoinReturnTypes(
f, NESTED_TUPLE_BOOLEAN_TYPE, NESTED_TUPLE_DOUBLE_TYPE, null, true);
if (!(ti instanceof MissingTypeInfo)) {
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(0).isTupleType()).isTrue();
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(1))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
}
@Test
void testCoGroupLambda() {
CoGroupFunction<
Tuple2<Tuple1<Integer>, Boolean>,
Tuple2<Tuple1<Integer>, Double>,
Tuple2<Tuple1<Integer>, String>>
f = (i1, i2, o) -> {};
TypeInformation<?> ti =
TypeExtractor.getCoGroupReturnTypes(
f, NESTED_TUPLE_BOOLEAN_TYPE, NESTED_TUPLE_DOUBLE_TYPE, null, true);
if (!(ti instanceof MissingTypeInfo)) {
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(0).isTupleType()).isTrue();
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(1))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
}
@Test
void testKeySelectorLambda() {
KeySelector<Tuple2<Tuple1<Integer>, Boolean>, Tuple2<Tuple1<Integer>, String>> f =
(i) -> null;
TypeInformation<?> ti =
TypeExtractor.getKeySelectorTypes(f, NESTED_TUPLE_BOOLEAN_TYPE, null, true);
if (!(ti instanceof MissingTypeInfo)) {
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(0).isTupleType()).isTrue();
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(1))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
}
@SuppressWarnings("rawtypes")
@Test
void testLambdaTypeErasure() {
MapFunction<Tuple1<Integer>, Tuple1> f = (i) -> null;
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(
f, new TypeHint<Tuple1<Integer>>() {}.getTypeInfo(), null, true);
assertThat(ti).isInstanceOf(MissingTypeInfo.class);
}
@Test
void testLambdaWithoutTypeErasure() {
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(
Tuple1::of, BasicTypeInfo.STRING_TYPE_INFO, null, true);
assertThat(ti).isInstanceOf(MissingTypeInfo.class);
}
@Test
void testPartitionerLambda() {
Partitioner<Tuple2<Integer, String>> partitioner =
(key, numPartitions) -> key.f1.length() % numPartitions;
final TypeInformation<?> ti = TypeExtractor.getPartitionerTypes(partitioner, null, true);
if (!(ti instanceof MissingTypeInfo)) {
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(0)).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(1))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
}
private static | MyClass |
java | elastic__elasticsearch | x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/CartesianPointValuesSource.java | {
"start": 2345,
"end": 3014
} | class ____ extends MultiPointValues<CartesianPoint> {
private final CartesianPoint point = new CartesianPoint();
public MultiCartesianPointValues(SortedNumericLongValues numericValues) {
super(numericValues);
}
@Override
public CartesianPoint nextValue() throws IOException {
return point.resetFromEncoded(numericValues.nextValue());
}
}
/**
* Return the internal representation of point doc values as a {@link SortedNumericDocValues}.
*/
public abstract SortedNumericLongValues sortedNumericLongValues(LeafReaderContext context);
public static | MultiCartesianPointValues |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardResponse.java | {
"start": 763,
"end": 2799
} | class ____ extends ActionResponse {
final List<Integer> locations;
final List<TermVectorsResponse> responses;
final List<MultiTermVectorsResponse.Failure> failures;
MultiTermVectorsShardResponse() {
locations = new ArrayList<>();
responses = new ArrayList<>();
failures = new ArrayList<>();
}
MultiTermVectorsShardResponse(StreamInput in) throws IOException {
int size = in.readVInt();
locations = new ArrayList<>(size);
responses = new ArrayList<>(size);
failures = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
locations.add(in.readVInt());
if (in.readBoolean()) {
responses.add(new TermVectorsResponse(in));
} else {
responses.add(null);
}
if (in.readBoolean()) {
failures.add(new MultiTermVectorsResponse.Failure(in));
} else {
failures.add(null);
}
}
}
public void add(int location, TermVectorsResponse response) {
locations.add(location);
responses.add(response);
failures.add(null);
}
public void add(int location, MultiTermVectorsResponse.Failure failure) {
locations.add(location);
responses.add(null);
failures.add(failure);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(locations.size());
for (int i = 0; i < locations.size(); i++) {
out.writeVInt(locations.get(i));
if (responses.get(i) == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
responses.get(i).writeTo(out);
}
if (failures.get(i) == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
failures.get(i).writeTo(out);
}
}
}
}
| MultiTermVectorsShardResponse |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/concurrent/DelegatingSecurityContextRunnableTests.java | {
"start": 1908,
"end": 7172
} | class ____ {
@Mock
private Runnable delegate;
@Mock
private SecurityContext securityContext;
@Mock
private Object callableResult;
private Runnable runnable;
private ExecutorService executor;
private SecurityContext originalSecurityContext;
@BeforeEach
public void setUp() {
this.originalSecurityContext = SecurityContextHolder.createEmptyContext();
this.executor = Executors.newFixedThreadPool(1);
}
private void givenDelegateRunWillAnswerWithCurrentSecurityContext() {
willAnswer((Answer<Object>) (invocation) -> {
assertThat(SecurityContextHolder.getContext()).isEqualTo(this.securityContext);
return null;
}).given(this.delegate).run();
}
private void givenDelegateRunWillAnswerWithCurrentSecurityContext(SecurityContextHolderStrategy strategy) {
willAnswer((Answer<Object>) (invocation) -> {
assertThat(strategy.getContext()).isEqualTo(this.securityContext);
return null;
}).given(this.delegate).run();
}
@AfterEach
public void tearDown() {
SecurityContextHolder.clearContext();
}
@Test
public void constructorNullDelegate() {
assertThatIllegalArgumentException().isThrownBy(() -> new DelegatingSecurityContextRunnable(null));
}
@Test
public void constructorNullDelegateNonNullSecurityContext() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new DelegatingSecurityContextRunnable(null, this.securityContext));
}
@Test
public void constructorNullDelegateAndSecurityContext() {
assertThatIllegalArgumentException().isThrownBy(() -> new DelegatingSecurityContextRunnable(null, null));
}
@Test
public void constructorNullSecurityContext() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new DelegatingSecurityContextRunnable(this.delegate, null));
}
@Test
public void call() throws Exception {
givenDelegateRunWillAnswerWithCurrentSecurityContext();
this.runnable = new DelegatingSecurityContextRunnable(this.delegate, this.securityContext);
assertWrapped(this.runnable);
}
@Test
public void callDefaultSecurityContext() throws Exception {
givenDelegateRunWillAnswerWithCurrentSecurityContext();
SecurityContextHolder.setContext(this.securityContext);
this.runnable = new DelegatingSecurityContextRunnable(this.delegate);
SecurityContextHolder.clearContext(); // ensure runnable is what sets up the
// SecurityContextHolder
assertWrapped(this.runnable);
}
@Test
public void callDefaultSecurityContextWithCustomSecurityContextHolderStrategy() throws Exception {
SecurityContextHolderStrategy securityContextHolderStrategy = spy(new MockSecurityContextHolderStrategy());
givenDelegateRunWillAnswerWithCurrentSecurityContext(securityContextHolderStrategy);
securityContextHolderStrategy.setContext(this.securityContext);
DelegatingSecurityContextRunnable runnable = new DelegatingSecurityContextRunnable(this.delegate);
runnable.setSecurityContextHolderStrategy(securityContextHolderStrategy);
this.runnable = runnable;
// ensure callable is what sets up the SecurityContextHolder
securityContextHolderStrategy.clearContext();
assertWrapped(this.runnable);
verify(securityContextHolderStrategy, atLeastOnce()).getContext();
}
// SEC-3031
@Test
public void callOnSameThread() throws Exception {
givenDelegateRunWillAnswerWithCurrentSecurityContext();
this.originalSecurityContext = this.securityContext;
SecurityContextHolder.setContext(this.originalSecurityContext);
this.executor = synchronousExecutor();
this.runnable = new DelegatingSecurityContextRunnable(this.delegate, this.securityContext);
assertWrapped(this.runnable);
}
@Test
public void createNullDelegate() {
assertThatIllegalArgumentException()
.isThrownBy(() -> DelegatingSecurityContextRunnable.create(null, this.securityContext));
}
@Test
public void createNullDelegateAndSecurityContext() {
assertThatIllegalArgumentException().isThrownBy(() -> DelegatingSecurityContextRunnable.create(null, null));
}
@Test
public void createNullSecurityContext() throws Exception {
givenDelegateRunWillAnswerWithCurrentSecurityContext();
SecurityContextHolder.setContext(this.securityContext);
this.runnable = DelegatingSecurityContextRunnable.create(this.delegate, null);
SecurityContextHolder.clearContext(); // ensure runnable is what sets up the
// SecurityContextHolder
assertWrapped(this.runnable);
}
@Test
public void create() throws Exception {
givenDelegateRunWillAnswerWithCurrentSecurityContext();
this.runnable = DelegatingSecurityContextRunnable.create(this.delegate, this.securityContext);
assertWrapped(this.runnable);
}
// SEC-2682
@Test
public void toStringDelegates() {
this.runnable = new DelegatingSecurityContextRunnable(this.delegate, this.securityContext);
assertThat(this.runnable.toString()).isEqualTo(this.delegate.toString());
}
private void assertWrapped(Runnable runnable) throws Exception {
Future<?> submit = this.executor.submit(runnable);
submit.get();
verify(this.delegate).run();
assertThat(SecurityContextHolder.getContext()).isEqualTo(this.originalSecurityContext);
}
private static ExecutorService synchronousExecutor() {
return new ExecutorServiceAdapter(new SyncTaskExecutor());
}
}
| DelegatingSecurityContextRunnableTests |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/write/BoundsChecksWithGeneratorTest.java | {
"start": 1183,
"end": 1313
} | interface ____ {
void call(JsonGenerator g, byte[] data, int offset, int len) throws Exception;
}
| ByteBackedOperation |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MysqlSelectTest_issue_5140.java | {
"start": 907,
"end": 1211
} | class ____ extends MysqlTest {
@Test
public void test_error_sql() {
String sql = "select col1, from table1";
ParserException ex = assertThrows(ParserException.class, () -> SQLUtils.parseSingleStatement(sql, DbType.mysql));
assertNotNull(ex);
}
}
| MysqlSelectTest_issue_5140 |
java | junit-team__junit5 | junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/descriptor/ExclusiveResourceCollector.java | {
"start": 1197,
"end": 2589
} | class ____ {
private static final ExclusiveResourceCollector NO_EXCLUSIVE_RESOURCES = new ExclusiveResourceCollector() {
@Override
Stream<ExclusiveResource> getAllExclusiveResources(
Function<ResourceLocksProvider, Set<ResourceLocksProvider.Lock>> providerToLocks) {
return Stream.empty();
}
@Override
Stream<ExclusiveResource> getStaticResourcesFor(ResourceLockTarget target) {
return Stream.empty();
}
@Override
Stream<ExclusiveResource> getDynamicResources(
Function<ResourceLocksProvider, Set<ResourceLocksProvider.Lock>> providerToLocks) {
return Stream.empty();
}
};
Stream<ExclusiveResource> getAllExclusiveResources(
Function<ResourceLocksProvider, Set<ResourceLocksProvider.Lock>> providerToLocks) {
return Stream.concat(getStaticResourcesFor(SELF), getDynamicResources(providerToLocks));
}
abstract Stream<ExclusiveResource> getStaticResourcesFor(ResourceLockTarget target);
abstract Stream<ExclusiveResource> getDynamicResources(
Function<ResourceLocksProvider, Set<ResourceLocksProvider.Lock>> providerToLocks);
static ExclusiveResourceCollector from(AnnotatedElement element) {
List<ResourceLock> annotations = findRepeatableAnnotations(element, ResourceLock.class);
return annotations.isEmpty() ? NO_EXCLUSIVE_RESOURCES : new DefaultExclusiveResourceCollector(annotations);
}
private static | ExclusiveResourceCollector |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/std/StdValueInstantiatorTest.java | {
"start": 4277,
"end": 4901
} | class ____ {
final int creatorType;
B2(BigInteger value) {
this.creatorType = 3;
}
B2(double value) {
this.creatorType = 4;
}
}
@Test
public void testJsonIntegerIntoDoubleConstructorThrows() throws Exception {
try {
MAPPER.readValue("5", D.class);
fail("Should not pass");
} catch (ValueInstantiationException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
assertEquals("boo", e.getCause().getMessage());
}
}
static final | B2 |
java | apache__flink | flink-metrics/flink-metrics-jmx/src/test/java/org/apache/flink/runtime/jobmanager/JMXJobManagerMetricTest.java | {
"start": 5830,
"end": 6247
} | class ____ extends AbstractInvokable {
private static final OneShotLatch LATCH = new OneShotLatch();
public BlockingInvokable(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
LATCH.await();
}
public static void unblock() {
LATCH.trigger();
}
}
}
| BlockingInvokable |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/odps/visitor/OdpsOutputVisitor.java | {
"start": 1407,
"end": 33186
} | class ____ extends HiveOutputVisitor implements OdpsASTVisitor {
private Set<String> builtInFunctions = new HashSet<String>();
{
builtInFunctions.add("IF");
builtInFunctions.add("COALESCE");
builtInFunctions.add("TO_DATE");
builtInFunctions.add("SUBSTR");
builtInFunctions.add("INSTR");
builtInFunctions.add("LENGTH");
builtInFunctions.add("SPLIT");
builtInFunctions.add("TOLOWER");
builtInFunctions.add("TOUPPER");
builtInFunctions.add("EXPLODE");
builtInFunctions.add("LEAST");
builtInFunctions.add("GREATEST");
groupItemSingleLine = true;
}
public OdpsOutputVisitor() {
this(new StringBuilder());
}
public OdpsOutputVisitor(boolean ucase) {
this(new StringBuilder());
config(VisitorFeature.OutputUCase, ucase);
}
public OdpsOutputVisitor(StringBuilder appender) {
super(appender, DbType.odps, Odps.DIALECT);
}
@Override
public boolean visit(SQLMergeStatement.WhenUpdate x) {
print0(ucase ? "WHEN MATCHED" : "when matched");
this.indentCount++;
SQLExpr where = x.getWhere();
if (where != null) {
this.indentCount++;
if (SQLBinaryOpExpr.isAnd(where)) {
println();
} else {
print(' ');
}
print0(ucase ? "AND " : "and ");
printExpr(where, parameterized);
this.indentCount--;
println();
} else {
print(' ');
}
println(ucase ? "THEN UPDATE" : "then update");
incrementIndent();
print(ucase ? "SET " : "set ");
printlnAndAccept(x.getItems(), ",");
decrementIndent();
this.indentCount--;
return false;
}
@Override
public boolean visit(OdpsCreateTableStatement x) {
return visit((SQLCreateTableStatement) x);
}
@Override
public boolean visit(SQLCreateTableStatement x) {
List<SQLCommentHint> headHints = x.getHeadHintsDirect();
if (headHints != null) {
for (SQLCommentHint hint : headHints) {
visit((SQLCommentHint) hint);
println();
}
}
if (x.hasBeforeComment()) {
printlnComments(x.getBeforeCommentsDirect());
}
print0(ucase ? "CREATE " : "create ");
printCreateTableFeatures(x);
if (x.isIfNotExists()) {
print0(ucase ? "TABLE IF NOT EXISTS " : "table if not exists ");
} else {
print0(ucase ? "TABLE " : "table ");
}
x.getName().accept(this);
printCreateTableLike(x);
printTableElementsWithComment(x);
printComment(x.getComment());
printPartitionedBy(x);
printClusteredBy(x);
printSortedBy(x.getSortedBy());
printIntoBuckets(x.getBuckets());
printIntoShards(x.getShards());
printRowFormat(x);
printStoredBy(x.getStoredBy());
printStoredAs(x);
if (x instanceof OdpsCreateTableStatement) {
printSerdeProperties((OdpsCreateTableStatement) x);
}
printLocation(x);
printTableOptions(x);
printLifeCycle(x.getLifeCycle());
if (x instanceof HiveCreateTableStatement) {
printUsing((HiveCreateTableStatement) x);
}
printSelectAs(x, true);
return false;
}
protected void printPartitionedBy(SQLCreateTableStatement x) {
super.printPartitionedBy(x);
if (x instanceof OdpsCreateTableStatement) {
SQLAliasedExpr autoPartitionedBy = ((OdpsCreateTableStatement) x).getAutoPartitionedBy();
if (autoPartitionedBy != null) {
println();
print0(ucase ? "AUTO PARTITIONED BY (" : "auto partitioned by (");
autoPartitionedBy.accept(this);
print(")");
}
}
}
protected void printSerdeProperties(SQLCreateTableStatement x) {
if (x instanceof OdpsCreateTableStatement) {
List<SQLExpr> withSerdeproperties = ((OdpsCreateTableStatement) x).getWithSerdeproperties();
if (!withSerdeproperties.isEmpty()) {
println();
print0(ucase ? "WITH SERDEPROPERTIES (" : "with serdeproperties (");
printAndAccept(withSerdeproperties, ", ");
print(')');
}
}
}
public boolean visit(SQLDecimalExpr x) {
BigDecimal value = x.getValue();
print(value.toString());
print("BD");
return false;
}
// protected void printSelectList(List<SQLSelectItem> selectList) {
// this.indentCount++;
// for (int i = 0, size = selectList.size(); i < size; ++i) {
// SQLSelectItem selectItem = selectList.get(i);
//
// if (i != 0) {
// SQLSelectItem preSelectItem = selectList.get(i - 1);
// if (preSelectItem.hasAfterComment()) {
// print(' ');
// printlnComment(preSelectItem.getAfterCommentsDirect());
// }
//
// println();
// print0(", ");
// }
//
// selectItem.accept(this);
//
// if (i == selectList.size() - 1 && selectItem.hasAfterComment()) {
// print(' ');
// printlnComments(selectItem.getAfterCommentsDirect());
// }
// }
// this.indentCount--;
// }
@Override
public boolean visit(SQLSubqueryTableSource x) {
print('(');
this.indentCount++;
println();
x.getSelect().accept(this);
this.indentCount--;
println();
print(')');
SQLPivot pivot = x.getPivot();
if (pivot != null) {
println();
pivot.accept(this);
}
SQLUnpivot unpivot = x.getUnpivot();
if (unpivot != null) {
println();
unpivot.accept(this);
}
if (x.getAlias() != null) {
print(' ');
print0(x.getAlias());
}
return false;
}
@Override
public boolean visit(SQLJoinTableSource x) {
SQLTableSource left = x.getLeft();
left.accept(this);
SQLTableSource right = x.getRight();
JoinType joinType = x.getJoinType();
if (joinType == JoinType.CROSS_JOIN
&& right instanceof SQLUnnestTableSource
) {
SQLUnnestTableSource unnest = (SQLUnnestTableSource) right;
if (unnest.isOrdinality()) {
print0(ucase ? " LATERAL VIEW POSEXPLODE(" : " lateral view posexplode(");
} else {
print0(ucase ? " LATERAL VIEW EXPLODE(" : " lateral view explode(");
}
List<SQLExpr> items = unnest.getItems();
printAndAccept(items, ", ");
print(')');
if (right.getAlias() != null) {
print(' ');
print0(right.getAlias());
}
final List<SQLName> columns = unnest.getColumns();
if (columns != null && columns.size() > 0) {
print0(ucase ? " AS " : " as ");
printAndAccept(unnest.getColumns(), ", ");
}
return false;
}
if (joinType == JoinType.COMMA) {
print(',');
} else {
println();
printJoinType(joinType);
}
if (!(right instanceof SQLLateralViewTableSource)) {
print(' ');
}
right.accept(this);
if (x.getCondition() != null) {
println();
print0(ucase ? "ON " : "on ");
this.indentCount++;
x.getCondition().accept(this);
this.indentCount--;
if (x.getAfterCommentsDirect() != null) {
printAfterComments(x.getAfterCommentsDirect());
println();
}
}
if (!x.getUsing().isEmpty()) {
print0(ucase ? " USING (" : " using (");
printAndAccept(x.getUsing(), ", ");
print(')');
}
if (x.getAlias() != null) {
print0(ucase ? " AS " : " as ");
print0(x.getAlias());
}
SQLJoinTableSource.UDJ udj = x.getUdj();
if (udj != null) {
println();
udj.accept(this);
}
return false;
}
@Override
public boolean visit(OdpsUDTFSQLSelectItem x) {
x.getExpr().accept(this);
println();
print0(ucase ? "AS (" : "as (");
int aliasSize = x.getAliasList().size();
if (aliasSize > 5) {
this.indentCount++;
println();
}
for (int i = 0; i < aliasSize; ++i) {
if (i != 0) {
if (aliasSize > 5) {
println(",");
} else {
print0(", ");
}
}
print0(x.getAliasList().get(i));
}
if (aliasSize > 5) {
this.indentCount--;
println();
}
print(')');
return false;
}
@Override
public boolean visit(SQLShowStatisticStmt x) {
print0(ucase ? "SHOW STATISTIC" : "show statistic");
final SQLExprTableSource tableSource = x.getTableSource();
if (tableSource != null) {
print(' ');
tableSource.accept(this);
}
List<SQLAssignItem> partitions = x.getPartitions();
if (!partitions.isEmpty()) {
print0(ucase ? " PARTITION (" : " partition (");
printAndAccept(partitions, ", ");
print(')');
}
return false;
}
@Override
public boolean visit(OdpsSetLabelStatement x) {
print0(ucase ? "SET LABEL " : "set label ");
print0(x.getLabel());
print0(ucase ? " TO " : " to ");
if (x.getUser() != null) {
print0(ucase ? "USER " : "user ");
x.getUser().accept(this);
} else if (x.getTable() != null) {
print0(ucase ? "TABLE " : "table ");
x.getTable().accept(this);
if (x.getColumns().size() > 0) {
print('(');
printAndAccept(x.getColumns(), ", ");
print(')');
}
}
return false;
}
@Override
public boolean visit(OdpsSelectQueryBlock x) {
if (isPrettyFormat() && x.hasBeforeComment()) {
printlnComments(x.getBeforeCommentsDirect());
}
print0(ucase ? "SELECT " : "select ");
List<SQLCommentHint> hints = x.getHintsDirect();
if (hints != null) {
printAndAccept(hints, " ");
print(' ');
}
if (SQLSetQuantifier.ALL == x.getDistionOption()) {
print0(ucase ? "ALL " : "all ");
} else if (SQLSetQuantifier.DISTINCT == x.getDistionOption()) {
print0(ucase ? "DISTINCT " : "distinct ");
} else if (SQLSetQuantifier.UNIQUE == x.getDistionOption()) {
print0(ucase ? "UNIQUE " : "unique ");
}
printSelectList(x.getSelectList());
printFrom(x);
printWhere(x);
printGroupBy(x);
printWindow(x);
printQualify(x);
printOrderBy(x);
SQLZOrderBy zorderBy = x.getZOrderBy();
if (zorderBy != null) {
println();
zorderBy.accept(this);
}
final List<SQLSelectOrderByItem> distributeBy = x.getDistributeByDirect();
if (distributeBy.size() > 0) {
println();
print0(ucase ? "DISTRIBUTE BY " : "distribute by ");
printAndAccept(distributeBy, ", ");
}
final List<SQLSelectOrderByItem> sortBy = x.getSortByDirect();
if (!sortBy.isEmpty()) {
println();
print0(ucase ? "SORT BY " : "sort by ");
printAndAccept(sortBy, ", ");
}
final List<SQLSelectOrderByItem> clusterBy = x.getClusterByDirect();
if (clusterBy.size() > 0) {
println();
print0(ucase ? "CLUSTER BY " : "cluster by ");
printAndAccept(clusterBy, ", ");
}
printLimit(x);
return false;
}
public boolean visit(SQLOrderBy x) {
int itemSize = x.getItems().size();
if (itemSize > 0) {
print0(ucase ? "ORDER BY " : "order by ");
this.indentCount++;
for (int i = 0; i < itemSize; ++i) {
if (i != 0) {
println(", ");
}
x.getItems().get(i).accept(this);
}
this.indentCount--;
}
return false;
}
public boolean visit(SQLZOrderBy x) {
int itemSize = x.getItems().size();
if (itemSize > 0) {
print0(ucase ? "ZORDER BY " : "zorder by ");
this.indentCount++;
for (int i = 0; i < itemSize; ++i) {
if (i != 0) {
println(", ");
}
x.getItems().get(i).accept(this);
}
this.indentCount--;
}
return false;
}
@Override
public boolean visit(OdpsAddStatisticStatement x) {
print0(ucase ? "ADD STATISTIC " : "add statistic ");
x.getTable().accept(this);
print(' ');
x.getStatisticClause().accept(this);
return false;
}
@Override
public boolean visit(OdpsRemoveStatisticStatement x) {
print0(ucase ? "REMOVE STATISTIC " : "remove statistic ");
x.getTable().accept(this);
print(' ');
x.getStatisticClause().accept(this);
return false;
}
@Override
public boolean visit(OdpsStatisticClause.TableCount x) {
print0(ucase ? "TABLE_COUNT" : "table_count");
return false;
}
@Override
public boolean visit(OdpsStatisticClause.ExpressionCondition x) {
print0(ucase ? "EXPRESSION_CONDITION " : "expression_condition ");
x.getExpr().accept(this);
return false;
}
@Override
public boolean visit(OdpsStatisticClause.NullValue x) {
print0(ucase ? "NULL_VALUE " : "null_value ");
x.getColumn().accept(this);
return false;
}
@Override
public boolean visit(OdpsStatisticClause.DistinctValue x) {
print0(ucase ? "DISTINCT_VALUE " : "distinct_value ");
x.getColumn().accept(this);
return false;
}
@Override
public boolean visit(OdpsStatisticClause.ColumnSum x) {
print0(ucase ? "COLUMN_SUM " : "column_sum ");
x.getColumn().accept(this);
return false;
}
@Override
public boolean visit(OdpsStatisticClause.ColumnMax x) {
print0(ucase ? "COLUMN_MAX " : "column_max ");
x.getColumn().accept(this);
return false;
}
@Override
public boolean visit(OdpsStatisticClause.ColumnMin x) {
print0(ucase ? "COLUMN_MIN " : "column_min ");
x.getColumn().accept(this);
return false;
}
@Override
public boolean visit(OdpsReadStatement x) {
List<SQLCommentHint> headHints = x.getHeadHintsDirect();
if (headHints != null) {
for (SQLCommentHint hint : headHints) {
visit((SQLCommentHint) hint);
println();
}
}
if (x.hasBeforeComment()) {
printlnComments(x.getBeforeCommentsDirect());
}
print0(ucase ? "READ " : "read ");
x.getTable().accept(this);
if (!x.getColumns().isEmpty()) {
print0(" (");
printAndAccept(x.getColumns(), ", ");
print(')');
}
if (!x.getPartition().isEmpty()) {
print0(ucase ? " PARTITION (" : " partition (");
printAndAccept(x.getPartition(), ", ");
print(')');
}
if (x.getRowCount() != null) {
print(' ');
x.getRowCount().accept(this);
}
return false;
}
protected void printMethodOwner(SQLExpr owner) {
owner.accept(this);
if (owner instanceof SQLMethodInvokeExpr || owner instanceof SQLPropertyExpr) {
print('.');
} else {
print(':');
}
}
protected void printJoinType(JoinType joinType) {
if (joinType.equals(JoinType.LEFT_OUTER_JOIN)) {
print0(ucase ? "LEFT OUTER JOIN" : "left outer join");
} else if (joinType.equals(JoinType.RIGHT_OUTER_JOIN)) {
print0(ucase ? "RIGHT OUTER JOIN" : "right outer join");
} else if (joinType.equals(JoinType.FULL_OUTER_JOIN)) {
print0(ucase ? "FULL OUTER JOIN" : "full outer join");
} else {
print0(ucase ? joinType.name : joinType.nameLCase);
}
}
public boolean visit(SQLDataType x) {
String dataTypeName = x.getName();
if (dataTypeName.indexOf('<') != -1 || dataTypeName.equals("Object")) {
print0(dataTypeName);
} else {
print0(ucase ? dataTypeName.toUpperCase() : dataTypeName.toLowerCase());
}
if (!x.getArguments().isEmpty()) {
print('(');
printAndAccept(x.getArguments(), ", ");
print(')');
}
return false;
}
protected void printFunctionName(String name) {
if (name == null) {
return;
}
String upperName = name.toUpperCase();
if (builtInFunctions.contains(upperName)) {
print0(ucase ? upperName : name);
} else {
print0(name);
}
}
@Override
public boolean visit(OdpsShowGrantsStmt x) {
List<SQLCommentHint> headHints = x.getHeadHintsDirect();
if (headHints != null) {
for (SQLCommentHint hint : headHints) {
visit((SQLCommentHint) hint);
println();
}
}
if (x.hasBeforeComment()) {
printlnComments(x.getBeforeCommentsDirect());
}
if (x.isLabel()) {
print0(ucase ? "SHOW LABEL GRANTS" : "show label grants");
if (x.getObjectType() != null) {
print0(ucase ? " ON TABLE " : " on table ");
x.getObjectType().accept(this);
}
if (x.getUser() != null) {
print0(ucase ? " FOR USER " : " for user ");
x.getUser().accept(this);
}
} else {
print0(ucase ? "SHOW GRANTS" : "show grants");
if (x.getUser() != null) {
print0(ucase ? " FOR " : " for ");
x.getUser().accept(this);
}
if (x.getObjectType() != null) {
print0(ucase ? " ON TYPE " : " on type ");
x.getObjectType().accept(this);
}
}
return false;
}
@Override
public boolean visit(OdpsListStmt x) {
print0(ucase ? "LIST " : "list ");
if (x.getObject() != null) {
x.getObject().accept(this);
}
return false;
}
@Override
public boolean visit(OdpsGrantStmt x) {
print0(ucase ? "GRANT " : "grant ");
if (x.isSuper()) {
print0(ucase ? "SUPER " : "super ");
}
if (x.isLabel()) {
print0(ucase ? "LABEL " : "label ");
x.getLabel().accept(this);
} else {
printAndAccept(x.getPrivileges(), ", ");
}
if (x.getResource() != null) {
print0(ucase ? " ON " : " on ");
if (x.getResourceType() != null) {
print0(ucase ? x.getResourceType().name() : x.getResourceType().name().toLowerCase());
print(' ');
}
x.getResource().accept(this);
if (x.getColumns().size() > 0) {
print('(');
printAndAccept(x.getColumns(), ", ");
print(')');
}
}
if (x.getUsers() != null) {
print0(ucase ? " TO " : " to ");
if (x.getSubjectType() != null) {
print0(x.getSubjectType().name());
print(' ');
}
printAndAccept(x.getUsers(), ",");
}
if (x.getExpire() != null) {
print0(ucase ? " WITH EXP " : " with exp ");
x.getExpire().accept(this);
}
return false;
}
public boolean visit(SQLCharExpr x, boolean parameterized) {
if (x.hasBeforeComment()) {
printlnComments(x.getBeforeCommentsDirect());
}
String text = x.getText();
if (text == null) {
print0(ucase ? "NULL" : "null");
} else {
StringBuilder buf = new StringBuilder(text.length() + 2);
buf.append('\'');
for (int i = 0; i < text.length(); ++i) {
char ch = text.charAt(i);
switch (ch) {
case '\\':
buf.append("\\\\");
break;
case '\'':
buf.append("\\'");
break;
case '\0':
buf.append("\\0");
break;
case '\n':
buf.append("\\n");
break;
default:
buf.append(ch);
break;
}
}
buf.append('\'');
print0(buf.toString());
}
if (x.hasAfterComment()) {
printAfterComments(x.getAfterCommentsDirect());
}
return false;
}
@Override
public boolean visit(SQLAlterTableRenameColumn x) {
print0(ucase ? "CHANGE COLUMN " : "change column ");
x.getColumn().accept(this);
print0(ucase ? " RENAME TO " : " rename to ");
x.getTo().accept(this);
return false;
}
@Override
public boolean visit(OdpsAddTableStatement x) {
print0(ucase ? "ADD TABLE " : "add table ");
x.getTable().accept(this);
List<SQLAssignItem> partitions = x.getPartitions();
if (partitions.size() > 0) {
print0(ucase ? " PARTITION (" : " partition (");
printAndAccept(partitions, ", ");
print(')');
}
SQLExpr comment = x.getComment();
if (comment != null) {
print0(ucase ? " COMMENT " : " comment ");
printExpr(comment);
}
if (x.isForce()) {
print0(" -f");
}
SQLName toPackage = x.getToPackage();
if (toPackage != null) {
print0(ucase ? " TO PACKAGE " : " to package ");
printExpr(toPackage);
List<SQLPrivilegeItem> privileges = x.getPrivileges();
if (!privileges.isEmpty()) {
print0(ucase ? " WITH PRIVILEGES " : " with privileges ");
printAndAccept(privileges, ", ");
}
}
return false;
}
@Override
public boolean visit(OdpsAddFileStatement x) {
print0(ucase ? "ADD " : "add ");
OdpsAddFileStatement.FileType type = x.getType();
switch (type) {
case JAR:
print0(ucase ? "JAR " : "jar ");
break;
case ARCHIVE:
print0(ucase ? "ARCHIVE " : "archive ");
break;
case PY:
print0(ucase ? "PY " : "py ");
break;
default:
print0(ucase ? "FILE " : "file ");
break;
}
print0(x.getFile());
SQLExpr comment = x.getComment();
if (comment != null) {
print0(ucase ? " COMMENT " : " comment ");
printExpr(comment);
}
if (x.isForce()) {
print0(" -f");
}
return false;
}
@Override
public boolean visit(OdpsAddUserStatement x) {
print0(ucase ? "ADD USER " : "add user ");
printExpr(x.getUser());
return false;
}
@Override
public boolean visit(OdpsRemoveUserStatement x) {
print0(ucase ? "REMOVE USER " : "remove user ");
printExpr(x.getUser());
return false;
}
@Override
public boolean visit(SQLWhoamiStatement x) {
print0(ucase ? "WHOAMI" : "whoami");
return false;
}
@Override
public boolean visit(OdpsAlterTableSetChangeLogs x) {
print0(ucase ? "SET CHANGELOGS " : "set changelogs ");
x.getValue().accept(this);
return false;
}
@Override
public boolean visit(OdpsAlterTableChangeOwner x) {
print0(ucase ? "CHANGEOWNER TO " : "changeowner to ");
x.getValue().accept(this);
return false;
}
@Override
public boolean visit(OdpsAlterTableSetFileFormat x) {
print0(ucase ? "SET FILEFORMAT " : "set fileformat ");
x.getValue().accept(this);
return false;
}
@Override
public boolean visit(OdpsCountStatement x) {
List<SQLCommentHint> headHints = x.getHeadHintsDirect();
if (headHints != null) {
for (SQLCommentHint hint : headHints) {
visit((SQLCommentHint) hint);
println();
}
}
if (x.hasBeforeComment()) {
printlnComments(x.getBeforeCommentsDirect());
}
print0(ucase ? "COUNT " : "count ");
x.getTable().accept(this);
List<SQLAssignItem> partitions = x.getPartitions();
if (partitions.size() > 0) {
print0(ucase ? " PARTITION (" : " partition (");
printAndAccept(partitions, ", ");
print(')');
}
return false;
}
@Override
public boolean visit(OdpsQueryAliasStatement x) {
print0(x.getVariant());
print0(" := ");
x.getStatement().accept(this);
return false;
}
@Override
public boolean visit(OdpsTransformExpr x) {
print0(ucase ? "TRANSFORM(" : "transform(");
printAndAccept(x.getInputColumns(), ", ");
print(')');
SQLExpr using = x.getUsing();
if (using != null) {
println();
print0(ucase ? "USING " : "using ");
using.accept(this);
}
List<SQLExpr> resources = x.getResources();
if (!resources.isEmpty()) {
println();
print0(ucase ? "RESOURCES " : "resources ");
printAndAccept(resources, ", ");
}
List<SQLColumnDefinition> outputColumns = x.getOutputColumns();
if (!outputColumns.isEmpty()) {
println();
print0(ucase ? "AS (" : "as (");
printAndAccept(outputColumns, ", ");
print(')');
}
SQLExternalRecordFormat inputRowFormat = x.getInputRowFormat();
if (inputRowFormat != null) {
println();
print0(ucase ? "ROW FORMAT DELIMITED" : "row format delimited");
inputRowFormat.accept(this);
}
return false;
}
@Override
public boolean visit(OdpsExstoreStatement x) {
print0(ucase ? "EXSTORE " : "exstore ");
x.getTable().accept(this);
print0(ucase ? " PARTITION (" : " partition (");
printAndAccept(x.getPartitions(), ", ");
print(')');
return false;
}
@Override
public boolean visit(HiveLoadDataStatement x) {
print0(ucase ? "LOAD " : "load ");
if (x.isOverwrite()) {
print0(ucase ? "OVERWRITE " : "overwrite ");
}
print0(ucase ? "INTO TABLE " : "into table ");
x.getInto().accept(this);
if (x.getPartition().size() > 0) {
print0(ucase ? " PARTITION (" : " partition (");
printAndAccept(x.getPartition(), ", ");
print(')');
}
println();
print0(ucase ? "LOCATION " : "location ");
x.getInpath().accept(this);
printStoredBy(x.getStoredBy());
SQLExpr rowFormat = x.getRowFormat();
if (rowFormat != null) {
println();
print0(ucase ? "ROW FORMAT SERDE " : "row format serde ");
rowFormat.accept(this);
}
printSerdeProperties(x.getSerdeProperties());
SQLExpr storedAs = x.getStoredAs();
if (storedAs != null) {
println();
print0(ucase ? "STORED AS " : "stored as ");
printExpr(storedAs);
}
SQLExpr using = x.getUsing();
if (using != null) {
println();
print0(ucase ? "USING " : "using ");
printExpr(using);
}
return false;
}
@Override
public boolean visit(OdpsNewExpr x) {
print0("new ");
super.visit((SQLMethodInvokeExpr) x);
return false;
}
public boolean visit(OdpsInstallPackageStatement x) {
print0(ucase ? "INSTALL PACKAGE " : "install package ");
printExpr(x.getPackageName());
return false;
}
public boolean visit(OdpsPAIStmt x) {
print0(ucase ? "PAI " : "pai ");
print0(x.getArguments());
return false;
}
public boolean visit(OdpsCopyStmt x) {
print0(ucase ? "COPY " : "copy ");
print0(x.getArguments());
return false;
}
@Override
public boolean visit(SQLCurrentTimeExpr x) {
final SQLCurrentTimeExpr.Type type = x.getType();
print(ucase ? type.name : type.nameLCase);
print0("()");
return false;
}
@Override
public boolean visit(SQLDropDatabaseStatement x) {
print0(ucase ? "DROP " : "drop ");
if (x.isPhysical()) {
print0(ucase ? "PHYSICAL " : "physical ");
}
print0(ucase ? "SCHEMA " : "schema ");
if (x.isIfExists()) {
print0(ucase ? "IF EXISTS " : "if exists ");
}
x.getDatabase().accept(this);
final Boolean restrict = x.getRestrict();
if (restrict != null && restrict.booleanValue()) {
print0(ucase ? " RESTRICT" : " restrict");
}
if (x.isCascade()) {
print0(ucase ? " CASCADE" : " cascade");
}
return false;
}
protected void printMethodParameters(SQLMethodInvokeExpr x) {
List<SQLExpr> arguments = x.getArguments();
boolean needPrintLine = false;
if (arguments.size() > 10
&& (arguments.size() % 2) == 0
&& (x.methodNameHashCode64() == FnvHash.Constants.NAMED_STRUCT
|| x.methodNameHashCode64() == FnvHash.Constants.MAP)
) {
needPrintLine = true;
}
if (needPrintLine) {
print0('(');
incrementIndent();
println();
for (int i = 0, size = arguments.size(); i < size; i += 2) {
if (i != 0) {
print0(',');
println();
}
SQLExpr arg0 = arguments.get(i);
SQLExpr arg1 = arguments.get(i + 1);
printExpr(arg0);
this.print0(", ");
printExpr(arg1);
}
decrementIndent();
println();
print0(')');
return;
}
super.printMethodParameters(x);
}
@Override
public void printMergeInsertRow() {
print(" *");
}
}
| OdpsOutputVisitor |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/cglib/core/CodeEmitter.java | {
"start": 904,
"end": 18928
} | class ____ extends LocalVariablesSorter {
// SPRING PATCH BEGIN
private static final Signature BOOLEAN_VALUE_OF =
TypeUtils.parseSignature("Boolean valueOf(boolean)");
private static final Signature BYTE_VALUE_OF =
TypeUtils.parseSignature("Byte valueOf(byte)");
private static final Signature CHARACTER_VALUE_OF =
TypeUtils.parseSignature("Character valueOf(char)");
private static final Signature DOUBLE_VALUE_OF =
TypeUtils.parseSignature("Double valueOf(double)");
private static final Signature FLOAT_VALUE_OF =
TypeUtils.parseSignature("Float valueOf(float)");
private static final Signature INTEGER_VALUE_OF =
TypeUtils.parseSignature("Integer valueOf(int)");
private static final Signature LONG_VALUE_OF =
TypeUtils.parseSignature("Long valueOf(long)");
private static final Signature SHORT_VALUE_OF =
TypeUtils.parseSignature("Short valueOf(short)");
// SPRING PATCH END
private static final Signature BOOLEAN_VALUE =
TypeUtils.parseSignature("boolean booleanValue()");
private static final Signature CHAR_VALUE =
TypeUtils.parseSignature("char charValue()");
private static final Signature DOUBLE_VALUE =
TypeUtils.parseSignature("double doubleValue()");
private static final Signature FLOAT_VALUE =
TypeUtils.parseSignature("float floatValue()");
private static final Signature INT_VALUE =
TypeUtils.parseSignature("int intValue()");
private static final Signature LONG_VALUE =
TypeUtils.parseSignature("long longValue()");
private static final Signature CSTRUCT_NULL =
TypeUtils.parseConstructor("");
private static final Signature CSTRUCT_STRING =
TypeUtils.parseConstructor("String");
public static final int ADD = Constants.IADD;
public static final int MUL = Constants.IMUL;
public static final int XOR = Constants.IXOR;
public static final int USHR = Constants.IUSHR;
public static final int SUB = Constants.ISUB;
public static final int DIV = Constants.IDIV;
public static final int NEG = Constants.INEG;
public static final int REM = Constants.IREM;
public static final int AND = Constants.IAND;
public static final int OR = Constants.IOR;
public static final int GT = Constants.IFGT;
public static final int LT = Constants.IFLT;
public static final int GE = Constants.IFGE;
public static final int LE = Constants.IFLE;
public static final int NE = Constants.IFNE;
public static final int EQ = Constants.IFEQ;
private ClassEmitter ce;
private State state;
CodeEmitter(ClassEmitter ce, MethodVisitor mv, int access, Signature sig, Type[] exceptionTypes) {
super(access, sig.getDescriptor(), mv);
this.ce = ce;
state = new State(ce.getClassInfo(), access, sig, exceptionTypes);
}
public CodeEmitter(CodeEmitter wrap) {
super(wrap);
this.ce = wrap.ce;
this.state = wrap.state;
}
public boolean isStaticHook() {
return false;
}
public Signature getSignature() {
return state.sig;
}
public Type getReturnType() {
return state.sig.getReturnType();
}
public MethodInfo getMethodInfo() {
return state;
}
public ClassEmitter getClassEmitter() {
return ce;
}
public void end_method() {
visitMaxs(0, 0);
}
public Block begin_block() {
return new Block(this);
}
public void catch_exception(Block block, Type exception) {
if (block.getEnd() == null) {
throw new IllegalStateException("end of block is unset");
}
mv.visitTryCatchBlock(block.getStart(),
block.getEnd(),
mark(),
exception.getInternalName());
}
public void goTo(Label label) {
mv.visitJumpInsn(Constants.GOTO, label);
}
public void ifnull(Label label) {
mv.visitJumpInsn(Constants.IFNULL, label);
}
public void ifnonnull(Label label) {
mv.visitJumpInsn(Constants.IFNONNULL, label);
}
public void if_jump(int mode, Label label) {
mv.visitJumpInsn(mode, label);
}
public void if_icmp(int mode, Label label) {
if_cmp(Type.INT_TYPE, mode, label);
}
public void if_cmp(Type type, int mode, Label label) {
int intOp = -1;
int jumpmode = mode;
switch (mode) {
case GE:
jumpmode = LT;
break;
case LE:
jumpmode = GT;
break;
}
switch (type.getSort()) {
case Type.LONG:
mv.visitInsn(Constants.LCMP);
break;
case Type.DOUBLE:
mv.visitInsn(Constants.DCMPG);
break;
case Type.FLOAT:
mv.visitInsn(Constants.FCMPG);
break;
case Type.ARRAY:
case Type.OBJECT:
switch (mode) {
case EQ:
mv.visitJumpInsn(Constants.IF_ACMPEQ, label);
return;
case NE:
mv.visitJumpInsn(Constants.IF_ACMPNE, label);
return;
}
throw new IllegalArgumentException("Bad comparison for type " + type);
default:
switch (mode) {
case EQ:
intOp = Constants.IF_ICMPEQ;
break;
case NE:
intOp = Constants.IF_ICMPNE;
break;
case GE:
swap(); /* fall through */
case LT:
intOp = Constants.IF_ICMPLT;
break;
case LE:
swap(); /* fall through */
case GT:
intOp = Constants.IF_ICMPGT;
break;
}
mv.visitJumpInsn(intOp, label);
return;
}
if_jump(jumpmode, label);
}
public void pop() {
mv.visitInsn(Constants.POP);
}
public void pop2() {
mv.visitInsn(Constants.POP2);
}
public void dup() {
mv.visitInsn(Constants.DUP);
}
public void dup2() {
mv.visitInsn(Constants.DUP2);
}
public void dup_x1() {
mv.visitInsn(Constants.DUP_X1);
}
public void dup_x2() {
mv.visitInsn(Constants.DUP_X2);
}
public void dup2_x1() {
mv.visitInsn(Constants.DUP2_X1);
}
public void dup2_x2() {
mv.visitInsn(Constants.DUP2_X2);
}
public void swap() {
mv.visitInsn(Constants.SWAP);
}
public void aconst_null() {
mv.visitInsn(Constants.ACONST_NULL);
}
public void swap(Type prev, Type type) {
if (type.getSize() == 1) {
if (prev.getSize() == 1) {
swap(); // same as dup_x1(), pop();
}
else {
dup_x2();
pop();
}
}
else {
if (prev.getSize() == 1) {
dup2_x1();
pop2();
}
else {
dup2_x2();
pop2();
}
}
}
public void monitorenter() {
mv.visitInsn(Constants.MONITORENTER);
}
public void monitorexit() {
mv.visitInsn(Constants.MONITOREXIT);
}
public void math(int op, Type type) {
mv.visitInsn(type.getOpcode(op));
}
public void array_load(Type type) {
mv.visitInsn(type.getOpcode(Constants.IALOAD));
}
public void array_store(Type type) {
mv.visitInsn(type.getOpcode(Constants.IASTORE));
}
/**
* Casts from one primitive numeric type to another
*/
public void cast_numeric(Type from, Type to) {
if (from != to) {
if (from == Type.DOUBLE_TYPE) {
if (to == Type.FLOAT_TYPE) {
mv.visitInsn(Constants.D2F);
}
else if (to == Type.LONG_TYPE) {
mv.visitInsn(Constants.D2L);
}
else {
mv.visitInsn(Constants.D2I);
cast_numeric(Type.INT_TYPE, to);
}
}
else if (from == Type.FLOAT_TYPE) {
if (to == Type.DOUBLE_TYPE) {
mv.visitInsn(Constants.F2D);
}
else if (to == Type.LONG_TYPE) {
mv.visitInsn(Constants.F2L);
}
else {
mv.visitInsn(Constants.F2I);
cast_numeric(Type.INT_TYPE, to);
}
}
else if (from == Type.LONG_TYPE) {
if (to == Type.DOUBLE_TYPE) {
mv.visitInsn(Constants.L2D);
}
else if (to == Type.FLOAT_TYPE) {
mv.visitInsn(Constants.L2F);
}
else {
mv.visitInsn(Constants.L2I);
cast_numeric(Type.INT_TYPE, to);
}
}
else {
if (to == Type.BYTE_TYPE) {
mv.visitInsn(Constants.I2B);
}
else if (to == Type.CHAR_TYPE) {
mv.visitInsn(Constants.I2C);
}
else if (to == Type.DOUBLE_TYPE) {
mv.visitInsn(Constants.I2D);
}
else if (to == Type.FLOAT_TYPE) {
mv.visitInsn(Constants.I2F);
}
else if (to == Type.LONG_TYPE) {
mv.visitInsn(Constants.I2L);
}
else if (to == Type.SHORT_TYPE) {
mv.visitInsn(Constants.I2S);
}
}
}
}
public void push(int i) {
if (i < -1) {
mv.visitLdcInsn(i);
}
else if (i <= 5) {
mv.visitInsn(TypeUtils.ICONST(i));
}
else if (i <= Byte.MAX_VALUE) {
mv.visitIntInsn(Constants.BIPUSH, i);
}
else if (i <= Short.MAX_VALUE) {
mv.visitIntInsn(Constants.SIPUSH, i);
}
else {
mv.visitLdcInsn(i);
}
}
public void push(long value) {
if (value == 0L || value == 1L) {
mv.visitInsn(TypeUtils.LCONST(value));
}
else {
mv.visitLdcInsn(value);
}
}
public void push(float value) {
if (value == 0f || value == 1f || value == 2f) {
mv.visitInsn(TypeUtils.FCONST(value));
}
else {
mv.visitLdcInsn(value);
}
}
public void push(double value) {
if (value == 0d || value == 1d) {
mv.visitInsn(TypeUtils.DCONST(value));
}
else {
mv.visitLdcInsn(value);
}
}
public void push(String value) {
mv.visitLdcInsn(value);
}
public void newarray() {
newarray(Constants.TYPE_OBJECT);
}
public void newarray(Type type) {
if (TypeUtils.isPrimitive(type)) {
mv.visitIntInsn(Constants.NEWARRAY, TypeUtils.NEWARRAY(type));
}
else {
emit_type(Constants.ANEWARRAY, type);
}
}
public void arraylength() {
mv.visitInsn(Constants.ARRAYLENGTH);
}
public void load_this() {
if (TypeUtils.isStatic(state.access)) {
throw new IllegalStateException("no 'this' pointer within static method");
}
mv.visitVarInsn(Constants.ALOAD, 0);
}
/**
* Pushes all of the arguments of the current method onto the stack.
*/
public void load_args() {
load_args(0, state.argumentTypes.length);
}
/**
* Pushes the specified argument of the current method onto the stack.
*
* @param index the zero-based index into the argument list
*/
public void load_arg(int index) {
load_local(state.argumentTypes[index],
state.localOffset + skipArgs(index));
}
// zero-based (see load_this)
public void load_args(int fromArg, int count) {
int pos = state.localOffset + skipArgs(fromArg);
for (int i = 0; i < count; i++) {
Type t = state.argumentTypes[fromArg + i];
load_local(t, pos);
pos += t.getSize();
}
}
private int skipArgs(int numArgs) {
int amount = 0;
for (int i = 0; i < numArgs; i++) {
amount += state.argumentTypes[i].getSize();
}
return amount;
}
private void load_local(Type t, int pos) {
// TODO: make t == null ok?
mv.visitVarInsn(t.getOpcode(Constants.ILOAD), pos);
}
private void store_local(Type t, int pos) {
// TODO: make t == null ok?
mv.visitVarInsn(t.getOpcode(Constants.ISTORE), pos);
}
public void iinc(Local local, int amount) {
mv.visitIincInsn(local.getIndex(), amount);
}
public void store_local(Local local) {
store_local(local.getType(), local.getIndex());
}
public void load_local(Local local) {
load_local(local.getType(), local.getIndex());
}
public void return_value() {
mv.visitInsn(state.sig.getReturnType().getOpcode(Constants.IRETURN));
}
public void getfield(String name) {
ClassEmitter.FieldInfo info = ce.getFieldInfo(name);
int opcode = TypeUtils.isStatic(info.access) ? Constants.GETSTATIC : Constants.GETFIELD;
emit_field(opcode, ce.getClassType(), name, info.type);
}
public void putfield(String name) {
ClassEmitter.FieldInfo info = ce.getFieldInfo(name);
int opcode = TypeUtils.isStatic(info.access) ? Constants.PUTSTATIC : Constants.PUTFIELD;
emit_field(opcode, ce.getClassType(), name, info.type);
}
public void super_getfield(String name, Type type) {
emit_field(Constants.GETFIELD, ce.getSuperType(), name, type);
}
public void super_putfield(String name, Type type) {
emit_field(Constants.PUTFIELD, ce.getSuperType(), name, type);
}
public void super_getstatic(String name, Type type) {
emit_field(Constants.GETSTATIC, ce.getSuperType(), name, type);
}
public void super_putstatic(String name, Type type) {
emit_field(Constants.PUTSTATIC, ce.getSuperType(), name, type);
}
public void getfield(Type owner, String name, Type type) {
emit_field(Constants.GETFIELD, owner, name, type);
}
public void putfield(Type owner, String name, Type type) {
emit_field(Constants.PUTFIELD, owner, name, type);
}
public void getstatic(Type owner, String name, Type type) {
emit_field(Constants.GETSTATIC, owner, name, type);
}
public void putstatic(Type owner, String name, Type type) {
emit_field(Constants.PUTSTATIC, owner, name, type);
}
// package-protected for EmitUtils, try to fix
void emit_field(int opcode, Type ctype, String name, Type ftype) {
mv.visitFieldInsn(opcode,
ctype.getInternalName(),
name,
ftype.getDescriptor());
}
public void super_invoke() {
super_invoke(state.sig);
}
public void super_invoke(Signature sig) {
emit_invoke(Constants.INVOKESPECIAL, ce.getSuperType(), sig, false);
}
public void invoke_constructor(Type type) {
invoke_constructor(type, CSTRUCT_NULL);
}
public void super_invoke_constructor() {
invoke_constructor(ce.getSuperType());
}
public void invoke_constructor_this() {
invoke_constructor(ce.getClassType());
}
private void emit_invoke(int opcode, Type type, Signature sig, boolean isInterface) {
if (sig.getName().equals(Constants.CONSTRUCTOR_NAME) &&
((opcode == Constants.INVOKEVIRTUAL) ||
(opcode == Constants.INVOKESTATIC))) {
// TODO: error
}
mv.visitMethodInsn(opcode,
type.getInternalName(),
sig.getName(),
sig.getDescriptor(),
isInterface);
}
public void invoke_interface(Type owner, Signature sig) {
emit_invoke(Constants.INVOKEINTERFACE, owner, sig, true);
}
public void invoke_virtual(Type owner, Signature sig) {
emit_invoke(Constants.INVOKEVIRTUAL, owner, sig, false);
}
@Deprecated
public void invoke_static(Type owner, Signature sig) {
invoke_static(owner, sig, false);
}
public void invoke_static(Type owner, Signature sig, boolean isInterface) {
emit_invoke(Constants.INVOKESTATIC, owner, sig, isInterface);
}
public void invoke_virtual_this(Signature sig) {
invoke_virtual(ce.getClassType(), sig);
}
public void invoke_static_this(Signature sig) {
invoke_static(ce.getClassType(), sig);
}
public void invoke_constructor(Type type, Signature sig) {
emit_invoke(Constants.INVOKESPECIAL, type, sig, false);
}
public void invoke_constructor_this(Signature sig) {
invoke_constructor(ce.getClassType(), sig);
}
public void super_invoke_constructor(Signature sig) {
invoke_constructor(ce.getSuperType(), sig);
}
public void new_instance_this() {
new_instance(ce.getClassType());
}
public void new_instance(Type type) {
emit_type(Constants.NEW, type);
}
private void emit_type(int opcode, Type type) {
String desc;
if (TypeUtils.isArray(type)) {
desc = type.getDescriptor();
}
else {
desc = type.getInternalName();
}
mv.visitTypeInsn(opcode, desc);
}
public void aaload(int index) {
push(index);
aaload();
}
public void aaload() {
mv.visitInsn(Constants.AALOAD);
}
public void aastore() {
mv.visitInsn(Constants.AASTORE);
}
public void athrow() {
mv.visitInsn(Constants.ATHROW);
}
public Label make_label() {
return new Label();
}
public Local make_local() {
return make_local(Constants.TYPE_OBJECT);
}
public Local make_local(Type type) {
return new Local(newLocal(type.getSize()), type);
}
public void checkcast_this() {
checkcast(ce.getClassType());
}
public void checkcast(Type type) {
if (!type.equals(Constants.TYPE_OBJECT)) {
emit_type(Constants.CHECKCAST, type);
}
}
public void instance_of(Type type) {
emit_type(Constants.INSTANCEOF, type);
}
public void instance_of_this() {
instance_of(ce.getClassType());
}
public void process_switch(int[] keys, ProcessSwitchCallback callback) {
float density;
if (keys.length == 0) {
density = 0;
}
else {
density = (float) keys.length / (keys[keys.length - 1] - keys[0] + 1);
}
process_switch(keys, callback, density >= 0.5f);
}
public void process_switch(int[] keys, ProcessSwitchCallback callback, boolean useTable) {
if (!isSorted(keys)) {
throw new IllegalArgumentException("keys to switch must be sorted ascending");
}
Label def = make_label();
Label end = make_label();
try {
if (keys.length > 0) {
int len = keys.length;
int min = keys[0];
int max = keys[len - 1];
int range = max - min + 1;
if (useTable) {
Label[] labels = new Label[range];
Arrays.fill(labels, def);
for (int i = 0; i < len; i++) {
labels[keys[i] - min] = make_label();
}
mv.visitTableSwitchInsn(min, max, def, labels);
for (int i = 0; i < range; i++) {
Label label = labels[i];
if (label != def) {
mark(label);
callback.processCase(i + min, end);
}
}
}
else {
Label[] labels = new Label[len];
for (int i = 0; i < len; i++) {
labels[i] = make_label();
}
mv.visitLookupSwitchInsn(def, keys, labels);
for (int i = 0; i < len; i++) {
mark(labels[i]);
callback.processCase(keys[i], end);
}
}
}
mark(def);
callback.processDefault();
mark(end);
}
catch (RuntimeException | Error e) {
throw e;
}
catch (Exception e) {
throw new CodeGenerationException(e);
}
}
private static boolean isSorted(int[] keys) {
for (int i = 1; i < keys.length; i++) {
if (keys[i] < keys[i - 1]) {
return false;
}
}
return true;
}
public void mark(Label label) {
mv.visitLabel(label);
}
Label mark() {
Label label = make_label();
mv.visitLabel(label);
return label;
}
public void push(boolean value) {
push(value ? 1 : 0);
}
/**
* Toggles the integer on the top of the stack from 1 to 0 or vice versa
*/
public void not() {
push(1);
math(XOR, Type.INT_TYPE);
}
public void throw_exception(Type type, String msg) {
new_instance(type);
dup();
push(msg);
invoke_constructor(type, CSTRUCT_STRING);
athrow();
}
/**
* If the argument is a primitive class, replaces the primitive value
* on the top of the stack with the wrapped (Object) equivalent using valueOf() methods.
* For example, char -> Character.
* If the | CodeEmitter |
java | apache__logging-log4j2 | log4j-core-java9/src/main/java/org/apache/logging/log4j/core/util/Integers.java | {
"start": 861,
"end": 939
} | class ____ for compilation. It will not be copied to `log4j-core`.
*/
public | used |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java | {
"start": 2151,
"end": 7365
} | enum ____ {
/**
* Allocated as primary
*/
PRIMARY((byte) 0),
/**
* Allocated as a replica
*/
REPLICA((byte) 1),
/**
* Not allocated
*/
UNUSED((byte) 2);
private final byte id;
AllocationStatus(byte id) {
this.id = id;
}
private static AllocationStatus fromId(byte id) {
return switch (id) {
case 0 -> PRIMARY;
case 1 -> REPLICA;
case 2 -> UNUSED;
default -> throw new IllegalArgumentException("unknown id for allocation status [" + id + "]");
};
}
public String value() {
return switch (id) {
case 0 -> "primary";
case 1 -> "replica";
case 2 -> "unused";
default -> throw new IllegalArgumentException("unknown id for allocation status [" + id + "]");
};
}
private static AllocationStatus readFrom(StreamInput in) throws IOException {
return fromId(in.readByte());
}
private void writeTo(StreamOutput out) throws IOException {
out.writeByte(id);
}
}
public StoreStatus(StreamInput in) throws IOException {
node = new DiscoveryNode(in);
allocationId = in.readOptionalString();
allocationStatus = AllocationStatus.readFrom(in);
if (in.readBoolean()) {
storeException = in.readException();
}
}
public StoreStatus(DiscoveryNode node, String allocationId, AllocationStatus allocationStatus, Exception storeException) {
this.node = node;
this.allocationId = allocationId;
this.allocationStatus = allocationStatus;
this.storeException = storeException;
}
/**
* Node the store belongs to
*/
public DiscoveryNode getNode() {
return node;
}
/**
* AllocationStatus id of the store, used to select the store that will be
* used as a primary.
*/
public String getAllocationId() {
return allocationId;
}
/**
* Exception while trying to open the
* shard index or from when the shard failed
*/
public Exception getStoreException() {
return storeException;
}
/**
* The allocationStatus status of the store.
* {@link AllocationStatus#PRIMARY} indicates a primary shard copy
* {@link AllocationStatus#REPLICA} indicates a replica shard copy
* {@link AllocationStatus#UNUSED} indicates an unused shard copy
*/
public AllocationStatus getAllocationStatus() {
return allocationStatus;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
node.writeTo(out);
out.writeOptionalString(allocationId);
allocationStatus.writeTo(out);
if (storeException != null) {
out.writeBoolean(true);
out.writeException(storeException);
} else {
out.writeBoolean(false);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
node.toXContent(builder, params);
if (allocationId != null) {
builder.field(Fields.ALLOCATION_ID, allocationId);
}
builder.field(Fields.ALLOCATED, allocationStatus.value());
if (storeException != null) {
builder.startObject(Fields.STORE_EXCEPTION);
ElasticsearchException.generateThrowableXContent(builder, params, storeException);
builder.endObject();
}
return builder;
}
@Override
public int compareTo(StoreStatus other) {
if (storeException != null && other.storeException == null) {
return 1;
} else if (other.storeException != null && storeException == null) {
return -1;
}
if (allocationId != null && other.allocationId == null) {
return -1;
} else if (allocationId == null && other.allocationId != null) {
return 1;
} else if (allocationId == null && other.allocationId == null) {
return Integer.compare(allocationStatus.id, other.allocationStatus.id);
} else {
int compare = Integer.compare(allocationStatus.id, other.allocationStatus.id);
if (compare == 0) {
return allocationId.compareTo(other.allocationId);
}
return compare;
}
}
}
/**
* Single node failure while retrieving shard store information
*/
public static | AllocationStatus |
java | junit-team__junit5 | junit-platform-console/src/main/java/org/junit/platform/console/output/FlatPrintingListener.java | {
"start": 861,
"end": 4167
} | class ____ implements DetailsPrintingListener {
static final String INDENTATION = " ";
private final PrintWriter out;
private final ColorPalette colorPalette;
public FlatPrintingListener(PrintWriter out, ColorPalette colorPalette) {
this.out = out;
this.colorPalette = colorPalette;
}
@Override
public void testPlanExecutionStarted(TestPlan testPlan) {
this.out.printf("Test execution started. Number of static tests: %d%n",
testPlan.countTestIdentifiers(TestIdentifier::isTest));
}
@Override
public void testPlanExecutionFinished(TestPlan testPlan) {
this.out.println("Test execution finished.");
}
@Override
public void dynamicTestRegistered(TestIdentifier testIdentifier) {
printlnTestDescriptor(Style.DYNAMIC, "Registered:", testIdentifier);
}
@Override
public void executionSkipped(TestIdentifier testIdentifier, String reason) {
printlnTestDescriptor(Style.SKIPPED, "Skipped:", testIdentifier);
printlnMessage(Style.SKIPPED, "Reason", reason);
}
@Override
public void executionStarted(TestIdentifier testIdentifier) {
printlnTestDescriptor(Style.valueOf(testIdentifier), "Started:", testIdentifier);
}
@Override
public void executionFinished(TestIdentifier testIdentifier, TestExecutionResult testExecutionResult) {
Style style = Style.valueOf(testExecutionResult);
printlnTestDescriptor(style, "Finished:", testIdentifier);
testExecutionResult.getThrowable().ifPresent(t -> printlnException(style, t));
}
@Override
public void reportingEntryPublished(TestIdentifier testIdentifier, ReportEntry entry) {
printlnTestDescriptor(Style.REPORTED, "Reported:", testIdentifier);
printlnMessage(Style.REPORTED, "Reported values", entry.toString());
}
@Override
public void fileEntryPublished(TestIdentifier testIdentifier, FileEntry file) {
printlnTestDescriptor(Style.REPORTED, "Reported:", testIdentifier);
printlnMessage(Style.REPORTED, "Reported file", file.toString());
}
private void printlnTestDescriptor(Style style, String message, TestIdentifier testIdentifier) {
println(style, "%-10s %s (%s)", message, testIdentifier.getDisplayName(), testIdentifier.getUniqueId());
}
private void printlnException(Style style, Throwable throwable) {
printlnMessage(style, "Exception", ExceptionUtils.readStackTrace(throwable));
}
private void printlnMessage(Style style, String message, String detail) {
println(style, INDENTATION + "=> " + message + ": %s", indented(detail));
}
private void println(Style style, String format, Object... args) {
this.out.println(colorPalette.paint(style, format.formatted(args)));
}
/**
* Indent the given message if it is a multi-line string.
*
* <p>{@link #INDENTATION} is used to prefix the start of each new line
* except the first one.
*
* @param message the message to indent
* @return indented message
*/
private static String indented(String message) {
return DetailsPrintingListener.indented(message, INDENTATION);
}
@Override
public void listTests(TestPlan testPlan) {
testPlan.accept(new TestPlan.Visitor() {
@Override
public void visit(TestIdentifier testIdentifier) {
println(Style.valueOf(testIdentifier), "%s (%s)", testIdentifier.getDisplayName(),
testIdentifier.getUniqueId());
}
});
}
}
| FlatPrintingListener |
java | apache__flink | flink-test-utils-parent/flink-test-utils/src/main/java/org/apache/flink/connector/upserttest/sink/UpsertTestSink.java | {
"start": 1802,
"end": 3278
} | class ____<IN> implements Sink<IN> {
private final File outputFile;
private final SerializationSchema<IN> keySerializationSchema;
private final SerializationSchema<IN> valueSerializationSchema;
UpsertTestSink(
File outputFile,
SerializationSchema<IN> keySerializationSchema,
SerializationSchema<IN> valueSerializationSchema) {
this.outputFile = checkNotNull(outputFile);
this.keySerializationSchema = checkNotNull(keySerializationSchema);
this.valueSerializationSchema = checkNotNull(valueSerializationSchema);
}
/**
* Create a {@link UpsertTestSinkBuilder} to construct a new {@link UpsertTestSink}.
*
* @param <IN> type of incoming records
* @return {@link UpsertTestSinkBuilder}
*/
public static <IN> UpsertTestSinkBuilder<IN> builder() {
return new UpsertTestSinkBuilder<>();
}
@Internal
@Override
public SinkWriter<IN> createWriter(WriterInitContext context) {
try {
keySerializationSchema.open(context.asSerializationSchemaInitializationContext());
valueSerializationSchema.open(context.asSerializationSchemaInitializationContext());
} catch (Exception e) {
throw new FlinkRuntimeException("Failed to initialize schema.", e);
}
return new UpsertTestSinkWriter<>(
outputFile, keySerializationSchema, valueSerializationSchema);
}
}
| UpsertTestSink |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/intarrays/IntArrays_assertIsSorted_Test.java | {
"start": 1601,
"end": 4220
} | class ____ extends IntArraysBaseTest {
@Override
protected void initActualArray() {
actual = arrayOf(1, 2, 3, 4, 4);
}
@Test
void should_pass_if_actual_is_sorted_in_ascending_order() {
arrays.assertIsSorted(someInfo(), actual);
}
@Test
void should_pass_if_actual_is_empty() {
arrays.assertIsSorted(someInfo(), emptyArray());
}
@Test
void should_pass_if_actual_contains_only_one_element() {
arrays.assertIsSorted(someInfo(), arrayOf(1));
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertIsSorted(someInfo(), null))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_is_not_sorted_in_ascending_order() {
AssertionInfo info = someInfo();
actual = arrayOf(1, 3, 2);
Throwable error = catchThrowable(() -> arrays.assertIsSorted(info, actual));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeSorted(1, actual));
}
@Test
void should_pass_if_actual_is_sorted_in_ascending_order_according_to_custom_comparison_strategy() {
actual = arrayOf(1, -2, 3, 4, -4);
arraysWithCustomComparisonStrategy.assertIsSorted(someInfo(), actual);
}
@Test
void should_pass_if_actual_is_empty_whatever_custom_comparison_strategy_is() {
arraysWithCustomComparisonStrategy.assertIsSorted(someInfo(), emptyArray());
}
@Test
void should_pass_if_actual_contains_only_one_element_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertIsSorted(someInfo(), arrayOf(1));
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertIsSorted(someInfo(),
null))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_is_not_sorted_in_ascending_order_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
actual = arrayOf(1, 3, 2);
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertIsSorted(info, actual));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeSortedAccordingToGivenComparator(1, actual, comparatorForCustomComparisonStrategy()));
}
}
| IntArrays_assertIsSorted_Test |
java | apache__flink | flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarPlanRequestBody.java | {
"start": 1470,
"end": 2266
} | class ____ extends JarRequestBody {
@VisibleForTesting
public JarPlanRequestBody() {
super(null, null, null, null, null);
}
@JsonCreator
public JarPlanRequestBody(
@Nullable @JsonProperty(FIELD_NAME_ENTRY_CLASS) String entryClassName,
@Nullable @JsonProperty(FIELD_NAME_PROGRAM_ARGUMENTS_LIST)
List<String> programArgumentsList,
@Nullable @JsonProperty(FIELD_NAME_PARALLELISM) Integer parallelism,
@Nullable @JsonProperty(FIELD_NAME_JOB_ID) JobID jobId,
@Nullable @JsonProperty(FIELD_NAME_FLINK_CONFIGURATION)
Map<String, String> flinkConfiguration) {
super(entryClassName, programArgumentsList, parallelism, jobId, flinkConfiguration);
}
}
| JarPlanRequestBody |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/FuturesTest.java | {
"start": 51525,
"end": 52405
} | class ____ implements Function<Throwable, Object> {
@SuppressWarnings("nullness:initialization.field.uninitialized")
ListenableFuture<Object> output;
@Override
public Object apply(Throwable input) {
output.cancel(false);
throw new SomeUncheckedException();
}
}
Fallback fallback = new Fallback();
SettableFuture<Object> input = SettableFuture.create();
ListenableFuture<Object> output = catching(input, Throwable.class, fallback, directExecutor());
fallback.output = output;
input.setException(new MyException());
assertTrue(output.isCancelled());
}
public void testCatching_getThrowsRuntimeException() throws Exception {
ListenableFuture<Object> input =
UncheckedThrowingFuture.throwingRuntimeException(new SomeUncheckedException());
// We'd catch only SomeUncheckedException. | Fallback |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InSerializationTests.java | {
"start": 597,
"end": 1508
} | class ____ extends AbstractExpressionSerializationTests<In> {
@Override
protected In createTestInstance() {
Source source = randomSource();
Expression value = randomChild();
List<Expression> list = randomList(10, AbstractExpressionSerializationTests::randomChild);
return new In(source, value, list);
}
@Override
protected In mutateInstance(In instance) throws IOException {
Source source = instance.source();
Expression value = instance.value();
List<Expression> list = instance.list();
if (randomBoolean()) {
value = randomValueOtherThan(value, AbstractExpressionSerializationTests::randomChild);
} else {
list = randomValueOtherThan(list, () -> randomList(10, AbstractExpressionSerializationTests::randomChild));
}
return new In(source, value, list);
}
}
| InSerializationTests |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/resource/EventLoopGroupProvider.java | {
"start": 1701,
"end": 3232
} | class ____ the {@link EventLoopGroup}, must not be {@code null}
* @param <T> type of the {@link EventLoopGroup}
* @return the {@link EventLoopGroup}.
*/
<T extends EventLoopGroup> T allocate(Class<T> type);
/**
* Returns the pool size (number of threads) for IO threads. The indicated size does not reflect the number for all IO
* threads, it is the number of threads that are used to create a particular thread pool.
*
* @return the pool size (number of threads) for all IO tasks.
*/
int threadPoolSize();
/**
* Release a {@code eventLoopGroup} instance. The method will shutdown/terminate the {@link EventExecutorGroup} if it is no
* longer needed.
*
* @param eventLoopGroup the eventLoopGroup instance, must not be {@code null}
* @param quietPeriod the quiet period
* @param timeout the timeout
* @param unit time unit for the quiet period/the timeout
* @return a close future to synchronize the called for shutting down.
*/
Future<Boolean> release(EventExecutorGroup eventLoopGroup, long quietPeriod, long timeout, TimeUnit unit);
/**
* Shutdown the provider and release all instances.
*
* @param quietPeriod the quiet period
* @param timeout the timeout
* @param timeUnit the unit of {@code quietPeriod} and {@code timeout}
* @return a close future to synchronize the called for shutting down.
*/
Future<Boolean> shutdown(long quietPeriod, long timeout, TimeUnit timeUnit);
}
| of |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/codegen/BufferHolder.java | {
"start": 1511,
"end": 1744
} | class ____ writing program, so that the memory segment/data buffer can be reused. Note that
* for each incoming record, we should call `reset` of BufferHolder instance before write the record
* and reuse the data buffer.
*/
final | per |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/ast/internal/StandardLockingClauseStrategy.java | {
"start": 1646,
"end": 11156
} | class ____ extends AbstractLockingClauseStrategy {
private final Dialect dialect;
private final RowLockStrategy rowLockStrategy;
private final PessimisticLockKind lockKind;
private final Timeout timeout;
private boolean queryHasOuterJoins = false;
private Set<TableGroup> rootsToLock;
private Set<TableGroupJoin> joinsToLock;
public StandardLockingClauseStrategy(
Dialect dialect,
PessimisticLockKind lockKind,
RowLockStrategy rowLockStrategy,
LockOptions lockOptions,
Set<NavigablePath> rootsForLocking) {
super( lockOptions.getScope(), rootsForLocking );
assert lockKind != PessimisticLockKind.NONE;
this.dialect = dialect;
this.rowLockStrategy = rowLockStrategy;
this.lockKind = lockKind;
this.timeout = lockOptions.getTimeout();
}
@Override
public boolean registerRoot(TableGroup root) {
if ( !queryHasOuterJoins ) {
if ( CollectionHelper.isNotEmpty( root.getTableReferenceJoins() ) ) {
// joined inheritance and/or secondary tables - inherently has outer joins
queryHasOuterJoins = true;
}
}
return super.registerRoot( root );
}
@Override
protected void trackRoot(TableGroup root) {
super.trackRoot( root );
if ( rootsToLock == null ) {
rootsToLock = new HashSet<>();
}
rootsToLock.add( root );
}
@Override
public boolean registerJoin(TableGroupJoin join) {
checkForOuterJoins( join );
return super.registerJoin( join );
}
@Override
protected void trackJoin(TableGroupJoin join) {
super.trackJoin( join );
if ( joinsToLock == null ) {
joinsToLock = new LinkedHashSet<>();
}
joinsToLock.add( join );
}
private void checkForOuterJoins(TableGroupJoin join) {
if ( queryHasOuterJoins ) {
// perf out
return;
}
queryHasOuterJoins = hasOuterJoin( join );
}
private boolean hasOuterJoin(TableGroupJoin join) {
final TableGroup joinedGroup = join.getJoinedGroup();
if ( join.isInitialized()
&& join.getJoinType() != SqlAstJoinType.INNER
&& !joinedGroup.isVirtual() ) {
return true;
}
if ( !CollectionHelper.isEmpty( joinedGroup.getTableReferenceJoins() ) ) {
for ( TableReferenceJoin tableReferenceJoin : joinedGroup.getTableReferenceJoins() ) {
if ( tableReferenceJoin.getJoinType() != SqlAstJoinType.INNER ) {
return true;
}
}
}
return false;
}
@Override
public boolean containsOuterJoins() {
return queryHasOuterJoins;
}
@Override
public void render(SqlAppender sqlAppender) {
renderLockFragment( sqlAppender );
renderResultSetOptions( sqlAppender );
}
protected void renderLockFragment(SqlAppender sqlAppender) {
final String fragment;
if ( rowLockStrategy == RowLockStrategy.NONE ) {
fragment = lockKind == PessimisticLockKind.SHARE
? dialect.getReadLockString( timeout )
: dialect.getWriteLockString( timeout );
}
else if ( CollectionHelper.isEmpty( rootsToLock )
&& CollectionHelper.isEmpty( joinsToLock ) ) {
// this might happen with locking and scalar queries. e.g.
// session.createQuery( "select p.unitCost * .049 from Product p" )
// .setLockMode(...)
//
// the spec says:
// > (if) the query returns scalar data ..., the underlying database rows will be locked
//
// so we use a simple `for update`, with no `of`. aka, we treat it the same as RowLockStrategy.NONE
assert CollectionHelper.isEmpty( rootsForLocking );
fragment = lockKind == PessimisticLockKind.SHARE
? dialect.getReadLockString( timeout )
: dialect.getWriteLockString( timeout );
}
else {
final String lockItemsFragment = collectLockItems();
fragment = lockKind == PessimisticLockKind.SHARE
? dialect.getReadLockString( lockItemsFragment, timeout )
: dialect.getWriteLockString( lockItemsFragment, timeout );
}
sqlAppender.append( fragment );
}
private String collectLockItems() {
if ( rowLockStrategy == null ) {
return "";
}
final List<String> lockItems = new ArrayList<>();
if ( rootsToLock != null ) {
for ( TableGroup root : rootsToLock ) {
collectLockItems( root, lockItems );
}
}
if ( joinsToLock != null ) {
for ( TableGroupJoin join : joinsToLock ) {
collectLockItems( join.getJoinedGroup(), lockItems );
}
}
final StringBuilder buffer = new StringBuilder();
boolean first = true;
for ( String lockItem : lockItems ) {
if ( first ) {
first = false;
}
else {
buffer.append( ',' );
}
buffer.append( lockItem );
}
return buffer.toString();
}
protected void renderResultSetOptions(SqlAppender sqlAppender) {
// hook for Derby
}
private void collectLockItems(TableGroup tableGroup, List<String> lockItems) {
if ( rowLockStrategy == RowLockStrategy.TABLE ) {
addTableAliases( tableGroup, lockItems );
}
else if ( rowLockStrategy == RowLockStrategy.COLUMN ) {
addColumnRefs( tableGroup, lockItems );
}
}
private void addTableAliases(TableGroup tableGroup, List<String> lockItems) {
final String tableAlias = tableGroup.getPrimaryTableReference().getIdentificationVariable();
lockItems.add( tableAlias );
final List<TableReferenceJoin> tableReferenceJoins = tableGroup.getTableReferenceJoins();
if ( CollectionHelper.isNotEmpty( tableReferenceJoins ) ) {
for ( int i = 0; i < tableReferenceJoins.size(); i++ ) {
lockItems.add( tableReferenceJoins.get(i).getJoinedTableReference().getIdentificationVariable() );
}
}
}
private void addColumnRefs(TableGroup tableGroup, List<String> lockItems) {
final String[] keyColumns = determineKeyColumnNames( tableGroup );
final String tableAlias = tableGroup.getPrimaryTableReference().getIdentificationVariable();
for ( int i = 0; i < keyColumns.length; i++ ) {
lockItems.add( tableAlias + "." + keyColumns[i] );
}
final List<TableReferenceJoin> tableReferenceJoins = tableGroup.getTableReferenceJoins();
if ( CollectionHelper.isNotEmpty( tableReferenceJoins ) ) {
final EntityPersister entityPersister = determineEntityPersister( tableGroup.getModelPart() );
for ( int i = 0; i < tableReferenceJoins.size(); i++ ) {
final TableReferenceJoin tableReferenceJoin = tableReferenceJoins.get( i );
final NamedTableReference joinedTableReference = tableReferenceJoin.getJoinedTableReference();
final String tableJoinAlias = joinedTableReference.getIdentificationVariable();
final TableMapping tableMapping = determineTableMapping( entityPersister, tableReferenceJoin );
for ( TableDetails.KeyColumn keyColumn : tableMapping.getKeyDetails().getKeyColumns() ) {
lockItems.add( tableJoinAlias + "." + keyColumn.getColumnName() );
}
}
}
}
private TableMapping determineTableMapping(EntityPersister entityPersister, TableReferenceJoin tableReferenceJoin) {
final NamedTableReference joinedTableReference = tableReferenceJoin.getJoinedTableReference();
for ( EntityTableMapping tableMapping : entityPersister.getTableMappings() ) {
if ( joinedTableReference.containsAffectedTableName( tableMapping.getTableName() ) ) {
return tableMapping;
}
}
for ( EntityMappingType subMappingType : entityPersister.getSubMappingTypes() ) {
for ( EntityTableMapping tableMapping : subMappingType.getEntityPersister().getTableMappings() ) {
if ( joinedTableReference.containsAffectedTableName( tableMapping.getTableName() ) ) {
return tableMapping;
}
}
}
throw new IllegalArgumentException( "Couldn't find subclass index for joined table reference " + joinedTableReference );
}
private EntityPersister determineEntityPersister(ModelPartContainer modelPart) {
if ( modelPart instanceof EntityPersister entityPersister ) {
return entityPersister;
}
else if ( modelPart instanceof PluralAttributeMapping pluralAttributeMapping ) {
return pluralAttributeMapping.getCollectionDescriptor().getElementPersister();
}
else if ( modelPart instanceof EntityAssociationMapping entityAssociationMapping ) {
return entityAssociationMapping.getAssociatedEntityMappingType().getEntityPersister();
}
else {
throw new IllegalArgumentException( "Expected table group with table joins to have an entity typed model part but got: " + modelPart );
}
}
private String[] determineKeyColumnNames(TableGroup tableGroup) {
if ( tableGroup instanceof LockingTableGroup lockingTableGroup ) {
return extractColumnNames( lockingTableGroup.getKeyColumnMappings() );
}
else if ( tableGroup.getModelPart() instanceof EntityPersister entityPersister ) {
return entityPersister.getIdentifierColumnNames();
}
else if ( tableGroup.getModelPart() instanceof PluralAttributeMapping pluralAttributeMapping ) {
return extractColumnNames( pluralAttributeMapping.getKeyDescriptor() );
}
else if ( tableGroup.getModelPart() instanceof EntityAssociationMapping entityAssociationMapping ) {
return extractColumnNames( entityAssociationMapping.getAssociatedEntityMappingType().getIdentifierMapping() );
}
else {
throw new AssertionFailure( "Unable to determine columns for locking" );
}
}
private static String[] extractColumnNames(SelectableMappings keyColumnMappings) {
if ( keyColumnMappings.getJdbcTypeCount() == 1 ) {
return new String[] { keyColumnMappings.getSelectable( 0 ).getSelectableName() };
}
final String[] results = new String[ keyColumnMappings.getJdbcTypeCount() ];
keyColumnMappings.forEachSelectable( (selectionIndex, selectableMapping) -> {
results[selectionIndex] = selectableMapping.getSelectableName();
} );
return results;
}
}
| StandardLockingClauseStrategy |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/cascade/MergeWithTransientNonCascadedAssociationTest.java | {
"start": 1674,
"end": 1935
} | class ____ {
@Id
@GeneratedValue(generator = "increment")
@GenericGenerator(name = "increment", strategy = "increment")
private Integer id;
@ManyToOne
private Address address;
public Person() {
}
}
@Entity(name = "Address")
public static | Person |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ComparableAndComparatorTest.java | {
"start": 2146,
"end": 2499
} | class ____ implements Comparator<SuperClass> {
@Override
public int compare(SuperClass o1, SuperClass o2) {
return 0;
}
}
/** SubClass test class */
// BUG: Diagnostic contains: Class should not implement both
public static | SuperClass |
java | apache__spark | common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockStoreClient.java | {
"start": 2156,
"end": 13784
} | class ____ extends BlockStoreClient {
private static final ErrorHandler PUSH_ERROR_HANDLER = new ErrorHandler.BlockPushErrorHandler();
private final boolean authEnabled;
private final SecretKeyHolder secretKeyHolder;
private final long registrationTimeoutMs;
// Push based shuffle requires a comparable Id to distinguish the shuffle data among multiple
// application attempts. This variable is derived from the String typed appAttemptId. If no
// appAttemptId is set, the default comparableAppAttemptId is -1.
private int comparableAppAttemptId = -1;
/**
* Creates an external shuffle client, with SASL optionally enabled. If SASL is not enabled,
* then secretKeyHolder may be null.
*/
public ExternalBlockStoreClient(
TransportConf conf,
SecretKeyHolder secretKeyHolder,
boolean authEnabled,
long registrationTimeoutMs) {
this.transportConf = conf;
this.secretKeyHolder = secretKeyHolder;
this.authEnabled = authEnabled;
this.registrationTimeoutMs = registrationTimeoutMs;
}
/**
* Initializes the BlockStoreClient, specifying this Executor's appId.
* Must be called before any other method on the BlockStoreClient.
*/
public void init(String appId) {
this.appId = appId;
TransportContext context = new TransportContext(
transportConf, new NoOpRpcHandler(), true, true);
List<TransportClientBootstrap> bootstraps = new ArrayList<>();
if (authEnabled) {
bootstraps.add(new AuthClientBootstrap(transportConf, appId, secretKeyHolder));
}
clientFactory = context.createClientFactory(bootstraps);
}
@Override
public void setAppAttemptId(String appAttemptId) {
super.setAppAttemptId(appAttemptId);
setComparableAppAttemptId(appAttemptId);
}
private void setComparableAppAttemptId(String appAttemptId) {
// For now, push based shuffle only supports running in YARN.
// Application attemptId in YARN is integer and it can be safely parsed
// to integer here. For the application attemptId from other cluster set up
// which is not numeric, it needs to generate this comparableAppAttemptId
// from the String typed appAttemptId through some other customized logic.
try {
this.comparableAppAttemptId = Integer.parseInt(appAttemptId);
} catch (NumberFormatException e) {
logger.warn("Push based shuffle requires comparable application attemptId, " +
"but the appAttemptId {} cannot be parsed to Integer", e,
MDC.of(LogKeys.APP_ATTEMPT_ID, appAttemptId));
}
}
@Override
public void fetchBlocks(
String host,
int port,
String execId,
String[] blockIds,
BlockFetchingListener listener,
DownloadFileManager downloadFileManager) {
checkInit();
logger.debug("External shuffle fetch from {}:{} (executor id {})", host, port, execId);
try {
int maxRetries = transportConf.maxIORetries();
RetryingBlockTransferor.BlockTransferStarter blockFetchStarter =
(inputBlockId, inputListener) -> {
// Unless this client is closed.
if (clientFactory != null) {
assert inputListener instanceof BlockFetchingListener :
"Expecting a BlockFetchingListener, but got " + inputListener.getClass();
TransportClient client = clientFactory.createClient(host, port, maxRetries > 0);
new OneForOneBlockFetcher(client, appId, execId, inputBlockId,
(BlockFetchingListener) inputListener, transportConf, downloadFileManager).start();
} else {
logger.info("This clientFactory was closed. Skipping further block fetch retries.");
}
};
if (maxRetries > 0) {
// Note this Fetcher will correctly handle maxRetries == 0; we avoid it just in case there's
// a bug in this code. We should remove the if statement once we're sure of the stability.
new RetryingBlockTransferor(transportConf, blockFetchStarter, blockIds, listener).start();
} else {
blockFetchStarter.createAndStart(blockIds, listener);
}
} catch (Exception e) {
logger.error("Exception while beginning fetchBlocks", e);
for (String blockId : blockIds) {
listener.onBlockFetchFailure(blockId, e);
}
}
}
@Override
public void pushBlocks(
String host,
int port,
String[] blockIds,
ManagedBuffer[] buffers,
BlockPushingListener listener) {
checkInit();
assert blockIds.length == buffers.length : "Number of block ids and buffers do not match.";
Map<String, ManagedBuffer> buffersWithId = new HashMap<>();
for (int i = 0; i < blockIds.length; i++) {
buffersWithId.put(blockIds[i], buffers[i]);
}
logger.debug("Push {} shuffle blocks to {}:{}", blockIds.length, host, port);
try {
RetryingBlockTransferor.BlockTransferStarter blockPushStarter =
(inputBlockId, inputListener) -> {
if (clientFactory != null) {
assert inputListener instanceof BlockPushingListener :
"Expecting a BlockPushingListener, but got " + inputListener.getClass();
TransportClient client = clientFactory.createClient(host, port);
new OneForOneBlockPusher(client, appId, comparableAppAttemptId, inputBlockId,
(BlockPushingListener) inputListener, buffersWithId).start();
} else {
logger.info("This clientFactory was closed. Skipping further block push retries.");
}
};
int maxRetries = transportConf.maxIORetries();
if (maxRetries > 0) {
new RetryingBlockTransferor(
transportConf, blockPushStarter, blockIds, listener, PUSH_ERROR_HANDLER).start();
} else {
blockPushStarter.createAndStart(blockIds, listener);
}
} catch (Exception e) {
logger.error("Exception while beginning pushBlocks", e);
for (String blockId : blockIds) {
listener.onBlockPushFailure(blockId, e);
}
}
}
@Override
public void finalizeShuffleMerge(
String host,
int port,
int shuffleId,
int shuffleMergeId,
MergeFinalizerListener listener) {
checkInit();
try {
TransportClient client = clientFactory.createClient(host, port);
ByteBuffer finalizeShuffleMerge =
new FinalizeShuffleMerge(
appId, comparableAppAttemptId, shuffleId, shuffleMergeId).toByteBuffer();
client.sendRpc(finalizeShuffleMerge, new RpcResponseCallback() {
@Override
public void onSuccess(ByteBuffer response) {
listener.onShuffleMergeSuccess(
(MergeStatuses) BlockTransferMessage.Decoder.fromByteBuffer(response));
}
@Override
public void onFailure(Throwable e) {
listener.onShuffleMergeFailure(e);
}
});
} catch (Exception e) {
logger.error("Exception while sending finalizeShuffleMerge request to {}:{}", e,
MDC.of(LogKeys.HOST, host),
MDC.of(LogKeys.PORT, port));
listener.onShuffleMergeFailure(e);
}
}
@Override
public void getMergedBlockMeta(
String host,
int port,
int shuffleId,
int shuffleMergeId,
int reduceId,
MergedBlocksMetaListener listener) {
checkInit();
logger.debug("Get merged blocks meta from {}:{} for shuffleId {} shuffleMergeId {}"
+ " reduceId {}", host, port, shuffleId, shuffleMergeId, reduceId);
try {
TransportClient client = clientFactory.createClient(host, port);
client.sendMergedBlockMetaReq(appId, shuffleId, shuffleMergeId, reduceId,
new MergedBlockMetaResponseCallback() {
@Override
public void onSuccess(int numChunks, ManagedBuffer buffer) {
logger.trace("Successfully got merged block meta for shuffleId {} shuffleMergeId {}"
+ " reduceId {}", shuffleId, shuffleMergeId, reduceId);
listener.onSuccess(shuffleId, shuffleMergeId, reduceId,
new MergedBlockMeta(numChunks, buffer));
}
@Override
public void onFailure(Throwable e) {
listener.onFailure(shuffleId, shuffleMergeId, reduceId, e);
}
});
} catch (Exception e) {
listener.onFailure(shuffleId, shuffleMergeId, reduceId, e);
}
}
@Override
public boolean removeShuffleMerge(String host, int port, int shuffleId, int shuffleMergeId) {
checkInit();
try {
TransportClient client = clientFactory.createClient(host, port);
client.send(
new RemoveShuffleMerge(appId, comparableAppAttemptId, shuffleId, shuffleMergeId)
.toByteBuffer());
// TODO(SPARK-42025): Add some error logs for RemoveShuffleMerge RPC
} catch (Exception e) {
logger.debug("Exception while sending RemoveShuffleMerge request to {}:{}",
host, port, e);
return false;
}
return true;
}
@Override
public MetricSet shuffleMetrics() {
checkInit();
return clientFactory.getAllMetrics();
}
/**
* Registers this executor with an external shuffle server. This registration is required to
* inform the shuffle server about where and how we store our shuffle files.
*
* @param host Host of shuffle server.
* @param port Port of shuffle server.
* @param execId This Executor's id.
* @param executorInfo Contains all info necessary for the service to find our shuffle files.
*/
public void registerWithShuffleServer(
String host,
int port,
String execId,
ExecutorShuffleInfo executorInfo) throws IOException, InterruptedException {
checkInit();
try (TransportClient client = clientFactory.createClient(host, port)) {
ByteBuffer registerMessage = new RegisterExecutor(appId, execId, executorInfo).toByteBuffer();
client.sendRpcSync(registerMessage, registrationTimeoutMs);
}
}
public Future<Integer> removeBlocks(
String host,
int port,
String execId,
String[] blockIds) throws IOException, InterruptedException {
checkInit();
CompletableFuture<Integer> numRemovedBlocksFuture = new CompletableFuture<>();
ByteBuffer removeBlocksMessage = new RemoveBlocks(appId, execId, blockIds).toByteBuffer();
final TransportClient client = clientFactory.createClient(host, port);
client.sendRpc(removeBlocksMessage, new RpcResponseCallback() {
@Override
public void onSuccess(ByteBuffer response) {
try {
BlockTransferMessage msgObj = BlockTransferMessage.Decoder.fromByteBuffer(response);
numRemovedBlocksFuture.complete(((BlocksRemoved) msgObj).numRemovedBlocks);
} catch (Throwable t) {
logger.warn("Error trying to remove blocks {} via external shuffle service from " +
"executor: {}", t,
MDC.of(LogKeys.BLOCK_IDS, Arrays.toString(blockIds)),
MDC.of(LogKeys.EXECUTOR_ID, execId));
numRemovedBlocksFuture.complete(0);
}
}
@Override
public void onFailure(Throwable e) {
logger.warn("Error trying to remove blocks {} via external shuffle service from " +
"executor: {}", e, MDC.of(LogKeys.BLOCK_IDS, Arrays.toString(blockIds)),
MDC.of(LogKeys.EXECUTOR_ID, execId));
numRemovedBlocksFuture.complete(0);
}
});
return numRemovedBlocksFuture;
}
@Override
public void close() {
checkInit();
if (clientFactory != null) {
clientFactory.close();
clientFactory = null;
}
}
}
| ExternalBlockStoreClient |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/google/SortedMultisetTestSuiteBuilder.java | {
"start": 2944,
"end": 3889
} | class ____
@Override
protected List<Class<? extends AbstractTester>> getTesters() {
List<Class<? extends AbstractTester>> testers = copyToList(super.getTesters());
testers.add(MultisetNavigationTester.class);
return testers;
}
@Override
TestSuite createElementSetTestSuite(
FeatureSpecificTestSuiteBuilder<?, ? extends OneSizeTestContainerGenerator<Collection<E>, E>>
parentBuilder) {
// TODO(lowasser): make a SortedElementSetGenerator
return SetTestSuiteBuilder.using(
new ElementSetGenerator<E>(parentBuilder.getSubjectGenerator()))
.named(getName() + ".elementSet")
.withFeatures(computeElementSetFeatures(parentBuilder.getFeatures()))
.suppressing(parentBuilder.getSuppressedTests())
.createTestSuite();
}
/**
* To avoid infinite recursion, test suites with these marker features won't have derived suites
* created for them.
*/
| literals |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java | {
"start": 1773,
"end": 12917
} | enum ____ implements DateTimeField {
MILLENNIUM(dt -> {
int year = dt.getYear();
int firstYearOfMillennium = year - (year % 1000);
return dt.with(ChronoField.YEAR, firstYearOfMillennium)
.with(ChronoField.MONTH_OF_YEAR, 1)
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate()
.atStartOfDay(dt.getZone());
}, idt -> new IntervalDayTime(Duration.ZERO, idt.dataType()), iym -> {
Period period = iym.interval();
int year = period.getYears();
int firstYearOfMillennium = year - (year % 1000);
return new IntervalYearMonth(Period.ZERO.plusYears(firstYearOfMillennium), iym.dataType());
}, "millennia"),
CENTURY(dt -> {
int year = dt.getYear();
int firstYearOfCentury = year - (year % 100);
return dt.with(ChronoField.YEAR, firstYearOfCentury)
.with(ChronoField.MONTH_OF_YEAR, 1)
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate()
.atStartOfDay(dt.getZone());
}, idt -> new IntervalDayTime(Duration.ZERO, idt.dataType()), iym -> {
Period period = iym.interval();
int year = period.getYears();
int firstYearOfCentury = year - (year % 100);
return new IntervalYearMonth(Period.ZERO.plusYears(firstYearOfCentury), iym.dataType());
}, "centuries"),
DECADE(dt -> {
int year = dt.getYear();
int firstYearOfDecade = year - (year % 10);
return dt.with(ChronoField.YEAR, firstYearOfDecade)
.with(ChronoField.MONTH_OF_YEAR, 1)
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate()
.atStartOfDay(dt.getZone());
}, idt -> new IntervalDayTime(Duration.ZERO, idt.dataType()), iym -> {
Period period = iym.interval();
int year = period.getYears();
int firstYearOfDecade = year - (year % 10);
return new IntervalYearMonth(Period.ZERO.plusYears(firstYearOfDecade), iym.dataType());
}, "decades"),
YEAR(dt -> {
return dt.with(ChronoField.MONTH_OF_YEAR, 1).with(ChronoField.DAY_OF_MONTH, 1).toLocalDate().atStartOfDay(dt.getZone());
}, idt -> new IntervalDayTime(Duration.ZERO, idt.dataType()), iym -> {
Period period = iym.interval();
int year = period.getYears();
return new IntervalYearMonth(Period.ZERO.plusYears(year), iym.dataType());
}, "years", "yy", "yyyy"),
QUARTER(dt -> {
int month = dt.getMonthValue();
int firstMonthOfQuarter = (((month - 1) / 3) * 3) + 1;
return dt.with(ChronoField.MONTH_OF_YEAR, firstMonthOfQuarter)
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate()
.atStartOfDay(dt.getZone());
}, idt -> new IntervalDayTime(Duration.ZERO, (idt.dataType())), iym -> {
Period period = iym.interval();
int month = period.getMonths();
int year = period.getYears();
int firstMonthOfQuarter = (month / 3) * 3;
return new IntervalYearMonth(Period.ZERO.plusYears(year).plusMonths(firstMonthOfQuarter), iym.dataType());
}, "quarters", "qq", "q"),
MONTH(
dt -> { return dt.with(ChronoField.DAY_OF_MONTH, 1).toLocalDate().atStartOfDay(dt.getZone()); },
idt -> new IntervalDayTime(Duration.ZERO, idt.dataType()),
iym -> iym,
"months",
"mm",
"m"
),
WEEK(dt -> {
return dt.with(ChronoField.DAY_OF_WEEK, 1).toLocalDate().atStartOfDay(dt.getZone());
}, idt -> new IntervalDayTime(Duration.ZERO, idt.dataType()), iym -> iym, "weeks", "wk", "ww"),
DAY(
dt -> dt.toLocalDate().atStartOfDay(dt.getZone()),
idt -> truncateIntervalSmallerThanWeek(idt, ChronoUnit.DAYS),
iym -> iym,
"days",
"dd",
"d"
),
HOUR(dt -> {
int hour = dt.getHour();
return dt.toLocalDate().atStartOfDay(dt.getZone()).with(ChronoField.HOUR_OF_DAY, hour);
}, idt -> truncateIntervalSmallerThanWeek(idt, ChronoUnit.HOURS), iym -> iym, "hours", "hh"),
MINUTE(dt -> {
int hour = dt.getHour();
int minute = dt.getMinute();
return dt.toLocalDate().atStartOfDay(dt.getZone()).with(ChronoField.HOUR_OF_DAY, hour).with(ChronoField.MINUTE_OF_HOUR, minute);
}, idt -> truncateIntervalSmallerThanWeek(idt, ChronoUnit.MINUTES), iym -> iym, "minutes", "mi", "n"),
SECOND(
dt -> dt.with(ChronoField.NANO_OF_SECOND, 0),
idt -> truncateIntervalSmallerThanWeek(idt, ChronoUnit.SECONDS),
iym -> iym,
"seconds",
"ss",
"s"
),
MILLISECOND(dt -> {
int micros = dt.get(ChronoField.MICRO_OF_SECOND);
return dt.with(ChronoField.MILLI_OF_SECOND, (micros / 1000));
}, idt -> truncateIntervalSmallerThanWeek(idt, ChronoUnit.MILLIS), iym -> iym, "milliseconds", "ms"),
MICROSECOND(dt -> {
int nanos = dt.getNano();
return dt.with(ChronoField.MICRO_OF_SECOND, (nanos / 1000));
}, idt -> idt, iym -> iym, "microseconds", "mcs"),
NANOSECOND(dt -> dt, idt -> idt, iym -> iym, "nanoseconds", "ns");
private static final Map<String, Part> NAME_TO_PART;
private static final List<String> VALID_VALUES;
static {
NAME_TO_PART = DateTimeField.initializeResolutionMap(values());
VALID_VALUES = DateTimeField.initializeValidValues(values());
}
private UnaryOperator<IntervalYearMonth> truncateFunctionIntervalYearMonth;
private UnaryOperator<ZonedDateTime> truncateFunctionZonedDateTime;
private UnaryOperator<IntervalDayTime> truncateFunctionIntervalDayTime;
private Set<String> aliases;
Part(
UnaryOperator<ZonedDateTime> truncateFunctionZonedDateTime,
UnaryOperator<IntervalDayTime> truncateFunctionIntervalDayTime,
UnaryOperator<IntervalYearMonth> truncateFunctionIntervalYearMonth,
String... aliases
) {
this.truncateFunctionIntervalYearMonth = truncateFunctionIntervalYearMonth;
this.truncateFunctionZonedDateTime = truncateFunctionZonedDateTime;
this.truncateFunctionIntervalDayTime = truncateFunctionIntervalDayTime;
this.aliases = Set.of(aliases);
}
@Override
public Iterable<String> aliases() {
return aliases;
}
public static List<String> findSimilar(String match) {
return DateTimeField.findSimilar(NAME_TO_PART.keySet(), match);
}
public static Part resolve(String truncateTo) {
return DateTimeField.resolveMatch(NAME_TO_PART, truncateTo);
}
public ZonedDateTime truncate(ZonedDateTime dateTime) {
return truncateFunctionZonedDateTime.apply(dateTime);
}
public IntervalDayTime truncate(IntervalDayTime dateTime) {
return truncateFunctionIntervalDayTime.apply(dateTime);
}
public IntervalYearMonth truncate(IntervalYearMonth dateTime) {
return truncateFunctionIntervalYearMonth.apply(dateTime);
}
private static IntervalDayTime truncateIntervalSmallerThanWeek(IntervalDayTime r, ChronoUnit unit) {
Duration d = r.interval();
int isNegative = 1;
if (d.isNegative()) {
d = d.negated();
isNegative = -1;
}
long durationInSec = d.getSeconds();
long day = durationInSec / SECONDS_PER_DAY;
durationInSec = durationInSec % SECONDS_PER_DAY;
long hour = durationInSec / SECONDS_PER_HOUR;
durationInSec = durationInSec % SECONDS_PER_HOUR;
long min = durationInSec / SECONDS_PER_MINUTE;
durationInSec = durationInSec % SECONDS_PER_MINUTE;
long sec = durationInSec;
long miliseccond = TimeUnit.NANOSECONDS.toMillis(d.getNano());
Duration newDuration = Duration.ZERO;
if (unit.ordinal() <= ChronoUnit.DAYS.ordinal()) {
newDuration = newDuration.plusDays(day * isNegative);
}
if (unit.ordinal() <= ChronoUnit.HOURS.ordinal()) {
newDuration = newDuration.plusHours(hour * isNegative);
}
if (unit.ordinal() <= ChronoUnit.MINUTES.ordinal()) {
newDuration = newDuration.plusMinutes(min * isNegative);
}
if (unit.ordinal() <= ChronoUnit.SECONDS.ordinal()) {
newDuration = newDuration.plusSeconds(sec * isNegative);
}
if (unit.ordinal() <= ChronoUnit.MILLIS.ordinal()) {
newDuration = newDuration.plusMillis(miliseccond * isNegative);
}
return new IntervalDayTime(newDuration, r.dataType());
}
}
public DateTrunc(Source source, Expression truncateTo, Expression timestamp, ZoneId zoneId) {
super(source, truncateTo, timestamp, zoneId);
}
@Override
public DataType dataType() {
if (isInterval(right().dataType())) {
return right().dataType();
}
return DataTypes.DATETIME;
}
@Override
protected TypeResolution resolveType() {
TypeResolution resolution = super.resolveType();
if (resolution.unresolved()) {
return resolution;
}
resolution = isDateOrInterval(right(), sourceText(), SECOND);
if (resolution.unresolved()) {
return resolution;
}
return TypeResolution.TYPE_RESOLVED;
}
@Override
protected BinaryScalarFunction replaceChildren(Expression newTruncateTo, Expression newTimestamp) {
return new DateTrunc(source(), newTruncateTo, newTimestamp, zoneId());
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, DateTrunc::new, left(), right(), zoneId());
}
@Override
protected String scriptMethodName() {
return "dateTrunc";
}
@Override
public Object fold() {
return DateTruncProcessor.process(left().fold(), right().fold(), zoneId());
}
@Override
protected Pipe createPipe(Pipe truncateTo, Pipe timestamp, ZoneId zoneId) {
return new DateTruncPipe(source(), this, truncateTo, timestamp, zoneId);
}
@Override
protected boolean resolveDateTimeField(String dateTimeField) {
return Part.resolve(dateTimeField) != null;
}
@Override
protected List<String> findSimilarDateTimeFields(String dateTimeField) {
return Part.findSimilar(dateTimeField);
}
@Override
protected List<String> validDateTimeFieldValues() {
return Part.VALID_VALUES;
}
}
| Part |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/entityNames/manyToManyAudited/ReadEntityWithAuditedManyToManyTest.java | {
"start": 999,
"end": 3979
} | class ____ {
private long id_car1;
private long id_car2;
private long id_pers1;
private Person person1;
private Car car1;
private Person person1_1;
private Car car1_2;
@Test
public void testGetEntityNameManyYoManyWithEntityName(SessionFactoryScope scope) {
scope.inSession( session -> {
Person pers1 = new Person( "Hernan", 28 );
Person pers2 = new Person( "Leandro", 29 );
Person pers3 = new Person( "Barba", 32 );
Person pers4 = new Person( "Camomo", 15 );
//REV 1
session.getTransaction().begin();
List<Person> owners = new ArrayList<Person>();
owners.add( pers1 );
owners.add( pers2 );
owners.add( pers3 );
Car car1 = new Car( 5, owners );
session.persist( car1 );
session.getTransaction().commit();
id_pers1 = pers1.getId();
id_car1 = car1.getId();
owners = new ArrayList<Person>();
owners.add( pers2 );
owners.add( pers3 );
owners.add( pers4 );
Car car2 = new Car( 27, owners );
//REV 2
session.getTransaction().begin();
Person person1 = (Person) session.get( "Personaje", id_pers1 );
person1.setName( "Hernan David" );
person1.setAge( 40 );
session.persist( car1 );
session.persist( car2 );
session.getTransaction().commit();
id_car2 = car2.getId();
final var auditReader = AuditReaderFactory.get( session );
loadDataOnSessionAndAuditReader( session, auditReader );
checkEntityNames( session, auditReader );
} );
}
private void loadDataOnSessionAndAuditReader(SessionImplementor session, AuditReader auditReader) {
car1_2 = auditReader.find( Car.class, id_car1, 2 );
Car car2_2 = auditReader.find( Car.class, id_car2, 2 );
// navigate through relations to load objects
for ( Person owner : car1_2.getOwners() ) {
for ( Car ownedCar : owner.getCars() ) {
ownedCar.getRegistrationNumber();
}
}
for ( Person owner : car2_2.getOwners() ) {
for ( Car ownedCar : owner.getCars() ) {
ownedCar.getRegistrationNumber();
}
}
car1 = (Car) session.get( Car.class, id_car1 );
person1 = (Person) session.get( "Personaje", id_pers1 );
person1_1 = auditReader.find( Person.class, "Personaje", id_pers1, 1 );
}
private void checkEntityNames(SessionImplementor session, AuditReader auditReader) {
String currPerson1EN = session.getEntityName( person1 );
String currCar1EN = session.getEntityName( car1 );
String person1_1EN = auditReader.getEntityName( id_pers1, 1, person1_1 );
assert (currPerson1EN.equals( person1_1EN ));
String car1_2EN = auditReader.getEntityName( id_car1, 2, car1_2 );
assert (currCar1EN.equals( car1_2EN ));
}
@Test
public void testGetEntityNameManyYoManyWithEntityNameInNewSession(SessionFactoryScope scope) {
scope.inSession( session -> {
//force new session and AR
final var auditReader = AuditReaderFactory.get( session );
loadDataOnSessionAndAuditReader( session, auditReader );
checkEntityNames( session, auditReader );
} );
}
}
| ReadEntityWithAuditedManyToManyTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/core/FlowableOperator.java | {
"start": 970,
"end": 1435
} | interface ____<@NonNull Downstream, @NonNull Upstream> {
/**
* Applies a function to the child {@link Subscriber} and returns a new parent {@code Subscriber}.
* @param subscriber the child {@code Subscriber} instance
* @return the parent {@code Subscriber} instance
* @throws Throwable on failure
*/
@NonNull
Subscriber<? super Upstream> apply(@NonNull Subscriber<? super Downstream> subscriber) throws Throwable;
}
| FlowableOperator |
java | google__dagger | javatests/dagger/android/support/functional/ComponentStructureFollowsControllerStructureApplication.java | {
"start": 7177,
"end": 7420
} | class ____ {
@Provides
@IntoSet
static Class<?> addToComponentHierarchy() {
return InnerActivitySubcomponent.class;
}
}
}
@Subcomponent(modules = ServiceModule.class)
| InnerActivityModule |
java | apache__rocketmq | filter/src/main/java/org/apache/rocketmq/filter/expression/LogicExpression.java | {
"start": 921,
"end": 1023
} | class ____ taken from ActiveMQ org.apache.activemq.filter.LogicExpression,
* </p>
*/
public abstract | was |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/simp/broker/AbstractSubscriptionRegistry.java | {
"start": 1411,
"end": 4579
} | class ____ implements SubscriptionRegistry {
private static final MultiValueMap<String, String> EMPTY_MAP =
CollectionUtils.unmodifiableMultiValueMap(new LinkedMultiValueMap<>());
protected final Log logger = SimpLogging.forLogName(getClass());
@Override
public final void registerSubscription(Message<?> message) {
MessageHeaders headers = message.getHeaders();
SimpMessageType messageType = SimpMessageHeaderAccessor.getMessageType(headers);
if (!SimpMessageType.SUBSCRIBE.equals(messageType)) {
throw new IllegalArgumentException("Expected SUBSCRIBE: " + message);
}
String sessionId = SimpMessageHeaderAccessor.getSessionId(headers);
if (sessionId == null) {
if (logger.isErrorEnabled()) {
logger.error("No sessionId in " + message);
}
return;
}
String subscriptionId = SimpMessageHeaderAccessor.getSubscriptionId(headers);
if (subscriptionId == null) {
if (logger.isErrorEnabled()) {
logger.error("No subscriptionId in " + message);
}
return;
}
String destination = SimpMessageHeaderAccessor.getDestination(headers);
if (destination == null) {
if (logger.isErrorEnabled()) {
logger.error("No destination in " + message);
}
return;
}
addSubscriptionInternal(sessionId, subscriptionId, destination, message);
}
@Override
public final void unregisterSubscription(Message<?> message) {
MessageHeaders headers = message.getHeaders();
SimpMessageType messageType = SimpMessageHeaderAccessor.getMessageType(headers);
if (!SimpMessageType.UNSUBSCRIBE.equals(messageType)) {
throw new IllegalArgumentException("Expected UNSUBSCRIBE: " + message);
}
String sessionId = SimpMessageHeaderAccessor.getSessionId(headers);
if (sessionId == null) {
if (logger.isErrorEnabled()) {
logger.error("No sessionId in " + message);
}
return;
}
String subscriptionId = SimpMessageHeaderAccessor.getSubscriptionId(headers);
if (subscriptionId == null) {
if (logger.isErrorEnabled()) {
logger.error("No subscriptionId " + message);
}
return;
}
removeSubscriptionInternal(sessionId, subscriptionId, message);
}
@Override
public final MultiValueMap<String, String> findSubscriptions(Message<?> message) {
MessageHeaders headers = message.getHeaders();
SimpMessageType type = SimpMessageHeaderAccessor.getMessageType(headers);
if (!SimpMessageType.MESSAGE.equals(type)) {
throw new IllegalArgumentException("Unexpected message type: " + type);
}
String destination = SimpMessageHeaderAccessor.getDestination(headers);
if (destination == null) {
if (logger.isErrorEnabled()) {
logger.error("No destination in " + message);
}
return EMPTY_MAP;
}
return findSubscriptionsInternal(destination, message);
}
protected abstract void addSubscriptionInternal(
String sessionId, String subscriptionId, String destination, Message<?> message);
protected abstract void removeSubscriptionInternal(
String sessionId, String subscriptionId, Message<?> message);
protected abstract MultiValueMap<String, String> findSubscriptionsInternal(
String destination, Message<?> message);
}
| AbstractSubscriptionRegistry |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/connector/sink2/SinkWriter.java | {
"start": 2158,
"end": 2468
} | interface ____ {
/** Returns the current event-time watermark. */
long currentWatermark();
/**
* Returns the timestamp of the current input record or {@code null} if the element does not
* have an assigned timestamp.
*/
Long timestamp();
}
}
| Context |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/WebSocketHandler.java | {
"start": 1386,
"end": 3349
} | interface ____ {
/**
* Invoked after WebSocket negotiation has succeeded and the WebSocket connection is
* opened and ready for use.
* @throws Exception this method can handle or propagate exceptions; see class-level
* Javadoc for details.
*/
void afterConnectionEstablished(WebSocketSession session) throws Exception;
/**
* Invoked when a new WebSocket message arrives.
* @throws Exception this method can handle or propagate exceptions; see class-level
* Javadoc for details.
*/
void handleMessage(WebSocketSession session, WebSocketMessage<?> message) throws Exception;
/**
* Handle an error from the underlying WebSocket message transport.
* @throws Exception this method can handle or propagate exceptions; see class-level
* Javadoc for details.
*/
void handleTransportError(WebSocketSession session, Throwable exception) throws Exception;
/**
* Invoked after the WebSocket connection has been closed by either side, or after a
* transport error has occurred. Although the session may technically still be open,
* depending on the underlying implementation, sending messages at this point is
* discouraged and most likely will not succeed.
* @throws Exception this method can handle or propagate exceptions; see class-level
* Javadoc for details.
*/
void afterConnectionClosed(WebSocketSession session, CloseStatus closeStatus) throws Exception;
/**
* Whether the WebSocketHandler handles partial messages. If this flag is set to
* {@code true} and the underlying WebSocket server supports partial messages,
* then a large WebSocket message, or one of an unknown size may be split and
* maybe received over multiple calls to
* {@link #handleMessage(WebSocketSession, WebSocketMessage)}. The flag
* {@link org.springframework.web.socket.WebSocketMessage#isLast()} indicates if
* the message is partial and whether it is the last part.
*/
boolean supportsPartialMessages();
}
| WebSocketHandler |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/stmt/OracleMultiInsertStatement.java | {
"start": 2635,
"end": 3642
} | class ____ extends OracleSQLObjectImpl implements Entry {
private List<ConditionalInsertClauseItem> items = new ArrayList<ConditionalInsertClauseItem>();
private InsertIntoClause elseItem;
public InsertIntoClause getElseItem() {
return elseItem;
}
public void setElseItem(InsertIntoClause elseItem) {
this.elseItem = elseItem;
}
public List<ConditionalInsertClauseItem> getItems() {
return items;
}
public void addItem(ConditionalInsertClauseItem item) {
if (item != null) {
item.setParent(this);
}
this.items.add(item);
}
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, items);
acceptChild(visitor, elseItem);
}
visitor.endVisit(this);
}
}
public static | ConditionalInsertClause |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/illegal/InterceptorWithDisposerMethodTest.java | {
"start": 1053,
"end": 1685
} | class ____ {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(MyInterceptor.class, MyInterceptorBinding.class)
.shouldFail()
.build();
@Test
public void trigger() {
Throwable error = container.getFailure();
assertNotNull(error);
assertInstanceOf(DefinitionException.class, error);
assertTrue(error.getMessage().contains("Interceptor declares a disposer method"));
}
@Target({ TYPE, METHOD, FIELD, PARAMETER })
@Retention(RUNTIME)
@InterceptorBinding
@ | InterceptorWithDisposerMethodTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/inference/CompletionExecSerializationTests.java | {
"start": 831,
"end": 2338
} | class ____ extends AbstractPhysicalPlanSerializationTests<CompletionExec> {
@Override
protected CompletionExec createTestInstance() {
return new CompletionExec(randomSource(), randomChild(0), randomInferenceId(), randomPrompt(), randomAttribute());
}
@Override
protected CompletionExec mutateInstance(CompletionExec instance) throws IOException {
PhysicalPlan child = instance.child();
Expression inferenceId = instance.inferenceId();
Expression prompt = instance.prompt();
Attribute targetField = instance.targetField();
switch (between(0, 3)) {
case 0 -> child = randomValueOtherThan(child, () -> randomChild(0));
case 1 -> inferenceId = randomValueOtherThan(inferenceId, this::randomInferenceId);
case 2 -> prompt = randomValueOtherThan(prompt, this::randomPrompt);
case 3 -> targetField = randomValueOtherThan(targetField, this::randomAttribute);
}
return new CompletionExec(instance.source(), child, inferenceId, prompt, targetField);
}
private Literal randomInferenceId() {
return Literal.keyword(Source.EMPTY, randomIdentifier());
}
private Expression randomPrompt() {
return randomBoolean() ? Literal.keyword(Source.EMPTY, randomIdentifier()) : randomAttribute();
}
private Attribute randomAttribute() {
return ReferenceAttributeTests.randomReferenceAttribute(randomBoolean());
}
}
| CompletionExecSerializationTests |
java | quarkusio__quarkus | extensions/panache/hibernate-orm-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/orm/rest/data/panache/deployment/entity/PanacheEntityResourceMethodListenerTest.java | {
"start": 646,
"end": 3416
} | class ____ {
public static final AtomicInteger ON_BEFORE_SAVE_COUNTER = new AtomicInteger(0);
public static final AtomicInteger ON_AFTER_SAVE_COUNTER = new AtomicInteger(0);
public static final AtomicInteger ON_BEFORE_UPDATE_COUNTER = new AtomicInteger(0);
public static final AtomicInteger ON_AFTER_UPDATE_COUNTER = new AtomicInteger(0);
public static final AtomicInteger ON_BEFORE_DELETE_COUNTER = new AtomicInteger(0);
public static final AtomicInteger ON_AFTER_DELETE_COUNTER = new AtomicInteger(0);
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Collection.class, CollectionsResource.class, AbstractEntity.class, AbstractItem.class,
Item.class, ItemsResource.class, ItemRestDataResourceMethodListener.class)
.addAsResource("application.properties")
.addAsResource("import.sql"));
@Order(1)
@Test
void shouldListenersBeCalledWhenCreatingEntities() {
whenCreateEntity();
assertEquals(1, ON_BEFORE_SAVE_COUNTER.get());
assertEquals(1, ON_AFTER_SAVE_COUNTER.get());
}
@Order(2)
@Test
void shouldListenersBeCalledWhenUpdatingEntities() {
whenUpdateEntity();
assertEquals(1, ON_BEFORE_UPDATE_COUNTER.get());
assertEquals(1, ON_AFTER_UPDATE_COUNTER.get());
}
@Order(3)
@Test
void shouldListenersBeCalledWhenDeletingEntities() {
whenDeleteEntity();
assertEquals(1, ON_BEFORE_DELETE_COUNTER.get());
assertEquals(1, ON_AFTER_DELETE_COUNTER.get());
}
private void whenCreateEntity() {
Response response = given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-simple\", \"collection\": {\"id\": \"full\"}}")
.when().post("/items")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(201);
}
private void whenUpdateEntity() {
given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"id\": \"1\", \"name\": \"first-test\", \"collection\": {\"id\": \"full\"}}")
.when().put("/items/1")
.then().statusCode(204);
}
private void whenDeleteEntity() {
given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"id\": \"1\", \"name\": \"first-test\", \"collection\": {\"id\": \"full\"}}")
.when().delete("/items/1")
.then().statusCode(204);
}
}
| PanacheEntityResourceMethodListenerTest |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/event/EventUtilsTest.java | {
"start": 3314,
"end": 3500
} | class ____ {
public void addPropertyChangeListener(final PropertyChangeListener listener) {
throw new RuntimeException();
}
}
public | ExceptionEventSource |
java | apache__camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FtpConsumerMoveExpressionIT.java | {
"start": 2577,
"end": 2676
} | class ____ {
public String guid() {
return "123";
}
}
}
| MyGuidGenerator |
java | spring-projects__spring-security | ldap/src/test/java/org/springframework/security/ldap/authentication/ad/ActiveDirectoryLdapAuthenticationProviderTests.java | {
"start": 2722,
"end": 17208
} | class ____ {
public static final String EXISTING_LDAP_PROVIDER = "ldap://192.168.1.200/";
public static final String NON_EXISTING_LDAP_PROVIDER = "ldap://192.168.1.201/";
ActiveDirectoryLdapAuthenticationProvider provider;
UsernamePasswordAuthenticationToken joe = UsernamePasswordAuthenticationToken.unauthenticated("joe", "password");
DirContext ctx;
@BeforeEach
public void setUp() throws NamingException {
this.provider = new ActiveDirectoryLdapAuthenticationProvider("mydomain.eu", "ldap://192.168.1.200/");
this.ctx = mock(DirContext.class);
given(this.ctx.getNameInNamespace()).willReturn("");
}
@Test
public void bindPrincipalIsCreatedCorrectly() {
assertThat(this.provider.createBindPrincipal("joe")).isEqualTo("joe@mydomain.eu");
assertThat(this.provider.createBindPrincipal("joe@mydomain.eu")).isEqualTo("joe@mydomain.eu");
}
@Test
public void successfulAuthenticationProducesExpectedAuthorities() throws Exception {
checkAuthentication("dc=mydomain,dc=eu", this.provider);
}
// SEC-1915
@Test
public void customSearchFilterIsUsedForSuccessfulAuthentication() throws Exception {
String customSearchFilter = "(&(objectClass=user)(sAMAccountName={0}))";
DirContextAdapter dca = new DirContextAdapter();
SearchResult sr = new SearchResult("CN=Joe Jannsen,CN=Users", dca, dca.getAttributes());
given(this.ctx.search(any(Name.class), eq(customSearchFilter), any(Object[].class), any(SearchControls.class)))
.willReturn(new MockNamingEnumeration(sr));
ActiveDirectoryLdapAuthenticationProvider customProvider = new ActiveDirectoryLdapAuthenticationProvider(
"mydomain.eu", "ldap://192.168.1.200/");
customProvider.contextFactory = createContextFactoryReturning(this.ctx);
customProvider.setSearchFilter(customSearchFilter);
Authentication result = customProvider.authenticate(this.joe);
assertThat(result.isAuthenticated()).isTrue();
}
@Test
public void defaultSearchFilter() throws Exception {
final String defaultSearchFilter = "(&(objectClass=user)(userPrincipalName={0}))";
DirContextAdapter dca = new DirContextAdapter();
SearchResult sr = new SearchResult("CN=Joe Jannsen,CN=Users", dca, dca.getAttributes());
given(this.ctx.search(any(Name.class), eq(defaultSearchFilter), any(Object[].class), any(SearchControls.class)))
.willReturn(new MockNamingEnumeration(sr));
ActiveDirectoryLdapAuthenticationProvider customProvider = new ActiveDirectoryLdapAuthenticationProvider(
"mydomain.eu", "ldap://192.168.1.200/");
customProvider.contextFactory = createContextFactoryReturning(this.ctx);
Authentication result = customProvider.authenticate(this.joe);
assertThat(result.isAuthenticated()).isTrue();
verify(this.ctx).search(any(Name.class), eq(defaultSearchFilter), any(Object[].class),
any(SearchControls.class));
}
// SEC-2897,SEC-2224
@Test
public void bindPrincipalAndUsernameUsed() throws Exception {
final String defaultSearchFilter = "(&(objectClass=user)(userPrincipalName={0}))";
ArgumentCaptor<Object[]> captor = ArgumentCaptor.forClass(Object[].class);
DirContextAdapter dca = new DirContextAdapter();
SearchResult sr = new SearchResult("CN=Joe Jannsen,CN=Users", dca, dca.getAttributes());
given(this.ctx.search(any(Name.class), eq(defaultSearchFilter), captor.capture(), any(SearchControls.class)))
.willReturn(new MockNamingEnumeration(sr));
ActiveDirectoryLdapAuthenticationProvider customProvider = new ActiveDirectoryLdapAuthenticationProvider(
"mydomain.eu", "ldap://192.168.1.200/");
customProvider.contextFactory = createContextFactoryReturning(this.ctx);
Authentication result = customProvider.authenticate(this.joe);
assertThat(captor.getValue()).containsExactly("joe@mydomain.eu", "joe");
assertThat(result.isAuthenticated()).isTrue();
}
@Test
public void setSearchFilterNull() {
assertThatIllegalArgumentException().isThrownBy(() -> this.provider.setSearchFilter(null));
}
@Test
public void setSearchFilterEmpty() {
assertThatIllegalArgumentException().isThrownBy(() -> this.provider.setSearchFilter(" "));
}
@Test
public void nullDomainIsSupportedIfAuthenticatingWithFullUserPrincipal() throws Exception {
this.provider = new ActiveDirectoryLdapAuthenticationProvider(null, "ldap://192.168.1.200/");
DirContextAdapter dca = new DirContextAdapter();
SearchResult sr = new SearchResult("CN=Joe Jannsen,CN=Users", dca, dca.getAttributes());
given(this.ctx.search(eq(LdapNameBuilder.newInstance("DC=mydomain,DC=eu").build()), any(String.class),
any(Object[].class), any(SearchControls.class)))
.willReturn(new MockNamingEnumeration(sr));
this.provider.contextFactory = createContextFactoryReturning(this.ctx);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
this.provider.authenticate(UsernamePasswordAuthenticationToken.unauthenticated("joe@mydomain.eu", "password"));
}
@Test
public void failedUserSearchCausesBadCredentials() throws Exception {
given(this.ctx.search(any(Name.class), any(String.class), any(Object[].class), any(SearchControls.class)))
.willThrow(new NameNotFoundException());
this.provider.contextFactory = createContextFactoryReturning(this.ctx);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
// SEC-2017
@Test
public void noUserSearchCausesUsernameNotFound() throws Exception {
given(this.ctx.search(any(Name.class), any(String.class), any(Object[].class), any(SearchControls.class)))
.willReturn(new MockNamingEnumeration(null));
this.provider.contextFactory = createContextFactoryReturning(this.ctx);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
// SEC-2500
@Test
public void sec2500PreventAnonymousBind() {
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(
() -> this.provider.authenticate(UsernamePasswordAuthenticationToken.unauthenticated("rwinch", "")));
}
@Test
@SuppressWarnings("unchecked")
public void duplicateUserSearchCausesError() throws Exception {
NamingEnumeration<SearchResult> searchResults = mock(NamingEnumeration.class);
given(searchResults.hasMore()).willReturn(true, true, false);
SearchResult searchResult = mock(SearchResult.class);
given(searchResult.getObject()).willReturn(new DirContextAdapter("ou=1"), new DirContextAdapter("ou=2"));
given(searchResults.next()).willReturn(searchResult);
given(this.ctx.search(any(Name.class), any(String.class), any(Object[].class), any(SearchControls.class)))
.willReturn(searchResults);
this.provider.contextFactory = createContextFactoryReturning(this.ctx);
assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class)
.isThrownBy(() -> this.provider.authenticate(this.joe));
}
static final String msg = "[LDAP: error code 49 - 80858585: LdapErr: DSID-DECAFF0, comment: AcceptSecurityContext error, data ";
@Test
public void userNotFoundIsCorrectlyMapped() {
this.provider.contextFactory = createContextFactoryThrowing(new AuthenticationException(msg + "525, xxxx]"));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
@Test
public void incorrectPasswordIsCorrectlyMapped() {
this.provider.contextFactory = createContextFactoryThrowing(new AuthenticationException(msg + "52e, xxxx]"));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
@Test
public void notPermittedIsCorrectlyMapped() {
this.provider.contextFactory = createContextFactoryThrowing(new AuthenticationException(msg + "530, xxxx]"));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
@Test
public void passwordNeedsResetIsCorrectlyMapped() {
final String dataCode = "773";
this.provider.contextFactory = createContextFactoryThrowing(
new AuthenticationException(msg + dataCode + ", xxxx]"));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe))
.withCauseInstanceOf(ActiveDirectoryAuthenticationException.class)
.satisfies((ex) -> assertThat(((ActiveDirectoryAuthenticationException) ex.getCause()).getDataCode())
.isEqualTo(dataCode));
}
@Test
public void expiredPasswordIsCorrectlyMapped() {
this.provider.contextFactory = createContextFactoryThrowing(new AuthenticationException(msg + "532, xxxx]"));
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(CredentialsExpiredException.class)
.isThrownBy(() -> this.provider.authenticate(this.joe));
}
@Test
public void accountDisabledIsCorrectlyMapped() {
this.provider.contextFactory = createContextFactoryThrowing(new AuthenticationException(msg + "533, xxxx]"));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(DisabledException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
@Test
public void accountExpiredIsCorrectlyMapped() {
this.provider.contextFactory = createContextFactoryThrowing(new AuthenticationException(msg + "701, xxxx]"));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(AccountExpiredException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
@Test
public void accountLockedIsCorrectlyMapped() {
this.provider.contextFactory = createContextFactoryThrowing(new AuthenticationException(msg + "775, xxxx]"));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(LockedException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
@Test
public void unknownErrorCodeIsCorrectlyMapped() {
this.provider.contextFactory = createContextFactoryThrowing(new AuthenticationException(msg + "999, xxxx]"));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
@Test
public void errorWithNoSubcodeIsHandledCleanly() {
this.provider.contextFactory = createContextFactoryThrowing(new AuthenticationException(msg));
this.provider.setConvertSubErrorCodesToExceptions(true);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.provider.authenticate(this.joe));
}
@Test
public void nonAuthenticationExceptionIsConvertedToSpringLdapException() throws Throwable {
assertThatExceptionOfType(InternalAuthenticationServiceException.class).isThrownBy(() -> {
this.provider.contextFactory = createContextFactoryThrowing(new CommunicationException(msg));
this.provider.authenticate(this.joe);
}).withCauseInstanceOf(org.springframework.ldap.CommunicationException.class);
}
@Test
public void connectionExceptionIsWrappedInInternalException() throws Exception {
ActiveDirectoryLdapAuthenticationProvider noneReachableProvider = new ActiveDirectoryLdapAuthenticationProvider(
"mydomain.eu", NON_EXISTING_LDAP_PROVIDER, "dc=ad,dc=eu,dc=mydomain");
noneReachableProvider
.setContextEnvironmentProperties(Collections.singletonMap("com.sun.jndi.ldap.connect.timeout", "5"));
assertThatExceptionOfType(
org.springframework.security.authentication.InternalAuthenticationServiceException.class)
.isThrownBy(() -> noneReachableProvider.doAuthentication(this.joe));
}
@Test
public void rootDnProvidedSeparatelyFromDomainAlsoWorks() throws Exception {
ActiveDirectoryLdapAuthenticationProvider provider = new ActiveDirectoryLdapAuthenticationProvider(
"mydomain.eu", EXISTING_LDAP_PROVIDER, "dc=ad,dc=eu,dc=mydomain");
checkAuthentication("dc=ad,dc=eu,dc=mydomain", provider);
}
@Test
public void setContextEnvironmentPropertiesNull() {
assertThatIllegalArgumentException().isThrownBy(() -> this.provider.setContextEnvironmentProperties(null));
}
@Test
public void setContextEnvironmentPropertiesEmpty() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.provider.setContextEnvironmentProperties(new Hashtable<>()));
}
@Test
public void contextEnvironmentPropertiesUsed() {
Hashtable<String, Object> env = new Hashtable<>();
env.put("java.naming.ldap.factory.socket", "unknown.package.NonExistingSocketFactory");
this.provider.setContextEnvironmentProperties(env);
assertThatExceptionOfType(InternalAuthenticationServiceException.class)
.isThrownBy(() -> this.provider.authenticate(this.joe))
.withCauseInstanceOf(org.springframework.ldap.CommunicationException.class)
.withRootCauseInstanceOf(ClassNotFoundException.class);
}
ContextFactory createContextFactoryThrowing(final NamingException ex) {
return new ContextFactory() {
@Override
DirContext createContext(Hashtable<?, ?> env) throws NamingException {
throw ex;
}
};
}
ContextFactory createContextFactoryReturning(final DirContext ctx) {
return new ContextFactory() {
@Override
DirContext createContext(Hashtable<?, ?> env) {
return ctx;
}
};
}
private void checkAuthentication(String rootDn, ActiveDirectoryLdapAuthenticationProvider provider)
throws NamingException {
DirContextAdapter dca = new DirContextAdapter();
SearchResult sr = new SearchResult("CN=Joe Jannsen,CN=Users", dca, dca.getAttributes());
@SuppressWarnings("deprecation")
Name searchBaseDn = LdapNameBuilder.newInstance(rootDn).build();
given(this.ctx.search(eq(searchBaseDn), any(String.class), any(Object[].class), any(SearchControls.class)))
.willReturn(new MockNamingEnumeration(sr))
.willReturn(new MockNamingEnumeration(sr));
provider.contextFactory = createContextFactoryReturning(this.ctx);
Authentication result = provider.authenticate(this.joe);
SecurityAssertions.assertThat(result).authorities().doesNotHaveToString("Admin");
dca.addAttributeValue("memberOf", "CN=Admin,CN=Users,DC=mydomain,DC=eu");
result = provider.authenticate(this.joe);
SecurityAssertions.assertThat(result).hasAuthority("Admin");
}
static | ActiveDirectoryLdapAuthenticationProviderTests |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/test/java/org/springframework/boot/buildpack/platform/docker/DockerApiTests.java | {
"start": 26848,
"end": 35554
} | class ____ {
private ContainerApi api;
@Captor
@SuppressWarnings("NullAway.Init")
private ArgumentCaptor<IOConsumer<OutputStream>> writer;
@Mock
@SuppressWarnings("NullAway.Init")
private UpdateListener<LogUpdateEvent> logListener;
@BeforeEach
void setup() {
this.api = DockerApiTests.this.dockerApi.container();
}
@Test
@SuppressWarnings("NullAway") // Test null check
void createWhenConfigIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.api.create(null, null))
.withMessage("'config' must not be null");
}
@Test
void createCreatesContainer() throws Exception {
ImageReference imageReference = ImageReference.of("ubuntu:bionic");
ContainerConfig config = ContainerConfig.of(imageReference, (update) -> update.withCommand("/bin/bash"));
URI createUri = new URI(CONTAINERS_URL + "/create");
given(http().post(eq(createUri), eq("application/json"), any()))
.willReturn(responseOf("create-container-response.json"));
ContainerReference containerReference = this.api.create(config, null);
assertThat(containerReference).hasToString("e90e34656806");
then(http()).should().post(any(), any(), this.writer.capture());
ByteArrayOutputStream out = new ByteArrayOutputStream();
this.writer.getValue().accept(out);
assertThat(out.toByteArray()).hasSize(config.toString().length());
}
@Test
void createWhenHasContentContainerWithContent() throws Exception {
ImageReference imageReference = ImageReference.of("ubuntu:bionic");
ContainerConfig config = ContainerConfig.of(imageReference, (update) -> update.withCommand("/bin/bash"));
TarArchive archive = TarArchive.of((layout) -> {
layout.directory("/test", Owner.ROOT);
layout.file("/test/file", Owner.ROOT, Content.of("test"));
});
ContainerContent content = ContainerContent.of(archive);
URI createUri = new URI(CONTAINERS_URL + "/create");
given(http().post(eq(createUri), eq("application/json"), any()))
.willReturn(responseOf("create-container-response.json"));
URI uploadUri = new URI(CONTAINERS_URL + "/e90e34656806/archive?path=%2F");
given(http().put(eq(uploadUri), eq("application/x-tar"), any())).willReturn(emptyResponse());
ContainerReference containerReference = this.api.create(config, null, content);
assertThat(containerReference).hasToString("e90e34656806");
then(http()).should().post(any(), any(), this.writer.capture());
ByteArrayOutputStream out = new ByteArrayOutputStream();
this.writer.getValue().accept(out);
assertThat(out.toByteArray()).hasSize(config.toString().length());
then(http()).should().put(any(), any(), this.writer.capture());
this.writer.getValue().accept(out);
assertThat(out.toByteArray()).hasSizeGreaterThan(2000);
}
@Test
void createWithPlatformCreatesContainer() throws Exception {
ImageReference imageReference = ImageReference.of("ubuntu:bionic");
ContainerConfig config = ContainerConfig.of(imageReference, (update) -> update.withCommand("/bin/bash"));
ImagePlatform platform = ImagePlatform.of("linux/arm64/v1");
setVersion("1.41");
URI createUri = new URI("/v1.41/containers/create?platform=linux%2Farm64%2Fv1");
given(http().post(eq(createUri), eq("application/json"), any()))
.willReturn(responseOf("create-container-response.json"));
ContainerReference containerReference = this.api.create(config, platform);
assertThat(containerReference).hasToString("e90e34656806");
then(http()).should().post(any(), any(), this.writer.capture());
ByteArrayOutputStream out = new ByteArrayOutputStream();
this.writer.getValue().accept(out);
assertThat(out.toByteArray()).hasSize(config.toString().length());
}
@Test
void createWithPlatformAndUnknownApiVersionAttemptsCreate() throws Exception {
createWithPlatform(null);
}
private void createWithPlatform(@Nullable String apiVersion) throws IOException, URISyntaxException {
ImageReference imageReference = ImageReference.of("ubuntu:bionic");
ContainerConfig config = ContainerConfig.of(imageReference, (update) -> update.withCommand("/bin/bash"));
ImagePlatform platform = ImagePlatform.of("linux/arm64/v1");
URI createUri = new URI(CONTAINERS_URL + "/create?platform=linux%2Farm64%2Fv1");
given(http().post(eq(createUri), eq("application/json"), any()))
.willReturn(responseOf("create-container-response.json"));
ContainerReference containerReference = this.api.create(config, platform);
assertThat(containerReference).hasToString("e90e34656806");
then(http()).should().post(any(), any(), this.writer.capture());
ByteArrayOutputStream out = new ByteArrayOutputStream();
this.writer.getValue().accept(out);
assertThat(out.toByteArray()).hasSize(config.toString().length());
}
@Test
void createWithPlatformAndKnownInsufficientApiVersionThrowsException() throws Exception {
ImageReference imageReference = ImageReference.of("ubuntu:bionic");
ContainerConfig config = ContainerConfig.of(imageReference, (update) -> update.withCommand("/bin/bash"));
ImagePlatform platform = ImagePlatform.of("linux/arm64/v1");
setVersion("1.24");
assertThatIllegalStateException().isThrownBy(() -> this.api.create(config, platform))
.withMessageContaining("must be at least 1.41")
.withMessageContaining("current API version is 1.24");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void startWhenReferenceIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.api.start(null))
.withMessage("'reference' must not be null");
}
@Test
void startStartsContainer() throws Exception {
ContainerReference reference = ContainerReference.of("e90e34656806");
URI startContainerUri = new URI(CONTAINERS_URL + "/e90e34656806/start");
given(http().post(startContainerUri)).willReturn(emptyResponse());
this.api.start(reference);
then(http()).should().post(startContainerUri);
}
@Test
@SuppressWarnings("NullAway") // Test null check
void logsWhenReferenceIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.api.logs(null, UpdateListener.none()))
.withMessage("'reference' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void logsWhenListenerIsNullThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.api.logs(ContainerReference.of("e90e34656806"), null))
.withMessage("'listener' must not be null");
}
@Test
void logsProducesEvents() throws Exception {
ContainerReference reference = ContainerReference.of("e90e34656806");
URI logsUri = new URI(CONTAINERS_URL + "/e90e34656806/logs?stdout=1&stderr=1&follow=1");
given(http().get(logsUri)).willReturn(responseOf("log-update-event.stream"));
this.api.logs(reference, this.logListener);
InOrder ordered = inOrder(this.logListener);
ordered.verify(this.logListener).onStart();
ordered.verify(this.logListener, times(7)).onUpdate(any());
ordered.verify(this.logListener).onFinish();
}
@Test
@SuppressWarnings("NullAway") // Test null check
void waitWhenReferenceIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.api.wait(null))
.withMessage("'reference' must not be null");
}
@Test
void waitReturnsStatus() throws Exception {
ContainerReference reference = ContainerReference.of("e90e34656806");
URI waitUri = new URI(CONTAINERS_URL + "/e90e34656806/wait");
given(http().post(waitUri)).willReturn(responseOf("container-wait-response.json"));
ContainerStatus status = this.api.wait(reference);
assertThat(status.getStatusCode()).isOne();
}
@Test
@SuppressWarnings("NullAway") // Test null check
void removeWhenReferenceIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.api.remove(null, true))
.withMessage("'reference' must not be null");
}
@Test
void removeRemovesContainer() throws Exception {
ContainerReference reference = ContainerReference.of("e90e34656806");
URI removeUri = new URI(CONTAINERS_URL + "/e90e34656806");
given(http().delete(removeUri)).willReturn(emptyResponse());
this.api.remove(reference, false);
then(http()).should().delete(removeUri);
}
@Test
void removeWhenForceIsTrueRemovesContainer() throws Exception {
ContainerReference reference = ContainerReference.of("e90e34656806");
URI removeUri = new URI(CONTAINERS_URL + "/e90e34656806?force=1");
given(http().delete(removeUri)).willReturn(emptyResponse());
this.api.remove(reference, true);
then(http()).should().delete(removeUri);
}
}
@Nested
| ContainerDockerApiTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RegisterNodeManagerRequestPBImpl.java | {
"start": 3104,
"end": 18157
} | class ____ extends RegisterNodeManagerRequest {
RegisterNodeManagerRequestProto proto = RegisterNodeManagerRequestProto.getDefaultInstance();
RegisterNodeManagerRequestProto.Builder builder = null;
boolean viaProto = false;
private Resource resource = null;
private NodeId nodeId = null;
private List<NMContainerStatus> containerStatuses = null;
private List<ApplicationId> runningApplications = null;
private Set<NodeLabel> labels = null;
private Set<NodeAttribute> attributes = null;
private List<LogAggregationReport> logAggregationReportsForApps = null;
/** Physical resources in the node. */
private Resource physicalResource = null;
private NodeStatus nodeStatus;
public RegisterNodeManagerRequestPBImpl() {
builder = RegisterNodeManagerRequestProto.newBuilder();
}
public RegisterNodeManagerRequestPBImpl(RegisterNodeManagerRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public synchronized RegisterNodeManagerRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private synchronized void mergeLocalToBuilder() {
if (this.containerStatuses != null) {
addNMContainerStatusesToProto();
}
if (this.runningApplications != null) {
addRunningApplicationsToProto();
}
if (this.resource != null) {
builder.setResource(convertToProtoFormat(this.resource));
}
if (this.nodeId != null) {
builder.setNodeId(convertToProtoFormat(this.nodeId));
}
if (this.labels != null) {
builder.clearNodeLabels();
Builder newBuilder = NodeLabelsProto.newBuilder();
for (NodeLabel label : labels) {
newBuilder.addNodeLabels(convertToProtoFormat(label));
}
builder.setNodeLabels(newBuilder.build());
}
if (this.attributes != null) {
builder.clearNodeAttributes();
NodeAttributesProto.Builder attributesBuilder =
NodeAttributesProto.newBuilder();
for (NodeAttribute attribute : attributes) {
attributesBuilder.addNodeAttributes(convertToProtoFormat(attribute));
}
builder.setNodeAttributes(attributesBuilder.build());
}
if (this.physicalResource != null) {
builder.setPhysicalResource(convertToProtoFormat(this.physicalResource));
}
if (this.logAggregationReportsForApps != null) {
addLogAggregationStatusForAppsToProto();
}
if (this.nodeStatus != null) {
builder.setNodeStatus(convertToProtoFormat(this.nodeStatus));
}
}
private void addLogAggregationStatusForAppsToProto() {
maybeInitBuilder();
builder.clearLogAggregationReportsForApps();
if (this.logAggregationReportsForApps == null) {
return;
}
Iterable<LogAggregationReportProto> it =
new Iterable<LogAggregationReportProto>() {
@Override
public Iterator<LogAggregationReportProto> iterator() {
return new Iterator<LogAggregationReportProto>() {
private Iterator<LogAggregationReport> iter =
logAggregationReportsForApps.iterator();
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public LogAggregationReportProto next() {
return convertToProtoFormat(iter.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
builder.addAllLogAggregationReportsForApps(it);
}
private LogAggregationReportProto convertToProtoFormat(
LogAggregationReport value) {
return ((LogAggregationReportPBImpl) value).getProto();
}
private synchronized void addNMContainerStatusesToProto() {
maybeInitBuilder();
builder.clearContainerStatuses();
List<NMContainerStatusProto> list =
new ArrayList<NMContainerStatusProto>();
for (NMContainerStatus status : this.containerStatuses) {
list.add(convertToProtoFormat(status));
}
builder.addAllContainerStatuses(list);
}
private synchronized void mergeLocalToProto() {
if (viaProto) {
maybeInitBuilder();
}
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private synchronized void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = RegisterNodeManagerRequestProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public synchronized Resource getResource() {
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.resource != null) {
return this.resource;
}
if (!p.hasResource()) {
return null;
}
this.resource = convertFromProtoFormat(p.getResource());
return this.resource;
}
@Override
public synchronized void setResource(Resource resource) {
maybeInitBuilder();
if (resource == null)
builder.clearResource();
this.resource = resource;
}
@Override
public synchronized NodeId getNodeId() {
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.nodeId != null) {
return this.nodeId;
}
if (!p.hasNodeId()) {
return null;
}
this.nodeId = convertFromProtoFormat(p.getNodeId());
return this.nodeId;
}
@Override
public synchronized void setNodeId(NodeId nodeId) {
maybeInitBuilder();
if (nodeId == null) {
builder.clearNodeId();
}
this.nodeId = nodeId;
}
@Override
public synchronized int getHttpPort() {
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasHttpPort()) {
return 0;
}
return (p.getHttpPort());
}
@Override
public synchronized void setHttpPort(int httpPort) {
maybeInitBuilder();
builder.setHttpPort(httpPort);
}
@Override
public synchronized List<ApplicationId> getRunningApplications() {
initRunningApplications();
return runningApplications;
}
private synchronized void initRunningApplications() {
if (this.runningApplications != null) {
return;
}
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
List<ApplicationIdProto> list = p.getRunningApplicationsList();
this.runningApplications = new ArrayList<ApplicationId>();
for (ApplicationIdProto c : list) {
this.runningApplications.add(convertFromProtoFormat(c));
}
}
@Override
public synchronized void setRunningApplications(List<ApplicationId> apps) {
if (apps == null) {
return;
}
initRunningApplications();
this.runningApplications.addAll(apps);
}
private synchronized void addRunningApplicationsToProto() {
maybeInitBuilder();
builder.clearRunningApplications();
if (runningApplications == null) {
return;
}
Iterable<ApplicationIdProto> it = new Iterable<ApplicationIdProto>() {
@Override
public Iterator<ApplicationIdProto> iterator() {
return new Iterator<ApplicationIdProto>() {
Iterator<ApplicationId> iter = runningApplications.iterator();
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public ApplicationIdProto next() {
return convertToProtoFormat(iter.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
builder.addAllRunningApplications(it);
}
@Override
public synchronized List<NMContainerStatus> getNMContainerStatuses() {
initContainerRecoveryReports();
return containerStatuses;
}
private synchronized void initContainerRecoveryReports() {
if (this.containerStatuses != null) {
return;
}
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
List<NMContainerStatusProto> list = p.getContainerStatusesList();
this.containerStatuses = new ArrayList<NMContainerStatus>();
for (NMContainerStatusProto c : list) {
this.containerStatuses.add(convertFromProtoFormat(c));
}
}
@Override
public synchronized void setContainerStatuses(
List<NMContainerStatus> containerReports) {
if (containerReports == null) {
return;
}
initContainerRecoveryReports();
this.containerStatuses.addAll(containerReports);
}
@Override
public synchronized Resource getPhysicalResource() {
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.physicalResource != null) {
return this.physicalResource;
}
if (!p.hasPhysicalResource()) {
return null;
}
this.physicalResource = convertFromProtoFormat(p.getPhysicalResource());
return this.physicalResource;
}
@Override
public synchronized void setPhysicalResource(Resource pPhysicalResource) {
maybeInitBuilder();
if (pPhysicalResource == null) {
builder.clearPhysicalResource();
}
this.physicalResource = pPhysicalResource;
}
@Override
public synchronized NodeStatus getNodeStatus() {
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.nodeStatus != null) {
return this.nodeStatus;
}
if (!p.hasNodeStatus()) {
return null;
}
this.nodeStatus = convertFromProtoFormat(p.getNodeStatus());
return this.nodeStatus;
}
@Override
public synchronized void setNodeStatus(NodeStatus pNodeStatus) {
maybeInitBuilder();
if (pNodeStatus == null) {
builder.clearNodeStatus();
}
this.nodeStatus = pNodeStatus;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public synchronized String getNMVersion() {
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasNmVersion()) {
return "";
}
return (p.getNmVersion());
}
@Override
public synchronized void setNMVersion(String version) {
maybeInitBuilder();
builder.setNmVersion(version);
}
@Override
public synchronized Set<NodeLabel> getNodeLabels() {
initNodeLabels();
return this.labels;
}
@Override
public synchronized void setNodeLabels(Set<NodeLabel> nodeLabels) {
maybeInitBuilder();
builder.clearNodeLabels();
this.labels = nodeLabels;
}
private synchronized void initNodeLabels() {
if (this.labels != null) {
return;
}
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasNodeLabels()) {
labels=null;
return;
}
NodeLabelsProto nodeLabels = p.getNodeLabels();
labels = new HashSet<NodeLabel>();
for(NodeLabelProto nlp : nodeLabels.getNodeLabelsList()) {
labels.add(convertFromProtoFormat(nlp));
}
}
@Override
public synchronized Set<NodeAttribute> getNodeAttributes() {
initNodeAttributes();
return this.attributes;
}
@Override
public synchronized void setNodeAttributes(
Set<NodeAttribute> nodeAttributes) {
maybeInitBuilder();
builder.clearNodeAttributes();
this.attributes = nodeAttributes;
}
private synchronized void initNodeAttributes() {
if (this.attributes != null) {
return;
}
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasNodeAttributes()) {
attributes=null;
return;
}
NodeAttributesProto nodeAttributes = p.getNodeAttributes();
attributes = new HashSet<>();
for(NodeAttributeProto nap : nodeAttributes.getNodeAttributesList()) {
attributes.add(convertFromProtoFormat(nap));
}
}
private static NodeLabelPBImpl convertFromProtoFormat(NodeLabelProto p) {
return new NodeLabelPBImpl(p);
}
private static NodeLabelProto convertToProtoFormat(NodeLabel t) {
return ((NodeLabelPBImpl)t).getProto();
}
private static NodeAttributePBImpl convertFromProtoFormat(
NodeAttributeProto p) {
return new NodeAttributePBImpl(p);
}
private static NodeAttributeProto convertToProtoFormat(NodeAttribute t) {
return ((NodeAttributePBImpl)t).getProto();
}
private static ApplicationIdPBImpl convertFromProtoFormat(
ApplicationIdProto p) {
return new ApplicationIdPBImpl(p);
}
private static ApplicationIdProto convertToProtoFormat(ApplicationId t) {
return ((ApplicationIdPBImpl)t).getProto();
}
private static NodeIdPBImpl convertFromProtoFormat(NodeIdProto p) {
return new NodeIdPBImpl(p);
}
private static NodeIdProto convertToProtoFormat(NodeId t) {
return ((NodeIdPBImpl)t).getProto();
}
private static ResourcePBImpl convertFromProtoFormat(ResourceProto p) {
return new ResourcePBImpl(p);
}
private static ResourceProto convertToProtoFormat(Resource t) {
return ProtoUtils.convertToProtoFormat(t);
}
private static NMContainerStatusPBImpl convertFromProtoFormat(
NMContainerStatusProto c) {
return new NMContainerStatusPBImpl(c);
}
private static NMContainerStatusProto convertToProtoFormat(
NMContainerStatus c) {
return ((NMContainerStatusPBImpl)c).getProto();
}
@Override
public synchronized List<LogAggregationReport>
getLogAggregationReportsForApps() {
if (this.logAggregationReportsForApps != null) {
return this.logAggregationReportsForApps;
}
initLogAggregationReportsForApps();
return logAggregationReportsForApps;
}
private void initLogAggregationReportsForApps() {
RegisterNodeManagerRequestProtoOrBuilder p = viaProto ? proto : builder;
List<LogAggregationReportProto> list =
p.getLogAggregationReportsForAppsList();
this.logAggregationReportsForApps = new ArrayList<LogAggregationReport>();
for (LogAggregationReportProto c : list) {
this.logAggregationReportsForApps.add(convertFromProtoFormat(c));
}
}
private LogAggregationReport convertFromProtoFormat(
LogAggregationReportProto logAggregationReport) {
return new LogAggregationReportPBImpl(logAggregationReport);
}
@Override
public synchronized void setLogAggregationReportsForApps(
List<LogAggregationReport> logAggregationStatusForApps) {
if(logAggregationStatusForApps == null) {
builder.clearLogAggregationReportsForApps();
}
this.logAggregationReportsForApps = logAggregationStatusForApps;
}
private NodeStatusPBImpl convertFromProtoFormat(NodeStatusProto s) {
return new NodeStatusPBImpl(s);
}
private NodeStatusProto convertToProtoFormat(NodeStatus s) {
return ((NodeStatusPBImpl)s).getProto();
}
} | RegisterNodeManagerRequestPBImpl |
java | redisson__redisson | redisson/src/test/java/org/redisson/RedissonPriorityBlockingDequeTest.java | {
"start": 281,
"end": 4559
} | class ____ extends RedisDockerTest {
@Test
@Timeout(3)
public void testShortPoll() {
RBlockingDeque<Integer> queue = redisson.getPriorityBlockingDeque("queue:pollany");
queue.pollLastAsync(500, TimeUnit.MILLISECONDS);
queue.pollFirstAsync(10, TimeUnit.MICROSECONDS);
}
@Test
public void testTakeFirst() throws InterruptedException {
RBlockingDeque<Integer> deque = redisson.getPriorityBlockingDeque("queue:take");
deque.add(1);
deque.add(2);
deque.add(3);
deque.add(4);
assertThat(deque.takeFirst()).isEqualTo(1);
assertThat(deque.takeFirst()).isEqualTo(2);
assertThat(deque.takeFirst()).isEqualTo(3);
assertThat(deque.takeFirst()).isEqualTo(4);
assertThat(deque.size()).isZero();
}
@Test
public void testTakeLast() throws InterruptedException {
RBlockingDeque<Integer> deque = redisson.getPriorityBlockingDeque("queue:take");
deque.add(1);
deque.add(2);
deque.add(3);
deque.add(4);
assertThat(deque.takeLast()).isEqualTo(4);
assertThat(deque.takeLast()).isEqualTo(3);
assertThat(deque.takeLast()).isEqualTo(2);
assertThat(deque.takeLast()).isEqualTo(1);
assertThat(deque.size()).isZero();
}
@Test
public void testTakeFirstAwait() throws InterruptedException {
RBlockingDeque<Integer> deque = redisson.getPriorityBlockingDeque("queue:take");
Executors.newSingleThreadScheduledExecutor().schedule(() -> {
RBlockingDeque<Integer> deque1 = redisson.getBlockingDeque("queue:take");
deque1.add(1);
deque1.add(2);
deque1.add(3);
deque1.add(4);
}, 10, TimeUnit.SECONDS);
long s = System.currentTimeMillis();
assertThat(deque.takeFirst()).isEqualTo(1);
assertThat(System.currentTimeMillis() - s).isGreaterThan(9000);
Thread.sleep(50);
assertThat(deque.takeFirst()).isEqualTo(2);
assertThat(deque.takeFirst()).isEqualTo(3);
assertThat(deque.takeFirst()).isEqualTo(4);
}
@Test
public void testTakeLastAwait() throws InterruptedException {
RBlockingDeque<Integer> deque = redisson.getPriorityBlockingDeque("queue:take");
Executors.newSingleThreadScheduledExecutor().schedule(() -> {
RBlockingDeque<Integer> deque1 = redisson.getBlockingDeque("queue:take");
deque1.add(1);
deque1.add(2);
deque1.add(3);
deque1.add(4);
}, 10, TimeUnit.SECONDS);
long s = System.currentTimeMillis();
assertThat(deque.takeLast()).isEqualTo(4);
assertThat(System.currentTimeMillis() - s).isGreaterThan(9000);
Thread.sleep(50);
assertThat(deque.takeLast()).isEqualTo(3);
assertThat(deque.takeLast()).isEqualTo(2);
assertThat(deque.takeLast()).isEqualTo(1);
}
@Test
public void testPollFirst() throws InterruptedException {
RBlockingDeque<Integer> queue1 = redisson.getPriorityBlockingDeque("queue1");
queue1.put(1);
queue1.put(2);
queue1.put(3);
assertThat(queue1.pollFirst(2, TimeUnit.SECONDS)).isEqualTo(1);
assertThat(queue1.pollFirst(2, TimeUnit.SECONDS)).isEqualTo(2);
assertThat(queue1.pollFirst(2, TimeUnit.SECONDS)).isEqualTo(3);
long s = System.currentTimeMillis();
assertThat(queue1.pollFirst(5, TimeUnit.SECONDS)).isNull();
assertThat(System.currentTimeMillis() - s).isGreaterThan(4900);
}
@Test
public void testPollLast() throws InterruptedException {
RBlockingDeque<Integer> queue1 = redisson.getPriorityBlockingDeque("queue1");
queue1.add(3);
queue1.add(1);
queue1.add(2);
assertThat(queue1.pollLast(2, TimeUnit.SECONDS)).isEqualTo(3);
assertThat(queue1.pollLast(2, TimeUnit.SECONDS)).isEqualTo(2);
assertThat(queue1.pollLast(2, TimeUnit.SECONDS)).isEqualTo(1);
long s = System.currentTimeMillis();
assertThat(queue1.pollLast(5, TimeUnit.SECONDS)).isNull();
assertThat(System.currentTimeMillis() - s).isGreaterThanOrEqualTo(5000);
}
}
| RedissonPriorityBlockingDequeTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/issues/DefaultMethodCalledFromSimpleExpressionTest.java | {
"start": 1935,
"end": 1967
} | interface ____ extends A {
}
}
| B |
java | apache__camel | components/camel-aws/camel-aws2-sns/src/main/java/org/apache/camel/component/aws2/sns/Sns2Endpoint.java | {
"start": 2492,
"end": 9515
} | class ____ extends DefaultEndpoint implements HeaderFilterStrategyAware, EndpointServiceLocation {
private static final Logger LOG = LoggerFactory.getLogger(Sns2Endpoint.class);
private SnsClient snsClient;
@UriPath(description = "Topic name or ARN")
@Metadata(required = true)
private String topicNameOrArn; // to support component docs
@UriParam
private final Sns2Configuration configuration;
@UriParam
private HeaderFilterStrategy headerFilterStrategy;
public Sns2Endpoint(String uri, Component component, Sns2Configuration configuration) {
super(uri, component);
this.configuration = configuration;
}
@Override
public HeaderFilterStrategy getHeaderFilterStrategy() {
return headerFilterStrategy;
}
/**
* To use a custom HeaderFilterStrategy to map headers to/from Camel.
*/
@Override
public void setHeaderFilterStrategy(HeaderFilterStrategy strategy) {
this.headerFilterStrategy = strategy;
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("You cannot receive messages from this endpoint");
}
@Override
public Producer createProducer() throws Exception {
return new Sns2Producer(this);
}
@Override
public Sns2Component getComponent() {
return (Sns2Component) super.getComponent();
}
@Override
public void doInit() throws Exception {
super.doInit();
snsClient = configuration.getAmazonSNSClient() != null
? configuration.getAmazonSNSClient() : Sns2ClientFactory.getSnsClient(configuration).getSNSClient();
// check the setting the headerFilterStrategy
if (headerFilterStrategy == null) {
headerFilterStrategy = new Sns2HeaderFilterStrategy();
}
if (configuration.getTopicArn() == null) {
try {
String nextToken = null;
final String arnSuffix = ":" + configuration.getTopicName();
do {
ListTopicsRequest request = ListTopicsRequest.builder().nextToken(nextToken).build();
final ListTopicsResponse response = snsClient.listTopics(request);
nextToken = response.nextToken();
for (final Topic topic : response.topics()) {
if (topic.topicArn().endsWith(arnSuffix)) {
configuration.setTopicArn(topic.topicArn());
break;
}
}
} while (nextToken != null);
} catch (final AwsServiceException ase) {
LOG.trace("The list topics operation return the following error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
}
if (configuration.getTopicArn() == null && configuration.isAutoCreateTopic()) {
// creates a new topic, or returns the URL of an existing one
CreateTopicRequest.Builder builder = CreateTopicRequest.builder().name(configuration.getTopicName());
Map<String, String> attributes = new HashMap<>();
if (configuration.isServerSideEncryptionEnabled()) {
if (ObjectHelper.isNotEmpty(configuration.getKmsMasterKeyId())) {
attributes.put("KmsMasterKeyId", configuration.getKmsMasterKeyId());
builder.attributes(attributes);
}
}
if (configuration.isFifoTopic()) {
attributes.put("FifoTopic", "true");
builder.attributes(attributes);
}
LOG.trace("Creating topic [{}] with request [{}]...", configuration.getTopicName(), builder);
CreateTopicResponse result = snsClient.createTopic(builder.build());
configuration.setTopicArn(result.topicArn());
LOG.trace("Topic created with Amazon resource name: {}", configuration.getTopicArn());
}
if (ObjectHelper.isNotEmpty(configuration.getPolicy())) {
LOG.trace("Updating topic [{}] with policy [{}]", configuration.getTopicArn(), configuration.getPolicy());
try (InputStream s = ResourceHelper.resolveMandatoryResourceAsInputStream(this.getCamelContext(),
getConfiguration().getPolicy())) {
String policy = IOUtils.toString(s, Charset.defaultCharset());
snsClient.setTopicAttributes(SetTopicAttributesRequest.builder().topicArn(configuration.getTopicArn())
.attributeName("Policy").attributeValue(policy)
.build());
LOG.trace("Topic policy updated");
}
}
if (configuration.isSubscribeSNStoSQS()) {
if (ObjectHelper.isNotEmpty(ObjectHelper.isNotEmpty(configuration.getQueueArn()))) {
SubscribeResponse resp = snsClient.subscribe(SubscribeRequest.builder().topicArn(configuration.getTopicArn())
.protocol("sqs").endpoint(configuration.getQueueArn())
.returnSubscriptionArn(true).build());
LOG.trace("Subscription of SQS Queue to SNS Topic done with Amazon resource name: {}", resp.subscriptionArn());
} else {
throw new IllegalArgumentException(
"Using the SubscribeSNStoSQS option require both AmazonSQSClient and Queue URL options");
}
}
}
@Override
public void doStop() throws Exception {
if (ObjectHelper.isEmpty(configuration.getAmazonSNSClient())) {
if (snsClient != null) {
snsClient.close();
}
}
super.doStop();
}
public Sns2Configuration getConfiguration() {
return configuration;
}
public void setSNSClient(SnsClient snsClient) {
this.snsClient = snsClient;
}
public SnsClient getSNSClient() {
return snsClient;
}
@Override
public String getServiceUrl() {
if (!configuration.isOverrideEndpoint()) {
if (ObjectHelper.isNotEmpty(configuration.getRegion())) {
return configuration.getRegion();
}
} else if (ObjectHelper.isNotEmpty(configuration.getUriEndpointOverride())) {
return configuration.getUriEndpointOverride();
}
return null;
}
@Override
public String getServiceProtocol() {
return "sns";
}
@Override
public Map<String, String> getServiceMetadata() {
HashMap<String, String> metadata = new HashMap<>();
if (configuration.getQueueArn() != null) {
metadata.put("queueArn", configuration.getQueueArn());
}
if (configuration.getTopicArn() != null) {
metadata.put("topicArn", configuration.getTopicArn());
}
return metadata;
}
}
| Sns2Endpoint |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/ComponentCreatorImplementationFactory.java | {
"start": 6267,
"end": 6931
} | class ____ being generated.
*/
abstract ImmutableMap<ComponentRequirement, RequirementStatus> userSettableRequirements();
/**
* Component requirements that are both settable by the creator and needed to construct the
* component.
*/
private Set<ComponentRequirement> neededUserSettableRequirements() {
return Sets.intersection(
userSettableRequirements().keySet(), componentConstructorRequirements());
}
private void setModifiers() {
visibility().ifPresent(classBuilder::addModifiers);
classBuilder.addModifiers(STATIC, FINAL);
}
/** Returns the visibility modifier the generated | currently |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/GeneratedTest.java | {
"start": 860,
"end": 2003
} | class ____ {
@Test
public void test(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::mapping-generated-Generated-persist-example[]
Person person = new Person();
person.setId(1L);
person.setFirstName("John");
person.setMiddleName1("Flávio");
person.setMiddleName2("André");
person.setMiddleName3("Frederico");
person.setMiddleName4("Rúben");
person.setMiddleName5("Artur");
person.setLastName("Doe");
entityManager.persist(person);
entityManager.flush();
assertEquals("John Flávio André Frederico Rúben Artur Doe", person.getFullName());
//end::mapping-generated-Generated-persist-example[]
});
scope.inTransaction( entityManager -> {
//tag::mapping-generated-Generated-update-example[]
Person person = entityManager.find(Person.class, 1L);
person.setLastName("Doe Jr");
entityManager.flush();
assertEquals("John Flávio André Frederico Rúben Artur Doe Jr", person.getFullName());
//end::mapping-generated-Generated-update-example[]
});
}
//tag::mapping-generated-provided-generated[]
@Entity(name = "Person")
public static | GeneratedTest |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java | {
"start": 3039,
"end": 13297
} | class ____ extends Realm implements CachingRealm {
public static final String KRB_METADATA_REALM_NAME_KEY = "kerberos_realm";
public static final String KRB_METADATA_UPN_KEY = "kerberos_user_principal_name";
private final Cache<String, User> userPrincipalNameToUserCache;
private final UserRoleMapper userRoleMapper;
private final KerberosTicketValidator kerberosTicketValidator;
private final ThreadPool threadPool;
private final Path keytabPath;
private final boolean enableKerberosDebug;
private final boolean removeRealmName;
private DelegatedAuthorizationSupport delegatedRealms;
public KerberosRealm(final RealmConfig config, final UserRoleMapper userRoleMapper, final ThreadPool threadPool) {
this(config, userRoleMapper, new KerberosTicketValidator(), threadPool, null);
}
// pkg scoped for testing
KerberosRealm(
final RealmConfig config,
final UserRoleMapper userRoleMapper,
final KerberosTicketValidator kerberosTicketValidator,
final ThreadPool threadPool,
final Cache<String, User> userPrincipalNameToUserCache
) {
super(config);
this.userRoleMapper = userRoleMapper;
this.userRoleMapper.clearRealmCacheOnChange(this);
final TimeValue ttl = config.getSetting(KerberosRealmSettings.CACHE_TTL_SETTING);
if (ttl.getNanos() > 0) {
this.userPrincipalNameToUserCache = (userPrincipalNameToUserCache == null)
? CacheBuilder.<String, User>builder()
.setExpireAfterWrite(config.getSetting(KerberosRealmSettings.CACHE_TTL_SETTING))
.setMaximumWeight(config.getSetting(KerberosRealmSettings.CACHE_MAX_USERS_SETTING))
.build()
: userPrincipalNameToUserCache;
} else {
this.userPrincipalNameToUserCache = null;
}
this.kerberosTicketValidator = kerberosTicketValidator;
this.threadPool = threadPool;
this.keytabPath = config.env().configDir().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH));
validateKeytab(this.keytabPath);
this.enableKerberosDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE);
this.removeRealmName = config.getSetting(KerberosRealmSettings.SETTING_REMOVE_REALM_NAME);
this.delegatedRealms = null;
}
private static void validateKeytab(Path keytabPath) {
boolean fileExists = AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> Files.exists(keytabPath));
if (fileExists == false) {
throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] does not exist");
}
boolean pathIsDir = AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> Files.isDirectory(keytabPath));
if (pathIsDir) {
throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] is a directory");
}
boolean isReadable = AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> Files.isReadable(keytabPath));
if (isReadable == false) {
throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] must have read permission");
}
}
@Override
public void initialize(Iterable<Realm> realms, XPackLicenseState licenseState) {
if (delegatedRealms != null) {
throw new IllegalStateException("Realm has already been initialized");
}
delegatedRealms = new DelegatedAuthorizationSupport(realms, config, licenseState);
}
@Override
public Map<String, List<String>> getAuthenticationFailureHeaders() {
return Collections.singletonMap(WWW_AUTHENTICATE, Collections.singletonList(NEGOTIATE_SCHEME_NAME));
}
@Override
public void expire(final String username) {
if (userPrincipalNameToUserCache != null) {
userPrincipalNameToUserCache.invalidate(username);
}
}
@Override
public void expireAll() {
if (userPrincipalNameToUserCache != null) {
userPrincipalNameToUserCache.invalidateAll();
}
}
@Override
public boolean supports(final AuthenticationToken token) {
return token instanceof KerberosAuthenticationToken;
}
@Override
public AuthenticationToken token(final ThreadContext context) {
return KerberosAuthenticationToken.extractToken(context.getHeader(AUTH_HEADER));
}
@Override
public void authenticate(final AuthenticationToken token, final ActionListener<AuthenticationResult<User>> listener) {
assert delegatedRealms != null : "Realm has not been initialized correctly";
assert token instanceof KerberosAuthenticationToken;
final KerberosAuthenticationToken kerbAuthnToken = (KerberosAuthenticationToken) token;
kerberosTicketValidator.validateTicket(
(byte[]) kerbAuthnToken.credentials(),
keytabPath,
enableKerberosDebug,
ActionListener.wrap(userPrincipalNameOutToken -> {
if (userPrincipalNameOutToken.v1() != null) {
resolveUser(userPrincipalNameOutToken.v1(), userPrincipalNameOutToken.v2(), listener);
} else {
/**
* This is when security context could not be established may be due to ongoing
* negotiation and requires token to be sent back to peer for continuing
* further. We are terminating the authentication process as this is spengo
* negotiation and no other realm can handle this. We can have only one Kerberos
* realm in the system so terminating with RestStatus Unauthorized (401) and
* with 'WWW-Authenticate' header populated with value with token in the form
* 'Negotiate oYH1MIHyoAMK...'
*/
String errorMessage = "failed to authenticate user, gss context negotiation not complete";
ElasticsearchSecurityException ese = unauthorized(errorMessage, null);
ese = unauthorizedWithOutputToken(ese, userPrincipalNameOutToken.v2());
listener.onResponse(AuthenticationResult.terminate(errorMessage, ese));
}
}, e -> handleException(e, listener))
);
}
private static String[] splitUserPrincipalName(final String userPrincipalName) {
return userPrincipalName.split("@");
}
private void handleException(Exception e, final ActionListener<AuthenticationResult<User>> listener) {
if (e instanceof LoginException) {
logger.debug("failed to authenticate user, service login failure", e);
listener.onResponse(
AuthenticationResult.terminate(
"failed to authenticate user, service login failure",
unauthorized(e.getLocalizedMessage(), e)
)
);
} else if (e instanceof GSSException) {
logger.debug("failed to authenticate user, gss context negotiation failure", e);
listener.onResponse(
AuthenticationResult.terminate(
"failed to authenticate user, gss context negotiation failure",
unauthorized(e.getLocalizedMessage(), e)
)
);
} else {
logger.debug("failed to authenticate user", e);
listener.onFailure(e);
}
}
private void resolveUser(
final String userPrincipalName,
final String outToken,
final ActionListener<AuthenticationResult<User>> listener
) {
// if outToken is present then it needs to be communicated with peer, add it to
// response header in thread context.
if (Strings.hasText(outToken)) {
threadPool.getThreadContext().addResponseHeader(WWW_AUTHENTICATE, NEGOTIATE_AUTH_HEADER_PREFIX + outToken);
}
final String[] userAndRealmName = splitUserPrincipalName(userPrincipalName);
/*
* Usually principal names are in the form 'user/instance@REALM'. If
* KerberosRealmSettings#SETTING_REMOVE_REALM_NAME is true then remove
* '@REALM' part from the user principal name to get username.
*/
final String username = (this.removeRealmName) ? userAndRealmName[0] : userPrincipalName;
if (delegatedRealms.hasDelegation()) {
delegatedRealms.resolve(username, listener);
} else {
final User user = (userPrincipalNameToUserCache != null) ? userPrincipalNameToUserCache.get(username) : null;
if (user != null) {
listener.onResponse(AuthenticationResult.success(user));
} else if (userAndRealmName.length > 1) {
final String realmName = userAndRealmName[1];
buildUser(username, Map.of(KRB_METADATA_REALM_NAME_KEY, realmName, KRB_METADATA_UPN_KEY, userPrincipalName), listener);
} else {
buildUser(username, Map.of(KRB_METADATA_UPN_KEY, userPrincipalName), listener);
}
}
}
private void buildUser(
final String username,
final Map<String, Object> metadata,
final ActionListener<AuthenticationResult<User>> listener
) {
final UserRoleMapper.UserData userData = new UserRoleMapper.UserData(username, null, Set.of(), metadata, this.config);
userRoleMapper.resolveRoles(userData, ActionListener.wrap(roles -> {
final User computedUser = new User(username, roles.toArray(new String[roles.size()]), null, null, userData.getMetadata(), true);
if (userPrincipalNameToUserCache != null) {
userPrincipalNameToUserCache.put(username, computedUser);
}
listener.onResponse(AuthenticationResult.success(computedUser));
}, listener::onFailure));
}
@Override
public void lookupUser(final String username, final ActionListener<User> listener) {
listener.onResponse(null);
}
}
| KerberosRealm |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/provider/FieldArgumentsProviderTests.java | {
"start": 20796,
"end": 20901
} | class ____ {
static List<String> strings = List.of("nested string1", "nested string2");
}
}
}
| Nested |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/StreamReadFeature.java | {
"start": 389,
"end": 6755
} | enum ____
implements JacksonFeature
{
// // // Low-level I/O handling features:
/**
* Feature that determines whether parser will automatically
* close underlying input source that is NOT owned by the
* parser. If disabled, calling application has to separately
* close the underlying {@link InputStream} and {@link Reader}
* instances used to create the parser. If enabled, parser
* will handle closing, as long as parser itself gets closed:
* this happens when end-of-input is encountered, or parser
* is closed by a call to {@link JsonParser#close}.
*<p>
* Feature is enabled by default.
*/
AUTO_CLOSE_SOURCE(true),
// // // Validity checks
/**
* Feature that determines what to do if the underlying data format requires knowledge
* of all properties to decode (usually via a Schema), and if no definition is
* found for a property that input content contains.
* Typically most textual data formats do NOT require schema information (although
* some do, such as CSV), whereas many binary data formats do require definitions
* (such as Avro, protobuf), although not all (Smile, CBOR, BSON and MessagePack do not).
* Further note that some formats that do require schema information will not be able
* to ignore undefined properties: for example, Avro is fully positional and there is
* no possibility of undefined data. This leaves formats like Protobuf that have identifiers
* that may or may not map; and as such Protobuf format does make use of this feature.
*<p>
* Note that support for this feature is implemented by individual data format
* module, if (and only if) it makes sense for the format in question. For JSON,
* for example, this feature has no effect as properties need not be pre-defined.
*<p>
* Feature is disabled by default, meaning that if the underlying data format
* requires knowledge of all properties to output, attempts to read an unknown
* property will result in an exception (typically of type
* {@link tools.jackson.core.exc.StreamReadException}).
*/
IGNORE_UNDEFINED(false),
/**
* Feature that determines whether {@link JsonParser} will explicitly
* check that no duplicate JSON Object Property names are encountered.
* If enabled, parser will check all names within context and report
* duplicates by throwing a {@link StreamReadException}; if disabled,
* parser will not do such checking. Assumption in latter case is
* that caller takes care of handling duplicates at a higher level:
* data-binding, for example, has features to specify detection to
* be done there.
*<p>
* Note that enabling this feature will incur performance overhead
* due to having to store and check additional information: this typically
* adds 20-30% to execution time for basic parsing.
*/
STRICT_DUPLICATE_DETECTION(false),
// // // Other
/**
* Feature that determines whether parser will clear "current token"
* (accessible via JsonParser#currentToken()) when it is closed (via
* {@link JsonParser#close()}).
*<p>
* Feature is enabled by default.
*/
CLEAR_CURRENT_TOKEN_ON_CLOSE(true),
/**
* Feature that determines whether {@link TokenStreamLocation} instances should be constructed
* with reference to source or not. If source reference is included, its type and contents
* are included when `toString()` method is called (most notably when printing out parse
* exception with that location information). If feature is disabled, no source reference
* is passed and source is only indicated as "REDACTED".
*<p>
* Most common reason for disabling this feature is to avoid leaking
* internal information; this may be done for security reasons.
* Note that even if source reference is included, only parts of contents are usually
* printed, and not the whole contents. Further, many source reference types cannot
* necessarily access contents (like streams), so only type is indicated, not contents.
*<p>
* Feature is disabled by default for security reason (to avoid leaking additional source
* information), meaning that "source reference" information is NOT passed
* and none of source content will be included in {@link TokenStreamLocation}
* constructed either when requested explicitly, or when needed for an exception.
*/
INCLUDE_SOURCE_IN_LOCATION(false),
/**
* Feature that determines whether to use the built-in JDK code for parsing
* <code>BigDecimal</code> and <code>BigIntegers</code> values (if {@code disabled})
* or {@code FastDoubleParser} implementation (if {@code enabled}).
*<p>
* This setting is enabled by default (since 3.0) so that {@code FastDoubleParser}
* implementation is used.
*/
USE_FAST_BIG_NUMBER_PARSER(true),
/**
* Feature that determines whether to use the built-in JDK {@link Double#parseDouble(String)}
* code to parse {@code double}s (if {@code disabled})
* or {@code FastDoubleParser} implementation (if {@code enabled}).
*<p>
* This setting is enabled by default (since 3.0) so that {@code FastDoubleParser}
* implementation is used.
*/
USE_FAST_DOUBLE_PARSER(true),
;
/**
* Whether feature is enabled or disabled by default.
*/
private final boolean _defaultState;
private final int _mask;
private StreamReadFeature(boolean defaultState) {
_mask = (1 << ordinal());
_defaultState = defaultState;
}
/**
* Method that calculates bit set (flags) of all features that
* are enabled by default.
*
* @return Bit mask of all features that are enabled by default
*/
public static int collectDefaults()
{
int flags = 0;
for (StreamReadFeature f : values()) {
if (f.enabledByDefault()) {
flags |= f.getMask();
}
}
return flags;
}
@Override
public boolean enabledByDefault() { return _defaultState; }
@Override
public boolean enabledIn(int flags) { return (flags & _mask) != 0; }
@Override
public int getMask() { return _mask; }
}
| StreamReadFeature |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/body/ConsumeStatsList.java | {
"start": 1068,
"end": 2122
} | class ____ extends RemotingSerializable {
private List<Map<String/*subscriptionGroupName*/, List<ConsumeStats>>> consumeStatsList = new ArrayList<>();
private String brokerAddr;
private long totalDiff;
private long totalInflightDiff;
public List<Map<String, List<ConsumeStats>>> getConsumeStatsList() {
return consumeStatsList;
}
public void setConsumeStatsList(List<Map<String, List<ConsumeStats>>> consumeStatsList) {
this.consumeStatsList = consumeStatsList;
}
public String getBrokerAddr() {
return brokerAddr;
}
public void setBrokerAddr(String brokerAddr) {
this.brokerAddr = brokerAddr;
}
public long getTotalDiff() {
return totalDiff;
}
public void setTotalDiff(long totalDiff) {
this.totalDiff = totalDiff;
}
public long getTotalInflightDiff() {
return totalInflightDiff;
}
public void setTotalInflightDiff(long totalInflightDiff) {
this.totalInflightDiff = totalInflightDiff;
}
}
| ConsumeStatsList |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BindableRuntimeHintsRegistrarTests.java | {
"start": 19068,
"end": 19301
} | class ____ {
@SuppressWarnings("unused")
private final @Nullable ImmutableRecursive recursive;
ImmutableRecursive(@Nullable ImmutableRecursive recursive) {
this.recursive = recursive;
}
}
public static | ImmutableRecursive |
java | spring-projects__spring-boot | module/spring-boot-security-saml2/src/main/java/org/springframework/boot/security/saml2/autoconfigure/RegistrationConfiguredCondition.java | {
"start": 1551,
"end": 2585
} | class ____ extends SpringBootCondition {
private static final String PROPERTY = "spring.security.saml2.relyingparty.registration";
private static final Bindable<Map<String, Registration>> STRING_REGISTRATION_MAP = Bindable.mapOf(String.class,
Registration.class);
@Override
public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) {
ConditionMessage.Builder message = ConditionMessage.forCondition("Relying Party Registration Condition");
Map<String, Registration> registrations = getRegistrations(context.getEnvironment());
if (registrations.isEmpty()) {
return ConditionOutcome.noMatch(message.didNotFind("any registrations").atAll());
}
return ConditionOutcome.match(message.found("registration", "registrations").items(registrations.keySet()));
}
private Map<String, Registration> getRegistrations(Environment environment) {
return Binder.get(environment).bind(PROPERTY, STRING_REGISTRATION_MAP).orElse(Collections.emptyMap());
}
}
| RegistrationConfiguredCondition |
java | quarkusio__quarkus | extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionBaselineOnMigrateNamedDataSourcesInactiveTest.java | {
"start": 430,
"end": 2488
} | class ____ {
@Inject
@FlywayDataSource("users")
Flyway flywayUsers;
@Inject
@FlywayDataSource("laptops")
Flyway flywayLaptops;
static final FlywayH2TestCustomizer customizerUsers = FlywayH2TestCustomizer
.withDbName("quarkus-flyway-baseline-on-named-ds-users")
.withPort(11302)
.withInitSqlFile("src/test/resources/h2-init-data.sql");
static final FlywayH2TestCustomizer customizerLaptops = FlywayH2TestCustomizer
.withDbName("quarkus-flyway-baseline-on-named-ds-laptops")
.withPort(11303)
.withInitSqlFile("src/test/resources/h2-init-data.sql");
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.setBeforeAllCustomizer(new Runnable() {
@Override
public void run() {
customizerUsers.startH2();
customizerLaptops.startH2();
}
})
.setAfterAllCustomizer(new Runnable() {
@Override
public void run() {
customizerUsers.stopH2();
customizerLaptops.stopH2();
}
})
.withApplicationRoot((jar) -> jar
.addClass(FlywayH2TestCustomizer.class)
.addAsResource("baseline-on-migrate-named-datasources-inactive.properties", "application.properties"));
@Test
@DisplayName("Create history table correctly")
public void testFlywayInitialBaselineInfo() {
MigrationInfo baselineInfo = flywayUsers.info().applied()[0];
assertEquals("0.0.1", baselineInfo.getVersion().getVersion());
assertEquals("Initial description for test", baselineInfo.getDescription());
}
@Test
@DisplayName("History table not created if inactive")
public void testFlywayInitialBaselineInfoInactive() {
assertEquals(0, flywayLaptops.info().applied().length);
}
}
| FlywayExtensionBaselineOnMigrateNamedDataSourcesInactiveTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerQueue.java | {
"start": 1193,
"end": 1872
} | interface ____<T extends SchedulerQueue> extends Queue {
/**
* Get list of child queues.
* @return a list of child queues
*/
List<T> getChildQueues();
/**
* Get the parent queue.
* @return the parent queue
*/
T getParent();
/**
* Get current queue state.
* @return the queue state
*/
QueueState getState();
/**
* Update the queue state.
* @param state the queue state
*/
void updateQueueState(QueueState state);
/**
* Stop the queue.
*/
void stopQueue();
/**
* Activate the queue.
* @throws YarnException if the queue can not be activated.
*/
void activateQueue() throws YarnException;
}
| SchedulerQueue |
java | micronaut-projects__micronaut-core | websocket/src/main/java/io/micronaut/websocket/WebSocketClientRegistry.java | {
"start": 1169,
"end": 2538
} | interface ____<W extends WebSocketClient> {
/**
* Resolves a {@link WebSocketClient} for the given injection point.
*
* @param injectionPoint The injection point
* @param loadBalancer The load balancer to use (Optional)
* @param configuration The configuration (Optional)
* @param beanContext The bean context to use
* @return The Streaming HTTP Client
*/
@NonNull
W resolveWebSocketClient(@Nullable InjectionPoint<?> injectionPoint,
@Nullable LoadBalancer loadBalancer,
@Nullable HttpClientConfiguration configuration,
@NonNull BeanContext beanContext);
/**
* Return the client for the given annotation metadata.
*
* @param annotationMetadata The annotation metadata.
* @return The client
*/
@NonNull
W getWebSocketClient(@NonNull AnnotationMetadata annotationMetadata);
/**
* @return Return the default Websocket HTTP client.
*/
default W getDefaultWebSocketClient() {
return getWebSocketClient(AnnotationMetadata.EMPTY_METADATA);
}
/**
* Dispose of the client defined by the given metadata.
*
* @param annotationMetadata The annotation metadata
*/
void disposeClient(AnnotationMetadata annotationMetadata);
}
| WebSocketClientRegistry |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/builder/TestClassBuilder.java | {
"start": 1448,
"end": 2401
} | class ____.
* @return The new class.
*/
static Class<?> defineSimpleClass(final String name) {
final ClassWriter classWriter = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS);
classWriter.visit(Opcodes.V1_8, Opcodes.ACC_PUBLIC, name, null, "java/lang/Object", new String[] {});
final MethodVisitor ctor = classWriter.visitMethod(Opcodes.ACC_PUBLIC, "<init>", "()V", null, null);
ctor.visitCode();
ctor.visitVarInsn(Opcodes.ALOAD, 0);
ctor.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/Object", "<init>", "()V", false);
ctor.visitInsn(Opcodes.RETURN);
ctor.visitMaxs(1, 1);
return new DynamicClassLoader().defineClass(name.replace('/', '.'), classWriter.toByteArray());
}
static Class<?> defineSimpleClass(final String packageName, final int i) {
return defineSimpleClass(packageName.replace('.', '/') + "/C" + i);
}
}
| name |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesBeanRegistrarTests.java | {
"start": 6945,
"end": 7039
} | class ____ {
}
@ConfigurationProperties("valuecp")
static | NoAnnotationConfigurationProperties |
java | apache__camel | test-infra/camel-test-infra-common/src/test/java/org/apache/camel/test/infra/common/TestEntityNameGenerator.java | {
"start": 914,
"end": 1623
} | class ____ implements SharedNameGenerator {
private String testClass;
private String testName;
@Override
public void beforeAll(ExtensionContext context) throws Exception {
testClass = context.getTestClass().get().getSimpleName();
}
@Override
public void beforeEach(ExtensionContext context) throws Exception {
testName = context.getTestMethod().get().getName();
SharedNameRegistry.getInstance().setSharedNameGenerator(this);
}
public String getName() {
return testClass + testName;
}
public String getClazz() {
return testClass;
}
public String getMethod() {
return testName;
}
}
| TestEntityNameGenerator |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DownsampleActionTests.java | {
"start": 1499,
"end": 18241
} | class ____ extends AbstractActionTestCase<DownsampleAction> {
public static final TimeValue WAIT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES);
static DownsampleAction randomInstance() {
return new DownsampleAction(
ConfigTestHelpers.randomInterval(),
WAIT_TIMEOUT,
randomBoolean() ? null : randomBoolean(),
randomSamplingMethod()
);
}
@Override
protected DownsampleAction doParseInstance(XContentParser parser) {
return DownsampleAction.parse(parser);
}
@Override
protected DownsampleAction createTestInstance() {
return randomInstance();
}
@Override
protected DownsampleAction mutateInstance(DownsampleAction instance) {
var interval = instance.fixedInterval();
var waitTimeout = instance.waitTimeout();
var forceMerge = instance.forceMergeIndex();
var samplingMethod = instance.samplingMethod();
switch (between(0, 3)) {
case 0 -> interval = randomValueOtherThan(interval, ConfigTestHelpers::randomInterval);
case 1 -> waitTimeout = TimeValue.timeValueMillis(
randomValueOtherThan(waitTimeout.millis(), () -> randomLongBetween(1, 10000))
);
case 2 -> forceMerge = forceMerge == null ? randomBoolean() : forceMerge == false;
case 3 -> samplingMethod = randomValueOtherThan(samplingMethod, DownsampleActionTests::randomSamplingMethod);
}
return new DownsampleAction(interval, waitTimeout, forceMerge, samplingMethod);
}
@Override
protected Reader<DownsampleAction> instanceReader() {
return DownsampleAction::new;
}
@Override
public boolean isSafeAction() {
return false;
}
@Override
public void testToSteps() {
DownsampleAction action = new DownsampleAction(
ConfigTestHelpers.randomInterval(),
WAIT_TIMEOUT,
randomBoolean() ? null : true,
randomSamplingMethod()
);
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
assertNotNull(steps);
assertEquals(17, steps.size());
assertTrue(steps.get(0) instanceof BranchingStep);
assertThat(steps.get(0).getKey().name(), equalTo(CONDITIONAL_TIME_SERIES_CHECK_KEY));
expectThrows(IllegalStateException.class, () -> steps.get(0).getNextStepKey());
assertThat(((BranchingStep) steps.get(0)).getNextStepKeyOnFalse(), equalTo(nextStepKey));
assertThat(((BranchingStep) steps.get(0)).getNextStepKeyOnTrue().name(), equalTo(CheckNotDataStreamWriteIndexStep.NAME));
assertTrue(steps.get(1) instanceof CheckNotDataStreamWriteIndexStep);
assertThat(steps.get(1).getKey().name(), equalTo(CheckNotDataStreamWriteIndexStep.NAME));
assertThat(steps.get(1).getNextStepKey().name(), equalTo(WaitForNoFollowersStep.NAME));
assertTrue(steps.get(2) instanceof WaitForNoFollowersStep);
assertThat(steps.get(2).getKey().name(), equalTo(WaitForNoFollowersStep.NAME));
assertThat(steps.get(2).getNextStepKey().name(), equalTo(WaitUntilTimeSeriesEndTimePassesStep.NAME));
assertTrue(steps.get(3) instanceof WaitUntilTimeSeriesEndTimePassesStep);
assertThat(steps.get(3).getKey().name(), equalTo(WaitUntilTimeSeriesEndTimePassesStep.NAME));
assertThat(steps.get(3).getNextStepKey().name(), equalTo(ReadOnlyStep.NAME));
assertTrue(steps.get(4) instanceof ReadOnlyStep);
assertThat(steps.get(4).getKey().name(), equalTo(ReadOnlyStep.NAME));
assertThat(steps.get(4).getNextStepKey().name(), equalTo(DownsamplePrepareLifeCycleStateStep.NAME));
assertTrue(steps.get(5) instanceof NoopStep);
assertThat(steps.get(5).getKey().name(), equalTo(DownsampleAction.BWC_CLEANUP_TARGET_INDEX_NAME));
assertThat(steps.get(5).getNextStepKey().name(), equalTo(DownsampleStep.NAME));
assertTrue(steps.get(6) instanceof DownsamplePrepareLifeCycleStateStep);
assertThat(steps.get(6).getKey().name(), equalTo(DownsamplePrepareLifeCycleStateStep.NAME));
assertThat(steps.get(6).getNextStepKey().name(), equalTo(DownsampleStep.NAME));
assertTrue(steps.get(7) instanceof DownsampleStep);
assertThat(steps.get(7).getKey().name(), equalTo(DownsampleStep.NAME));
assertThat(steps.get(7).getNextStepKey().name(), equalTo(WaitForIndexColorStep.NAME));
assertTrue(steps.get(8) instanceof ClusterStateWaitUntilThresholdStep);
assertThat(steps.get(8).getKey().name(), equalTo(WaitForIndexColorStep.NAME));
assertThat(steps.get(8).getNextStepKey().name(), equalTo(ForceMergeStep.NAME));
assertTrue(steps.get(9) instanceof ForceMergeStep);
assertThat(steps.get(9).getKey().name(), equalTo(ForceMergeStep.NAME));
assertThat(steps.get(9).getNextStepKey().name(), equalTo(SegmentCountStep.NAME));
assertTrue(steps.get(10) instanceof SegmentCountStep);
assertThat(steps.get(10).getKey().name(), equalTo(SegmentCountStep.NAME));
assertThat(steps.get(10).getNextStepKey().name(), equalTo(CopyExecutionStateStep.NAME));
assertTrue(steps.get(11) instanceof CopyExecutionStateStep);
assertThat(steps.get(11).getKey().name(), equalTo(CopyExecutionStateStep.NAME));
assertThat(steps.get(11).getNextStepKey().name(), equalTo(CopySettingsStep.NAME));
assertTrue(steps.get(12) instanceof CopySettingsStep);
assertThat(steps.get(12).getKey().name(), equalTo(CopySettingsStep.NAME));
assertThat(steps.get(12).getNextStepKey().name(), equalTo(CONDITIONAL_DATASTREAM_CHECK_KEY));
assertTrue(steps.get(13) instanceof BranchingStep);
assertThat(steps.get(13).getKey().name(), equalTo(CONDITIONAL_DATASTREAM_CHECK_KEY));
expectThrows(IllegalStateException.class, () -> steps.get(13).getNextStepKey());
assertThat(((BranchingStep) steps.get(13)).getNextStepKeyOnFalse().name(), equalTo(SwapAliasesAndDeleteSourceIndexStep.NAME));
assertThat(((BranchingStep) steps.get(13)).getNextStepKeyOnTrue().name(), equalTo(ReplaceDataStreamBackingIndexStep.NAME));
assertTrue(steps.get(14) instanceof ReplaceDataStreamBackingIndexStep);
assertThat(steps.get(14).getKey().name(), equalTo(ReplaceDataStreamBackingIndexStep.NAME));
assertThat(steps.get(14).getNextStepKey().name(), equalTo(DeleteStep.NAME));
assertTrue(steps.get(15) instanceof DeleteStep);
assertThat(steps.get(15).getKey().name(), equalTo(DeleteStep.NAME));
assertThat(steps.get(15).getNextStepKey(), equalTo(nextStepKey));
assertTrue(steps.get(16) instanceof SwapAliasesAndDeleteSourceIndexStep);
assertThat(steps.get(16).getKey().name(), equalTo(SwapAliasesAndDeleteSourceIndexStep.NAME));
assertThat(steps.get(16).getNextStepKey(), equalTo(nextStepKey));
}
public void testToStepsWithoutForceMerge() {
DownsampleAction action = new DownsampleAction(ConfigTestHelpers.randomInterval(), WAIT_TIMEOUT, false, randomSamplingMethod());
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
assertNotNull(steps);
assertEquals(15, steps.size());
assertTrue(steps.get(0) instanceof BranchingStep);
assertThat(steps.get(0).getKey().name(), equalTo(CONDITIONAL_TIME_SERIES_CHECK_KEY));
expectThrows(IllegalStateException.class, () -> steps.get(0).getNextStepKey());
assertThat(((BranchingStep) steps.get(0)).getNextStepKeyOnFalse(), equalTo(nextStepKey));
assertThat(((BranchingStep) steps.get(0)).getNextStepKeyOnTrue().name(), equalTo(CheckNotDataStreamWriteIndexStep.NAME));
assertTrue(steps.get(1) instanceof CheckNotDataStreamWriteIndexStep);
assertThat(steps.get(1).getKey().name(), equalTo(CheckNotDataStreamWriteIndexStep.NAME));
assertThat(steps.get(1).getNextStepKey().name(), equalTo(WaitForNoFollowersStep.NAME));
assertTrue(steps.get(2) instanceof WaitForNoFollowersStep);
assertThat(steps.get(2).getKey().name(), equalTo(WaitForNoFollowersStep.NAME));
assertThat(steps.get(2).getNextStepKey().name(), equalTo(WaitUntilTimeSeriesEndTimePassesStep.NAME));
assertTrue(steps.get(3) instanceof WaitUntilTimeSeriesEndTimePassesStep);
assertThat(steps.get(3).getKey().name(), equalTo(WaitUntilTimeSeriesEndTimePassesStep.NAME));
assertThat(steps.get(3).getNextStepKey().name(), equalTo(ReadOnlyStep.NAME));
assertTrue(steps.get(4) instanceof ReadOnlyStep);
assertThat(steps.get(4).getKey().name(), equalTo(ReadOnlyStep.NAME));
assertThat(steps.get(4).getNextStepKey().name(), equalTo(DownsamplePrepareLifeCycleStateStep.NAME));
assertTrue(steps.get(5) instanceof NoopStep);
assertThat(steps.get(5).getKey().name(), equalTo(DownsampleAction.BWC_CLEANUP_TARGET_INDEX_NAME));
assertThat(steps.get(5).getNextStepKey().name(), equalTo(DownsampleStep.NAME));
assertTrue(steps.get(6) instanceof DownsamplePrepareLifeCycleStateStep);
assertThat(steps.get(6).getKey().name(), equalTo(DownsamplePrepareLifeCycleStateStep.NAME));
assertThat(steps.get(6).getNextStepKey().name(), equalTo(DownsampleStep.NAME));
assertTrue(steps.get(7) instanceof DownsampleStep);
assertThat(steps.get(7).getKey().name(), equalTo(DownsampleStep.NAME));
assertThat(steps.get(7).getNextStepKey().name(), equalTo(WaitForIndexColorStep.NAME));
assertTrue(steps.get(8) instanceof ClusterStateWaitUntilThresholdStep);
assertThat(steps.get(8).getKey().name(), equalTo(WaitForIndexColorStep.NAME));
assertThat(steps.get(8).getNextStepKey().name(), equalTo(CopyExecutionStateStep.NAME));
assertTrue(steps.get(9) instanceof CopyExecutionStateStep);
assertThat(steps.get(9).getKey().name(), equalTo(CopyExecutionStateStep.NAME));
assertThat(steps.get(9).getNextStepKey().name(), equalTo(CopySettingsStep.NAME));
assertTrue(steps.get(10) instanceof CopySettingsStep);
assertThat(steps.get(10).getKey().name(), equalTo(CopySettingsStep.NAME));
assertThat(steps.get(10).getNextStepKey().name(), equalTo(CONDITIONAL_DATASTREAM_CHECK_KEY));
assertTrue(steps.get(11) instanceof BranchingStep);
assertThat(steps.get(11).getKey().name(), equalTo(CONDITIONAL_DATASTREAM_CHECK_KEY));
expectThrows(IllegalStateException.class, () -> steps.get(11).getNextStepKey());
assertThat(((BranchingStep) steps.get(11)).getNextStepKeyOnFalse().name(), equalTo(SwapAliasesAndDeleteSourceIndexStep.NAME));
assertThat(((BranchingStep) steps.get(11)).getNextStepKeyOnTrue().name(), equalTo(ReplaceDataStreamBackingIndexStep.NAME));
assertTrue(steps.get(12) instanceof ReplaceDataStreamBackingIndexStep);
assertThat(steps.get(12).getKey().name(), equalTo(ReplaceDataStreamBackingIndexStep.NAME));
assertThat(steps.get(12).getNextStepKey().name(), equalTo(DeleteStep.NAME));
assertTrue(steps.get(13) instanceof DeleteStep);
assertThat(steps.get(13).getKey().name(), equalTo(DeleteStep.NAME));
assertThat(steps.get(13).getNextStepKey(), equalTo(nextStepKey));
assertTrue(steps.get(14) instanceof SwapAliasesAndDeleteSourceIndexStep);
assertThat(steps.get(14).getKey().name(), equalTo(SwapAliasesAndDeleteSourceIndexStep.NAME));
assertThat(steps.get(14).getNextStepKey(), equalTo(nextStepKey));
}
public void testDownsamplingPrerequisitesStep() {
DateHistogramInterval fixedInterval = ConfigTestHelpers.randomInterval();
boolean withForceMerge = randomBoolean();
DownsampleAction action = new DownsampleAction(fixedInterval, WAIT_TIMEOUT, withForceMerge, randomSamplingMethod());
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
{
// non time series indices skip the action
BranchingStep branchingStep = getFirstBranchingStep(action, phase, nextStepKey, withForceMerge);
IndexMetadata indexMetadata = newIndexMeta("test", Settings.EMPTY);
ProjectState state = projectStateFromProject(ProjectMetadata.builder(randomProjectIdOrDefault()).put(indexMetadata, true));
branchingStep.performAction(indexMetadata.getIndex(), state);
assertThat(branchingStep.getNextStepKey(), is(nextStepKey));
}
{
// time series indices execute the action
BranchingStep branchingStep = getFirstBranchingStep(action, phase, nextStepKey, withForceMerge);
Settings settings = Settings.builder()
.put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES)
.put("index.routing_path", "uid")
.build();
IndexMetadata indexMetadata = newIndexMeta("test", settings);
ProjectState state = projectStateFromProject(ProjectMetadata.builder(randomProjectIdOrDefault()).put(indexMetadata, true));
branchingStep.performAction(indexMetadata.getIndex(), state);
assertThat(branchingStep.getNextStepKey().name(), is(CheckNotDataStreamWriteIndexStep.NAME));
}
{
// already downsampled indices for the interval skip the action
BranchingStep branchingStep = getFirstBranchingStep(action, phase, nextStepKey, withForceMerge);
Settings settings = Settings.builder()
.put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES)
.put("index.routing_path", "uid")
.put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS_KEY, IndexMetadata.DownsampleTaskStatus.SUCCESS)
.put(IndexMetadata.INDEX_DOWNSAMPLE_ORIGIN_NAME.getKey(), "test")
.build();
String indexName = DOWNSAMPLED_INDEX_PREFIX + fixedInterval + "-test";
IndexMetadata indexMetadata = newIndexMeta(indexName, settings);
ProjectState state = projectStateFromProject(ProjectMetadata.builder(randomProjectIdOrDefault()).put(indexMetadata, true));
branchingStep.performAction(indexMetadata.getIndex(), state);
assertThat(branchingStep.getNextStepKey(), is(nextStepKey));
}
{
// indices with the same name as the target downsample index that are NOT downsample indices skip the action
BranchingStep branchingStep = getFirstBranchingStep(action, phase, nextStepKey, withForceMerge);
String indexName = DOWNSAMPLED_INDEX_PREFIX + fixedInterval + "-test";
IndexMetadata indexMetadata = newIndexMeta(indexName, Settings.EMPTY);
ProjectState state = projectStateFromProject(ProjectMetadata.builder(randomProjectIdOrDefault()).put(indexMetadata, true));
branchingStep.performAction(indexMetadata.getIndex(), state);
assertThat(branchingStep.getNextStepKey(), is(nextStepKey));
}
}
private static BranchingStep getFirstBranchingStep(DownsampleAction action, String phase, StepKey nextStepKey, boolean withForceMerge) {
List<Step> steps = action.toSteps(null, phase, nextStepKey);
assertNotNull(steps);
assertEquals(withForceMerge ? 17 : 15, steps.size());
assertTrue(steps.get(0) instanceof BranchingStep);
assertThat(steps.get(0).getKey().name(), equalTo(CONDITIONAL_TIME_SERIES_CHECK_KEY));
return (BranchingStep) steps.get(0);
}
public static IndexMetadata newIndexMeta(String name, Settings indexSettings) {
return IndexMetadata.builder(name).settings(indexSettings(IndexVersion.current(), 1, 1).put(indexSettings)).build();
}
public static DownsampleConfig.SamplingMethod randomSamplingMethod() {
if (between(0, DownsampleConfig.SamplingMethod.values().length) == 0) {
return null;
} else {
return randomFrom(DownsampleConfig.SamplingMethod.values());
}
}
}
| DownsampleActionTests |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/security/AbfsDelegationTokenIdentifier.java | {
"start": 1333,
"end": 2662
} | class ____ extends DelegationTokenIdentifier {
/**
* The token kind of these tokens: ""ABFS delegation".
*/
public static final Text TOKEN_KIND = new Text("ABFS delegation");
/** Creates an {@code AbfsDelegationTokenIdentifier} with the default ABFS token kind. */
public AbfsDelegationTokenIdentifier() {
super(TOKEN_KIND);
}
/**
* Creates an {@code AbfsDelegationTokenIdentifier} with the specified token kind.
*
* @param kind the token kind to use
*/
public AbfsDelegationTokenIdentifier(Text kind) {
super(kind);
}
/**
* Creates an {@code AbfsDelegationTokenIdentifier} with the specified details.
*
* @param kind the token kind
* @param owner the token owner
* @param renewer the token renewer
* @param realUser the real user on whose behalf the token was issued
*/
public AbfsDelegationTokenIdentifier(Text kind, Text owner, Text renewer, Text realUser) {
super(kind, owner, renewer, realUser);
}
/**
* Get the token kind.
* Returns {@link #TOKEN_KIND} always.
* If a subclass does not want its renew/cancel process to be managed
* by {@link AbfsDelegationTokenManager}, this must be overridden.
* @return the kind of the token.
*/
@Override
public Text getKind() {
return TOKEN_KIND;
}
}
| AbfsDelegationTokenIdentifier |
java | quarkusio__quarkus | extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionCallback2.java | {
"start": 287,
"end": 1135
} | class ____ implements Callback {
public static List<Event> DEFAULT_EVENTS = Arrays.asList(Event.AFTER_MIGRATE);
@Override
public boolean supports(Event event, Context context) {
return DEFAULT_EVENTS.contains(event);
}
@Override
public boolean canHandleInTransaction(Event event, Context context) {
return true;
}
@Override
public void handle(Event event, Context context) {
try (Statement stmt = context.getConnection().createStatement()) {
stmt.executeUpdate("INSERT INTO quarked_callback(name) VALUES('" + event.getId() + "')");
} catch (SQLException exception) {
throw new IllegalStateException(exception);
}
}
@Override
public String getCallbackName() {
return "Quarked Flyway Callback 2";
}
}
| FlywayExtensionCallback2 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/sql/NativeQueryEagerAssociationTest.java | {
"start": 4778,
"end": 5902
} | class ____ {
@Id
private Long id;
private String description;
@ManyToOne( fetch = FetchType.EAGER )
@NotFound( action = NotFoundAction.EXCEPTION )
@JoinColumn( name = "building_id" )
private Building building;
@ManyToOne( fetch = FetchType.EAGER )
@JoinColumn( name = "secondary_id" )
private Building secondaryBuilding;
@OneToMany( fetch = FetchType.EAGER )
private List<Building> adjacentBuildings;
public Classroom() {
}
public Classroom(
Long id,
String description,
Building building,
Building secondaryBuilding,
List<Building> adjacentBuildings) {
this.id = id;
this.description = description;
this.building = building;
this.secondaryBuilding = secondaryBuilding;
this.adjacentBuildings = adjacentBuildings;
}
public Long getId() {
return id;
}
public String getDescription() {
return description;
}
public Building getBuilding() {
return building;
}
public Building getSecondaryBuilding() {
return secondaryBuilding;
}
public List<Building> getAdjacentBuildings() {
return adjacentBuildings;
}
}
}
| Classroom |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java | {
"start": 1178,
"end": 3932
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CastIntToUnsignedLongEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator v;
private final DriverContext driverContext;
private Warnings warnings;
public CastIntToUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v,
DriverContext driverContext) {
this.source = source;
this.v = v;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (IntBlock vBlock = (IntBlock) v.eval(page)) {
IntVector vVector = vBlock.asVector();
if (vVector == null) {
return eval(page.getPositionCount(), vBlock);
}
return eval(page.getPositionCount(), vVector).asBlock();
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += v.baseRamBytesUsed();
return baseRamBytesUsed;
}
public LongBlock eval(int positionCount, IntBlock vBlock) {
try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (vBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
int v = vBlock.getInt(vBlock.getFirstValueIndex(p));
result.appendLong(Cast.castIntToUnsignedLong(v));
}
return result.build();
}
}
public LongVector eval(int positionCount, IntVector vVector) {
try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
int v = vVector.getInt(p);
result.appendLong(p, Cast.castIntToUnsignedLong(v));
}
return result.build();
}
}
@Override
public String toString() {
return "CastIntToUnsignedLongEvaluator[" + "v=" + v + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(v);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | CastIntToUnsignedLongEvaluator |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestRawLocalFileSystemContract.java | {
"start": 1677,
"end": 7606
} | class ____ extends FileSystemContractBaseTest {
private static final Logger LOG =
LoggerFactory.getLogger(TestRawLocalFileSystemContract.class);
private final static Path TEST_BASE_DIR =
new Path(GenericTestUtils.getRandomizedTestDir().getAbsolutePath());
// These are the string values that DF sees as "Filesystem" for a
// Docker container accessing a Mac or Windows host's filesystem.
private static final String FS_TYPE_MAC = "osxfs";
private static boolean looksLikeMac(String filesys) {
return filesys.toLowerCase().contains(FS_TYPE_MAC.toLowerCase());
}
private static final Pattern HAS_DRIVE_LETTER_SPECIFIER =
Pattern.compile("^/?[a-zA-Z]:");
private static boolean looksLikeWindows(String filesys) {
return HAS_DRIVE_LETTER_SPECIFIER.matcher(filesys).find();
}
@BeforeEach
public void setUp() throws Exception {
Configuration conf = new Configuration();
fs = FileSystem.getLocal(conf).getRawFileSystem();
}
/**
* Actually rename is supported in RawLocalFileSystem but
* it works different as the other filesystems. Short term we do not test it.
* Please check HADOOP-13082.
* @return true if rename supported so rename related tests will run
*/
@Override
protected boolean renameSupported() {
return false;
}
/**
* Disabling testing root operation.
*
* Writing to root directory on the local file system may get permission
* denied exception, or even worse, delete/overwrite files accidentally.
*/
@Override
protected boolean rootDirTestEnabled() {
return false;
}
@Override
public String getDefaultWorkingDirectory() {
return fs.getWorkingDirectory().toUri().getPath();
}
@Override
protected Path getTestBaseDir() {
return TEST_BASE_DIR;
}
@Override
protected boolean filesystemIsCaseSensitive() {
if (Shell.WINDOWS || Shell.MAC) {
return false;
}
// osType is linux or unix-like, but it might be in a container mounting a
// Mac or Windows volume. Use DF to try to determine if this is the case.
String rfsPathStr = "uninitialized";
String rfsType;
try {
RawLocalFileSystem rfs = new RawLocalFileSystem();
Configuration conf = new Configuration();
rfs.initialize(rfs.getUri(), conf);
rfsPathStr = Path.getPathWithoutSchemeAndAuthority(
rfs.getWorkingDirectory()).toString();
File rfsPath = new File(rfsPathStr);
// DF.getFilesystem() only provides indirect info about FS type, but it's
// the best we have. `df -T` would be better, but isn't cross-platform.
rfsType = (new DF(rfsPath, conf)).getFilesystem();
LOG.info("DF.Filesystem is {} for path {}", rfsType, rfsPath);
} catch (IOException ex) {
LOG.error("DF failed on path {}", rfsPathStr);
rfsType = Shell.osType.toString();
}
return !(looksLikeMac(rfsType) || looksLikeWindows(rfsType));
}
// cross-check getPermission using both native/non-native
@Test
@SuppressWarnings("deprecation")
public void testPermission() throws Exception {
assumeTrue(NativeCodeLoader.isNativeCodeLoaded(),
"No native library");
Path testDir = getTestBaseDir();
String testFilename = "teststat2File";
Path path = new Path(testDir, testFilename);
RawLocalFileSystem rfs = new RawLocalFileSystem();
Configuration conf = new Configuration();
rfs.initialize(rfs.getUri(), conf);
rfs.createNewFile(path);
File file = rfs.pathToFile(path);
long defaultBlockSize = rfs.getDefaultBlockSize(path);
//
// test initial permission
//
RawLocalFileSystem.DeprecatedRawLocalFileStatus fsNIO =
new RawLocalFileSystem.DeprecatedRawLocalFileStatus(
file, defaultBlockSize, rfs);
fsNIO.loadPermissionInfoByNativeIO();
RawLocalFileSystem.DeprecatedRawLocalFileStatus fsnonNIO =
new RawLocalFileSystem.DeprecatedRawLocalFileStatus(
file, defaultBlockSize, rfs);
fsnonNIO.loadPermissionInfoByNonNativeIO();
assertEquals(fsNIO.getOwner(), fsnonNIO.getOwner());
assertEquals(fsNIO.getGroup(), fsnonNIO.getGroup());
assertEquals(fsNIO.getPermission(), fsnonNIO.getPermission());
LOG.info("owner: {}, group: {}, permission: {}, isSticky: {}",
fsNIO.getOwner(), fsNIO.getGroup(), fsNIO.getPermission(),
fsNIO.getPermission().getStickyBit());
//
// test normal chmod - no sticky bit
//
StatUtils.setPermissionFromProcess("644", file.getPath());
fsNIO.loadPermissionInfoByNativeIO();
fsnonNIO.loadPermissionInfoByNonNativeIO();
assertEquals(fsNIO.getPermission(), fsnonNIO.getPermission());
assertEquals(644, fsNIO.getPermission().toOctal());
assertFalse(fsNIO.getPermission().getStickyBit());
assertFalse(fsnonNIO.getPermission().getStickyBit());
//
// test sticky bit
// unfortunately, cannot be done in Windows environments
//
if (!Shell.WINDOWS) {
//
// add sticky bit
//
StatUtils.setPermissionFromProcess("1644", file.getPath());
fsNIO.loadPermissionInfoByNativeIO();
fsnonNIO.loadPermissionInfoByNonNativeIO();
assertEquals(fsNIO.getPermission(), fsnonNIO.getPermission());
assertEquals(1644, fsNIO.getPermission().toOctal());
assertEquals(true, fsNIO.getPermission().getStickyBit());
assertEquals(true, fsnonNIO.getPermission().getStickyBit());
//
// remove sticky bit
//
StatUtils.setPermissionFromProcess("-t", file.getPath());
fsNIO.loadPermissionInfoByNativeIO();
fsnonNIO.loadPermissionInfoByNonNativeIO();
assertEquals(fsNIO.getPermission(), fsnonNIO.getPermission());
assertEquals(644, fsNIO.getPermission().toOctal());
assertEquals(false, fsNIO.getPermission().getStickyBit());
assertEquals(false, fsnonNIO.getPermission().getStickyBit());
}
}
}
| TestRawLocalFileSystemContract |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ObserveableOp.java | {
"start": 1159,
"end": 1242
} | class ____ wish to monitor starting and
* ending events must implement.
*/
| that |
java | apache__camel | components/camel-twilio/src/generated/java/org/apache/camel/component/twilio/NotificationEndpointConfiguration.java | {
"start": 1151,
"end": 1853
} | class ____ extends TwilioConfiguration {
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "fetcher"), @ApiMethod(methodName = "reader")})
private String pathAccountSid;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "fetcher")})
private String pathSid;
public String getPathAccountSid() {
return pathAccountSid;
}
public void setPathAccountSid(String pathAccountSid) {
this.pathAccountSid = pathAccountSid;
}
public String getPathSid() {
return pathSid;
}
public void setPathSid(String pathSid) {
this.pathSid = pathSid;
}
}
| NotificationEndpointConfiguration |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/objectid/TestObjectIdDeserialization.java | {
"start": 1629,
"end": 1912
} | class ____
{
@JsonIdentityInfo(generator=ObjectIdGenerators.IntSequenceGenerator.class, property="@id")
public ValueNode node;
public IdWrapper() { }
public IdWrapper(int v) {
node = new ValueNode(v);
}
}
static | IdWrapper |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/simple/SimpleTypeProperties.java | {
"start": 924,
"end": 4096
} | class ____ {
private String myString;
private Byte myByte;
private byte myPrimitiveByte;
private Character myChar;
private char myPrimitiveChar;
private Boolean myBoolean;
private boolean myPrimitiveBoolean;
private Short myShort;
private short myPrimitiveShort;
private Integer myInteger;
private int myPrimitiveInteger;
private Long myLong;
private long myPrimitiveLong;
private Double myDouble;
private double myPrimitiveDouble;
private Float myFloat;
private float myPrimitiveFloat;
public String getMyString() {
return this.myString;
}
public void setMyString(String myString) {
this.myString = myString;
}
public Byte getMyByte() {
return this.myByte;
}
public void setMyByte(Byte myByte) {
this.myByte = myByte;
}
public byte getMyPrimitiveByte() {
return this.myPrimitiveByte;
}
public void setMyPrimitiveByte(byte myPrimitiveByte) {
this.myPrimitiveByte = myPrimitiveByte;
}
public Character getMyChar() {
return this.myChar;
}
public void setMyChar(Character myChar) {
this.myChar = myChar;
}
public char getMyPrimitiveChar() {
return this.myPrimitiveChar;
}
public void setMyPrimitiveChar(char myPrimitiveChar) {
this.myPrimitiveChar = myPrimitiveChar;
}
public Boolean getMyBoolean() {
return this.myBoolean;
}
public void setMyBoolean(Boolean myBoolean) {
this.myBoolean = myBoolean;
}
public boolean isMyPrimitiveBoolean() {
return this.myPrimitiveBoolean;
}
public void setMyPrimitiveBoolean(boolean myPrimitiveBoolean) {
this.myPrimitiveBoolean = myPrimitiveBoolean;
}
public Short getMyShort() {
return this.myShort;
}
public void setMyShort(Short myShort) {
this.myShort = myShort;
}
public short getMyPrimitiveShort() {
return this.myPrimitiveShort;
}
public void setMyPrimitiveShort(short myPrimitiveShort) {
this.myPrimitiveShort = myPrimitiveShort;
}
public Integer getMyInteger() {
return this.myInteger;
}
public void setMyInteger(Integer myInteger) {
this.myInteger = myInteger;
}
public int getMyPrimitiveInteger() {
return this.myPrimitiveInteger;
}
public void setMyPrimitiveInteger(int myPrimitiveInteger) {
this.myPrimitiveInteger = myPrimitiveInteger;
}
public Long getMyLong() {
return this.myLong;
}
public void setMyLong(Long myLong) {
this.myLong = myLong;
}
public long getMyPrimitiveLong() {
return this.myPrimitiveLong;
}
public void setMyPrimitiveLong(long myPrimitiveLong) {
this.myPrimitiveLong = myPrimitiveLong;
}
public Double getMyDouble() {
return this.myDouble;
}
public void setMyDouble(Double myDouble) {
this.myDouble = myDouble;
}
public double getMyPrimitiveDouble() {
return this.myPrimitiveDouble;
}
public void setMyPrimitiveDouble(double myPrimitiveDouble) {
this.myPrimitiveDouble = myPrimitiveDouble;
}
public Float getMyFloat() {
return this.myFloat;
}
public void setMyFloat(Float myFloat) {
this.myFloat = myFloat;
}
public float getMyPrimitiveFloat() {
return this.myPrimitiveFloat;
}
public void setMyPrimitiveFloat(float myPrimitiveFloat) {
this.myPrimitiveFloat = myPrimitiveFloat;
}
}
| SimpleTypeProperties |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/R2dbcReactiveOAuth2AuthorizedClientService.java | {
"start": 2384,
"end": 10337
} | class ____ implements ReactiveOAuth2AuthorizedClientService {
// @formatter:off
private static final String COLUMN_NAMES =
"client_registration_id, " +
"principal_name, " +
"access_token_type, " +
"access_token_value, " +
"access_token_issued_at, " +
"access_token_expires_at, " +
"access_token_scopes, " +
"refresh_token_value, " +
"refresh_token_issued_at";
// @formatter:on
private static final String TABLE_NAME = "oauth2_authorized_client";
private static final String PK_FILTER = "client_registration_id = :clientRegistrationId AND principal_name = :principalName";
// @formatter:off
private static final String LOAD_AUTHORIZED_CLIENT_SQL = "SELECT " + COLUMN_NAMES + " FROM " + TABLE_NAME
+ " WHERE " + PK_FILTER;
// @formatter:on
// @formatter:off
private static final String SAVE_AUTHORIZED_CLIENT_SQL = "INSERT INTO " + TABLE_NAME + " (" + COLUMN_NAMES + ")" +
"VALUES (:clientRegistrationId, :principalName, :accessTokenType, :accessTokenValue," +
" :accessTokenIssuedAt, :accessTokenExpiresAt, :accessTokenScopes, :refreshTokenValue," +
" :refreshTokenIssuedAt)";
// @formatter:on
private static final String REMOVE_AUTHORIZED_CLIENT_SQL = "DELETE FROM " + TABLE_NAME + " WHERE " + PK_FILTER;
// @formatter:off
private static final String UPDATE_AUTHORIZED_CLIENT_SQL = "UPDATE " + TABLE_NAME +
" SET access_token_type = :accessTokenType, " +
" access_token_value = :accessTokenValue, " +
" access_token_issued_at = :accessTokenIssuedAt," +
" access_token_expires_at = :accessTokenExpiresAt, " +
" access_token_scopes = :accessTokenScopes," +
" refresh_token_value = :refreshTokenValue, " +
" refresh_token_issued_at = :refreshTokenIssuedAt" +
" WHERE " +
PK_FILTER;
// @formatter:on
protected final DatabaseClient databaseClient;
protected final ReactiveClientRegistrationRepository clientRegistrationRepository;
protected Function<OAuth2AuthorizedClientHolder, Map<String, Parameter>> authorizedClientParametersMapper;
protected BiFunction<Row, RowMetadata, OAuth2AuthorizedClientHolder> authorizedClientRowMapper;
/**
* Constructs a {@code R2dbcReactiveOAuth2AuthorizedClientService} using the provided
* parameters.
* @param databaseClient the database client
* @param clientRegistrationRepository the repository of client registrations
*/
public R2dbcReactiveOAuth2AuthorizedClientService(DatabaseClient databaseClient,
ReactiveClientRegistrationRepository clientRegistrationRepository) {
Assert.notNull(databaseClient, "databaseClient cannot be null");
Assert.notNull(clientRegistrationRepository, "clientRegistrationRepository cannot be null");
this.databaseClient = databaseClient;
this.clientRegistrationRepository = clientRegistrationRepository;
this.authorizedClientParametersMapper = new OAuth2AuthorizedClientParametersMapper();
this.authorizedClientRowMapper = new OAuth2AuthorizedClientRowMapper();
}
@Override
@SuppressWarnings("unchecked")
public <T extends OAuth2AuthorizedClient> Mono<T> loadAuthorizedClient(String clientRegistrationId,
String principalName) {
Assert.hasText(clientRegistrationId, "clientRegistrationId cannot be empty");
Assert.hasText(principalName, "principalName cannot be empty");
return (Mono<T>) this.databaseClient.sql(LOAD_AUTHORIZED_CLIENT_SQL)
.bind("clientRegistrationId", clientRegistrationId)
.bind("principalName", principalName)
.map(this.authorizedClientRowMapper)
.first()
.flatMap(this::getAuthorizedClient);
}
private Mono<OAuth2AuthorizedClient> getAuthorizedClient(OAuth2AuthorizedClientHolder authorizedClientHolder) {
return this.clientRegistrationRepository.findByRegistrationId(authorizedClientHolder.getClientRegistrationId())
.switchIfEmpty(Mono.error(dataRetrievalFailureException(authorizedClientHolder.getClientRegistrationId())))
.map((clientRegistration) -> new OAuth2AuthorizedClient(clientRegistration,
authorizedClientHolder.getPrincipalName(), authorizedClientHolder.getAccessToken(),
authorizedClientHolder.getRefreshToken()));
}
private static Throwable dataRetrievalFailureException(String clientRegistrationId) {
return new DataRetrievalFailureException("The ClientRegistration with id '" + clientRegistrationId
+ "' exists in the data source, however, it was not found in the ReactiveClientRegistrationRepository.");
}
@Override
public Mono<Void> saveAuthorizedClient(OAuth2AuthorizedClient authorizedClient, Authentication principal) {
Assert.notNull(authorizedClient, "authorizedClient cannot be null");
Assert.notNull(principal, "principal cannot be null");
return this
.loadAuthorizedClient(authorizedClient.getClientRegistration().getRegistrationId(), principal.getName())
.flatMap((dbAuthorizedClient) -> updateAuthorizedClient(authorizedClient, principal))
.switchIfEmpty(Mono.defer(() -> insertAuthorizedClient(authorizedClient, principal)))
.then();
}
private Mono<Long> updateAuthorizedClient(OAuth2AuthorizedClient authorizedClient, Authentication principal) {
GenericExecuteSpec executeSpec = this.databaseClient.sql(UPDATE_AUTHORIZED_CLIENT_SQL);
for (Entry<String, Parameter> entry : this.authorizedClientParametersMapper
.apply(new OAuth2AuthorizedClientHolder(authorizedClient, principal))
.entrySet()) {
executeSpec = executeSpec.bind(entry.getKey(), entry.getValue());
}
return executeSpec.fetch().rowsUpdated();
}
private Mono<Long> insertAuthorizedClient(OAuth2AuthorizedClient authorizedClient, Authentication principal) {
GenericExecuteSpec executeSpec = this.databaseClient.sql(SAVE_AUTHORIZED_CLIENT_SQL);
for (Entry<String, Parameter> entry : this.authorizedClientParametersMapper
.apply(new OAuth2AuthorizedClientHolder(authorizedClient, principal))
.entrySet()) {
executeSpec = executeSpec.bind(entry.getKey(), entry.getValue());
}
return executeSpec.fetch().rowsUpdated();
}
@Override
public Mono<Void> removeAuthorizedClient(String clientRegistrationId, String principalName) {
Assert.hasText(clientRegistrationId, "clientRegistrationId cannot be empty");
Assert.hasText(principalName, "principalName cannot be empty");
return this.databaseClient.sql(REMOVE_AUTHORIZED_CLIENT_SQL)
.bind("clientRegistrationId", clientRegistrationId)
.bind("principalName", principalName)
.then();
}
/**
* Sets the {@code Function} used for mapping {@link OAuth2AuthorizedClientHolder} to
* a {@code Map} of {@link String} and {@link Parameter}. The default is
* {@link OAuth2AuthorizedClientParametersMapper}.
* @param authorizedClientParametersMapper the {@code Function} used for mapping
* {@link OAuth2AuthorizedClientHolder} to a {@code Map} of {@link String} and
* {@link Parameter}
*/
public final void setAuthorizedClientParametersMapper(
Function<OAuth2AuthorizedClientHolder, Map<String, Parameter>> authorizedClientParametersMapper) {
Assert.notNull(authorizedClientParametersMapper, "authorizedClientParametersMapper cannot be null");
this.authorizedClientParametersMapper = authorizedClientParametersMapper;
}
/**
* Sets the {@link BiFunction} used for mapping the current {@code io.r2dbc.spi.Row}
* to {@link OAuth2AuthorizedClientHolder}. The default is
* {@link OAuth2AuthorizedClientRowMapper}.
* @param authorizedClientRowMapper the {@link BiFunction} used for mapping the
* current {@code io.r2dbc.spi.Row} to {@link OAuth2AuthorizedClientHolder}
*/
public final void setAuthorizedClientRowMapper(
BiFunction<Row, RowMetadata, OAuth2AuthorizedClientHolder> authorizedClientRowMapper) {
Assert.notNull(authorizedClientRowMapper, "authorizedClientRowMapper cannot be null");
this.authorizedClientRowMapper = authorizedClientRowMapper;
}
/**
* A holder for {@link OAuth2AuthorizedClient} data and End-User
* {@link Authentication} (Resource Owner).
*/
public static final | R2dbcReactiveOAuth2AuthorizedClientService |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java | {
"start": 61780,
"end": 62043
} | class ____ {
@Bean
BasicProperties basicProperties() {
BasicProperties test = new BasicProperties();
test.setName("bar");
return test;
}
}
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties
static | DefaultsInJavaConfiguration |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/table/SqlCreateTableLikeConverter.java | {
"start": 1933,
"end": 6938
} | class ____ extends AbstractCreateTableConverter<SqlCreateTableLike> {
@Override
public Operation convertSqlNode(SqlCreateTableLike sqlCreateTableLike, ConvertContext context) {
ResolvedCatalogTable catalogTable =
// no schema definition to merge for CREATE TABLE ... LIKE, schema from source table
// will be merged
getResolvedCatalogTable(sqlCreateTableLike, context, null);
final ObjectIdentifier identifier = getIdentifier(sqlCreateTableLike, context);
return getCreateTableOperation(identifier, catalogTable, sqlCreateTableLike);
}
private CatalogTable lookupLikeSourceTable(
SqlTableLike sqlTableLike, CatalogManager catalogManager) {
UnresolvedIdentifier unresolvedIdentifier =
UnresolvedIdentifier.of(sqlTableLike.getSourceTable().names);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable lookupResult =
catalogManager
.getTable(identifier)
.orElseThrow(
() ->
new ValidationException(
String.format(
"Source table '%s' of the LIKE clause not found in the catalog, at %s",
identifier,
sqlTableLike
.getSourceTable()
.getParserPosition())));
if (!(lookupResult.getResolvedTable() instanceof CatalogTable)) {
throw new ValidationException(
String.format(
"Source table '%s' of the LIKE clause can not be a VIEW, at %s",
identifier, sqlTableLike.getSourceTable().getParserPosition()));
}
return lookupResult.getResolvedTable();
}
@Override
protected MergeContext getMergeContext(
SqlCreateTableLike sqlCreateTableLike, ConvertContext context) {
return new MergeContext() {
private final MergeTableLikeUtil mergeTableLikeUtil = new MergeTableLikeUtil(context);
private final SqlTableLike sqlTableLike = sqlCreateTableLike.getTableLike();
private final CatalogTable table =
lookupLikeSourceTable(sqlTableLike, context.getCatalogManager());
private final Map<SqlTableLike.FeatureOption, SqlTableLike.MergingStrategy>
mergingStrategies =
mergeTableLikeUtil.computeMergingStrategies(sqlTableLike.getOptions());
@Override
public Schema getMergedSchema(ResolvedSchema schemaToMerge) {
final Optional<SqlTableConstraint> tableConstraint =
sqlCreateTableLike.getFullConstraints().stream()
.filter(SqlTableConstraint::isPrimaryKey)
.findAny();
return mergeTableLikeUtil.mergeTables(
mergingStrategies,
table.getUnresolvedSchema(),
sqlCreateTableLike.getColumnList().getList(),
sqlCreateTableLike
.getWatermark()
.map(Collections::singletonList)
.orElseGet(Collections::emptyList),
tableConstraint.orElse(null));
}
@Override
public Map<String, String> getMergedTableOptions() {
final Map<String, String> derivedTableOptions = sqlCreateTableLike.getProperties();
return mergeTableLikeUtil.mergeOptions(
mergingStrategies.get(SqlTableLike.FeatureOption.OPTIONS),
table.getOptions(),
derivedTableOptions);
}
@Override
public List<String> getMergedPartitionKeys() {
return mergeTableLikeUtil.mergePartitions(
mergingStrategies.get(SqlTableLike.FeatureOption.PARTITIONS),
table.getPartitionKeys(),
sqlCreateTableLike.getPartitionKeyList());
}
@Override
public Optional<TableDistribution> getMergedTableDistribution() {
return mergeTableLikeUtil.mergeDistribution(
mergingStrategies.get(SqlTableLike.FeatureOption.DISTRIBUTION),
table.getDistribution(),
SqlCreateTableLikeConverter.this.getDerivedTableDistribution(
sqlCreateTableLike));
}
};
}
}
| SqlCreateTableLikeConverter |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/stat/CacheRegionStatistics.java | {
"start": 248,
"end": 2358
} | interface ____ extends Serializable {
/**
* The value returned from {@link #getElementCountInMemory},
* {@link #getElementCountOnDisk()} and {@link #getSizeInMemory()}
* for cache providers that do not support such "extended" statistics.
*/
long NO_EXTENDED_STAT_SUPPORT_RETURN = Long.MIN_VALUE;
String getRegionName();
/**
* The number of cache puts into the region since the last Statistics
* clearing
*/
long getPutCount();
/**
* The number of successful cache look-ups against the region since the
* last Statistics clearing
*/
long getHitCount();
/**
* The number of unsuccessful cache look-ups against the region since the
* last Statistics clearing
*/
long getMissCount();
/**
* The number of removals since the last Statistics clearing
*/
long getRemoveCount();
/**
* The number of elements currently in memory within the cache provider.
* <p>
* This is an optional value contingent upon the underlying cache provider
* providing extended stats support via
* {@link org.hibernate.cache.spi.ExtendedStatisticsSupport}. If the provider
* does not support extended stats, {@link #NO_EXTENDED_STAT_SUPPORT_RETURN}
* is returned instead.
*/
long getElementCountInMemory();
/**
* The number of elements currently stored to disk within the cache provider.
* <p>
* This is an optional value contingent upon the underlying cache provider
* providing extended stats support via
* {@link org.hibernate.cache.spi.ExtendedStatisticsSupport}. If the provider
* does not support extended stats, {@link #NO_EXTENDED_STAT_SUPPORT_RETURN}
* is returned instead.
*/
long getElementCountOnDisk();
/**
* The size that the in-memory elements take up within the cache provider.
* <p>
* This is an optional value contingent upon the underlying cache provider
* providing extended stats support via
* {@link org.hibernate.cache.spi.ExtendedStatisticsSupport}. If the provider
* does not support extended stats, {@link #NO_EXTENDED_STAT_SUPPORT_RETURN}
* is returned instead.
*/
long getSizeInMemory();
}
| CacheRegionStatistics |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/aggregate/OracleAggregateSupport.java | {
"start": 31621,
"end": 33476
} | class ____ implements JsonWriteExpression {
private final boolean colonSyntax;
private final SelectableMapping selectableMapping;
private final String customWriteExpressionStart;
private final String customWriteExpressionEnd;
BasicJsonWriteExpression(SelectableMapping selectableMapping, String customWriteExpression, boolean colonSyntax) {
this.selectableMapping = selectableMapping;
this.colonSyntax = colonSyntax;
if ( customWriteExpression.equals( "?" ) ) {
this.customWriteExpressionStart = "";
this.customWriteExpressionEnd = "";
}
else {
final String[] parts = StringHelper.split( "?", customWriteExpression );
assert parts.length == 2;
this.customWriteExpressionStart = parts[0];
this.customWriteExpressionEnd = parts[1];
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( '\'' );
sb.append( selectableMapping.getSelectableName() );
if ( colonSyntax ) {
sb.append( "':" );
}
else {
sb.append( "' value " );
}
sb.append( customWriteExpressionStart );
// We use NO_UNTYPED here so that expressions which require type inference are casted explicitly,
// since we don't know how the custom write expression looks like where this is embedded,
// so we have to be pessimistic and avoid ambiguities
final Expression valueExpression = expression.getValueExpression( selectableMapping );
if ( valueExpression instanceof Literal literal && literal.getLiteralValue() == null ) {
// Except for the null literal. That is just rendered as-is
sb.append( "null" );
}
else {
translator.render( valueExpression, SqlAstNodeRenderingMode.NO_UNTYPED );
}
sb.append( customWriteExpressionEnd );
}
}
| BasicJsonWriteExpression |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableTest26.java | {
"start": 911,
"end": 1393
} | class ____ extends TestCase {
public void test_alter_add_key() throws Exception {
String sql = "alter table xxxx UNION = (t1, t2);";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("ALTER TABLE xxxx"
+ "\n\tUNION = (t1, t2);", output);
}
}
| MySqlAlterTableTest26 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.