language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/admin/CreateAclsOptions.java | {
"start": 939,
"end": 1372
} | class ____ extends AbstractOptions<CreateAclsOptions> {
/**
* Set the timeout in milliseconds for this operation or {@code null} if the default api timeout for the
* AdminClient should be used.
*
*/
// This method is retained to keep binary compatibility with 0.11
public CreateAclsOptions timeoutMs(Integer timeoutMs) {
this.timeoutMs = timeoutMs;
return this;
}
}
| CreateAclsOptions |
java | apache__flink | flink-clients/src/main/java/org/apache/flink/client/StreamGraphTranslator.java | {
"start": 1418,
"end": 2596
} | class ____ implements FlinkPipelineTranslator {
private static final Logger LOG = LoggerFactory.getLogger(StreamGraphTranslator.class);
private final ClassLoader userClassloader;
public StreamGraphTranslator(ClassLoader userClassloader) {
this.userClassloader = userClassloader;
}
@Override
public JobGraph translateToJobGraph(
Pipeline pipeline, Configuration optimizerConfiguration, int defaultParallelism) {
checkArgument(
pipeline instanceof StreamGraph, "Given pipeline is not a DataStream StreamGraph.");
StreamGraph streamGraph = (StreamGraph) pipeline;
return streamGraph.getJobGraph(userClassloader, null);
}
@Override
public String translateToJSONExecutionPlan(Pipeline pipeline) {
checkArgument(
pipeline instanceof StreamGraph, "Given pipeline is not a DataStream StreamGraph.");
StreamGraph streamGraph = (StreamGraph) pipeline;
return streamGraph.getStreamingPlanAsJSON();
}
@Override
public boolean canTranslate(Pipeline pipeline) {
return pipeline instanceof StreamGraph;
}
}
| StreamGraphTranslator |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/CustomizersSupport.java | {
"start": 2461,
"end": 3054
} | class ____ implements CamelContextAware {
private CamelContext camelContext;
@Override
public CamelContext getCamelContext() {
return this.camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
}
/**
* A {@link ComponentCustomizer.Policy} that uses a hierarchical lists of properties to determine if customization
* is enabled for the given {@link org.apache.camel.Component}.
*/
public static final | CamelContextAwarePolicy |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineReaderClientImpl.java | {
"start": 5270,
"end": 7066
} | class ____ extends TimelineReaderClientImpl {
@Override
protected Response doGetUri(URI base, String path,
MultivaluedMap<String, String> params) throws IOException {
Response mockClientResponse = mock(Response.class);
if (path.contains(YARN_CONTAINER.toString()) && !params.containsKey("infofilters")) {
when(mockClientResponse.readEntity(TimelineEntity.class)).thenReturn(
createTimelineEntity("mockContainer1"));
when(mockClientResponse.readEntity(
new GenericType<Set<TimelineEntity>>(){})).thenReturn(
createTimelineEntities("mockContainer1", "mockContainer2"));
} else if (path.contains(YARN_CONTAINER.toString()) && params.containsKey("infofilters")) {
assertEquals(encodeValue(appAttemptInfoFilter), params.get("infofilters").get(0));
when(mockClientResponse.readEntity(
new GenericType<Set<TimelineEntity>>(){})).thenReturn(
createTimelineEntities("mockContainer3", "mockContainer4"));
} else if (path.contains(YARN_APPLICATION_ATTEMPT.toString())) {
when(mockClientResponse.readEntity(TimelineEntity.class)).thenReturn(
createTimelineEntity("mockAppAttempt1"));
when(mockClientResponse.readEntity(
new GenericType<Set<TimelineEntity>>(){})).thenReturn(
createTimelineEntities("mockAppAttempt1", "mockAppAttempt2"));
} else {
when(mockClientResponse.readEntity(TimelineEntity.class)).thenReturn(
createTimelineEntity("mockApp1"));
when(mockClientResponse.readEntity(
new GenericType<Set<TimelineEntity>>(){})).thenReturn(
createTimelineEntities("mockApp1", "mockApp2"));
}
return mockClientResponse;
}
}
}
| MockTimelineReaderClient |
java | google__dagger | hilt-android-testing/main/java/dagger/hilt/android/testing/BindElementsIntoSet.java | {
"start": 1114,
"end": 1495
} | class ____{
* ...
* {@literal @}BindElementsIntoSet Set<String> bindedSet = ImmutableSet.of("bar", "baz");
* ...
* }
* </code></pre>
*
* Here, bindedSet will be accessible to the entire application for your test. This is functionally
* equivalent to installing the following module in your test:
*
* <pre><code>
* {@literal @}Module
* {@literal @}InstallIn
* | FooTest |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/reactive/AbstractReactiveTransactionManager.java | {
"start": 3126,
"end": 3407
} | class ____ serializable, to allow for serializing the
* transaction strategy along with proxies that carry a transaction interceptor.
* It is up to subclasses if they wish to make their state to be serializable too.
* They should implement the {@code java.io.Serializable} marker | is |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SampleOperatorStatusTests.java | {
"start": 552,
"end": 2851
} | class ____ extends AbstractWireSerializingTestCase<SampleOperator.Status> {
public static SampleOperator.Status simple() {
return new SampleOperator.Status(500012, 200012, 123, 111, 222);
}
public static String simpleToJson() {
return """
{
"collect_nanos" : 500012,
"collect_time" : "500micros",
"emit_nanos" : 200012,
"emit_time" : "200micros",
"pages_processed" : 123,
"rows_received" : 111,
"rows_emitted" : 222
}""";
}
public void testToXContent() {
assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson()));
}
@Override
protected Writeable.Reader<SampleOperator.Status> instanceReader() {
return SampleOperator.Status::new;
}
@Override
public SampleOperator.Status createTestInstance() {
return new SampleOperator.Status(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeInt(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
}
@Override
protected SampleOperator.Status mutateInstance(SampleOperator.Status instance) {
long collectNanos = instance.collectNanos();
long emitNanos = instance.emitNanos();
int pagesProcessed = instance.pagesProcessed();
long rowsReceived = instance.rowsReceived();
long rowsEmitted = instance.rowsEmitted();
switch (between(0, 4)) {
case 0 -> collectNanos = randomValueOtherThan(collectNanos, ESTestCase::randomNonNegativeLong);
case 1 -> emitNanos = randomValueOtherThan(emitNanos, ESTestCase::randomNonNegativeLong);
case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
default -> throw new UnsupportedOperationException();
}
return new SampleOperator.Status(collectNanos, emitNanos, pagesProcessed, rowsReceived, rowsEmitted);
}
}
| SampleOperatorStatusTests |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/security/inheritance/classdenyall/ClassDenyAllParentResourceWithoutPath_SecurityOnBase.java | {
"start": 759,
"end": 1660
} | class ____
implements ClassDenyAllInterfaceWithoutPath_SecurityOnBase {
@Path(CLASS_PATH_ON_RESOURCE + IMPL_ON_BASE + PARENT_METHOD_WITH_PATH + CLASS_DENY_ALL_PATH)
@POST
public abstract String test_ClassPathOnResource_ImplOnBase_ParentMethodWithPath_ClassDenyAll(JsonObject array);
@Path(CLASS_PATH_ON_RESOURCE + IMPL_ON_BASE + PARENT_METHOD_WITH_PATH + CLASS_DENY_ALL_METHOD_PERMIT_ALL_PATH)
@POST
public abstract String test_ClassPathOnResource_ImplOnBase_ParentMethodWithPath_ClassDenyAllMethodPermitAll(
JsonObject array);
@Path(CLASS_PATH_ON_RESOURCE + IMPL_ON_BASE + PARENT_METHOD_WITH_PATH + CLASS_DENY_ALL_METHOD_ROLES_ALLOWED_PATH)
@POST
public abstract String test_ClassPathOnResource_ImplOnBase_ParentMethodWithPath_ClassDenyAllMethodRolesAllowed(
JsonObject array);
}
| ClassDenyAllParentResourceWithoutPath_SecurityOnBase |
java | google__error-prone | core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java | {
"start": 15272,
"end": 15739
} | class ____ {
java.util.Map.Entry<String, Integer> f() {
// BUG: Diagnostic contains: return (Map.Entry<String,Integer>) null;
return null;
}
}
""")
.doTest();
}
@Test
public void fullQualifiedName_typeVariable() {
CompilationTestHelper.newInstance(CastReturnFullType.class, getClass())
.addSourceLines(
"Test.java",
"""
| Test |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-jackson/deployment/src/main/java/io/quarkus/resteasy/reactive/jackson/deployment/processor/JacksonDeserializerFactory.java | {
"start": 2265,
"end": 2518
} | class ____ needs to be deserialized from json.
* In this way the deserialization process can be performed through the ad-hoc generate deserializer and then without
* any use of reflection. For instance for a pojo like this
*
* <pre>{@code
* public | that |
java | alibaba__nacos | persistence/src/test/java/com/alibaba/nacos/persistence/utils/ConnectionCheckUtilTest.java | {
"start": 1061,
"end": 1876
} | class ____ {
@Test
void testCheckConnectionThrowException() throws SQLException {
assertThrows(RuntimeException.class, () -> {
HikariDataSource ds = mock(HikariDataSource.class);
when(ds.getConnection()).thenThrow(new RuntimeException());
ConnectionCheckUtil.checkDataSourceConnection(ds);
verify(ds).getConnection();
});
}
@Test
void testCheckConnectionNormal() throws SQLException {
HikariDataSource ds = mock(HikariDataSource.class);
Connection connection = mock(Connection.class);
when(ds.getConnection()).thenReturn(connection);
ConnectionCheckUtil.checkDataSourceConnection(ds);
verify(ds).getConnection();
verify(connection).close();
}
}
| ConnectionCheckUtilTest |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/util/JavacElementUtilsDecorator.java | {
"start": 300,
"end": 650
} | class ____ extends AbstractElementUtilsDecorator {
JavacElementUtilsDecorator(ProcessingEnvironment processingEnv, TypeElement mapperElement) {
super( processingEnv, mapperElement );
}
@Override
protected TypeElement replaceTypeElementIfNecessary(TypeElement element) {
return element;
}
}
| JavacElementUtilsDecorator |
java | quarkusio__quarkus | extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/service/QuarkusRuntimeInitDialectFactoryInitiator.java | {
"start": 575,
"end": 2782
} | class ____ implements StandardServiceInitiator<DialectFactory> {
private final String persistenceUnitName;
private final boolean isFromPersistenceXml;
private final Dialect dialect;
private final Optional<String> datasourceName;
private final DatabaseVersion buildTimeDbVersion;
private final boolean versionCheckEnabled;
public QuarkusRuntimeInitDialectFactoryInitiator(String persistenceUnitName,
boolean isFromPersistenceXml, Dialect dialect,
RecordedConfig recordedConfig,
HibernateOrmRuntimeConfigPersistenceUnit runtimePuConfig) {
this.persistenceUnitName = persistenceUnitName;
this.isFromPersistenceXml = isFromPersistenceXml;
this.dialect = dialect;
this.datasourceName = recordedConfig.getDataSource();
// We set the version from the dialect since if it wasn't provided explicitly through the `recordedConfig.getDbVersion()`
// then the version from `DialectVersions.Defaults` will be used:
this.buildTimeDbVersion = dialect.getVersion();
HibernateOrmRuntimeConfigPersistenceUnit.HibernateOrmConfigPersistenceUnitDatabase database = runtimePuConfig
.database();
if (database.startOffline() && database.versionCheckEnabled().filter(v -> v.booleanValue()).isPresent()) {
throw new PersistenceException(
"When using offline mode `quarkus.hibernate-orm.database.start-offline=true`, version check `quarkus.hibernate-orm.database.version-check.enabled` must be unset or set to `false`");
}
this.versionCheckEnabled = runtimePuConfig.database().versionCheckEnabled()
.orElse(!database.startOffline());
}
@Override
public Class<DialectFactory> getServiceInitiated() {
return DialectFactory.class;
}
@Override
public DialectFactory initiateService(Map<String, Object> configurationValues, ServiceRegistryImplementor registry) {
return new QuarkusRuntimeInitDialectFactory(persistenceUnitName, isFromPersistenceXml, dialect, datasourceName,
buildTimeDbVersion, versionCheckEnabled);
}
}
| QuarkusRuntimeInitDialectFactoryInitiator |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java | {
"start": 149751,
"end": 150084
} | class ____ {
@RequestMapping(value = "/", method = RequestMethod.GET)
public String home() {
return "home";
}
@RequestMapping(value = "/", method = RequestMethod.GET, headers="Accept=application/json")
@ResponseBody
public String homeJson() {
return "homeJson";
}
}
@Controller
static | HeadersConditionController |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/xcontent/XContentUtilsTests.java | {
"start": 1557,
"end": 7514
} | class ____ extends ESTestCase {
public void testAddAuthorizationInfoWithNoAuthHeader() throws IOException {
String json = generateJson(null);
assertThat(json, equalTo("{}"));
json = generateJson(Map.of());
assertThat(json, equalTo("{}"));
json = generateJson(Map.of(randomAlphaOfLengthBetween(10, 20), randomAlphaOfLengthBetween(20, 30)));
assertThat(json, equalTo("{}"));
}
public void testAddAuthorizationInfoWithRoles() throws IOException {
String[] roles = generateRandomStringArray(4, randomIntBetween(5, 15), false, false);
User user = new User(randomAlphaOfLengthBetween(5, 15), roles);
AuthenticationTestBuilder builder = AuthenticationTestHelper.builder().realm().user(user);
Authentication authentication = builder.build();
String json = generateJson(Map.of(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()));
assertThat(
json,
equalTo("{\"authorization\":{\"roles\":" + Arrays.stream(roles).collect(Collectors.joining("\",\"", "[\"", "\"]")) + "}}")
);
}
public void testAddAuthorizationInfoWithApiKey() throws IOException {
String apiKeyId = randomAlphaOfLength(20);
String apiKeyName = randomAlphaOfLengthBetween(1, 16);
AuthenticationTestBuilder builder = AuthenticationTestHelper.builder()
.apiKey(apiKeyId)
.metadata(Map.of(API_KEY_NAME_KEY, apiKeyName));
Authentication authentication = builder.build();
String json = generateJson(Map.of(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()));
assertThat(json, equalTo("{\"authorization\":{\"api_key\":{\"id\":\"" + apiKeyId + "\",\"name\":\"" + apiKeyName + "\"}}}"));
}
public void testAddAuthorizationInfoWithCloudApiKey() throws IOException {
User user = AuthenticationTestHelper.randomCloudApiKeyUser();
Authentication authentication = AuthenticationTestHelper.randomCloudApiKeyAuthentication(user);
String json = generateJson(Map.of(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()));
assertThat(json, containsString("{\"authorization\":{\"cloud_api_key\":{\"id\":\"" + user.principal()));
assertThat(json, containsString("\"internal\":" + user.metadata().getOrDefault(API_KEY_INTERNAL_KEY, null)));
if (user.metadata().containsKey(API_KEY_NAME_KEY)) {
assertThat(json, containsString("\"name\":\"" + user.metadata().getOrDefault(API_KEY_NAME_KEY, null) + "\""));
}
for (String role : user.roles()) {
assertThat(json, containsString(role));
}
}
public void testAddAuthorizationInfoWithServiceAccount() throws IOException {
String account = "elastic/" + randomFrom("kibana", "fleet-server");
User user = new User(account);
AuthenticationTestBuilder builder = AuthenticationTestHelper.builder().serviceAccount(user);
Authentication authentication = builder.build();
String json = generateJson(Map.of(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()));
assertThat(json, equalTo("{\"authorization\":{\"service_account\":\"" + account + "\"}}"));
}
public void testAddAuthorizationInfoWithCrossClusterAccess() throws IOException {
final Authentication authentication = AuthenticationTestHelper.builder().crossClusterAccess().build();
final var apiKeyName = (String) authentication.getAuthenticatingSubject().getMetadata().get(API_KEY_NAME_KEY);
final var innerAuthentication = (Authentication) authentication.getAuthenticatingSubject()
.getMetadata()
.get(CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY);
// Rely on the target function itself to generate the json string for inner authentication.
// This is OK because other subject variants are tested elsewhere. We are only interested in the cross cluster variant here.
String innerAuthenticationString = generateJson(Map.of(AuthenticationField.AUTHENTICATION_KEY, innerAuthentication.encode()));
innerAuthenticationString = innerAuthenticationString.replace("{\"authorization\":", "");
innerAuthenticationString = innerAuthenticationString.substring(0, innerAuthenticationString.length() - 1);
String json = generateJson(Map.of(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()));
assertThat(
json,
equalTo(
XContentHelper.stripWhitespace(
Strings.format(
"""
{
"authorization": {
"cross_cluster_access": {
"api_key": {
"id": "%s"%s
},
"remote_authorization": %s
}
}
}""",
authentication.getAuthenticatingSubject().getMetadata().get(API_KEY_ID_KEY),
apiKeyName == null ? "" : ",\"name\":\"" + apiKeyName + "\"",
innerAuthenticationString
)
)
)
);
}
public void testAddAuthorizationInfoWithCorruptData() throws IOException {
String json = generateJson(Map.of(AuthenticationField.AUTHENTICATION_KEY, "corrupt"));
assertThat(json, equalTo("{}"));
}
private String generateJson(Map<String, String> headers) throws IOException {
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
builder.startObject();
XContentUtils.addAuthorizationInfo(builder, headers);
builder.endObject();
return Strings.toString(builder);
}
}
}
| XContentUtilsTests |
java | spring-projects__spring-boot | module/spring-boot-cloudfoundry/src/test/java/org/springframework/boot/cloudfoundry/autoconfigure/actuate/endpoint/CloudFoundryWebEndpointDiscovererTests.java | {
"start": 6325,
"end": 6495
} | class ____ {
@ReadOperation
@Nullable Object getAll() {
return null;
}
}
@EndpointWebExtension(endpoint = HealthEndpoint.class)
static | TestEndpointWebExtension |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/ContextLoaderUtilsContextHierarchyTests.java | {
"start": 20242,
"end": 20353
} | class ____ {
}
@ContextConfiguration("two.xml")
private static | TestClass1WithBareContextConfigurationInSubclass |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/classrealm/ClassRealmRequest.java | {
"start": 2461,
"end": 2540
} | class ____.
*
* @return The modifiable list of constituents for the | realm |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestOutputBuffer.java | {
"start": 962,
"end": 1784
} | class ____ {
@Test
public void testOutputBuffer() {
final int size = 100;
final OutputBuffer output1 = new OutputBuffer(BufferType.DIRECT_BUFFER, size);
assertThat(output1.getType()).isEqualTo(BufferType.DIRECT_BUFFER);
assertThat(output1.length()).isZero();
assertThat(output1.limit()).isEqualTo(size);
final OutputBuffer output2 = new OutputBuffer(BufferType.HEAP_BUFFER, size);
assertThat(output2.getType()).isEqualTo(BufferType.HEAP_BUFFER);
assertThat(output2.length()).isZero();
assertThat(output2.limit()).isEqualTo(size);
final OutputBuffer output3 = new OutputBuffer(new byte[size]);
assertThat(output3.getType()).isEqualTo(BufferType.HEAP_BUFFER);
assertThat(output3.length()).isZero();
assertThat(output3.limit()).isEqualTo(size);
}
}
| TestOutputBuffer |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/server/filters/TraceFilter.java | {
"start": 1144,
"end": 1823
} | class ____ implements HttpServerFilter { // <2>
private final TraceService traceService;
public TraceFilter(TraceService traceService) { // <3>
this.traceService = traceService;
}
@Override
public Publisher<MutableHttpResponse<?>> doFilter(HttpRequest<?> request,
ServerFilterChain chain) {
return Flux.from(traceService
.trace(request)) // <4>
.switchMap(aBoolean -> chain.proceed(request)) // <5>
.doOnNext(res ->
res.getHeaders().add("X-Trace-Enabled", "true") // <6>
);
}
}
// end::clazz[]
| TraceFilter |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/interceptor/TransactionalClientDataSourceAsyncTest.java | {
"start": 1473,
"end": 4427
} | class ____ extends TransactionalClientDataSourceTest {
@Override
@Test
public void testTransactionRollback() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:error");
mock.expectedMessageCount(1);
try {
template.sendBody("direct:fail", "Hello World");
fail("Should have thrown exception");
} catch (RuntimeCamelException e) {
// expected as we fail
assertIsInstanceOf(RuntimeCamelException.class, e.getCause());
assertTrue(e.getCause().getCause() instanceof IllegalArgumentException);
assertEquals("We don't have Donkeys, only Camels", e.getCause().getCause().getMessage());
}
assertMockEndpointsSatisfied();
int count = jdbc.queryForObject("select count(*) from books", Integer.class);
assertEquals(1, count, "Number of books");
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
context.addComponent("async", new MyAsyncComponent());
// use required as transaction policy
SpringTransactionPolicy required
= context.getRegistry().lookupByNameAndType("PROPAGATION_REQUIRED", SpringTransactionPolicy.class);
// configure to use transaction error handler and pass on the required as it will fetch
// the transaction manager from it that it needs
// The API is deprecated, we can remove warnings safely as the tests will disappear when removing this component.
@SuppressWarnings("deprecation")
LegacyTransactionErrorHandlerBuilder teh = new LegacyTransactionErrorHandlerBuilder();
teh.setSpringTransactionPolicy(required);
errorHandler(teh);
// on exception is also supported
onException(IllegalArgumentException.class).handled(false).to("mock:error");
from("direct:okay")
.policy(required)
.setBody(constant("Tiger in Action")).bean("bookService")
.log("Before thread ${threadName}")
.to("async:bye:camel")
.log("After thread ${threadName}")
.setBody(constant("Elephant in Action")).bean("bookService");
from("direct:fail")
.policy(required)
.setBody(constant("Tiger in Action")).bean("bookService")
.log("Before thread ${threadName}")
.to("async:bye:camel")
.log("After thread ${threadName}")
.setBody(constant("Donkey in Action")).bean("bookService");
}
};
}
}
| TransactionalClientDataSourceAsyncTest |
java | google__auto | common/src/test/java/com/google/auto/common/AnnotationMirrorsTest.java | {
"start": 2654,
"end": 2725
} | class ____ {}
@DefaultingOuter(FOO)
static | TestWithDefaultingOuterBlah |
java | netty__netty | codec-marshalling/src/main/java/io/netty/handler/codec/marshalling/CompatibleMarshallingDecoder.java | {
"start": 1224,
"end": 4195
} | class ____ extends ReplayingDecoder<Void> {
protected final UnmarshallerProvider provider;
protected final int maxObjectSize;
private boolean discardingTooLongFrame;
/**
* Create a new instance of {@link CompatibleMarshallingDecoder}.
*
* @param provider
* the {@link UnmarshallerProvider} which is used to obtain the {@link Unmarshaller}
* for the {@link Channel}
* @param maxObjectSize
* the maximal size (in bytes) of the {@link Object} to unmarshal. Once the size is
* exceeded the {@link Channel} will get closed. Use a maxObjectSize of
* {@link Integer#MAX_VALUE} to disable this. You should only do this if you are sure
* that the received Objects will never be big and the sending side are trusted, as this
* opens the possibility for a DOS-Attack due an {@link OutOfMemoryError}.
*/
public CompatibleMarshallingDecoder(UnmarshallerProvider provider, int maxObjectSize) {
this.provider = provider;
this.maxObjectSize = maxObjectSize;
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> out) throws Exception {
if (discardingTooLongFrame) {
buffer.skipBytes(actualReadableBytes());
checkpoint();
return;
}
Unmarshaller unmarshaller = provider.getUnmarshaller(ctx);
ByteInput input = new ChannelBufferByteInput(buffer);
if (maxObjectSize != Integer.MAX_VALUE) {
input = new LimitingByteInput(input, maxObjectSize);
}
try {
unmarshaller.start(input);
Object obj = unmarshaller.readObject();
unmarshaller.finish();
out.add(obj);
} catch (LimitingByteInput.TooBigObjectException ignored) {
discardingTooLongFrame = true;
throw new TooLongFrameException();
} finally {
// Call close in a finally block as the ReplayingDecoder will throw an Error if not enough bytes are
// readable. This helps to be sure that we do not leak resource
unmarshaller.close();
}
}
@Override
protected void decodeLast(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> out) throws Exception {
switch (buffer.readableBytes()) {
case 0:
return;
case 1:
// Ignore the last TC_RESET
if (buffer.getByte(buffer.readerIndex()) == ObjectStreamConstants.TC_RESET) {
buffer.skipBytes(1);
return;
}
}
decode(ctx, buffer, out);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
if (cause instanceof TooLongFrameException) {
ctx.close();
} else {
super.exceptionCaught(ctx, cause);
}
}
}
| CompatibleMarshallingDecoder |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/MonoCacheInvalidateWhen.java | {
"start": 4956,
"end": 6066
} | class ____<T> implements InnerConsumer<T>, State<T> {
final MonoCacheInvalidateWhen<T> main;
@SuppressWarnings("NotNullFieldNotInitialized") // initialized in onSubscribe
Subscription subscription;
volatile CacheMonoSubscriber<T>[] subscribers;
@SuppressWarnings("rawtypes")
static final AtomicReferenceFieldUpdater<CoordinatorSubscriber, CacheMonoSubscriber[]> SUBSCRIBERS =
AtomicReferenceFieldUpdater.newUpdater(CoordinatorSubscriber.class, CacheMonoSubscriber[].class, "subscribers");
@SuppressWarnings("rawtypes")
private static final CacheMonoSubscriber[] COORDINATOR_DONE = new CacheMonoSubscriber[0];
@SuppressWarnings("rawtypes")
private static final CacheMonoSubscriber[] COORDINATOR_INIT = new CacheMonoSubscriber[0];
CoordinatorSubscriber(MonoCacheInvalidateWhen<T> main) {
this.main = main;
@SuppressWarnings("unchecked")
CacheMonoSubscriber<T>[] init = (CacheMonoSubscriber<T>[]) COORDINATOR_INIT;
this.subscribers = init;
}
/**
* unused in this context as the {@link State} | CoordinatorSubscriber |
java | elastic__elasticsearch | plugins/examples/stable-analysis/src/main/java/org/elasticsearch/example/analysis/package-info.java | {
"start": 711,
"end": 1025
} | interface ____ an argument. See the javadoc for the</li>
* </ul>
* ExampleAnalysisSettings for more details:
* <ul>
* <li>a no/noarg constructor is also possible</li>
* <li>a methods from stable analysis api are to be implemented with Apache Lucene</li>
* </ul>
*/
package org.elasticsearch.example.analysis;
| as |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/manytoone/Car.java | {
"start": 517,
"end": 1075
} | class ____ {
private Integer id;
private Color bodyColor;
private Parent owner;
@Id
@GeneratedValue
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(foreignKey = @ForeignKey(name="BODY_COLOR_FK"))
public Color getBodyColor() {
return bodyColor;
}
public void setBodyColor(Color bodyColor) {
this.bodyColor = bodyColor;
}
@ManyToOne
public Parent getOwner() {
return owner;
}
public void setOwner(Parent owner) {
this.owner = owner;
}
}
| Car |
java | grpc__grpc-java | rls/src/test/java/io/grpc/rls/CachingRlsLbClientTest.java | {
"start": 49787,
"end": 50416
} | class ____ implements Throttler {
int numUnthrottled;
int numThrottled;
private boolean nextResult = false;
@Override
public boolean shouldThrottle() {
return nextResult;
}
@Override
public void registerBackendResponse(boolean throttled) {
if (throttled) {
numThrottled++;
} else {
numUnthrottled++;
}
}
public int getNumUnthrottled() {
return numUnthrottled;
}
public int getNumThrottled() {
return numThrottled;
}
public void resetCounts() {
numThrottled = 0;
numUnthrottled = 0;
}
}
}
| FakeThrottler |
java | apache__camel | components/camel-aws/camel-aws2-kms/src/test/java/org/apache/camel/component/aws2/kms/localstack/KmsScheduleDeleteKeyIT.java | {
"start": 1470,
"end": 3741
} | class ____ extends Aws2KmsBase {
@EndpointInject
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint result;
@Test
public void sendIn() throws Exception {
result.expectedMessageCount(1);
Exchange ex = template.send("direct:createKey", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(KMS2Constants.OPERATION, "createKey");
}
});
String keyId = ex.getMessage().getBody(CreateKeyResponse.class).keyMetadata().keyId();
template.send("direct:scheduleDelete", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(KMS2Constants.OPERATION, "scheduleKeyDeletion");
exchange.getIn().setHeader(KMS2Constants.KEY_ID, keyId);
}
});
template.send("direct:describeKey", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(KMS2Constants.OPERATION, "describeKey");
exchange.getIn().setHeader(KMS2Constants.KEY_ID, keyId);
}
});
MockEndpoint.assertIsSatisfied(context);
assertEquals(1, result.getExchanges().size());
assertEquals(KeyState.PENDING_DELETION,
result.getExchanges().get(0).getIn().getBody(DescribeKeyResponse.class).keyMetadata().keyState());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String awsEndpoint
= "aws2-kms://default?operation=createKey";
String describeKey
= "aws2-kms://default?operation=describeKey";
String scheduleDelete
= "aws2-kms://default?operation=describeKey";
from("direct:createKey").to(awsEndpoint);
from("direct:scheduleDelete").to(scheduleDelete);
from("direct:describeKey").to(describeKey).to("mock:result");
}
};
}
}
| KmsScheduleDeleteKeyIT |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerSchemasTests.java | {
"start": 1488,
"end": 6002
} | class ____ extends ESTestCase {
public static SageMakerSchemas mockSchemas() {
SageMakerSchemas schemas = mock();
var schema = mockSchema();
when(schemas.schemaFor(any(TaskType.class), anyString())).thenReturn(schema);
return schemas;
}
public static SageMakerSchema mockSchema() {
SageMakerSchema schema = mock();
when(schema.apiServiceSettings(anyMap(), any())).thenReturn(SageMakerStoredServiceSchema.NO_OP);
when(schema.apiTaskSettings(anyMap(), any())).thenReturn(SageMakerStoredTaskSchema.NO_OP);
return schema;
}
private static final SageMakerSchemas schemas = new SageMakerSchemas();
public void testSupportedTaskTypes() {
assertThat(
schemas.supportedTaskTypes(),
containsInAnyOrder(
TaskType.TEXT_EMBEDDING,
TaskType.COMPLETION,
TaskType.CHAT_COMPLETION,
TaskType.SPARSE_EMBEDDING,
TaskType.RERANK
)
);
}
public void testSupportedStreamingTasks() {
assertThat(schemas.supportedStreamingTasks(), containsInAnyOrder(TaskType.COMPLETION, TaskType.CHAT_COMPLETION));
}
public void testSchemaFor() {
var payloads = Stream.of(new OpenAiTextEmbeddingPayload(), new OpenAiCompletionPayload());
payloads.forEach(payload -> {
payload.supportedTasks().forEach(taskType -> {
var model = mockModel(taskType, payload.api());
assertNotNull(schemas.schemaFor(model));
});
});
}
public void testStreamSchemaFor() {
var payloads = Stream.<SageMakerStreamSchemaPayload>of(new OpenAiCompletionPayload());
payloads.forEach(payload -> {
payload.supportedTasks().forEach(taskType -> {
var model = mockModel(taskType, payload.api());
assertNotNull(schemas.streamSchemaFor(model));
});
});
}
private SageMakerModel mockModel(TaskType taskType, String api) {
SageMakerModel model = mock();
when(model.getTaskType()).thenReturn(taskType);
when(model.api()).thenReturn(api);
return model;
}
public void testMissingTaskTypeThrowsException() {
var knownPayload = new OpenAiTextEmbeddingPayload();
var unknownTaskType = TaskType.RERANK;
var knownModel = mockModel(unknownTaskType, knownPayload.api());
assertThrows(
"Task [rerank] is not compatible for service [sagemaker] and api [openai]. "
+ "Supported tasks: [text_embedding, completion, chat_completion]",
ElasticsearchStatusException.class,
() -> schemas.schemaFor(knownModel)
);
}
public void testMissingSchemaThrowsException() {
var unknownModel = mockModel(TaskType.ANY, "blah");
assertThrows(
"Task [any] is not compatible for service [sagemaker] and api [blah]. Supported tasks: []",
ElasticsearchStatusException.class,
() -> schemas.schemaFor(unknownModel)
);
}
public void testMissingStreamSchemaThrowsException() {
var unknownModel = mockModel(TaskType.ANY, "blah");
assertThrows(
"Streaming is not allowed for service [sagemaker], api [blah], and task [any]. Supported streaming tasks: []",
ElasticsearchStatusException.class,
() -> schemas.streamSchemaFor(unknownModel)
);
}
public void testNamedWriteables() {
var namedWriteables = Stream.of(
new OpenAiTextEmbeddingPayload().namedWriteables(),
new OpenAiCompletionPayload().namedWriteables(),
new ElasticCompletionPayload().namedWriteables(),
new ElasticSparseEmbeddingPayload().namedWriteables(),
new ElasticTextEmbeddingPayload().namedWriteables(),
new ElasticRerankPayload().namedWriteables()
);
var expectedNamedWriteables = Stream.concat(
namedWriteables.flatMap(names -> names.map(entry -> entry.name)),
Stream.of(SageMakerStoredServiceSchema.NO_OP.getWriteableName(), SageMakerStoredTaskSchema.NO_OP.getWriteableName())
).distinct().toArray();
var actualRegisteredNames = SageMakerSchemas.namedWriteables().stream().map(entry -> entry.name).toList();
assertThat(actualRegisteredNames, containsInAnyOrder(expectedNamedWriteables));
}
}
| SageMakerSchemasTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/util/StringHelperTest.java | {
"start": 777,
"end": 5629
} | class ____ {
private static final String BASE_PACKAGE = "org.hibernate";
private static final String STRING_HELPER_FQN = "org.hibernate.internal.util.StringHelper";
private static final String STRING_HELPER_NAME = StringHelper.unqualify( STRING_HELPER_FQN );
@Test
public void testNameCollapsing() {
assertNull( StringHelper.collapse( null ) );
assertEquals( STRING_HELPER_NAME, StringHelper.collapse( STRING_HELPER_NAME ) );
assertEquals( "o.h.i.u.StringHelper", StringHelper.collapse( STRING_HELPER_FQN ) );
}
@Test
public void testPartialNameUnqualification() {
assertNull( StringHelper.partiallyUnqualify( null, BASE_PACKAGE ) );
assertEquals( STRING_HELPER_NAME, StringHelper.partiallyUnqualify( STRING_HELPER_NAME, BASE_PACKAGE ) );
assertEquals( "internal.util.StringHelper", StringHelper.partiallyUnqualify( STRING_HELPER_FQN, BASE_PACKAGE ) );
}
@Test
public void testIsBlank() {
assertFalse( StringHelper.isBlank( "A" ) );
assertFalse( StringHelper.isBlank( " a" ) );
assertFalse( StringHelper.isBlank( "a " ) );
assertFalse( StringHelper.isBlank( "a\t" ) );
assertTrue( StringHelper.isBlank( "\t\n" ) );
assertTrue( StringHelper.isBlank( null ) );
assertTrue( StringHelper.isBlank( "" ) );
assertTrue( StringHelper.isBlank( " " ) );
}
@Test
public void testBasePackageCollapsing() {
assertNull( StringHelper.collapseQualifierBase( null, BASE_PACKAGE ) );
assertEquals( STRING_HELPER_NAME, StringHelper.collapseQualifierBase( STRING_HELPER_NAME, BASE_PACKAGE ) );
assertEquals( "o.h.internal.util.StringHelper", StringHelper.collapseQualifierBase( STRING_HELPER_FQN, BASE_PACKAGE ) );
}
@Test
public void testFindIdentifierWord() {
assertEquals( -1, StringHelper.indexOfIdentifierWord( "", "word" ) );
assertEquals( -1, StringHelper.indexOfIdentifierWord( null, "word" ) );
assertEquals( -1, StringHelper.indexOfIdentifierWord( "sentence", null ) );
assertEquals( 31, StringHelper.indexOfIdentifierWord( "where name=?13 and description=?1", "?1" ) );
assertEquals( 31, StringHelper.indexOfIdentifierWord( "where name=?13 and description=?1 and category_id=?4", "?1" ) );
assertEquals( 0, StringHelper.indexOfIdentifierWord( "?1", "?1" ) );
assertEquals( -1, StringHelper.indexOfIdentifierWord( "no identifier here", "?1" ) );
assertEquals( 10, StringHelper.indexOfIdentifierWord( "some text ?", "?" ) );
}
private static final H2Dialect DIALECT = new H2Dialect();
@Test
public void testArrayUnquoting() {
assertNull( StringHelper.unquote( (String[]) null, DIALECT ) );
//This to verify that the string array isn't being copied unnecessarily:
unchanged( new String [0] );
unchanged( new String[] { "a" } );
unchanged( new String[] { "a", "b" } );
helperEquals( new String[] { "a", "b", "c" }, new String[] { "a", "b", "`c`" } );
helperEquals( new String[] { "a", "b", "c" }, new String[] { "a", "\"b\"", "c" } );
}
private static void unchanged(String[] input) {
final String[] output = StringHelper.unquote( input, DIALECT );
assertSame( input, output );
}
private static void helperEquals(String[] expectation, String[] input) {
final String[] output = StringHelper.unquote( input, DIALECT );
assertArrayEquals( expectation, output );
}
@Test
public void testIsQuotedWithDialect() {
assertFalse( StringHelper.isQuoted( "a", DIALECT ) );
assertTrue( StringHelper.isQuoted( "`a`", DIALECT ) );
//This dialect has a different "open" than "close" quoting symbol:
final SQLServerDialect sqlServerDialect = new SQLServerDialect();
assertTrue( StringHelper.isQuoted( "[a]", sqlServerDialect ) );
assertFalse( StringHelper.isQuoted( "`a]", sqlServerDialect ) );
assertFalse( StringHelper.isQuoted( "[a`", sqlServerDialect ) );
assertFalse( StringHelper.isQuoted( "\"a`", sqlServerDialect ) );
assertFalse( StringHelper.isQuoted( "`a\"", sqlServerDialect ) );
assertFalse( StringHelper.isQuoted( "a", sqlServerDialect ) );
}
@Test
public void replaceRepeatingPlaceholdersWithoutStackOverflow() {
String ordinalParameters = generateOrdinalParameters( 3, 19999 );
String result = StringHelper.replace(
"select * from books where category in (?1) and id in(" + ordinalParameters + ") and parent_category in (?1) and id in(" + ordinalParameters + ")",
"?1", "?1, ?2", true, true );
assertEquals( "select * from books where category in (?1, ?2) and id in(" + ordinalParameters + ") and parent_category in (?1, ?2) and id in(" + ordinalParameters + ")", result );
}
private String generateOrdinalParameters(int startPosition, int endPosition) {
StringBuilder builder = new StringBuilder();
for ( int i = startPosition; i <= endPosition; i++ ) {
builder.append( '?' ).append( i );
if ( i < endPosition ) {
builder.append( ", " );
}
}
return builder.toString();
}
}
| StringHelperTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/AssertExtensions.java | {
"start": 1197,
"end": 1758
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(AssertExtensions.class);
private AssertExtensions() {
}
/**
* A description for AssertJ "describedAs" clauses which evaluates the
* lambda-expression only on failure. That must return a string
* or null/"" to be skipped.
* @param eval lambda expression to invoke
* @return a description for AssertJ
*/
public static Description dynamicDescription(Callable<String> eval) {
return new DynamicDescription(eval);
}
private static final | AssertExtensions |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/StoreInferenceEndpointsAction.java | {
"start": 1070,
"end": 1453
} | class ____ extends ActionType<StoreInferenceEndpointsAction.Response> {
public static final StoreInferenceEndpointsAction INSTANCE = new StoreInferenceEndpointsAction();
public static final String NAME = "cluster:internal/xpack/inference/create_endpoints";
public StoreInferenceEndpointsAction() {
super(NAME);
}
public static | StoreInferenceEndpointsAction |
java | redisson__redisson | redisson/src/main/java/org/redisson/jcache/JCacheManager.java | {
"start": 1572,
"end": 13395
} | class ____ implements CacheManager {
private static final EmptyStatisticsMXBean EMPTY_INSTANCE = new EmptyStatisticsMXBean();
private static final MBeanServer MBEAN_SERVER = ManagementFactory.getPlatformMBeanServer();
private final ClassLoader classLoader;
private final CachingProvider cacheProvider;
private final Properties properties;
private final URI uri;
private final ConcurrentMap<String, Cache<?, ?>> caches = new ConcurrentHashMap<>();
private final ConcurrentMap<Cache<?, ?>, JCacheStatisticsMXBean> statBeans = new ConcurrentHashMap<>();
private final ConcurrentMap<Cache<?, ?>, JCacheManagementMXBean> managementBeans = new ConcurrentHashMap<>();
private final AtomicBoolean closed = new AtomicBoolean();
private final Redisson redisson;
JCacheManager(Redisson redisson, ClassLoader classLoader, CachingProvider cacheProvider, Properties properties, URI uri) {
super();
this.classLoader = classLoader;
this.cacheProvider = cacheProvider;
this.properties = properties;
this.uri = uri;
this.redisson = redisson;
}
@Override
public CachingProvider getCachingProvider() {
return cacheProvider;
}
@Override
public URI getURI() {
return uri;
}
@Override
public ClassLoader getClassLoader() {
return classLoader;
}
@Override
public Properties getProperties() {
return properties;
}
private void checkNotClosed() {
if (closed.get()) {
throw new IllegalStateException();
}
}
@Override
public <K, V, C extends Configuration<K, V>> Cache<K, V> createCache(String cacheName, C configuration)
throws IllegalArgumentException {
checkNotClosed();
Redisson cacheRedisson = redisson;
if (cacheName == null) {
throw new NullPointerException();
}
if (configuration == null) {
throw new NullPointerException();
}
if (cacheRedisson == null && !(configuration instanceof RedissonConfiguration)) {
throw new IllegalStateException("Default configuration hasn't been specified!");
}
boolean hasOwnRedisson = false;
if (configuration instanceof RedissonConfiguration) {
RedissonConfiguration<K, V> rc = (RedissonConfiguration<K, V>) configuration;
if (rc.getConfig() != null) {
cacheRedisson = (Redisson) Redisson.create(rc.getConfig());
hasOwnRedisson = true;
} else {
cacheRedisson = (Redisson) rc.getRedisson();
}
}
JCacheConfiguration<K, V> cfg = new JCacheConfiguration<K, V>(configuration);
JCache<K, V> cache = new JCache<>(this, cacheRedisson, cacheName, cfg, hasOwnRedisson);
Cache<?, ?> oldCache = caches.putIfAbsent(cacheName, cache);
if (oldCache != null) {
throw new CacheException("Cache " + cacheName + " already exists");
}
if (cfg.isStatisticsEnabled()) {
enableStatistics(cacheName, true);
}
if (cfg.isManagementEnabled()) {
enableManagement(cacheName, true);
}
return cache;
}
@Override
public <K, V> Cache<K, V> getCache(String cacheName, Class<K> keyType, Class<V> valueType) {
checkNotClosed();
if (cacheName == null) {
throw new NullPointerException();
}
if (keyType == null) {
throw new NullPointerException();
}
if (valueType == null) {
throw new NullPointerException();
}
Cache<?, ?> cache = caches.get(cacheName);
if (cache == null) {
return null;
}
if (!keyType.isAssignableFrom(cache.getConfiguration(CompleteConfiguration.class).getKeyType())) {
throw new ClassCastException("Wrong type of key for " + cacheName);
}
if (!valueType.isAssignableFrom(cache.getConfiguration(CompleteConfiguration.class).getValueType())) {
throw new ClassCastException("Wrong type of value for " + cacheName);
}
return (Cache<K, V>) cache;
}
@Override
public <K, V> Cache<K, V> getCache(String cacheName) {
checkNotClosed();
Cache<K, V> cache = (Cache<K, V>) getCache(cacheName, Object.class, Object.class);
if (cache != null) {
if (!cache.getConfiguration(CompleteConfiguration.class).getKeyType().isAssignableFrom(Object.class)) {
throw new IllegalArgumentException("Wrong type of key for " + cacheName);
}
if (!cache.getConfiguration(CompleteConfiguration.class).getValueType().isAssignableFrom(Object.class)) {
throw new IllegalArgumentException("Wrong type of value for " + cacheName);
}
}
return cache;
}
@Override
public Iterable<String> getCacheNames() {
return Collections.unmodifiableSet(new HashSet<>(caches.keySet()));
}
@Override
public void destroyCache(String cacheName) {
checkNotClosed();
if (cacheName == null) {
throw new NullPointerException();
}
Cache<?, ?> cache = caches.get(cacheName);
if (cache != null) {
cache.clear();
cache.close();
}
}
public void closeCache(JCache<?, ?> cache) {
caches.remove(cache.getName());
unregisterStatisticsBean(cache);
unregisterManagementBean(cache);
}
@Override
public void enableManagement(String cacheName, boolean enabled) {
checkNotClosed();
if (cacheName == null) {
throw new NullPointerException();
}
Cache<?, ?> cache = caches.get(cacheName);
if (cache == null) {
throw new NullPointerException();
}
if (enabled) {
JCacheManagementMXBean statBean = managementBeans.get(cache);
if (statBean == null) {
statBean = new JCacheManagementMXBean(cache);
JCacheManagementMXBean oldBean = managementBeans.putIfAbsent(cache, statBean);
if (oldBean != null) {
statBean = oldBean;
}
}
try {
ObjectName objectName = queryNames("Configuration", cache);
if (MBEAN_SERVER.queryNames(objectName, null).isEmpty()) {
MBEAN_SERVER.registerMBean(statBean, objectName);
}
} catch (MalformedObjectNameException e) {
throw new CacheException(e);
} catch (InstanceAlreadyExistsException e) {
throw new CacheException(e);
} catch (MBeanRegistrationException e) {
throw new CacheException(e);
} catch (NotCompliantMBeanException e) {
throw new CacheException(e);
}
} else {
unregisterManagementBean(cache);
}
cache.getConfiguration(JCacheConfiguration.class).setManagementEnabled(enabled);
}
private ObjectName queryNames(String baseName, Cache<?, ?> cache) throws MalformedObjectNameException {
String name = getName(baseName, cache);
return new ObjectName(name);
}
private void unregisterManagementBean(Cache<?, ?> cache) {
JCacheManagementMXBean statBean = managementBeans.remove(cache);
if (statBean != null) {
try {
ObjectName name = queryNames("Configuration", cache);
for (ObjectName objectName : MBEAN_SERVER.queryNames(name, null)) {
MBEAN_SERVER.unregisterMBean(objectName);
}
} catch (MalformedObjectNameException e) {
throw new CacheException(e);
} catch (MBeanRegistrationException e) {
throw new CacheException(e);
} catch (InstanceNotFoundException e) {
throw new CacheException(e);
}
}
}
public JCacheStatisticsMXBean getStatBean(JCache<?, ?> cache) {
JCacheStatisticsMXBean bean = statBeans.get(cache);
if (bean != null) {
return bean;
}
return EMPTY_INSTANCE;
}
private String getName(String name, Cache<?, ?> cache) {
return "javax.cache:type=Cache" + name + ",CacheManager="
+ getURI().toString().replaceAll(",|:|=|\n", ".")
+ ",Cache=" + cache.getName().replaceAll(",|:|=|\n", ".");
}
@Override
public void enableStatistics(String cacheName, boolean enabled) {
checkNotClosed();
if (cacheName == null) {
throw new NullPointerException();
}
Cache<?, ?> cache = caches.get(cacheName);
if (cache == null) {
throw new NullPointerException();
}
if (enabled) {
JCacheStatisticsMXBean statBean = statBeans.get(cache);
if (statBean == null) {
statBean = new JCacheStatisticsMXBean();
JCacheStatisticsMXBean oldBean = statBeans.putIfAbsent(cache, statBean);
if (oldBean != null) {
statBean = oldBean;
}
}
try {
ObjectName objectName = queryNames("Statistics", cache);
if (!MBEAN_SERVER.isRegistered(objectName)) {
MBEAN_SERVER.registerMBean(statBean, objectName);
}
} catch (MalformedObjectNameException e) {
throw new CacheException(e);
} catch (InstanceAlreadyExistsException e) {
throw new CacheException(e);
} catch (MBeanRegistrationException e) {
throw new CacheException(e);
} catch (NotCompliantMBeanException e) {
throw new CacheException(e);
}
} else {
unregisterStatisticsBean(cache);
}
cache.getConfiguration(JCacheConfiguration.class).setStatisticsEnabled(enabled);
}
private void unregisterStatisticsBean(Cache<?, ?> cache) {
JCacheStatisticsMXBean statBean = statBeans.remove(cache);
if (statBean != null) {
try {
ObjectName name = queryNames("Statistics", cache);
for (ObjectName objectName : MBEAN_SERVER.queryNames(name, null)) {
MBEAN_SERVER.unregisterMBean(objectName);
}
} catch (MalformedObjectNameException e) {
throw new CacheException(e);
} catch (MBeanRegistrationException e) {
throw new CacheException(e);
} catch (InstanceNotFoundException e) {
throw new CacheException(e);
}
}
}
@Override
public void close() {
if (closed.compareAndSet(false, true)) {
if (cacheProvider != null) {
cacheProvider.close(uri, classLoader);
}
for (Cache<?, ?> cache : caches.values()) {
try {
cache.close();
} catch (Exception e) {
// skip
}
}
if (redisson != null) {
redisson.shutdown();
}
}
}
@Override
public boolean isClosed() {
return closed.get();
}
@Override
public <T> T unwrap(Class<T> clazz) {
if (clazz.isAssignableFrom(getClass())) {
return clazz.cast(this);
}
throw new IllegalArgumentException();
}
}
| JCacheManager |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/BanClassLoaderTest.java | {
"start": 3357,
"end": 3707
} | class ____ extends SecureClassLoader {}
/** OK to call loadClass if it's not on RMIClassLoader */
public final Class<?> overrideClassLoader() throws ClassNotFoundException {
SecureClassLoader loader = new AnotherSecureClassLoader();
return loader.loadClass("BadClass");
}
/** OK to define loadClass */
private | AnotherSecureClassLoader |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/WebTargetImpl.java | {
"start": 809,
"end": 18174
} | class ____ implements WebTarget {
protected UriBuilder uriBuilder;
private final HttpClient client;
private final ConfigurationImpl configuration;
private boolean chunked = false;
private final ClientImpl restClient;
final HandlerChain handlerChain;
final ThreadSetupAction requestContext;
// an additional handler that is passed to the handlerChain
// used to support observability features
private ClientRestHandler preClientSendHandler = null;
private List<ParamConverterProvider> paramConverterProviders = Collections.emptyList();
public WebTargetImpl(ClientImpl restClient, HttpClient client, UriBuilder uriBuilder,
ConfigurationImpl configuration,
HandlerChain handlerChain,
ThreadSetupAction requestContext) {
this.restClient = restClient;
this.client = client;
this.uriBuilder = uriBuilder;
this.configuration = configuration;
this.handlerChain = handlerChain;
this.requestContext = requestContext;
}
/**
* Get a new UriBuilder explicitly using RESTEasy implementation
* (instead of running UriBuilder.fromUri(uri) which relies on
* current registered JAX-RS implementation)
*
* @param uri
* @return
*/
private static UriBuilder uriBuilderFromUri(URI uri) {
return new UriBuilderImpl().uri(uri);
}
private static UriBuilder uriBuilderFromUri(String uri) {
return new UriBuilderImpl().uri(uri);
}
@Override
public WebTargetImpl clone() {
abortIfClosed();
UriBuilder copy = uriBuilder.clone();
return newInstance(client, copy, configuration);
}
@Override
public URI getUri() {
abortIfClosed();
return uriBuilder.build();
}
@Override
public UriBuilder getUriBuilder() {
abortIfClosed();
return uriBuilder.clone();
}
public UriBuilderImpl getUriBuilderUnsafe() {
return (UriBuilderImpl) uriBuilder;
}
@Override
public ConfigurationImpl getConfiguration() {
abortIfClosed();
return configuration;
}
@Override
public WebTargetImpl path(String path) throws NullPointerException {
abortIfClosed();
if (path == null)
throw new NullPointerException("Param was null");
UriBuilder copy = uriBuilder.clone().path(path);
return newInstance(client, copy, configuration);
}
@Override
public WebTargetImpl resolveTemplate(String name, Object value) throws NullPointerException {
abortIfClosed();
if (name == null)
throw new NullPointerException("Param was null");
if (value == null)
throw new NullPointerException("Param was null");
String val = configuration.toString(value);
UriBuilder copy = uriBuilder.clone().resolveTemplate(name, val);
WebTargetImpl target = newInstance(client, copy, configuration);
return target;
}
@Override
public WebTargetImpl resolveTemplates(Map<String, Object> templateValues) throws NullPointerException {
abortIfClosed();
if (templateValues == null)
throw new NullPointerException("Param was null");
if (templateValues.isEmpty())
return this;
Map<String, Object> vals = new HashMap<String, Object>();
for (Map.Entry<String, Object> entry : templateValues.entrySet()) {
if (entry.getKey() == null || entry.getValue() == null)
throw new NullPointerException("Param was null");
String val = configuration.toString(entry.getValue());
vals.put(entry.getKey(), val);
}
UriBuilder copy = uriBuilder.clone().resolveTemplates(vals);
WebTargetImpl target = newInstance(client, copy, configuration);
return target;
}
@Override
public WebTargetImpl resolveTemplate(String name, Object value, boolean encodeSlashInPath)
throws NullPointerException {
abortIfClosed();
if (name == null)
throw new NullPointerException("Param was null");
if (value == null)
throw new NullPointerException("Param was null");
String val = configuration.toString(value);
UriBuilder copy = uriBuilder.clone().resolveTemplate(name, val, encodeSlashInPath);
WebTargetImpl target = newInstance(client, copy, configuration);
return target;
}
@Override
public WebTargetImpl resolveTemplateFromEncoded(String name, Object value) throws NullPointerException {
abortIfClosed();
if (name == null)
throw new NullPointerException("Param was null");
if (value == null)
throw new NullPointerException("Param was null");
String val = configuration.toString(value);
UriBuilder copy = uriBuilder.clone().resolveTemplateFromEncoded(name, val);
WebTargetImpl target = newInstance(client, copy, configuration);
return target;
}
@Override
public WebTargetImpl resolveTemplatesFromEncoded(Map<String, Object> templateValues) throws NullPointerException {
abortIfClosed();
if (templateValues == null)
throw new NullPointerException("Param was null");
if (templateValues.isEmpty())
return this;
Map<String, Object> vals = new HashMap<String, Object>();
for (Map.Entry<String, Object> entry : templateValues.entrySet()) {
if (entry.getKey() == null || entry.getValue() == null)
throw new NullPointerException("Param was null");
String val = configuration.toString(entry.getValue());
vals.put(entry.getKey(), val);
}
UriBuilder copy = uriBuilder.clone().resolveTemplatesFromEncoded(vals);
WebTargetImpl target = newInstance(client, copy, configuration);
return target;
}
@Override
public WebTargetImpl resolveTemplates(Map<String, Object> templateValues, boolean encodeSlashInPath)
throws NullPointerException {
abortIfClosed();
if (templateValues == null)
throw new NullPointerException("Param was null");
if (templateValues.isEmpty())
return this;
Map<String, Object> vals = new HashMap<String, Object>();
for (Map.Entry<String, Object> entry : templateValues.entrySet()) {
if (entry.getKey() == null || entry.getValue() == null)
throw new NullPointerException("Param was null");
String val = configuration.toString(entry.getValue());
vals.put(entry.getKey(), val);
}
UriBuilder copy = uriBuilder.clone().resolveTemplates(vals, encodeSlashInPath);
WebTargetImpl target = newInstance(client, copy, configuration);
return target;
}
@Override
public WebTargetImpl matrixParam(String name, Object... values) throws NullPointerException {
abortIfClosed();
if (name == null)
throw new NullPointerException("Param was null");
UriBuilder copy = uriBuilder.clone();
if (values.length == 1 && values[0] == null) {
copy.replaceMatrixParam(name, (Object[]) null);
} else {
String[] stringValues = toStringValues(values);
copy.matrixParam(name, (Object[]) stringValues);
}
return newInstance(client, copy, configuration);
}
private String[] toStringValues(Object[] values) {
String[] stringValues = new String[values.length];
for (int i = 0; i < stringValues.length; i++) {
stringValues[i] = configuration.toString(values[i]);
}
return stringValues;
}
@SuppressWarnings("unused")
public WebTargetImpl queryParam(String name, Collection<String> values) throws NullPointerException {
return queryParam(name, values.toArray(new Object[0]));
}
@Override
public WebTargetImpl queryParam(String name, Object... values) throws NullPointerException {
abortIfClosed();
if (name == null)
throw new NullPointerException("Param was null");
UriBuilder copy = uriBuilder.clone();
if (copy instanceof UriBuilderImpl) {
var impl = (UriBuilderImpl) copy;
if (values == null || values.length == 0 || (values.length == 1 && values[0] == null)) {
impl.replaceQueryParam(name, (Object[]) null);
} else {
String[] stringValues = toStringValues(values);
impl.clientQueryParam(name, (Object[]) stringValues);
}
} else {
if (values == null || values.length == 0 || (values.length == 1 && values[0] == null)) {
copy.replaceQueryParam(name, (Object[]) null);
} else {
String[] stringValues = toStringValues(values);
copy.queryParam(name, (Object[]) stringValues);
}
}
return newInstance(client, copy, configuration);
}
@SuppressWarnings("unused") // this is used in the REST Client to support @BaseUrl
public WebTargetImpl withNewUri(URI uri) {
return newInstance(client, UriBuilder.fromUri(uri), configuration);
}
@SuppressWarnings("unused") // this is used in the REST Client to support @BaseUrl and observability is enabled
public WebTargetImpl withNewUri(URI uri, ClientRestHandler preClientSendHandler) {
return newInstance(client, UriBuilder.fromUri(uri), configuration, preClientSendHandler);
}
@SuppressWarnings("unused")
public WebTargetImpl queryParams(MultivaluedMap<String, Object> parameters)
throws IllegalArgumentException, NullPointerException {
abortIfClosed();
if (parameters == null)
throw new NullPointerException("Param was null");
UriBuilder copy = uriBuilder.clone();
for (Map.Entry<String, List<Object>> entry : parameters.entrySet()) {
String[] stringValues = toStringValues(entry.getValue().toArray());
copy.queryParam(entry.getKey(), (Object[]) stringValues);
}
return newInstance(client, copy, configuration);
}
public WebTargetImpl queryParamNoTemplate(String name, Object... values) throws NullPointerException {
abortIfClosed();
if (name == null)
throw new NullPointerException("Param was null");
//The whole array can be represented as one object, so we need to cast it to array of objects
if (values.length == 1 && values[0].getClass().isArray() && !values[0].getClass().getComponentType().isPrimitive()) {
values = (Object[]) values[0];
}
String[] stringValues = toStringValues(values);
UriBuilderImpl copy;
if (uriBuilder instanceof UriBuilderImpl) {
copy = (UriBuilderImpl) uriBuilder.clone();
} else {
copy = UriBuilderImpl.fromTemplate(uriBuilder.toTemplate());
}
copy.clientQueryParam(name, (Object[]) stringValues);
return newInstance(client, copy, configuration);
}
protected WebTargetImpl newInstance(HttpClient client, UriBuilder uriBuilder,
ConfigurationImpl configuration) {
return newInstance(client, uriBuilder, configuration, preClientSendHandler);
}
protected WebTargetImpl newInstance(HttpClient client, UriBuilder uriBuilder,
ConfigurationImpl configuration,
ClientRestHandler preClientSendHandler) {
WebTargetImpl result = new WebTargetImpl(restClient, client, uriBuilder, configuration,
handlerChain.setPreClientSendHandler(preClientSendHandler),
requestContext);
result.setPreClientSendHandler(preClientSendHandler);
return result;
}
@Override
public Invocation.Builder request() {
abortIfClosed();
InvocationBuilderImpl builder = createQuarkusRestInvocationBuilder(client, uriBuilder, configuration);
builder.setChunked(chunked);
return builder;
}
@Override
public Invocation.Builder request(String... acceptedResponseTypes) {
abortIfClosed();
InvocationBuilderImpl builder = createQuarkusRestInvocationBuilder(client, uriBuilder, configuration);
builder.getHeaders().accept(acceptedResponseTypes);
builder.setChunked(chunked);
return builder;
}
@Override
public Invocation.Builder request(MediaType... acceptedResponseTypes) {
abortIfClosed();
InvocationBuilderImpl builder = createQuarkusRestInvocationBuilder(client, uriBuilder, configuration);
builder.getHeaders().accept(acceptedResponseTypes);
builder.setChunked(chunked);
return builder;
}
private void abortIfClosed() {
restClient.abortIfClosed();
}
protected InvocationBuilderImpl createQuarkusRestInvocationBuilder(HttpClient client, UriBuilder uri,
ConfigurationImpl configuration) {
URI actualUri = uri.build();
registerStorkFilterIfNeeded(configuration, actualUri);
return new InvocationBuilderImpl(actualUri, restClient, client, this, configuration,
handlerChain.setPreClientSendHandler(preClientSendHandler), requestContext);
}
/**
* If the URI starts with stork:// or storks://, then register the StorkClientRequestFilter automatically.
*
* @param configuration the configuration
* @param actualUri the uri
*/
private static void registerStorkFilterIfNeeded(ConfigurationImpl configuration, URI actualUri) {
if (actualUri.getScheme() != null && actualUri.getScheme().startsWith("stork")
&& !isStorkAlreadyRegistered(configuration)) {
configuration.register(StorkClientRequestFilter.class);
}
}
/**
* Checks if the Stork request filter is already registered.
* We cannot use configuration.isRegistered, as the user registration uses a subclass, and so fail the equality
* expectation.
* <p>
* This method prevents having the stork filter registered twice: once because the uri starts with stork:// and,
* once from the user.
*
* @param configuration the configuration
* @return {@code true} if stork is already registered.
*/
private static boolean isStorkAlreadyRegistered(ConfigurationImpl configuration) {
for (Class<?> clazz : configuration.getClasses()) {
if (clazz.getName().startsWith(StorkClientRequestFilter.class.getName())) {
return true;
}
}
return false;
}
@Override
public WebTargetImpl property(String name, Object value) {
abortIfClosed();
if (name == null)
throw new NullPointerException("Param was null");
configuration.property(name, value);
return this;
}
@Override
public WebTargetImpl register(Class<?> componentClass) {
abortIfClosed();
configuration.register(componentClass);
return this;
}
@Override
public WebTargetImpl register(Class<?> componentClass, int priority) {
abortIfClosed();
configuration.register(componentClass, priority);
return this;
}
@Override
public WebTargetImpl register(Class<?> componentClass, Class<?>... contracts) {
abortIfClosed();
configuration.register(componentClass, contracts);
return this;
}
@Override
public WebTargetImpl register(Class<?> componentClass, Map<Class<?>, Integer> contracts) {
abortIfClosed();
configuration.register(componentClass, contracts);
return this;
}
@Override
public WebTargetImpl register(Object component) {
abortIfClosed();
configuration.register(component);
return this;
}
@Override
public WebTargetImpl register(Object component, int priority) {
abortIfClosed();
configuration.register(component, priority);
return this;
}
@Override
public WebTargetImpl register(Object component, Class<?>... contracts) {
abortIfClosed();
configuration.register(component, contracts);
return this;
}
@Override
public WebTargetImpl register(Object component, Map<Class<?>, Integer> contracts) {
abortIfClosed();
configuration.register(component, contracts);
return this;
}
public WebTargetImpl setChunked(boolean chunked) {
this.chunked = chunked;
return this;
}
public WebTargetImpl setParamConverterProviders(List<ParamConverterProvider> providers) {
this.paramConverterProviders = providers;
return this;
}
public <T> T proxy(Class<?> clazz) {
return restClient.getClientContext().getClientProxies().get(clazz, this, paramConverterProviders);
}
public ClientImpl getRestClient() {
return restClient;
}
@SuppressWarnings("unused")
public void setPreClientSendHandler(ClientRestHandler preClientSendHandler) {
this.preClientSendHandler = preClientSendHandler;
}
Serialisers getSerialisers() {
return restClient.getClientContext().getSerialisers();
}
}
| WebTargetImpl |
java | apache__camel | components/camel-ai/camel-weaviate/src/test/java/org/apache/camel/component/weaviate/WeaviateTestSupport.java | {
"start": 1245,
"end": 1805
} | class ____ extends CamelTestSupport {
@RegisterExtension
static WeaviateService WEAVIATE = WeaviateServiceFactory.createSingletonService();
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
WeaviateVectorDbComponent component = context.getComponent("weaviate", WeaviateVectorDbComponent.class);
component.getConfiguration().setHost(WEAVIATE.getWeaviateHost() + ":" + WEAVIATE.getWeaviatePort());
return context;
}
}
| WeaviateTestSupport |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/validation/AbstractNormalScopedFinalTest.java | {
"start": 1834,
"end": 1882
} | class ____ {
}
@Vetoed
static | FinalFoo |
java | apache__camel | components/camel-iso8583/src/main/java/org/apache/camel/dataformat/iso8583/Iso8583Constants.java | {
"start": 901,
"end": 1060
} | class ____ {
@Metadata(description = "The ISO-Type", javaType = "String")
public static final String ISO_TYPE = "CamelIso8583IsoType";
}
| Iso8583Constants |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java | {
"start": 1224,
"end": 13566
} | class ____ implements GroupingAggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("min", ElementType.BYTES_REF),
new IntermediateStateDesc("seen", ElementType.BOOLEAN) );
private final MinBytesRefAggregator.GroupingState state;
private final List<Integer> channels;
private final DriverContext driverContext;
public MinBytesRefGroupingAggregatorFunction(List<Integer> channels,
MinBytesRefAggregator.GroupingState state, DriverContext driverContext) {
this.channels = channels;
this.state = state;
this.driverContext = driverContext;
}
public static MinBytesRefGroupingAggregatorFunction create(List<Integer> channels,
DriverContext driverContext) {
return new MinBytesRefGroupingAggregatorFunction(channels, MinBytesRefAggregator.initGrouping(driverContext), driverContext);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds,
Page page) {
BytesRefBlock valueBlock = page.getBlock(channels.get(0));
BytesRefVector valueVector = valueBlock.asVector();
if (valueVector == null) {
maybeEnableGroupIdTracking(seenGroupIds, valueBlock);
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueBlock);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueBlock);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, valueBlock);
}
@Override
public void close() {
}
};
}
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueVector);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueVector);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, valueVector);
}
@Override
public void close() {
}
};
}
private void addRawInput(int positionOffset, IntArrayBlock groups, BytesRefBlock valueBlock) {
BytesRef valueScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
if (valueBlock.isNull(valuesPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valueStart = valueBlock.getFirstValueIndex(valuesPosition);
int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition);
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
BytesRef valueValue = valueBlock.getBytesRef(valueOffset, valueScratch);
MinBytesRefAggregator.combine(state, groupId, valueValue);
}
}
}
}
private void addRawInput(int positionOffset, IntArrayBlock groups, BytesRefVector valueVector) {
BytesRef valueScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
BytesRef valueValue = valueVector.getBytesRef(valuesPosition, valueScratch);
MinBytesRefAggregator.combine(state, groupId, valueValue);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block minUncast = page.getBlock(channels.get(0));
if (minUncast.areAllValuesNull()) {
return;
}
BytesRefVector min = ((BytesRefBlock) minUncast).asVector();
Block seenUncast = page.getBlock(channels.get(1));
if (seenUncast.areAllValuesNull()) {
return;
}
BooleanVector seen = ((BooleanBlock) seenUncast).asVector();
assert min.getPositionCount() == seen.getPositionCount();
BytesRef minScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
MinBytesRefAggregator.combineIntermediate(state, groupId, min.getBytesRef(valuesPosition, minScratch), seen.getBoolean(valuesPosition));
}
}
}
private void addRawInput(int positionOffset, IntBigArrayBlock groups, BytesRefBlock valueBlock) {
BytesRef valueScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
if (valueBlock.isNull(valuesPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valueStart = valueBlock.getFirstValueIndex(valuesPosition);
int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition);
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
BytesRef valueValue = valueBlock.getBytesRef(valueOffset, valueScratch);
MinBytesRefAggregator.combine(state, groupId, valueValue);
}
}
}
}
private void addRawInput(int positionOffset, IntBigArrayBlock groups,
BytesRefVector valueVector) {
BytesRef valueScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
BytesRef valueValue = valueVector.getBytesRef(valuesPosition, valueScratch);
MinBytesRefAggregator.combine(state, groupId, valueValue);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block minUncast = page.getBlock(channels.get(0));
if (minUncast.areAllValuesNull()) {
return;
}
BytesRefVector min = ((BytesRefBlock) minUncast).asVector();
Block seenUncast = page.getBlock(channels.get(1));
if (seenUncast.areAllValuesNull()) {
return;
}
BooleanVector seen = ((BooleanBlock) seenUncast).asVector();
assert min.getPositionCount() == seen.getPositionCount();
BytesRef minScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
MinBytesRefAggregator.combineIntermediate(state, groupId, min.getBytesRef(valuesPosition, minScratch), seen.getBoolean(valuesPosition));
}
}
}
private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock valueBlock) {
BytesRef valueScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuesPosition = groupPosition + positionOffset;
if (valueBlock.isNull(valuesPosition)) {
continue;
}
int groupId = groups.getInt(groupPosition);
int valueStart = valueBlock.getFirstValueIndex(valuesPosition);
int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition);
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
BytesRef valueValue = valueBlock.getBytesRef(valueOffset, valueScratch);
MinBytesRefAggregator.combine(state, groupId, valueValue);
}
}
}
private void addRawInput(int positionOffset, IntVector groups, BytesRefVector valueVector) {
BytesRef valueScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuesPosition = groupPosition + positionOffset;
int groupId = groups.getInt(groupPosition);
BytesRef valueValue = valueVector.getBytesRef(valuesPosition, valueScratch);
MinBytesRefAggregator.combine(state, groupId, valueValue);
}
}
@Override
public void addIntermediateInput(int positionOffset, IntVector groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block minUncast = page.getBlock(channels.get(0));
if (minUncast.areAllValuesNull()) {
return;
}
BytesRefVector min = ((BytesRefBlock) minUncast).asVector();
Block seenUncast = page.getBlock(channels.get(1));
if (seenUncast.areAllValuesNull()) {
return;
}
BooleanVector seen = ((BooleanBlock) seenUncast).asVector();
assert min.getPositionCount() == seen.getPositionCount();
BytesRef minScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int groupId = groups.getInt(groupPosition);
int valuesPosition = groupPosition + positionOffset;
MinBytesRefAggregator.combineIntermediate(state, groupId, min.getBytesRef(valuesPosition, minScratch), seen.getBoolean(valuesPosition));
}
}
private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, BytesRefBlock valueBlock) {
if (valueBlock.mayHaveNulls()) {
state.enableGroupIdTracking(seenGroupIds);
}
}
@Override
public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) {
state.enableGroupIdTracking(seenGroupIds);
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) {
state.toIntermediate(blocks, offset, selected, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, IntVector selected,
GroupingAggregatorEvaluationContext ctx) {
blocks[offset] = MinBytesRefAggregator.evaluateFinal(state, selected, ctx);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
| MinBytesRefGroupingAggregatorFunction |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/util/reflection/FieldInitializationReport.java | {
"start": 216,
"end": 1525
} | class ____ {
private final Object fieldInstance;
private final boolean wasInitialized;
private final boolean wasInitializedUsingConstructorArgs;
public FieldInitializationReport(
Object fieldInstance,
boolean wasInitialized,
boolean wasInitializedUsingConstructorArgs) {
this.fieldInstance = fieldInstance;
this.wasInitialized = wasInitialized;
this.wasInitializedUsingConstructorArgs = wasInitializedUsingConstructorArgs;
}
/**
* Returns the actual field instance.
*
* @return the actual instance
*/
public Object fieldInstance() {
return fieldInstance;
}
/**
* Indicate whether the field was created during the process or not.
*
* @return <code>true</code> if created, <code>false</code> if the field did already hold an instance.
*/
public boolean fieldWasInitialized() {
return wasInitialized;
}
/**
* Indicate whether the field was created using constructor args.
*
* @return <code>true</code> if field was created using constructor parameters.
*/
public boolean fieldWasInitializedUsingContructorArgs() {
return wasInitializedUsingConstructorArgs;
}
/**
* Returns the | FieldInitializationReport |
java | elastic__elasticsearch | x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java | {
"start": 1272,
"end": 4920
} | class ____ extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return List.of(LocalStateEnrich.class);
}
public void testExecute() throws Exception {
XContentBuilder source = XContentBuilder.builder(XContentType.SMILE.xContent());
source.startObject();
source.field("key1", "value1");
source.field("key2", "value2");
source.endObject();
String indexName = EnrichPolicy.ENRICH_INDEX_NAME_BASE + "1";
IndexRequest indexRequest = new IndexRequest(indexName);
indexRequest.source(source);
client().index(indexRequest).actionGet();
client().admin().indices().refresh(new RefreshRequest(indexName)).actionGet();
int numSearches = randomIntBetween(2, 32);
MultiSearchRequest request = new MultiSearchRequest();
for (int i = 0; i < numSearches; i++) {
SearchRequest searchRequest = new SearchRequest(indexName);
searchRequest.source().from(0);
searchRequest.source().size(1);
searchRequest.source().query(new MatchAllQueryBuilder());
searchRequest.source().fetchSource("key1", null);
request.add(searchRequest);
}
assertResponse(
client().execute(EnrichShardMultiSearchAction.INSTANCE, new EnrichShardMultiSearchAction.Request(request)),
response -> {
assertThat(response.getResponses().length, equalTo(numSearches));
for (int i = 0; i < numSearches; i++) {
assertThat(response.getResponses()[i].isFailure(), is(false));
assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value(), equalTo(1L));
Map<String, Object> sourceAsMap = response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap();
assertThat(sourceAsMap.size(), equalTo(1));
assertThat(sourceAsMap.get("key1"), equalTo("value1"));
}
}
);
}
public void testNonEnrichIndex() throws Exception {
createIndex("index");
MultiSearchRequest request = new MultiSearchRequest();
request.add(new SearchRequest("index"));
Exception e = expectThrows(
ActionRequestValidationException.class,
client().execute(EnrichShardMultiSearchAction.INSTANCE, new EnrichShardMultiSearchAction.Request(request))
);
assertThat(e.getMessage(), equalTo("Validation Failed: 1: index [index] is not an enrich index;"));
}
public void testMultipleShards() throws Exception {
String indexName = EnrichPolicy.ENRICH_INDEX_NAME_BASE + "1";
createIndex(indexName, Settings.builder().put("index.number_of_shards", 2).build());
MultiSearchRequest request = new MultiSearchRequest();
request.add(new SearchRequest(indexName));
Exception e = expectThrows(
IllegalStateException.class,
client().execute(EnrichShardMultiSearchAction.INSTANCE, new EnrichShardMultiSearchAction.Request(request))
);
assertThat(e.getMessage(), equalTo("index [.enrich-1] should have 1 shard, but has 2 shards"));
}
public void testMultipleIndices() throws Exception {
MultiSearchRequest request = new MultiSearchRequest();
request.add(new SearchRequest("index1"));
request.add(new SearchRequest("index2"));
expectThrows(AssertionError.class, () -> new EnrichShardMultiSearchAction.Request(request));
}
}
| EnrichShardMultiSearchActionTests |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 889147,
"end": 889969
} | class ____ the input data. Append a to the end of the name if you want the input to be an array type.", displayName = "Out Type"),
@YamlProperty(name = "produces", type = "string", description = "To define the content type what the REST service produces (uses for output), such as application/xml or application/json", displayName = "Produces"),
@YamlProperty(name = "skipBindingOnErrorCode", type = "boolean", defaultValue = "false", description = "Whether to skip binding on output if there is a custom HTTP error code header. This allows to build custom error messages that do not bind to json / xml etc, as success messages otherwise will do.", displayName = "Skip Binding On Error Code"),
@YamlProperty(name = "type", type = "string", description = "Sets the | of |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StaticQualifiedUsingExpressionTest.java | {
"start": 10495,
"end": 10812
} | class ____ {
void test() {
new b.Lib();
int x = Lib.CONST + b.Lib.CONST;
}
}
""")
.doTest();
}
@Test
public void expr() {
refactoringHelper
.addInputLines(
"I.java",
"""
| Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/unionsubclass2/UnionSubclassTest.java | {
"start": 1123,
"end": 10801
} | class ____ {
@AfterEach
void tearDown(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void testUnionSubclass(SessionFactoryScope scope) {
scope.inTransaction(
s -> {
Employee mark = new Employee();
mark.setName( "Mark" );
mark.setTitle( "internal sales" );
mark.setSex( 'M' );
mark.setAddress( "buckhead" );
mark.setZip( "30305" );
mark.setCountry( "USA" );
Customer joe = new Customer();
joe.setName( "Joe" );
joe.setAddress( "San Francisco" );
joe.setZip( "XXXXX" );
joe.setCountry( "USA" );
joe.setComments( "Very demanding" );
joe.setSex( 'M' );
joe.setSalesperson( mark );
Person yomomma = new Person();
yomomma.setName( "mum" );
yomomma.setSex( 'F' );
s.persist( yomomma );
s.persist( mark );
s.persist( joe );
try {
assertEquals( s.createQuery( "from java.io.Serializable" ).list().size(), 0 );
fail( "IllegalArgumentException expected" );
}
catch (Exception e) {
assertThat( e, instanceOf( IllegalArgumentException.class ) );
}
assertEquals( s.createQuery( "from Person" ).list().size(), 3 );
assertEquals( s.createQuery( "from Person p where p.class = Customer" ).list().size(), 1 );
assertEquals( s.createQuery( "from Person p where p.class = Person" ).list().size(), 1 );
assertEquals( s.createQuery( "from Person p where type(p) in :who" ).setParameter(
"who",
Customer.class
).list().size(), 1 );
assertEquals( s.createQuery( "from Person p where type(p) in :who" ).setParameterList(
"who",
new Class[] {
Customer.class,
Person.class
}
).list().size(), 2 );
s.clear();
List customers = s.createQuery( "from Customer c left join fetch c.salesperson" ).list();
for ( Object customer : customers ) {
Customer c = (Customer) customer;
assertTrue( Hibernate.isInitialized( c.getSalesperson() ) );
assertEquals( c.getSalesperson().getName(), "Mark" );
}
assertEquals( customers.size(), 1 );
s.clear();
customers = s.createQuery( "from Customer" ).list();
for ( Object customer : customers ) {
Customer c = (Customer) customer;
assertFalse( Hibernate.isInitialized( c.getSalesperson() ) );
assertEquals( c.getSalesperson().getName(), "Mark" );
}
assertEquals( customers.size(), 1 );
s.clear();
mark = s.get( Employee.class, Long.valueOf( mark.getId() ) );
joe = s.get( Customer.class, Long.valueOf( joe.getId() ) );
mark.setZip( "30306" );
assertEquals( s.createQuery( "from Person p where p.address.zip = '30306'" ).list().size(), 1 );
CriteriaBuilder criteriaBuilder = s.getCriteriaBuilder();
CriteriaQuery<Person> criteria = criteriaBuilder.createQuery( Person.class );
Root<Person> root = criteria.from( Person.class );
CriteriaBuilder.In<Object> addresses = criteriaBuilder.in( root.get( "address" ) );
addresses.value( mark.getAddress() );
addresses.value( joe.getAddress() );
criteria.where( addresses );
s.createQuery( criteria ).list();
// s.createCriteria( Person.class ).add(
// Restrictions.in( "address", new Address[] { mark.getAddress(), joe.getAddress()} ) ).list();
s.remove( mark );
s.remove( joe );
s.remove( yomomma );
assertTrue( s.createQuery( "from Person" ).list().isEmpty() );
}
);
}
@Test
public void testQuerySubclassAttribute(SessionFactoryScope scope) {
scope.inTransaction(
s -> {
Person p = new Person();
p.setName( "Emmanuel" );
p.setSex( 'M' );
s.persist( p );
Employee q = new Employee();
q.setName( "Steve" );
q.setSex( 'M' );
q.setTitle( "Mr" );
q.setSalary( new BigDecimal( 1000 ) );
s.persist( q );
List result = s.createQuery( "from Person where salary > 100" ).list();
assertEquals( result.size(), 1 );
assertSame( result.get( 0 ), q );
result = s.createQuery( "from Person where salary > 100 or name like 'E%'" ).list();
assertEquals( result.size(), 2 );
CriteriaBuilder criteriaBuilder = s.getCriteriaBuilder();
CriteriaQuery<Person> criteria = criteriaBuilder.createQuery( Person.class );
Root<Person> root = criteria.from( Person.class );
criteria.where( criteriaBuilder.gt( criteriaBuilder.treat( root, Employee.class ).get( "salary" ), new BigDecimal( 100 ) ) );
result = s.createQuery( criteria ).list();
// result = s.createCriteria( Person.class )
// .add( Property.forName( "salary" ).gt( new BigDecimal( 100 ) ) )
// .list();
assertEquals( result.size(), 1 );
assertSame( result.get( 0 ), q );
result = s.createQuery( "select salary from Person where salary > 100" ).list();
assertEquals( result.size(), 1 );
assertEquals( ( (BigDecimal) result.get( 0 ) ).intValue(), 1000 );
s.remove( p );
s.remove( q );
}
);
}
@Test
public void testCustomColumnReadAndWrite(SessionFactoryScope scope) {
scope.inTransaction(
s -> {
final double HEIGHT_INCHES = 73;
final double HEIGHT_CENTIMETERS = HEIGHT_INCHES * 2.54d;
Person p = new Person();
p.setName( "Emmanuel" );
p.setSex( 'M' );
p.setHeightInches( HEIGHT_INCHES );
s.persist( p );
final double PASSWORD_EXPIRY_WEEKS = 4;
final double PASSWORD_EXPIRY_DAYS = PASSWORD_EXPIRY_WEEKS * 7d;
Employee e = new Employee();
e.setName( "Steve" );
e.setSex( 'M' );
e.setTitle( "Mr" );
e.setPasswordExpiryDays( PASSWORD_EXPIRY_DAYS );
s.persist( e );
s.flush();
// Test value conversion during insert
// Value returned by Oracle native query is a Types.NUMERIC, which is mapped to a BigDecimalType;
// Cast returned value to Number then call Number.doubleValue() so it works on all dialects.
Double heightViaSql =
( (Number) s.createNativeQuery(
"select height_centimeters from UPerson where name='Emmanuel'" )
.uniqueResult() ).doubleValue();
assertEquals( HEIGHT_CENTIMETERS, heightViaSql, 0.01d );
Double expiryViaSql =
( (Number) s.createNativeQuery( "select pwd_expiry_weeks from UEmployee where person_id=?" )
.setParameter( 1, e.getId() )
.uniqueResult()
).doubleValue();
assertEquals( PASSWORD_EXPIRY_WEEKS, expiryViaSql, 0.01d );
// Test projection
Double heightViaHql = (Double) s.createQuery(
"select p.heightInches from Person p where p.name = 'Emmanuel'" )
.uniqueResult();
assertEquals( HEIGHT_INCHES, heightViaHql, 0.01d );
Double expiryViaHql = (Double) s.createQuery(
"select e.passwordExpiryDays from Employee e where e.name = 'Steve'" ).uniqueResult();
assertEquals( PASSWORD_EXPIRY_DAYS, expiryViaHql, 0.01d );
// Test restriction and entity load via criteria
CriteriaBuilder criteriaBuilder = s.getCriteriaBuilder();
CriteriaQuery<Person> personCriteria = criteriaBuilder.createQuery( Person.class );
Root<Person> personRoot = personCriteria.from( Person.class );
personCriteria.where( criteriaBuilder.between(
personRoot.get( "heightInches" ),
HEIGHT_INCHES - 0.01d,
HEIGHT_INCHES + 0.01d
) );
p = s.createQuery( personCriteria ).uniqueResult();
// p = (Person) s.createCriteria( Person.class )
// .add( Restrictions.between( "heightInches", HEIGHT_INCHES - 0.01d, HEIGHT_INCHES + 0.01d ) )
// .uniqueResult();
assertEquals( HEIGHT_INCHES, p.getHeightInches(), 0.01d );
CriteriaQuery<Employee> employeeCriteria = criteriaBuilder.createQuery( Employee.class );
Root<Employee> employeeRoot = employeeCriteria.from( Employee.class );
employeeCriteria.where( criteriaBuilder.between(
employeeRoot.get( "passwordExpiryDays" ),
PASSWORD_EXPIRY_DAYS - 0.01d,
PASSWORD_EXPIRY_DAYS + 0.01d
) );
e = s.createQuery( employeeCriteria ).uniqueResult();
// e = (Employee) s.createCriteria( Employee.class )
// .add( Restrictions.between(
// "passwordExpiryDays",
// PASSWORD_EXPIRY_DAYS - 0.01d,
// PASSWORD_EXPIRY_DAYS - 0.01d,
// ) )
// .uniqueResult();
assertEquals( PASSWORD_EXPIRY_DAYS, e.getPasswordExpiryDays(), 0.01d );
// Test predicate and entity load via HQL
p = (Person) s.createQuery( "from Person p where p.heightInches between ?1 and ?2" )
.setParameter( 1, HEIGHT_INCHES - 0.01d )
.setParameter( 2, HEIGHT_INCHES + 0.01d )
.uniqueResult();
assertEquals( HEIGHT_INCHES, p.getHeightInches(), 0.01d );
e = (Employee) s.createQuery( "from Employee e where e.passwordExpiryDays between ?1 and ?2" )
.setParameter( 1, PASSWORD_EXPIRY_DAYS - 0.01d )
.setParameter( 2, PASSWORD_EXPIRY_DAYS + 0.01d )
.uniqueResult();
assertEquals( PASSWORD_EXPIRY_DAYS, e.getPasswordExpiryDays(), 0.01d );
// Test update
p.setHeightInches( 1 );
e.setPasswordExpiryDays( 7 );
s.flush();
heightViaSql =
( (Number) s.createNativeQuery(
"select height_centimeters from UPerson where name='Emmanuel'" )
.uniqueResult() )
.doubleValue();
assertEquals( 2.54d, heightViaSql, 0.01d );
expiryViaSql =
( (Number) s.createNativeQuery( "select pwd_expiry_weeks from UEmployee where person_id=?" )
.setParameter( 1, e.getId() )
.uniqueResult()
).doubleValue();
assertEquals( 1d, expiryViaSql, 0.01d );
s.remove( p );
s.remove( e );
}
);
}
}
| UnionSubclassTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/hive/HiveCreateTableTest_28_struct.java | {
"start": 917,
"end": 5092
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"CREATE EXTERNAL TABLE `json_table_1`(\n" +
" `docid` string COMMENT 'from deserializer', \n" +
" `user_1` struct<id:int,username:string,name:string,shippingaddress:struct<address1:string,address2:string,city:string,state:string>,orders:array<struct<itemid:int,orderdate:string>>> COMMENT 'from deserializer')\n" +
"ROW FORMAT SERDE \n" +
" 'org.apache.hive.hcatalog.data.JsonSerDe' \n" +
"STORED AS INPUTFORMAT \n" +
" 'org.apache.hadoop.mapred.TextInputFormat' \n" +
"OUTPUTFORMAT \n" +
" 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\n" +
"LOCATION\n" +
" 'oss://acs:ram::1013022312866336:role&aliyunopenanalyticsaccessingossrole@oss-cn-beijing-for-openanalytics-test/datasets/test/json/hcatalog_serde/table_1'\n" +
"TBLPROPERTIES (\n" +
" 'COLUMN_STATS_ACCURATE'='false', \n" +
" 'numFiles'='1', \n" +
" 'numRows'='-1', \n" +
" 'rawDataSize'='-1', \n" +
" 'totalSize'='347', \n" +
" 'transient_lastDdlTime'='1530879306')";
List<SQLStatement> statementList = SQLUtils.toStatementList(sql, JdbcConstants.HIVE);
SQLStatement stmt = statementList.get(0);
System.out.println(stmt.toString());
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.HIVE);
stmt.accept(visitor);
{
String text = SQLUtils.toSQLString(stmt, JdbcConstants.HIVE);
assertEquals("CREATE EXTERNAL TABLE `json_table_1` (\n" +
"\t`docid` string COMMENT 'from deserializer',\n" +
"\t`user_1` STRUCT<\n" +
"\t\tid:int,\n" +
"\t\tusername:string,\n" +
"\t\tname:string,\n" +
"\t\tshippingaddress:STRUCT<address1:string, address2:string, city:string, state:string>,\n" +
"\t\torders:ARRAY<STRUCT<itemid:int, orderdate:string>>\n" +
"\t> COMMENT 'from deserializer'\n" +
")\n" +
"ROW FORMAT\n" +
"\tSERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n" +
"STORED AS\n" +
"\tINPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'\n" +
"\tOUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\n" +
"LOCATION 'oss://acs:ram::1013022312866336:role&aliyunopenanalyticsaccessingossrole@oss-cn-beijing-for-openanalytics-test/datasets/test/json/hcatalog_serde/table_1'\n" +
"TBLPROPERTIES (\n" +
"\t'COLUMN_STATS_ACCURATE' = 'false',\n" +
"\t'numFiles' = '1',\n" +
"\t'numRows' = '-1',\n" +
"\t'rawDataSize' = '-1',\n" +
"\t'totalSize' = '347',\n" +
"\t'transient_lastDdlTime' = '1530879306'\n" +
")", text);
}
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(2, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertEquals(0, visitor.getRelationships().size());
assertEquals(0, visitor.getOrderByColumns().size());
assertTrue(visitor.containsTable("json_table_1"));
}
}
| HiveCreateTableTest_28_struct |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/config/OptimizerConfigOptions.java | {
"start": 1464,
"end": 1577
} | class ____ configuration constants used by Flink's table planner module.
*
* <p>NOTE: All option keys in this | holds |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/remote/ability/ServerRemoteAbility.java | {
"start": 882,
"end": 2200
} | class ____ implements Serializable {
private static final long serialVersionUID = -3069795759506428390L;
/**
* if support remote connection.
*/
private boolean supportRemoteConnection;
/**
* if support grpc report.
*/
private boolean grpcReportEnabled = true;
public boolean isSupportRemoteConnection() {
return this.supportRemoteConnection;
}
public void setSupportRemoteConnection(boolean supportRemoteConnection) {
this.supportRemoteConnection = supportRemoteConnection;
}
public boolean isGrpcReportEnabled() {
return grpcReportEnabled;
}
public void setGrpcReportEnabled(boolean grpcReportEnabled) {
this.grpcReportEnabled = grpcReportEnabled;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ServerRemoteAbility that = (ServerRemoteAbility) o;
return supportRemoteConnection == that.supportRemoteConnection && grpcReportEnabled == that.grpcReportEnabled;
}
@Override
public int hashCode() {
return Objects.hash(supportRemoteConnection, grpcReportEnabled);
}
}
| ServerRemoteAbility |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/web/OAuth2TokenRevocationEndpointFilter.java | {
"start": 3090,
"end": 8842
} | class ____ extends OncePerRequestFilter {
/**
* The default endpoint {@code URI} for token revocation requests.
*/
private static final String DEFAULT_TOKEN_REVOCATION_ENDPOINT_URI = "/oauth2/revoke";
private final AuthenticationManager authenticationManager;
private final RequestMatcher tokenRevocationEndpointMatcher;
private AuthenticationDetailsSource<HttpServletRequest, ?> authenticationDetailsSource = new WebAuthenticationDetailsSource();
private AuthenticationConverter authenticationConverter;
private AuthenticationSuccessHandler authenticationSuccessHandler = this::sendRevocationSuccessResponse;
private AuthenticationFailureHandler authenticationFailureHandler = new OAuth2ErrorAuthenticationFailureHandler();
/**
* Constructs an {@code OAuth2TokenRevocationEndpointFilter} using the provided
* parameters.
* @param authenticationManager the authentication manager
*/
public OAuth2TokenRevocationEndpointFilter(AuthenticationManager authenticationManager) {
this(authenticationManager, DEFAULT_TOKEN_REVOCATION_ENDPOINT_URI);
}
/**
* Constructs an {@code OAuth2TokenRevocationEndpointFilter} using the provided
* parameters.
* @param authenticationManager the authentication manager
* @param tokenRevocationEndpointUri the endpoint {@code URI} for token revocation
* requests
*/
public OAuth2TokenRevocationEndpointFilter(AuthenticationManager authenticationManager,
String tokenRevocationEndpointUri) {
Assert.notNull(authenticationManager, "authenticationManager cannot be null");
Assert.hasText(tokenRevocationEndpointUri, "tokenRevocationEndpointUri cannot be empty");
this.authenticationManager = authenticationManager;
this.tokenRevocationEndpointMatcher = PathPatternRequestMatcher.withDefaults()
.matcher(HttpMethod.POST, tokenRevocationEndpointUri);
this.authenticationConverter = new OAuth2TokenRevocationAuthenticationConverter();
}
@Override
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
throws ServletException, IOException {
if (!this.tokenRevocationEndpointMatcher.matches(request)) {
filterChain.doFilter(request, response);
return;
}
try {
Authentication tokenRevocationAuthentication = this.authenticationConverter.convert(request);
if (tokenRevocationAuthentication instanceof AbstractAuthenticationToken authenticationToken) {
authenticationToken.setDetails(this.authenticationDetailsSource.buildDetails(request));
}
Authentication tokenRevocationAuthenticationResult = this.authenticationManager
.authenticate(tokenRevocationAuthentication);
this.authenticationSuccessHandler.onAuthenticationSuccess(request, response,
tokenRevocationAuthenticationResult);
}
catch (OAuth2AuthenticationException ex) {
SecurityContextHolder.clearContext();
if (this.logger.isTraceEnabled()) {
this.logger.trace(LogMessage.format("Token revocation request failed: %s", ex.getError()), ex);
}
this.authenticationFailureHandler.onAuthenticationFailure(request, response, ex);
}
}
/**
* Sets the {@link AuthenticationDetailsSource} used for building an authentication
* details instance from {@link HttpServletRequest}.
* @param authenticationDetailsSource the {@link AuthenticationDetailsSource} used for
* building an authentication details instance from {@link HttpServletRequest}
*/
public void setAuthenticationDetailsSource(
AuthenticationDetailsSource<HttpServletRequest, ?> authenticationDetailsSource) {
Assert.notNull(authenticationDetailsSource, "authenticationDetailsSource cannot be null");
this.authenticationDetailsSource = authenticationDetailsSource;
}
/**
* Sets the {@link AuthenticationConverter} used when attempting to extract a Revoke
* Token Request from {@link HttpServletRequest} to an instance of
* {@link OAuth2TokenRevocationAuthenticationToken} used for authenticating the
* request.
* @param authenticationConverter the {@link AuthenticationConverter} used when
* attempting to extract a Revoke Token Request from {@link HttpServletRequest}
*/
public void setAuthenticationConverter(AuthenticationConverter authenticationConverter) {
Assert.notNull(authenticationConverter, "authenticationConverter cannot be null");
this.authenticationConverter = authenticationConverter;
}
/**
* Sets the {@link AuthenticationSuccessHandler} used for handling an
* {@link OAuth2TokenRevocationAuthenticationToken}.
* @param authenticationSuccessHandler the {@link AuthenticationSuccessHandler} used
* for handling an {@link OAuth2TokenRevocationAuthenticationToken}
*/
public void setAuthenticationSuccessHandler(AuthenticationSuccessHandler authenticationSuccessHandler) {
Assert.notNull(authenticationSuccessHandler, "authenticationSuccessHandler cannot be null");
this.authenticationSuccessHandler = authenticationSuccessHandler;
}
/**
* Sets the {@link AuthenticationFailureHandler} used for handling an
* {@link OAuth2AuthenticationException} and returning the {@link OAuth2Error Error
* Response}.
* @param authenticationFailureHandler the {@link AuthenticationFailureHandler} used
* for handling an {@link OAuth2AuthenticationException}
*/
public void setAuthenticationFailureHandler(AuthenticationFailureHandler authenticationFailureHandler) {
Assert.notNull(authenticationFailureHandler, "authenticationFailureHandler cannot be null");
this.authenticationFailureHandler = authenticationFailureHandler;
}
private void sendRevocationSuccessResponse(HttpServletRequest request, HttpServletResponse response,
Authentication authentication) {
response.setStatus(HttpStatus.OK.value());
}
}
| OAuth2TokenRevocationEndpointFilter |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/support/AbstractBeanFactory.java | {
"start": 4127,
"end": 4298
} | class ____ bean factory implementations which obtain bean definitions
* from some backend resource (where bean definition access is an expensive operation).
*
* <p>This | for |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/aot/TestContextAotGeneratorTests.java | {
"start": 3714,
"end": 3754
} | class ____ {
// no beans
}
}
}
| Config |
java | spring-projects__spring-security | acl/src/main/java/org/springframework/security/acls/domain/GrantedAuthoritySid.java | {
"start": 1213,
"end": 2422
} | class ____ implements Sid {
private final String grantedAuthority;
public GrantedAuthoritySid(String grantedAuthority) {
Assert.hasText(grantedAuthority, "GrantedAuthority required");
this.grantedAuthority = grantedAuthority;
}
public GrantedAuthoritySid(GrantedAuthority grantedAuthority) {
Assert.notNull(grantedAuthority, "GrantedAuthority required");
Assert.notNull(grantedAuthority.getAuthority(),
"This Sid is only compatible with GrantedAuthoritys that provide a non-null getAuthority()");
this.grantedAuthority = grantedAuthority.getAuthority();
}
@Override
public boolean equals(Object object) {
if ((object == null) || !(object instanceof GrantedAuthoritySid)) {
return false;
}
// Delegate to getGrantedAuthority() to perform actual comparison (both should be
// identical)
return ((GrantedAuthoritySid) object).getGrantedAuthority().equals(this.getGrantedAuthority());
}
@Override
public int hashCode() {
return this.getGrantedAuthority().hashCode();
}
public String getGrantedAuthority() {
return this.grantedAuthority;
}
@Override
public String toString() {
return "GrantedAuthoritySid[" + this.grantedAuthority + "]";
}
}
| GrantedAuthoritySid |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/coordination/OperatorEventGateway.java | {
"start": 1974,
"end": 2253
} | interface ____ {
/**
* Sends the given event to the coordinator, where it will be handled by the {@link
* OperatorCoordinator#handleEventFromOperator(int, int, OperatorEvent)} method.
*/
void sendEventToCoordinator(OperatorEvent event);
}
| OperatorEventGateway |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/XsltComponentBuilderFactory.java | {
"start": 1816,
"end": 8189
} | interface ____ extends ComponentBuilder<XsltComponent> {
/**
* Whether to allow to use resource template from header or not (default
* false). Enabling this allows to specify dynamic templates via message
* header. However this can be seen as a potential security
* vulnerability if the header is coming from a malicious user, so use
* this with care.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param allowTemplateFromHeader the value to set
* @return the dsl builder
*/
default XsltComponentBuilder allowTemplateFromHeader(boolean allowTemplateFromHeader) {
doSetProperty("allowTemplateFromHeader", allowTemplateFromHeader);
return this;
}
/**
* Cache for the resource content (the stylesheet file) when it is
* loaded. If set to false Camel will reload the stylesheet file on each
* message processing. This is good for development. A cached stylesheet
* can be forced to reload at runtime via JMX using the
* clearCachedStylesheet operation.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param contentCache the value to set
* @return the dsl builder
*/
default XsltComponentBuilder contentCache(boolean contentCache) {
doSetProperty("contentCache", contentCache);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default XsltComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default XsltComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* To use a custom XSLT transformer factory, specified as a FQN class
* name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*
* @param transformerFactoryClass the value to set
* @return the dsl builder
*/
default XsltComponentBuilder transformerFactoryClass(java.lang.String transformerFactoryClass) {
doSetProperty("transformerFactoryClass", transformerFactoryClass);
return this;
}
/**
* A configuration strategy to apply on freshly created instances of
* TransformerFactory.
*
* The option is a:
* <code>org.apache.camel.component.xslt.TransformerFactoryConfigurationStrategy</code> type.
*
* Group: advanced
*
* @param transformerFactoryConfigurationStrategy the value to set
* @return the dsl builder
*/
default XsltComponentBuilder transformerFactoryConfigurationStrategy(org.apache.camel.component.xslt.TransformerFactoryConfigurationStrategy transformerFactoryConfigurationStrategy) {
doSetProperty("transformerFactoryConfigurationStrategy", transformerFactoryConfigurationStrategy);
return this;
}
/**
* To use a custom UriResolver. Should not be used together with the
* option 'uriResolverFactory'.
*
* The option is a:
* <code>javax.xml.transform.URIResolver</code> type.
*
* Group: advanced
*
* @param uriResolver the value to set
* @return the dsl builder
*/
default XsltComponentBuilder uriResolver(javax.xml.transform.URIResolver uriResolver) {
doSetProperty("uriResolver", uriResolver);
return this;
}
/**
* To use a custom UriResolver which depends on a dynamic endpoint
* resource URI. Should not be used together with the option
* 'uriResolver'.
*
* The option is a:
* <code>org.apache.camel.component.xslt.XsltUriResolverFactory</code> type.
*
* Group: advanced
*
* @param uriResolverFactory the value to set
* @return the dsl builder
*/
default XsltComponentBuilder uriResolverFactory(org.apache.camel.component.xslt.XsltUriResolverFactory uriResolverFactory) {
doSetProperty("uriResolverFactory", uriResolverFactory);
return this;
}
}
| XsltComponentBuilder |
java | google__auto | common/src/test/java/com/google/auto/common/BasicAnnotationProcessorTest.java | {
"start": 9026,
"end": 9072
} | interface ____ {}
/** Generates a | GeneratesCode |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeFromSupplierTest.java | {
"start": 1344,
"end": 6996
} | class ____ extends RxJavaTest {
@Test
public void fromSupplier() {
final AtomicInteger atomicInteger = new AtomicInteger();
Maybe.fromSupplier(new Supplier<Object>() {
@Override
public Object get() throws Exception {
atomicInteger.incrementAndGet();
return null;
}
})
.test()
.assertResult();
assertEquals(1, atomicInteger.get());
}
@Test
public void fromSupplierTwice() {
final AtomicInteger atomicInteger = new AtomicInteger();
Supplier<Object> supplier = new Supplier<Object>() {
@Override
public Object get() throws Exception {
atomicInteger.incrementAndGet();
return null;
}
};
Maybe.fromSupplier(supplier)
.test()
.assertResult();
assertEquals(1, atomicInteger.get());
Maybe.fromSupplier(supplier)
.test()
.assertResult();
assertEquals(2, atomicInteger.get());
}
@Test
public void fromSupplierInvokesLazy() {
final AtomicInteger atomicInteger = new AtomicInteger();
Maybe<Object> completable = Maybe.fromSupplier(new Supplier<Object>() {
@Override
public Object get() throws Exception {
atomicInteger.incrementAndGet();
return null;
}
});
assertEquals(0, atomicInteger.get());
completable
.test()
.assertResult();
assertEquals(1, atomicInteger.get());
}
@Test
public void fromSupplierThrows() {
Maybe.fromSupplier(new Supplier<Object>() {
@Override
public Object get() throws Exception {
throw new UnsupportedOperationException();
}
})
.test()
.assertFailure(UnsupportedOperationException.class);
}
@SuppressWarnings("unchecked")
@Test
public void supplier() throws Throwable {
final int[] counter = { 0 };
Maybe<Integer> m = Maybe.fromSupplier(new Supplier<Integer>() {
@Override
public Integer get() throws Exception {
counter[0]++;
return 0;
}
});
assertTrue(m.getClass().toString(), m instanceof Supplier);
assertEquals(0, ((Supplier<Void>)m).get());
assertEquals(1, counter[0]);
}
@Test
public void noErrorLoss() throws Exception {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final CountDownLatch cdl1 = new CountDownLatch(1);
final CountDownLatch cdl2 = new CountDownLatch(1);
TestObserver<Integer> to = Maybe.fromSupplier(new Supplier<Integer>() {
@Override
public Integer get() throws Exception {
cdl1.countDown();
cdl2.await(5, TimeUnit.SECONDS);
return 1;
}
}).subscribeOn(Schedulers.single()).test();
assertTrue(cdl1.await(5, TimeUnit.SECONDS));
to.dispose();
int timeout = 10;
while (timeout-- > 0 && errors.isEmpty()) {
Thread.sleep(100);
}
TestHelper.assertUndeliverable(errors, 0, InterruptedException.class);
} finally {
RxJavaPlugins.reset();
}
}
@SuppressWarnings("unchecked")
@Test
public void shouldNotDeliverResultIfSubscriberUnsubscribedBeforeEmission() throws Throwable {
Supplier<String> func = mock(Supplier.class);
final CountDownLatch funcLatch = new CountDownLatch(1);
final CountDownLatch observerLatch = new CountDownLatch(1);
when(func.get()).thenAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
observerLatch.countDown();
try {
funcLatch.await();
} catch (InterruptedException e) {
// It's okay, unsubscription causes Thread interruption
// Restoring interruption status of the Thread
Thread.currentThread().interrupt();
}
return "should_not_be_delivered";
}
});
Maybe<String> fromSupplierObservable = Maybe.fromSupplier(func);
Observer<Object> observer = TestHelper.mockObserver();
TestObserver<String> outer = new TestObserver<>(observer);
fromSupplierObservable
.subscribeOn(Schedulers.computation())
.subscribe(outer);
// Wait until func will be invoked
observerLatch.await();
// Unsubscribing before emission
outer.dispose();
// Emitting result
funcLatch.countDown();
// func must be invoked
verify(func).get();
// Observer must not be notified at all
verify(observer).onSubscribe(any(Disposable.class));
verifyNoMoreInteractions(observer);
}
@Test
public void success() {
Maybe.fromSupplier(() -> 1)
.test()
.assertResult(1);
}
@Test
public void disposeUpfront() throws Throwable {
@SuppressWarnings("unchecked")
Supplier<Integer> supplier = mock(Supplier.class);
Maybe.fromSupplier(supplier)
.test(true)
.assertEmpty();
verify(supplier, never()).get();
}
}
| MaybeFromSupplierTest |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/OptionalEquality.java | {
"start": 1193,
"end": 1733
} | class ____ extends AbstractReferenceEquality {
private static final ImmutableSet<String> OPTIONAL_CLASSES =
ImmutableSet.of(com.google.common.base.Optional.class.getName(), "java.util.Optional");
@Override
protected boolean matchArgument(ExpressionTree tree, VisitorState state) {
Type type = ASTHelpers.getType(tree);
for (String className : OPTIONAL_CLASSES) {
if (ASTHelpers.isSameType(type, state.getTypeFromString(className), state)) {
return true;
}
}
return false;
}
}
| OptionalEquality |
java | quarkusio__quarkus | extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/GraphQLMutationTest.java | {
"start": 2204,
"end": 2635
} | class ____ {
@Query
public String ping() {
return "pong";
}
@Mutation
public String hello(@DefaultValue("Phillip") String name) {
return "Hello " + name;
}
@Mutation
public String error(@DefaultValue("Phillip") String name) throws BusinessError {
throw new BusinessError("Some error");
}
}
public static | MutationApi |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerPresentEmbeddableTest.java | {
"start": 11257,
"end": 11776
} | class ____ {
private String text;
protected MappedSuperclassForEmbeddable() {
// For Hibernate ORM only - it will change the property value through reflection
}
public MappedSuperclassForEmbeddable(String text) {
this.text = text;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
}
@Embeddable
public static | MappedSuperclassForEmbeddable |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/sql/BaseSqmToSqlAstConverter.java | {
"start": 70309,
"end": 230980
} | class ____ extends AbstractJdbcParameter {
private final BeforeExecutionGenerator generator;
public IdGeneratorParameter(BasicEntityIdentifierMapping identifierMapping, BeforeExecutionGenerator generator) {
super( identifierMapping.getJdbcMapping() );
this.generator = generator;
}
@Override
public void bindParameterValue(
PreparedStatement statement,
int startPosition,
JdbcParameterBindings jdbcParamBindings,
ExecutionContext executionContext) throws SQLException {
getJdbcMapping().getJdbcValueBinder().bind(
statement,
generator.generate( executionContext.getSession(), null, null, INSERT ),
startPosition,
executionContext.getSession()
);
}
}
@Override
public Values visitValues(SqmValues sqmValues) {
final List<SqmPath<?>> insertionTargetPaths =
currentSqmStatement instanceof SqmInsertStatement<?> insertStatement
? insertStatement.getInsertionTargetPaths() : null;
final List<SqmExpression<?>> expressions = sqmValues.getExpressions();
final ArrayList<Expression> valuesExpressions = new ArrayList<>( expressions.size() );
for ( int i = 0; i < expressions.size(); i++ ) {
// todo: add WriteExpression handling
valuesExpressions.add(
insertionTargetPaths == null
? (Expression) expressions.get( i ).accept( this )
: visitWithInferredType( expressions.get( i ), insertionTargetPaths.get( i ) )
);
}
return new Values( valuesExpressions );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Select statement
@Override
public SelectStatement visitSelectStatement(SqmSelectStatement<?> statement) {
final CteContainer oldCteContainer = cteContainer;
final CteContainer cteContainer = this.visitCteContainer( statement );
final SqmStatement<?> oldSqmStatement = this.currentSqmStatement;
this.currentSqmStatement = statement;
final QueryPart queryPart = visitQueryPart( statement.getQueryPart() );
final List<DomainResult<?>> domainResults = queryPart.isRoot() ? this.domainResults : emptyList();
try {
return new SelectStatement( cteContainer, queryPart, domainResults );
}
finally {
this.currentSqmStatement = oldSqmStatement;
this.cteContainer = oldCteContainer;
rootPathsForLockingCollector = null;
}
}
@Override
public DynamicInstantiation<?> visitDynamicInstantiation(SqmDynamicInstantiation<?> sqmDynamicInstantiation) {
final SqmDynamicInstantiationTarget<?> instantiationTarget = sqmDynamicInstantiation.getInstantiationTarget();
final DynamicInstantiationNature instantiationNature = instantiationTarget.getNature();
final JavaType<?> targetTypeDescriptor = interpretInstantiationTarget( instantiationTarget );
final DynamicInstantiation<?> dynamicInstantiation =
new DynamicInstantiation<>( instantiationNature, targetTypeDescriptor );
for ( SqmDynamicInstantiationArgument<?> sqmArgument : sqmDynamicInstantiation.getArguments() ) {
if ( sqmArgument.getSelectableNode() instanceof SqmPath<?> sqmPath ) {
prepareForSelection( sqmPath );
}
final DomainResultProducer<?> argumentResultProducer = (DomainResultProducer<?>) sqmArgument.accept( this );
dynamicInstantiation.addArgument( sqmArgument.getAlias(), argumentResultProducer, this );
}
dynamicInstantiation.complete();
return dynamicInstantiation;
}
private <X> JavaType<X> interpretInstantiationTarget(SqmDynamicInstantiationTarget<X> instantiationTarget) {
return getCreationContext().getTypeConfiguration().getJavaTypeRegistry()
.getDescriptor( switch ( instantiationTarget.getNature() ) {
case LIST -> List.class;
case MAP -> Map.class;
default -> instantiationTarget.getJavaType();
} );
}
@Override
public CteStatement visitCteStatement(SqmCteStatement<?> sqmCteStatement) {
final SqmCteTable<?> sqmCteTable = sqmCteStatement.getCteTable();
final String cteName = getCteName( sqmCteTable );
final SqmSelectQuery<?> selectStatement = sqmCteStatement.getCteDefinition();
final SqmQueryPart<?> queryPart = selectStatement.getQueryPart();
final Literal cycleLiteral = getLiteral( sqmCteStatement.getCycleLiteral() );
final Literal noCycleLiteral = getLiteral( sqmCteStatement.getNoCycleLiteral() );
final JdbcMapping cycleMarkType = cycleLiteral == null ? null : cycleLiteral.getJdbcMapping();
final BasicType<String> stringType =
creationContext.getTypeConfiguration()
.getBasicTypeForJavaType( String.class );
if ( queryPart instanceof SqmQueryGroup<?> queryGroup
&& queryPart.getSortSpecifications().isEmpty()
&& queryPart.getFetchExpression() == null
&& queryPart.getOffsetExpression() == null ) {
switch ( queryGroup.getSetOperator() ) {
case UNION:
case UNION_ALL:
if ( queryGroup.getQueryParts().size() == 2 ) {
// This could potentially be a recursive CTE,
// for which we need to visit the non-recursive part first
// and register a CteStatement before visiting the recursive part.
// This is important, because the recursive part will refer to the CteStatement,
// hence we require that it is registered already
final CteContainer oldCteContainer = cteContainer;
final CteContainer subCteContainer = this.visitCteContainer( selectStatement );
// Note that the following is a trimmed down version of what visitQueryGroup does
try {
final SqmQueryPart<?> firstPart = queryGroup.getQueryParts().get( 0 );
final SqmQueryPart<?> secondPart = queryGroup.getQueryParts().get( 1 );
final List<QueryPart> newQueryParts = new ArrayList<>( 2 );
final QueryGroup group = new QueryGroup(
getProcessingStateStack().isEmpty(),
queryGroup.getSetOperator(),
newQueryParts
);
final SqlAstQueryPartProcessingStateImpl processingState = new SqlAstQueryPartProcessingStateImpl(
group,
getCurrentProcessingState(),
this,
DelegatingSqmAliasedNodeCollector::new,
currentClauseStack::getCurrent,
deduplicateSelectionItems
);
final DelegatingSqmAliasedNodeCollector collector =
(DelegatingSqmAliasedNodeCollector) processingState.getSqlExpressionResolver();
sqmQueryPartStack.push( queryGroup );
pushProcessingState( processingState );
try {
newQueryParts.add( visitQueryPart( firstPart ) );
collector.setSqmAliasedNodeCollector(
(SqmAliasedNodeCollector) lastPoppedProcessingState.getSqlExpressionResolver()
);
// Before visiting the second query part, setup the CteStatement and register it
final CteTable cteTable = new CteTable(
cteName,
sqmCteTable.resolveTableGroupProducer(
cteName,
newQueryParts.get( 0 )
.getFirstQuerySpec()
.getSelectClause()
.getSqlSelections(),
lastPoppedFromClauseIndex
)
);
final CteStatement cteStatement = new CteStatement(
cteTable,
new SelectStatement( subCteContainer, group, emptyList() ),
sqmCteStatement.getMaterialization(),
sqmCteStatement.getSearchClauseKind(),
visitSearchBySpecifications( cteTable, sqmCteStatement.getSearchBySpecifications() ),
createCteColumn( sqmCteStatement.getSearchAttributeName(), stringType ),
visitCycleColumns( cteTable, sqmCteStatement.getCycleAttributes() ),
createCteColumn( sqmCteStatement.getCycleMarkAttributeName(), cycleMarkType ),
createCteColumn( sqmCteStatement.getCyclePathAttributeName(), stringType ),
cycleLiteral,
noCycleLiteral
);
oldCteContainer.addCteStatement( cteStatement );
// Finally, visit the second part, which is potentially the recursive part
newQueryParts.add( visitQueryPart( secondPart ) );
return cteStatement;
}
finally {
popProcessingStateStack();
sqmQueryPartStack.pop();
}
}
finally {
this.cteContainer = oldCteContainer;
}
}
break;
}
}
final SelectStatement statement;
if ( selectStatement instanceof SqmSubQuery<?> subquery ) {
statement = visitSubQueryExpression( subquery );
}
else if ( selectStatement instanceof SqmSelectStatement<?> select ) {
statement = visitSelectStatement( select );
}
else {
throw new AssertionFailure( "Unrecognized select statemengt type" );
}
final CteTable cteTable = new CteTable(
cteName,
sqmCteTable.resolveTableGroupProducer(
cteName,
statement.getQuerySpec().getSelectClause().getSqlSelections(),
lastPoppedFromClauseIndex
)
);
final CteStatement cteStatement = new CteStatement(
cteTable,
statement,
sqmCteStatement.getMaterialization(),
sqmCteStatement.getSearchClauseKind(),
visitSearchBySpecifications( cteTable, sqmCteStatement.getSearchBySpecifications() ),
createCteColumn( sqmCteStatement.getSearchAttributeName(), stringType ),
visitCycleColumns( cteTable, sqmCteStatement.getCycleAttributes() ),
createCteColumn( sqmCteStatement.getCycleMarkAttributeName(), cycleMarkType ),
createCteColumn( sqmCteStatement.getCyclePathAttributeName(), stringType ),
cycleLiteral,
noCycleLiteral
);
cteContainer.addCteStatement( cteStatement );
return cteStatement;
}
private String getCteName(SqmCteTable<?> sqmCteTable) {
final String name = sqmCteTable.getName();
if ( cteNameMapping == null ) {
cteNameMapping = new HashMap<>();
}
final String key = sqmCteTable.getCteName();
final String generatedCteName = cteNameMapping.get( key );
if ( generatedCteName != null ) {
return generatedCteName;
}
final String cteName = name != null
? generateCteName( name )
: generateCteName( "cte" + cteNameMapping.size() );
cteNameMapping.put( key, cteName );
return cteName;
}
private String generateCteName(String baseName) {
String name = baseName;
int maxTries = 5;
for ( int i = 0; i < maxTries; i++ ) {
if ( !cteNameMapping.containsKey( name ) ) {
return name;
}
name = baseName + "_" + i;
}
throw new InterpretationException(
String.format(
"Couldn't generate CTE name for base name [%s] after %d tries",
baseName,
maxTries
)
);
}
private Literal getLiteral(SqmLiteral<?> value) {
return value == null ? null : (Literal) visitLiteral( value );
}
protected List<SearchClauseSpecification> visitSearchBySpecifications(
CteTable cteTable,
List<JpaSearchOrder> searchBySpecifications) {
if ( searchBySpecifications == null || searchBySpecifications.isEmpty() ) {
return null;
}
final int size = searchBySpecifications.size();
final List<SearchClauseSpecification> searchClauseSpecifications = new ArrayList<>( size );
for ( int i = 0; i < size; i++ ) {
final JpaSearchOrder specification = searchBySpecifications.get( i );
forEachCteColumn(
cteTable,
(SqmCteTableColumn) specification.getAttribute(),
cteColumn -> searchClauseSpecifications.add(
new SearchClauseSpecification(
cteColumn,
specification.getSortOrder(),
specification.getNullPrecedence()
)
)
);
}
return searchClauseSpecifications;
}
protected CteColumn createCteColumn(String cteColumn, JdbcMapping jdbcMapping) {
return cteColumn == null ? null : new CteColumn( cteColumn, jdbcMapping );
}
protected void forEachCteColumn(CteTable cteTable, SqmCteTableColumn cteColumn, Consumer<CteColumn> consumer) {
final List<CteColumn> cteColumns = cteTable.getCteColumns();
final int size = cteColumns.size();
for ( int i = 0; i < size; i++ ) {
final CteColumn column = cteColumns.get( i );
final String columnName = column.getColumnExpression();
final String sqmName = cteColumn.getName();
if ( columnName.regionMatches( 0, sqmName, 0, sqmName.length() )
&& ( columnName.length() == sqmName.length()
|| columnName.charAt( sqmName.length() ) == '_' ) ) {
consumer.accept( column );
}
}
}
protected List<CteColumn> visitCycleColumns(CteTable cteTable, List<JpaCteCriteriaAttribute> cycleColumns) {
if ( cycleColumns == null || cycleColumns.isEmpty() ) {
return null;
}
else {
final int size = cycleColumns.size();
final List<CteColumn> columns = new ArrayList<>( size );
for ( int i = 0; i < size; i++ ) {
forEachCteColumn(
cteTable,
(SqmCteTableColumn) cycleColumns.get( i ),
columns::add
);
}
return columns;
}
}
@Override
public CteContainer visitCteContainer(SqmCteContainer consumer) {
final Collection<SqmCteStatement<?>> sqmCteStatements = consumer.getCteStatements();
cteContainer = new CteContainerImpl( cteContainer );
if ( !sqmCteStatements.isEmpty() ) {
final boolean originalDeduplicateSelectionItems = deduplicateSelectionItems;
deduplicateSelectionItems = false;
currentClauseStack.push( Clause.WITH );
for ( SqmCteStatement<?> sqmCteStatement : sqmCteStatements ) {
visitCteStatement( sqmCteStatement );
}
currentClauseStack.pop();
deduplicateSelectionItems = originalDeduplicateSelectionItems;
// Avoid leaking the processing state from CTEs to upper levels
lastPoppedFromClauseIndex = null;
lastPoppedProcessingState = null;
}
return cteContainer;
}
@Override
public QueryPart visitQueryPart(SqmQueryPart<?> queryPart) {
return (QueryPart) super.visitQueryPart( queryPart );
}
@Override
public QueryGroup visitQueryGroup(SqmQueryGroup<?> queryGroup) {
final List<? extends SqmQueryPart<?>> queryParts = queryGroup.getQueryParts();
final int size = queryParts.size();
final List<QueryPart> newQueryParts = new ArrayList<>( size );
final QueryGroup group = new QueryGroup(
getProcessingStateStack().isEmpty(),
queryGroup.getSetOperator(),
newQueryParts
);
final Map<NavigablePath, Map.Entry<Integer, List<SqlSelection>>> originalTrackedFetchSelectionsForGroup = this.trackedFetchSelectionsForGroup;
if ( queryGroup.getOrderByClause() != null && queryGroup.getOrderByClause().hasPositionalSortItem() ) {
trackSelectionsForGroup = true;
// Find the order by elements which refer to attributes of the selections
// and register the navigable paths so that a list of SqlSelection is tracked for the fetch
Map<NavigablePath, Map.Entry<Integer, List<SqlSelection>>> trackedFetchSelectionsForGroup = null;
for ( SqmSortSpecification sortSpecification : queryGroup.getOrderByClause().getSortSpecifications() ) {
if ( sortSpecification.getExpression() instanceof SqmAliasedNodeRef nodeRef ) {
if ( nodeRef.getNavigablePath() != null ) {
if ( trackedFetchSelectionsForGroup == null ) {
trackedFetchSelectionsForGroup = new HashMap<>();
}
trackedFetchSelectionsForGroup.put( nodeRef.getNavigablePath(), new AbstractMap.SimpleEntry<>( nodeRef.getPosition() - 1, new ArrayList<>() ) );
}
}
}
this.trackedFetchSelectionsForGroup = trackedFetchSelectionsForGroup == null
? Collections.emptyMap()
: trackedFetchSelectionsForGroup;
}
final SqlAstQueryPartProcessingStateImpl processingState = new SqlAstQueryPartProcessingStateImpl(
group,
getCurrentProcessingState(),
this,
DelegatingSqmAliasedNodeCollector::new,
currentClauseStack::getCurrent,
deduplicateSelectionItems
);
final DelegatingSqmAliasedNodeCollector collector = (DelegatingSqmAliasedNodeCollector) processingState
.getSqlExpressionResolver();
sqmQueryPartStack.push( queryGroup );
pushProcessingState( processingState );
FromClauseIndex firstQueryPartIndex = null;
SqlAstProcessingState firstPoppedProcessingState = null;
try {
newQueryParts.add( visitQueryPart( queryParts.get( 0 ) ) );
firstQueryPartIndex = lastPoppedFromClauseIndex;
firstPoppedProcessingState = lastPoppedProcessingState;
collector.setSqmAliasedNodeCollector(
(SqmAliasedNodeCollector) lastPoppedProcessingState.getSqlExpressionResolver()
);
visitOrderByOffsetAndFetch( queryGroup, group );
trackSelectionsForGroup = false;
trackedFetchSelectionsForGroup = originalTrackedFetchSelectionsForGroup;
for ( int i = 1; i < size; i++ ) {
newQueryParts.add( visitQueryPart( queryParts.get( i ) ) );
}
return group;
}
finally {
popProcessingStateStack();
sqmQueryPartStack.pop();
lastPoppedFromClauseIndex = firstQueryPartIndex;
lastPoppedProcessingState = firstPoppedProcessingState;
}
}
@Override
public QuerySpec visitQuerySpec(SqmQuerySpec<?> sqmQuerySpec) {
final boolean topLevel = getProcessingStateStack().isEmpty();
final QuerySpec sqlQuerySpec = new QuerySpec( topLevel, sqmQuerySpec.getFromClause().getNumberOfRoots() );
final Predicate originalAdditionalRestrictions = additionalRestrictions;
additionalRestrictions = null;
final boolean oldInNestedContext = inNestedContext;
inNestedContext = false;
final SqlAstQueryPartProcessingStateImpl processingState;
if ( trackAliasedNodePositions( sqmQuerySpec ) ) {
processingState = new SqlAstQueryPartProcessingStateImpl(
sqlQuerySpec,
getCurrentProcessingState(),
this,
resolver -> new SqmAliasedNodePositionTracker( resolver, sqmQuerySpec.getSelectClause().getSelections() ),
currentClauseStack::getCurrent,
deduplicateSelectionItems
);
}
else {
processingState = new SqlAstQueryPartProcessingStateImpl(
sqlQuerySpec,
getCurrentProcessingState(),
this,
currentClauseStack::getCurrent,
deduplicateSelectionItems
);
}
final boolean originalDeduplicateSelectionItems = deduplicateSelectionItems;
sqmQueryPartStack.push( sqmQuerySpec );
// In sub-queries, we can never deduplicate the selection items as that might change semantics
deduplicateSelectionItems = false;
pushProcessingState( processingState );
queryTransformers.push( new ArrayList<>() );
try {
return querySpec( sqmQuerySpec, sqlQuerySpec, topLevel, processingState );
}
finally {
if ( additionalRestrictions != null ) {
sqlQuerySpec.applyPredicate( additionalRestrictions );
}
additionalRestrictions = originalAdditionalRestrictions;
inNestedContext = oldInNestedContext;
popProcessingStateStack();
queryTransformers.pop();
sqmQueryPartStack.pop();
deduplicateSelectionItems = originalDeduplicateSelectionItems;
}
}
private QuerySpec querySpec(
SqmQuerySpec<?> sqmQuerySpec,
QuerySpec sqlQuerySpec,
boolean topLevel,
SqlAstQueryPartProcessingStateImpl processingState) {
// we want to visit the from-clause first
visitFromClause( sqmQuerySpec.getFromClause() );
visitSelectClause( sqmQuerySpec.getSelectClause() );
final SqmWhereClause whereClause = sqmQuerySpec.getWhereClause();
if ( whereClause != null ) {
sqlQuerySpec.applyPredicate( visitWhereClause( whereClause.getPredicate() ) );
}
sqlQuerySpec.setGroupByClauseExpressions( visitGroupByClause( sqmQuerySpec.getGroupByClauseExpressions() ) );
final SqmPredicate havingClausePredicate = sqmQuerySpec.getHavingClausePredicate();
if ( havingClausePredicate != null ) {
sqlQuerySpec.setHavingClauseRestrictions( visitHavingClause( havingClausePredicate ) );
}
visitOrderByOffsetAndFetch( sqmQuerySpec, sqlQuerySpec );
if ( topLevel && statement instanceof SqmSelectStatement<?> ) {
if ( orderByFragments != null ) {
orderByFragments.forEach( entry -> entry.getKey().apply( sqlQuerySpec, entry.getValue(), this ) );
orderByFragments = null;
}
}
downgradeTreatUses( processingState );
return applyTransformers( sqlQuerySpec );
}
/**
* Look for treated {@code SqmFrom} registrations that have uses of the untreated {@code SqmFrom}.
* These {@code SqmFrom} nodes are then not treat-joined but rather treated only in expressions.
* Consider the following two queries. The latter also uses the untreated {@code SqmFrom}, and
* hence has different semantics i.e. the treat is not filtering, but just applies where it's used.
*
* <pre>select a.id from Root r join treat(r.attribute as Subtype) a where a.id = 1</pre>
*
* <pre>select a.id from Root r join r.attribute a where treat(a as Subtype).id = 1</pre>
*/
private void downgradeTreatUses(SqlAstQueryPartProcessingStateImpl processingState) {
processingState.getFromRegistrations().forEach( (key, value) -> {
if ( value != null && value ) {
downgradeTreatUses( getFromClauseIndex().getTableGroup( key.getNavigablePath() ) );
}
} );
}
private QuerySpec applyTransformers(QuerySpec sqlQuerySpec) {
QuerySpec finalQuerySpec = sqlQuerySpec;
@SuppressWarnings("unchecked")
final List<QueryTransformer> transformers = queryTransformers.getCurrent();
for ( QueryTransformer transformer : transformers ) {
finalQuerySpec = transformer.transform( cteContainer, finalQuerySpec, this );
}
return finalQuerySpec;
}
private boolean trackAliasedNodePositions(SqmQuerySpec<?> sqmQuerySpec) {
return trackSelectionsForGroup
|| sqmQuerySpec.getOrderByClause() != null && sqmQuerySpec.getOrderByClause().hasPositionalSortItem()
|| sqmQuerySpec.hasPositionalGroupItem()
// Since JPA Criteria queries can use the same expression object in order or group by items,
// we need to track the positions to be able to replace the expression in the items with alias references
// Also see #resolveGroupOrOrderByExpression for more details
|| statement.getQuerySource() == SqmQuerySource.CRITERIA
&& ( sqmQuerySpec.getOrderByClause() != null || !sqmQuerySpec.getGroupByClauseExpressions().isEmpty() );
}
private void downgradeTreatUses(TableGroup tableGroup) {
final Map<String, EntityNameUse> entityNameUses = tableGroupEntityNameUses.get( tableGroup );
if ( entityNameUses != null ) {
for ( Map.Entry<String, EntityNameUse> entry : entityNameUses.entrySet() ) {
if ( entry.getValue().getKind() == EntityNameUse.UseKind.TREAT ) {
entry.setValue( EntityNameUse.EXPRESSION );
}
}
}
}
protected void visitOrderByOffsetAndFetch(SqmQueryPart<?> sqmQueryPart, QueryPart sqlQueryPart) {
if ( sqmQueryPart.getOrderByClause() != null ) {
currentClauseStack.push( Clause.ORDER );
inferrableTypeAccessStack.push( () -> null );
try {
for ( SqmSortSpecification sortSpecification :
sqmQueryPart.getOrderByClause().getSortSpecifications() ) {
final SortSpecification specification = visitSortSpecification( sortSpecification );
if ( specification != null ) {
sqlQueryPart.addSortSpecification( specification );
}
}
}
finally {
inferrableTypeAccessStack.pop();
currentClauseStack.pop();
}
}
if ( !containsCollectionFetches || !currentClauseStack.isEmpty() ) {
// Strip off the root offset and limit expressions in case the query contains collection fetches to retain
// the proper cardinality. We could implement pagination for single select statements differently in this
// case by using a subquery e.g. `... where alias in (select subAlias from ... limit ...)`
// or use window functions e.g. `select ... from (select ..., dense_rank() over(order by ..., id) rn from ...) tmp where tmp.rn between ...`
// but these transformations/translations are non-trivial and can be done later
inferrableTypeAccessStack.push( () -> getTypeConfiguration().getBasicTypeForJavaType( Integer.class ) );
sqlQueryPart.setOffsetClauseExpression( visitOffsetExpression( sqmQueryPart.getOffsetExpression() ) );
if ( sqmQueryPart.getFetchClauseType() == FetchClauseType.PERCENT_ONLY
|| sqmQueryPart.getFetchClauseType() == FetchClauseType.PERCENT_WITH_TIES ) {
inferrableTypeAccessStack.pop();
inferrableTypeAccessStack.push( () -> getTypeConfiguration().getBasicTypeForJavaType( Double.class ) );
}
sqlQueryPart.setFetchClauseExpression(
visitFetchExpression( sqmQueryPart.getFetchExpression() ),
sqmQueryPart.getFetchClauseType()
);
inferrableTypeAccessStack.pop();
}
}
private TableGroup findTableGroupByPath(NavigablePath navigablePath) {
return getFromClauseAccess().getTableGroup( navigablePath );
}
private Consumer<NavigablePath> rootPathsForLockingCollector;
@Override
public SelectClause visitSelectClause(SqmSelectClause selectClause) {
currentClauseStack.push( Clause.SELECT );
try {
final SelectClause sqlSelectClause = currentQuerySpec().getSelectClause();
if ( sqmQueryPartStack.depth() == 1 && currentClauseStack.depth() == 1 ) {
// these 2 conditions combined *should* indicate we have the
// root query-spec of a top-level select statement
rootPathsForLockingCollector = (path) ->
currentQuerySpec().applyRootPathForLocking( path );
}
if ( selectClause.getSelections().isEmpty() ) {
final SqmFrom<?, ?> implicitSelection = determineImplicitSelection( (SqmQuerySpec<?>) getCurrentSqmQueryPart() );
visitSelection( 0, new SqmSelection<>( implicitSelection, implicitSelection.nodeBuilder() ) );
}
else {
final List<SqmSelection<?>> selections = selectClause.getSelections();
for ( int i = 0; i < selections.size(); i++ ) {
visitSelection( i, selections.get( i ) );
}
sqlSelectClause.makeDistinct( selectClause.isDistinct() );
}
return sqlSelectClause;
}
finally {
rootPathsForLockingCollector = null;
currentClauseStack.pop();
}
}
protected SqmFrom<?, ?> determineImplicitSelection(SqmQuerySpec<?> querySpec) {
// Note that this is different from org.hibernate.query.hql.internal.SemanticQueryBuilder.buildInferredSelectClause
return querySpec.getFromClause().getRoots().get( 0 );
}
@Override
public Void visitSelection(SqmSelection<?> sqmSelection) {
visitSelection(
getCurrentSqmQueryPart().getFirstQuerySpec().getSelectClause().getSelections().indexOf( sqmSelection ),
sqmSelection
);
return null;
}
private void visitSelection(int index, SqmSelection<?> sqmSelection) {
collectRootPathsForLocking( sqmSelection );
inferTargetPath( index );
callResultProducers( resultProducers( sqmSelection ) );
if ( statement instanceof SqmInsertSelectStatement<?>
&& contributesToTopLevelSelectClause() ) {
inferrableTypeAccessStack.pop();
}
}
private void collectRootPathsForLocking(SqmSelection<?> sqmSelection) {
if ( rootPathsForLockingCollector == null ) {
return;
}
collectRootPathsForLocking( sqmSelection.getSelectableNode() );
}
private void collectRootPathsForLocking(SqmSelectableNode<?> selectableNode) {
// roughly speaking we only care about 2 cases here:
// 1) entity path - the entity will be locked
// 2) scalar path - the entity from which the path originates will be locked
//
// note, however, that we need to account for both cases as the argument to a dynamic instantiation
if ( selectableNode instanceof SqmPath<?> selectedPath ) {
collectRootPathsForLocking( selectedPath );
}
else if ( selectableNode instanceof SqmDynamicInstantiation<?> dynamicInstantiation ) {
collectRootPathsForLocking( dynamicInstantiation );
}
}
private void collectRootPathsForLocking(SqmPath<?> selectedPath) {
assert rootPathsForLockingCollector != null;
if ( selectedPath == null ) {
// typically this comes from paths rooted in a CTE.
// regardless, without a path we cannot evaluate so just return.
return;
}
if ( selectedPath.getNodeType() instanceof EntityTypeImpl ) {
rootPathsForLockingCollector.accept( selectedPath.getNavigablePath() );
}
else {
collectRootPathsForLocking( selectedPath.getLhs() );
}
}
private void collectRootPathsForLocking(SqmDynamicInstantiation<?> dynamicInstantiation) {
dynamicInstantiation.getArguments().forEach( ( argument ) -> {
collectRootPathsForLocking( argument.getSelectableNode() );
} );
}
private void inferTargetPath(int index) {
// Only infer the type on the "top level" select clauses
// todo: add WriteExpression handling
if ( statement instanceof SqmInsertSelectStatement<?> insertSelectStatement
&& contributesToTopLevelSelectClause() ) {
// we infer the target path
final SqmPath<?> path = insertSelectStatement.getInsertionTargetPaths().get( index );
inferrableTypeAccessStack.push( () -> determineValueMapping( path ) );
}
}
private boolean contributesToTopLevelSelectClause() {
return currentClauseStack.depth() == 1
&& currentClauseStack.getCurrent() == Clause.SELECT;
}
private void callResultProducers(List<Map.Entry<String, DomainResultProducer<?>>> resultProducers) {
final boolean needsDomainResults = domainResults != null && contributesToTopLevelSelectClause();
final boolean collectDomainResults = collectDomainResults( needsDomainResults );
resultProducers.forEach(
entry -> {
// this currentSqlSelectionCollector().next() is meant solely for resolving
// literal reference to a selection-item in the order-by or group-by clause.
// in the case of `DynamicInstantiation`, that ordering should ignore that
// level here. Visiting the dynamic-instantiation will manage this for its
// arguments
final DomainResultProducer<?> domainResultProducer = entry.getValue();
if ( !( domainResultProducer instanceof DynamicInstantiation<?> ) ) {
currentSqlSelectionCollector().next();
}
if ( collectDomainResults ) {
domainResults.add( domainResultProducer.createDomainResult( entry.getKey(), this ) );
}
else if ( needsDomainResults ) {
// We just create domain results for the purpose of creating selections
// This is necessary for top-level query specs within query groups to avoid cycles
domainResultProducer.createDomainResult( entry.getKey(), this );
}
else {
domainResultProducer.applySqlSelections( this );
}
}
);
}
private boolean collectDomainResults(boolean needsDomainResults) {
final Stack<SqlAstProcessingState> processingStateStack = getProcessingStateStack();
if ( processingStateStack.depth() == 1 ) {
return needsDomainResults;
}
else {
final SqlAstProcessingState current = processingStateStack.getCurrent();
// Since we only want to create domain results for the first/left-most query spec within query groups,
// we have to check if the current query spec is the left-most.
// This is the case when all upper level in-flight query groups are still empty
return needsDomainResults
&& processingStateStack.findCurrentFirstWithParameter( current, BaseSqmToSqlAstConverter::stackMatchHelper ) == null;
}
}
private List<Map.Entry<String, DomainResultProducer<?>>> resultProducers(SqmSelection<?> sqmSelection) {
final SqmSelectableNode<?> selectionNode = sqmSelection.getSelectableNode();
if ( selectionNode instanceof SqmJpaCompoundSelection<?> selectableNode ) {
final List<Map.Entry<String, DomainResultProducer<?>>> resultProducers =
new ArrayList<>( selectableNode.getSelectionItems().size() );
for ( SqmSelectableNode<?> selectionItem : selectableNode.getSelectionItems() ) {
if ( selectionItem instanceof SqmPath<?> path ) {
prepareForSelection( path );
}
resultProducers.add(
new AbstractMap.SimpleEntry<>(
selectionItem.getAlias(),
(DomainResultProducer<?>) selectionItem.accept( this )
)
);
}
return resultProducers;
}
else {
if ( selectionNode instanceof SqmPath<?> path ) {
prepareForSelection( path );
}
return singletonList(
new AbstractMap.SimpleEntry<>(
sqmSelection.getAlias(),
(DomainResultProducer<?>) selectionNode.accept( this )
)
);
}
}
protected Expression resolveGroupOrOrderByExpression(SqmExpression<?> groupByClauseExpression) {
final int sqmPosition;
final NavigablePath path;
if ( groupByClauseExpression instanceof SqmAliasedNodeRef aliasedNodeRef ) {
sqmPosition = aliasedNodeRef.getPosition() - 1;
path = aliasedNodeRef.getNavigablePath();
}
else if ( statement.getQuerySource() == SqmQuerySource.CRITERIA && currentClauseStack.getCurrent() != Clause.OVER ) {
// In JPA Criteria we could be using the same expression object for the group/order by and select item
// We try to find the select item position for this expression here which is not necessarily just an optimization.
// This is vital to enable the support for parameters in these expressions.
// Databases usually don't know if a parameter marker will have the same value as another parameter marker
// and due to that, a database usually complains when seeing something like
// `select ?, count(*) from dual group by ?` saying that there is a missing group by for the first `?`
// To avoid this issue, we determine the position and let the SqlAstTranslator handle the rest.
// Usually it will render `select ?, count(*) from dual group by 1` if supported
// or force rendering the parameter as literal instead so that the database can see the grouping is fine
final SqmQuerySpec<?> querySpec = getCurrentSqmQueryPart().getFirstQuerySpec();
sqmPosition = indexOfExpression( querySpec.getSelectClause().getSelections(), groupByClauseExpression );
path = null;
}
else {
sqmPosition = -1;
path = null;
}
return sqmPosition != -1
? selectionExpressions( path, sqmPosition )
: (Expression) groupByClauseExpression.accept( this );
}
private Expression selectionExpressions(NavigablePath path, int sqmPosition) {
final List<SqlSelection> selections =
path == null
? currentSqlSelectionCollector().getSelections( sqmPosition )
: trackedFetchSelectionsForGroup.get( path ).getValue();
assert selections != null
: String.format( Locale.ROOT, "No SqlSelections for SQM position `%s`", sqmPosition );
final List<Expression> expressions = new ArrayList<>( selections.size() );
for ( int i = 0; i < selections.size(); i++ ) {
final SqlSelectionExpression selectionExpression = selectionExpression( selections, i );
if ( selectionExpression != null ) {
expressions.add( selectionExpression );
}
}
return expressions.size() == 1
? expressions.get( 0 )
: new SqlTuple( expressions, null );
}
private SqlSelectionExpression selectionExpression(List<SqlSelection> selections, int i) {
final SqlSelection selection = selections.get( i );
// We skip duplicate selections which can occur when grouping/ordering by an entity alias.
// Duplication happens because the primary key of an entity usually acts as FK target of collections
// which is, just like the identifier itself, also registered as selection
for ( int j = 0; j < i; j++ ) {
if ( selections.get( j ) == selection ) {
return null;
}
}
return getCurrentSqmQueryPart() instanceof SqmQueryGroup<?>
// Reusing the SqlSelection for query groups would be wrong because the aliases do no exist
// So we have to use a literal expression in a new SqlSelection instance to refer to the position
? sqlSelectionExpression( selection )
: new SqlSelectionExpression( selection );
}
private SqlSelectionExpression sqlSelectionExpression(SqlSelection selection) {
return new SqlSelectionExpression(
new SqlSelectionImpl(
selection.getJdbcResultSetIndex(),
selection.getValuesArrayPosition(),
new QueryLiteral<>(
selection.getValuesArrayPosition(),
basicType( Integer.class )
),
false
)
);
}
private int indexOfExpression(List<? extends SqmAliasedNode<?>> selections, SqmExpression<?> node) {
final int result = indexOfExpression( 0, selections, node );
return result < 0 ? -1 : result;
}
private int indexOfExpression(int offset, List<? extends SqmAliasedNode<?>> selections, SqmExpression<?> node) {
// The idea of this method is that we return the negated index of the position at which we found the node
// and if we didn't find the node, we return the offset + size to allow for recursive invocation
// Encoding this into the integer allows us to avoid some kind of mutable state to handle size/offset
for ( int i = 0; i < selections.size(); i++ ) {
final SqmSelectableNode<?> selectableNode = selections.get( i ).getSelectableNode();
if ( selectableNode instanceof SqmDynamicInstantiation<?> dynamicInstantiation ) {
final int subResult = indexOfExpression(
offset + i,
dynamicInstantiation.getArguments(),
node
);
if ( subResult >= 0 ) {
return subResult;
}
offset = -subResult - i;
}
else if ( selectableNode instanceof SqmJpaCompoundSelection<?> compoundSelection ) {
final List<? extends SqmSelectableNode<?>> selectionItems = compoundSelection.getSelectionItems();
for ( int j = 0; j < selectionItems.size(); j++ ) {
if ( selectionItems.get( j ) == node ) {
return offset + i + j;
}
}
offset += selectionItems.size();
}
else {
if ( selectableNode == node ) {
return offset + i;
}
}
}
return -( offset + selections.size() );
}
@Override
public List<Expression> visitGroupByClause(List<SqmExpression<?>> groupByClauseExpressions) {
if ( !groupByClauseExpressions.isEmpty() ) {
currentClauseStack.push( Clause.GROUP );
inferrableTypeAccessStack.push( () -> null );
try {
final List<Expression> expressions = new ArrayList<>( groupByClauseExpressions.size() );
for ( SqmExpression<?> groupByClauseExpression : groupByClauseExpressions ) {
expressions.add( resolveGroupOrOrderByExpression( groupByClauseExpression ) );
}
return expressions;
}
finally {
inferrableTypeAccessStack.pop();
currentClauseStack.pop();
}
}
return emptyList();
}
@Override
public Predicate visitWhereClause(@Nullable SqmWhereClause whereClause) {
if ( whereClause == null ) {
return null;
}
return visitWhereClause( whereClause.getPredicate() );
}
private Predicate visitWhereClause(SqmPredicate sqmPredicate) {
currentClauseStack.push( Clause.WHERE );
inferrableTypeAccessStack.push( () -> null );
try {
return combinePredicates(
sqmPredicate != null ? (Predicate) sqmPredicate.accept( this ) : null,
consumeConjunctTreatTypeRestrictions()
);
}
finally {
inferrableTypeAccessStack.pop();
currentClauseStack.pop();
}
}
@Override
public Predicate visitHavingClause(SqmPredicate sqmPredicate) {
currentClauseStack.push( Clause.HAVING );
inferrableTypeAccessStack.push( () -> null );
try {
return combinePredicates(
sqmPredicate != null ? (Predicate) sqmPredicate.accept( this ) : null,
consumeConjunctTreatTypeRestrictions()
);
}
finally {
inferrableTypeAccessStack.pop();
currentClauseStack.pop();
}
}
@Override
public Void visitOrderByClause(SqmOrderByClause orderByClause) {
super.visitOrderByClause( orderByClause );
return null;
}
@Override
public SortSpecification visitSortSpecification(SqmSortSpecification sortSpecification) {
final Expression expression = resolveGroupOrOrderByExpression( sortSpecification.getSortExpression() );
if ( expression == null ) {
return null;
}
return new SortSpecification(
expression,
sortSpecification.getSortDirection(),
sortSpecification.getNullPrecedence(),
sortSpecification.isIgnoreCase()
);
}
@Override
public Expression visitOffsetExpression(SqmExpression<?> expression) {
if ( expression == null ) {
return null;
}
currentClauseStack.push( Clause.OFFSET );
try {
return (Expression) expression.accept( this );
}
finally {
currentClauseStack.pop();
}
}
@Override
public Expression visitFetchExpression(SqmExpression<?> expression) {
if ( expression == null ) {
return null;
}
currentClauseStack.push( Clause.FETCH );
try {
return (Expression) expression.accept( this );
}
finally {
currentClauseStack.pop();
}
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// FROM clause
@Override
public Void visitFromClause(SqmFromClause sqmFromClause) {
currentClauseStack.push( Clause.FROM );
try {
// First, consume correlated roots, because these table groups can be used in join predicates of other from nodes
sqmFromClause.visitRoots( this::consumeFromClauseCorrelatedRoot );
sqmFromClause.visitRoots( this::consumeFromClauseRoot );
}
finally {
currentClauseStack.pop();
}
return null;
}
protected void consumeFromClauseCorrelatedRoot(SqmRoot<?> sqmRoot) {
// LOG.tracef( "Resolving SqmRoot [%s] to TableGroup", sqmRoot );
final FromClauseIndex fromClauseIndex = getFromClauseIndex();
// if ( fromClauseIndex.isResolved( sqmRoot ) ) {
// LOG.tracef( "Already resolved SqmRoot [%s] to TableGroup", sqmRoot );
// }
final TableGroup tableGroup;
if ( !sqmRoot.isCorrelated() ) {
return;
}
final QuerySpec currentQuerySpec = currentQuerySpec();
final SessionFactoryImplementor sessionFactory = creationContext.getSessionFactory();
if ( sqmRoot.containsOnlyInnerJoins() ) {
// If we have just inner joins against a correlated root, we can render the joins as references
final SqmFrom<?, ?> from;
// If we correlate a join, we have to create a special SqmRoot shell called SqmCorrelatedRootJoin.
// The only purpose of that is to serve as SqmRoot, which is needed for the FROM clause.
// It will always contain just a single correlated join though, which is what is actually correlated
if ( sqmRoot instanceof SqmCorrelatedRootJoin<?> ) {
assert sqmRoot.getSqmJoins().size() == 1;
assert sqmRoot.getSqmJoins().get( 0 ).isCorrelated();
from = sqmRoot.getSqmJoins().get( 0 );
}
else {
from = sqmRoot;
}
final TableGroup parentTableGroup = fromClauseIndex.findTableGroup(
from.getCorrelationParent().getNavigablePath()
);
if ( parentTableGroup == null ) {
throw new InterpretationException( "Access to from node '" + from.getCorrelationParent() + "' is not possible in from-clause subqueries, unless the 'lateral' keyword is used for the subquery!" );
}
final SqlAliasBase sqlAliasBase = sqlAliasBaseManager.createSqlAliasBase( parentTableGroup.getGroupAlias() );
if ( parentTableGroup instanceof PluralTableGroup pluralTableGroup ) {
final CorrelatedPluralTableGroup correlatedPluralTableGroup = new CorrelatedPluralTableGroup(
parentTableGroup,
sqlAliasBase,
currentQuerySpec,
predicate -> additionalRestrictions = combinePredicates( additionalRestrictions, predicate ),
sessionFactory
);
final TableGroup elementTableGroup = pluralTableGroup.getElementTableGroup();
if ( elementTableGroup != null ) {
final TableGroup correlatedElementTableGroup = new CorrelatedTableGroup(
elementTableGroup,
sqlAliasBase,
currentQuerySpec,
predicate -> additionalRestrictions = combinePredicates( additionalRestrictions, predicate ),
sessionFactory
);
final TableGroupJoin tableGroupJoin = new TableGroupJoin(
elementTableGroup.getNavigablePath(),
SqlAstJoinType.INNER,
correlatedElementTableGroup
);
correlatedPluralTableGroup.registerElementTableGroup( tableGroupJoin );
}
final TableGroup indexTableGroup = pluralTableGroup.getIndexTableGroup();
if ( indexTableGroup != null ) {
final TableGroup correlatedIndexTableGroup = new CorrelatedTableGroup(
indexTableGroup,
sqlAliasBase,
currentQuerySpec,
predicate -> additionalRestrictions = combinePredicates( additionalRestrictions, predicate ),
sessionFactory
);
final TableGroupJoin tableGroupJoin = new TableGroupJoin(
indexTableGroup.getNavigablePath(),
SqlAstJoinType.INNER,
correlatedIndexTableGroup
);
correlatedPluralTableGroup.registerIndexTableGroup( tableGroupJoin );
}
tableGroup = correlatedPluralTableGroup;
}
else {
tableGroup = new CorrelatedTableGroup(
parentTableGroup,
sqlAliasBase,
currentQuerySpec,
predicate -> additionalRestrictions = combinePredicates( additionalRestrictions, predicate ),
sessionFactory
);
}
fromClauseIndex.register( from, tableGroup );
registerPluralTableGroupParts( tableGroup );
// Note that we do not need to register the correlated table group to the from clause
// because that is never "rendered" in the subquery anyway.
// Any table group joins added to the correlated table group are added to the query spec
// as roots anyway, so nothing to worry about
// LOG.tracef( "Resolved SqmRoot [%s] to correlated TableGroup [%s]", sqmRoot, tableGroup );
if ( from instanceof SqmRoot<?> correlatedRoot ) {
consumeJoins( correlatedRoot, fromClauseIndex, tableGroup );
}
else {
consumeExplicitJoins( from, tableGroup );
}
return;
}
else {
final EntityPersister entityDescriptor = resolveEntityPersister( sqmRoot.getModel() );
final TableGroup parentTableGroup = fromClauseIndex.findTableGroup(
sqmRoot.getCorrelationParent().getNavigablePath()
);
// If we have non-inner joins against a correlated root, we must render the root with a correlation predicate
tableGroup = entityDescriptor.createRootTableGroup(
true,
sqmRoot.getNavigablePath(),
sqmRoot.getExplicitAlias(),
null,
() -> predicate -> {},
this
);
final EntityIdentifierMapping identifierMapping = entityDescriptor.getIdentifierMapping();
final NavigablePath navigablePath = sqmRoot.getNavigablePath().append( identifierMapping.getNavigableRole().getNavigableName() );
final int jdbcTypeCount = identifierMapping.getJdbcTypeCount();
if ( jdbcTypeCount == 1 ) {
identifierMapping.forEachSelectable(
(index, selectable) -> additionalRestrictions = combinePredicates(
additionalRestrictions,
new ComparisonPredicate(
new ColumnReference(
parentTableGroup.resolveTableReference( navigablePath, selectable.getContainingTableExpression() ),
selectable
),
ComparisonOperator.EQUAL,
new ColumnReference(
tableGroup.resolveTableReference( navigablePath, selectable.getContainingTableExpression() ),
selectable
)
)
)
);
}
else {
final List<Expression> lhs = new ArrayList<>( jdbcTypeCount );
final List<Expression> rhs = new ArrayList<>( jdbcTypeCount );
identifierMapping.forEachSelectable(
(index, selectable) -> {
lhs.add(
new ColumnReference(
parentTableGroup.resolveTableReference( navigablePath, selectable.getContainingTableExpression() ),
selectable
)
);
rhs.add(
new ColumnReference(
tableGroup.resolveTableReference( navigablePath, selectable.getContainingTableExpression() ),
selectable
)
);
}
);
additionalRestrictions = combinePredicates(
additionalRestrictions,
new ComparisonPredicate(
new SqlTuple( lhs, identifierMapping ),
ComparisonOperator.EQUAL,
new SqlTuple( rhs, identifierMapping )
)
);
}
}
// LOG.tracef( "Resolved SqmRoot [%s] to new TableGroup [%s]", sqmRoot, tableGroup );
fromClauseIndex.register( sqmRoot, tableGroup );
currentQuerySpec.getFromClause().addRoot( tableGroup );
consumeJoins( sqmRoot, fromClauseIndex, tableGroup );
}
protected void consumeFromClauseRoot(SqmRoot<?> sqmRoot) {
// LOG.tracef( "Resolving SqmRoot [%s] to TableGroup", sqmRoot );
final FromClauseIndex fromClauseIndex = getFromClauseIndex();
// if ( fromClauseIndex.isResolved( sqmRoot ) ) {
// LOG.tracef( "Already resolved SqmRoot [%s] to TableGroup", sqmRoot );
// }
if ( sqmRoot.isCorrelated() ) {
return;
}
final SqlAstQueryNodeProcessingState currentQueryNodeProcessingState = currentQueryNodeProcessingState();
final TableGroup tableGroup;
if ( sqmRoot instanceof SqmDerivedRoot<?> derivedRoot ) {
// Temporarily push an empty FromClauseIndex to disallow access to aliases from the top query
// Only lateral subqueries are allowed to see the aliases
fromClauseIndexStack.push( new FromClauseIndex( null ) );
final SelectStatement statement = (SelectStatement) derivedRoot.getQueryPart().accept( this );
fromClauseIndexStack.pop();
final AnonymousTupleType<?> tupleType = (AnonymousTupleType<?>) sqmRoot.getNodeType();
final List<SqlSelection> sqlSelections =
statement.getQueryPart().getFirstQuerySpec().getSelectClause().getSqlSelections();
final AnonymousTupleTableGroupProducer tableGroupProducer = tupleType.resolveTableGroupProducer(
derivedRoot.getExplicitAlias(),
sqlSelections,
lastPoppedFromClauseIndex
);
final List<String> columnNames = tableGroupProducer.getColumnNames();
final SqlAliasBase sqlAliasBase = getSqlAliasBaseGenerator().createSqlAliasBase(
derivedRoot.getExplicitAlias() == null ? "derived" : derivedRoot.getExplicitAlias()
);
final String identifierVariable = sqlAliasBase.generateNewAlias();
tableGroup = new QueryPartTableGroup(
derivedRoot.getNavigablePath(),
tableGroupProducer,
statement,
identifierVariable,
columnNames,
tableGroupProducer.getCompatibleTableExpressions(),
false,
true,
creationContext.getSessionFactory()
);
}
else if ( sqmRoot instanceof SqmFunctionRoot<?> functionRoot ) {
tableGroup = createFunctionTableGroup(
functionRoot.getFunction(),
functionRoot.getNavigablePath(),
functionRoot.getExplicitAlias(),
false,
true,
functionRoot.getReusablePath( CollectionPart.Nature.INDEX.getName() ) != null
);
}
else if ( sqmRoot instanceof SqmCteRoot<?> cteRoot ) {
tableGroup = createCteTableGroup(
getCteName( cteRoot.getCte().getCteTable() ),
cteRoot.getNavigablePath(),
cteRoot.getExplicitAlias(),
true
);
}
else {
final EntityPersister entityDescriptor = resolveEntityPersister( sqmRoot.getModel() );
tableGroup = entityDescriptor.createRootTableGroup(
true,
sqmRoot.getNavigablePath(),
sqmRoot.getExplicitAlias(),
null,
() -> predicate -> additionalRestrictions = combinePredicates( additionalRestrictions, predicate ),
this
);
entityDescriptor.applyBaseRestrictions(
currentQueryNodeProcessingState::applyPredicate,
tableGroup,
true,
getLoadQueryInfluencers().getEnabledFilters(),
false,
null,
this
);
}
// LOG.tracef( "Resolved SqmRoot [%s] to new TableGroup [%s]", sqmRoot, tableGroup );
registerSqmFromTableGroup( sqmRoot, tableGroup );
currentQueryNodeProcessingState.getFromClause().addRoot( tableGroup );
consumeJoins( sqmRoot, fromClauseIndex, tableGroup );
}
private void registerSqmFromTableGroup(SqmFrom<?, ?> sqmFrom, TableGroup tableGroup) {
getFromClauseIndex().register( sqmFrom, tableGroup );
// We also need to register the table group for the treats
for ( SqmFrom<?, ?> sqmTreat : sqmFrom.getSqmTreats() ) {
getFromClauseAccess().registerTableGroup( sqmTreat.getNavigablePath(), tableGroup );
}
}
private TableGroup createFunctionTableGroup(
SqmSetReturningFunction<?> function,
NavigablePath navigablePath,
String explicitAlias,
boolean lateral,
boolean canUseInnerJoins,
boolean withOrdinality) {
if ( !lateral ) {
// Temporarily push an empty FromClauseIndex to disallow access to aliases from the top query
// Only lateral subqueries are allowed to see the aliases
fromClauseIndexStack.push( new FromClauseIndex( null ) );
}
final boolean oldInNestedContext = inNestedContext;
inNestedContext = true;
final Supplier<MappingModelExpressible<?>> oldFunctionImpliedResultTypeAccess = functionImpliedResultTypeAccess;
functionImpliedResultTypeAccess = inferrableTypeAccessStack.getCurrent();
inferrableTypeAccessStack.push( () -> null );
try {
final SqlAliasBase sqlAliasBase = getSqlAliasBaseGenerator().createSqlAliasBase(
explicitAlias == null ? "derived" : explicitAlias
);
final String identifierVariable = sqlAliasBase.generateNewAlias();
return function.convertToSqlAst( navigablePath, identifierVariable, lateral, canUseInnerJoins, withOrdinality, this );
}
finally {
inferrableTypeAccessStack.pop();
functionImpliedResultTypeAccess = oldFunctionImpliedResultTypeAccess;
inNestedContext = oldInNestedContext;
if ( !lateral ) {
fromClauseIndexStack.pop();
}
}
}
private TableGroup createCteTableGroup(
String cteName,
NavigablePath navigablePath,
String explicitAlias,
boolean canUseInnerJoins) {
final SqlAliasBase sqlAliasBase = getSqlAliasBaseGenerator().createSqlAliasBase(
explicitAlias == null ? cteName : explicitAlias
);
final String identifierVariable = sqlAliasBase.generateNewAlias();
final CteStatement cteStatement = cteContainer.getCteStatement( cteName );
if ( cteStatement == null ) {
throw new InterpretationException( "Could not find CTE for name '" + cteName + "'!" );
}
final QueryPart cteQueryPart = ( (SelectStatement) cteStatement.getCteDefinition() ).getQueryPart();
// If the query part of the CTE is one which we are currently processing, then this is a recursive CTE
if ( cteQueryPart instanceof QueryGroup && Boolean.TRUE == processingStateStack.findCurrentFirstWithParameter( cteQueryPart, BaseSqmToSqlAstConverter::matchSqlAstWithQueryPart ) ) {
cteStatement.setRecursive();
}
final AnonymousTupleTableGroupProducer tableGroupProducer = cteStatement.getCteTable().getTableGroupProducer();
return new CteTableGroup(
canUseInnerJoins,
navigablePath,
sqlAliasBase,
tableGroupProducer,
new NamedTableReference( cteName, identifierVariable ),
tableGroupProducer.getCompatibleTableExpressions()
);
}
private void consumeJoins(SqmRoot<?> sqmRoot, FromClauseIndex fromClauseIndex, TableGroup tableGroup) {
if ( sqmRoot.getOrderedJoins() == null ) {
consumeExplicitJoins( sqmRoot, tableGroup );
}
else {
// if ( LOG.isTraceEnabled() ) {
// LOG.tracef( "Visiting explicit joins for '%s'", sqmRoot.getNavigablePath() );
// }
TableGroup lastTableGroup = tableGroup;
for ( SqmJoin<?, ?> join : sqmRoot.getOrderedJoins() ) {
final TableGroup ownerTableGroup;
if ( join.getLhs() == null ) {
ownerTableGroup = tableGroup;
}
else {
if ( join.getLhs() instanceof SqmCorrelation<?, ?> ) {
ownerTableGroup = fromClauseIndex.findTableGroup(
( (SqmCorrelation<?, ?>) join.getLhs() ).getCorrelatedRoot().getNavigablePath()
);
}
else {
ownerTableGroup = fromClauseIndex.findTableGroup( join.getLhs().getNavigablePath() );
}
}
assert ownerTableGroup != null;
final TableGroup actualTableGroup = getActualTableGroup( ownerTableGroup, join );
lastTableGroup = consumeExplicitJoin( join, lastTableGroup, actualTableGroup, false );
}
}
}
private EntityPersister resolveEntityPersister(EntityDomainType<?> entityDomainType) {
return creationContext.getMappingMetamodel()
.getEntityDescriptor( entityDomainType.getHibernateEntityName() );
}
private SqlAstQueryPartProcessingState getSqlAstQueryPartProcessingState() {
return (SqlAstQueryPartProcessingState) getCurrentProcessingState();
}
/**
* Registers {@link EntityNameUse#PROJECTION} entity name uses for all entity valued path subtypes.
* If the path is a treat, registers {@link EntityNameUse#TREAT} for all treated subtypes instead.
*/
private void registerEntityNameProjectionUsage(SqmPath<?> projectedPath, TableGroup tableGroup) {
final ManagedDomainType<?> treatedType;
if ( projectedPath instanceof SqmTreatedPath<?, ?> sqmTreatedPath ) {
treatedType = sqmTreatedPath.getTreatTarget();
registerEntityNameUsage( tableGroup, EntityNameUse.TREAT, treatedType.getTypeName(), true );
if ( projectedPath instanceof SqmFrom<?, ?> ) {
// Register that the TREAT uses for the SqmFrom node may not be downgraded
getSqlAstQueryPartProcessingState().registerFromUsage(
(SqmFrom<?, ?>) ( (SqmTreatedPath<?, ?>) projectedPath ).getWrappedPath(),
false
);
}
}
else if ( projectedPath.getReferencedPathSource().getPathType() instanceof EntityDomainType<?> entityDomainType ) {
treatedType = entityDomainType;
registerEntityNameUsage( tableGroup, EntityNameUse.PROJECTION, treatedType.getTypeName(), true );
if ( projectedPath instanceof SqmFrom<?, ?> sqmFrom ) {
// Register that the TREAT uses for the SqmFrom node may not be downgraded
getSqlAstQueryPartProcessingState().registerFromUsage( sqmFrom, true );
}
}
}
/**
* If the {@link SqmPath} has a {@link PersistentAttribute} as {@link SqmPathSource},
* this method determines the declaring entity type of the attribute and register a {@link EntityNameUse#EXPRESSION}
* for the given table group. If the parent path is a treat e.g. {@code treat(alias as Subtype).attribute},
* it will instead register a {@link EntityNameUse#TREAT} for the treated type.
*/
private void registerPathAttributeEntityNameUsage(SqmPath<?> sqmPath, TableGroup tableGroup) {
final SqmPath<?> parentPath = sqmPath.getLhs();
final SqlAstProcessingState processingState = getCurrentProcessingState();
if ( processingState instanceof SqlAstQueryPartProcessingState sqlAstQueryPartProcessingState ) {
if ( parentPath instanceof SqmFrom<?, ?> sqmFrom ) {
sqlAstQueryPartProcessingState.registerFromUsage( sqmFrom, true );
}
if ( sqmPath instanceof SqmFrom<?, ?> ) {
sqlAstQueryPartProcessingState.registerFromUsage( (SqmFrom<?, ?>) sqmPath, true );
}
}
if ( !( sqmPath instanceof SqmTreatedPath<?, ?> )
&& tableGroup.getModelPart().getPartMappingType() instanceof EntityMappingType entityType
&& sqmPath.getResolvedModel() instanceof PersistentAttribute<?, ?> ) {
final String attributeName = sqmPath.getResolvedModel().getPathName();
final EntityMappingType parentType;
if ( parentPath instanceof SqmTreatedPath<?, ?> treatedPath ) {
// A treated attribute usage i.e. `treat(alias as Subtype).attribute = 1`
final ManagedDomainType<?> treatTarget = treatedPath.getTreatTarget();
if ( treatTarget.getPersistenceType() == ENTITY ) {
parentType = creationContext.getMappingMetamodel().getEntityDescriptor( treatTarget.getTypeName() );
// The following is an optimization to avoid rendering treat conditions into predicates.
// Imagine an HQL predicate like `treat(alias as Subtype).attribute is null or alias.name = '...'`.
// If the `attribute` is basic, we will render a case wrapper around the column expression
// and hence we can safely skip adding the `type(alias) = Subtype and ...` condition to the SQL.
final ModelPart subPart = parentType.findSubPart( attributeName );
final EntityNameUse entityNameUse =
subPart.asBasicValuedModelPart() != null
// We only apply this optimization for basic valued model parts for now
? EntityNameUse.OPTIONAL_TREAT
: EntityNameUse.BASE_TREAT;
registerEntityNameUsage( tableGroup, entityNameUse, treatTarget.getTypeName() );
}
else {
parentType = entityType;
}
}
else {
// A simple attribute usage e.g. `alias.attribute = 1`
parentType = entityType;
}
final AttributeMapping attributeMapping = parentType.findAttributeMapping( attributeName );
if ( attributeMapping == null ) {
if ( attributeName.equals( parentType.getIdentifierMapping().getAttributeName() ) ) {
if ( parentType.getIdentifierMapping() instanceof EmbeddableValuedModelPart ) {
// Until HHH-16571 is fixed, we must also register an entity name use for the root entity descriptor name
registerEntityNameUsage( tableGroup, EntityNameUse.EXPRESSION,
parentType.getRootEntityDescriptor().getEntityName() );
}
final EntityDiscriminatorMapping discriminator = parentType.getDiscriminatorMapping();
final String entityName;
if ( discriminator != null && discriminator.hasPhysicalColumn() && !parentType.getSubMappingTypes().isEmpty() ) {
// This is needed to preserve optimization for joined + discriminator inheritance
// see JoinedSubclassEntityPersister#getIdentifierMappingForJoin
entityName = parentType.getRootEntityDescriptor().getEntityName();
}
else {
entityName = parentType.getEntityName();
}
registerEntityNameUsage( tableGroup, EntityNameUse.EXPRESSION, entityName );
}
else {
// If the attribute mapping can't be found on the declaring type and it is not the identifier,
// this signals that we are working with an arbitrarily chosen attribute from a subclass.
// Register entity name usages for all subtypes that declare the attribute with the same name then
for ( EntityMappingType subMappingType : parentType.getSubMappingTypes() ) {
if ( subMappingType.findDeclaredAttributeMapping( attributeName ) != null ) {
registerEntityNameUsage( tableGroup, EntityNameUse.EXPRESSION, subMappingType.getEntityName() );
}
}
}
}
else {
registerEntityNameUsage( tableGroup, EntityNameUse.EXPRESSION,
attributeMapping.findContainingEntityMapping().getEntityName() );
}
}
}
@Override
public boolean supportsEntityNameUsage() {
return true;
}
@Override
public void registerEntityNameUsage(
TableGroup tableGroup,
EntityNameUse entityNameUse,
String treatTargetTypeName) {
registerEntityNameUsage(
tableGroup,
entityNameUse,
treatTargetTypeName,
entityNameUse.getKind() == EntityNameUse.UseKind.PROJECTION
);
}
private void registerEntityNameUsage(
TableGroup tableGroup,
EntityNameUse entityNameUse,
String treatTargetTypeName,
boolean projection) {
final EntityPersister persister;
if ( tableGroup.getModelPart() instanceof EmbeddableValuedModelPart ) {
persister = null;
final EmbeddableDomainType<?> embeddableDomainType =
creationContext.getJpaMetamodel().findEmbeddableType( treatTargetTypeName );
if ( embeddableDomainType == null || !embeddableDomainType.isPolymorphic() ) {
return;
}
}
else {
persister = creationContext.getMappingMetamodel().findEntityDescriptor( treatTargetTypeName );
if ( persister == null || !persister.isPolymorphic() ) {
return;
}
}
final TableGroup actualTableGroup;
final EntityNameUse finalEntityNameUse;
if ( tableGroup instanceof CorrelatedTableGroup ) {
actualTableGroup = ( (CorrelatedTableGroup) tableGroup ).getCorrelatedTableGroup();
// For correlated table groups we can't apply filters,
// as the context is in which the use happens may only affect the result of the subquery
finalEntityNameUse = entityNameUse == EntityNameUse.EXPRESSION ? entityNameUse : EntityNameUse.PROJECTION;
}
else {
if ( tableGroup instanceof PluralTableGroup ) {
actualTableGroup = ( (PluralTableGroup) tableGroup ).getElementTableGroup();
}
else {
actualTableGroup = tableGroup;
}
finalEntityNameUse = entityNameUse == EntityNameUse.EXPRESSION
|| entityNameUse == EntityNameUse.PROJECTION
|| contextAllowsTreatOrFilterEntityNameUse()
? entityNameUse
: EntityNameUse.EXPRESSION;
}
final Map<String, EntityNameUse> entityNameUses =
tableGroupEntityNameUses.computeIfAbsent( actualTableGroup,
group -> new HashMap<>( 1 ) );
entityNameUses.compute( treatTargetTypeName,
(s, existingUse) -> finalEntityNameUse.stronger( existingUse ) );
if ( persister == null ) {
// No need to do anything else for embeddables
return;
}
// Resolve the table reference for all types which we register an entity name use for.
// Also, force table group initialization for treats when needed to ensure correct cardinality
final EntityNameUse.UseKind useKind = finalEntityNameUse.getKind();
if ( actualTableGroup.isInitialized()
|| useKind == EntityNameUse.UseKind.TREAT
&& actualTableGroup.canUseInnerJoins()
&& !( (EntityMappingType) actualTableGroup.getModelPart().getPartMappingType() )
.isTypeOrSuperType( persister ) ) {
actualTableGroup.resolveTableReference( null, persister.getTableName() );
}
if ( projection ) {
EntityMappingType superMappingType = persister;
while ( ( superMappingType = superMappingType.getSuperMappingType() ) != null ) {
entityNameUses.putIfAbsent( superMappingType.getEntityName(), EntityNameUse.PROJECTION );
actualTableGroup.resolveTableReference( null,
superMappingType.getEntityPersister().getTableName() );
}
}
// If we encounter a treat or projection use, we also want register the use for all subtypes.
// We do this here to not have to expand entity name uses during pruning later on
if ( useKind == EntityNameUse.UseKind.TREAT ) {
for ( EntityMappingType subType : persister.getSubMappingTypes() ) {
entityNameUses.compute( subType.getEntityName(),
(s, existingUse) -> finalEntityNameUse.stronger( existingUse ) );
}
}
else if ( useKind == EntityNameUse.UseKind.PROJECTION ) {
for ( EntityMappingType subType : persister.getSubMappingTypes() ) {
entityNameUses.compute( subType.getEntityName(),
(s, existingUse) -> finalEntityNameUse.stronger( existingUse ) );
}
}
}
private boolean contextAllowsTreatOrFilterEntityNameUse() {
return switch ( getCurrentClauseStack().getCurrent() ) {
// A TREAT or FILTER EntityNameUse is only allowed in these clauses,
// but only if it's not in a nested context
case SET, FROM, GROUP, HAVING, WHERE -> !inNestedContext;
default -> false;
};
}
protected void registerTypeUsage(DiscriminatorSqmPath<?> path) {
registerTypeUsage( getFromClauseAccess().getTableGroup( path.getNavigablePath().getParent() ) );
}
protected void registerTypeUsage(TableGroup tableGroup) {
// When we encounter a discriminator path i.e. a use of `type( alias )`
// we have to resolve all subclass tables, otherwise we might get wrong results
// It might be worth deferring this process to the pruning phase when we start to prune subclass joins in more cases
// The biggest optimization that we currently don't do yet is capturing how this discriminator path is restricted
// If we could infer a list of treated entity names from the restrictions,
// we could intersect that with the tableGroupTreatUsages and thus eliminate subclass joins.
// The hard part about this is inferring the list though, because we must respect the predicate transitivity
// i.e. for `a = 1 or type(..) = ...` means nothing can be inferred,
// but for `a = 1 and type(..) = A or type(..) = B` we can infer `A, B`
// The OR junction allows to create a union of entity name lists of all sub-predicates
// The AND junction allows to create an intersection of entity name lists of all sub-predicates
if ( tableGroup.getModelPart().getPartMappingType() instanceof EntityMappingType mappingType ) {
final EntityPersister persister = mappingType.getEntityPersister();
// Avoid resolving subclass tables for persisters with physical discriminators as we won't need them
if ( !persister.getDiscriminatorMapping().hasPhysicalColumn() ) {
if ( getCurrentClauseStack().getCurrent() != Clause.WHERE
&& getCurrentClauseStack().getCurrent() != Clause.HAVING ) {
// Where and having clauses are handled specially with EntityNameUse.FILTER and pruning
registerEntityNameUsage( tableGroup, EntityNameUse.PROJECTION, persister.getEntityName(), true );
}
else {
final int subclassTableSpan = persister.getSubclassTableSpan();
for ( int i = 0; i < subclassTableSpan; i++ ) {
tableGroup.resolveTableReference( null, persister.getSubclassTableName( i ) );
}
}
}
}
}
protected void pruneTableGroupJoins() {
for ( Map.Entry<TableGroup, Map<String, EntityNameUse>> entry : tableGroupEntityNameUses.entrySet() ) {
final TableGroup tableGroup = entry.getKey();
if ( tableGroup.isInitialized() ) {
final Map<String, EntityNameUse> entityNameUses = entry.getValue();
final ModelPartContainer modelPart = tableGroup.getModelPart();
final MappingType partMappingType =
modelPart instanceof PluralAttributeMapping pluralAttributeMapping
? pluralAttributeMapping.getElementDescriptor().getPartMappingType()
: modelPart.getPartMappingType();
if ( partMappingType instanceof EntityPersister entityPersister ) {
entityPersister.pruneForSubclasses( tableGroup, entityNameUses );
}
}
}
}
protected void consumeExplicitJoins(SqmFrom<?, ?> sqmFrom, TableGroup lhsTableGroup) {
// if ( LOG.isTraceEnabled() ) {
// LOG.tracef( "Visiting explicit joins for '%s'", sqmFrom.getNavigablePath() );
// }
sqmFrom.visitSqmJoins(
sqmJoin -> {
final TableGroup actualTableGroup = getActualTableGroup( lhsTableGroup, sqmJoin );
registerPathAttributeEntityNameUsage( sqmJoin, actualTableGroup );
consumeExplicitJoin( sqmJoin, actualTableGroup, actualTableGroup, true );
}
);
final List<SqmTreatedFrom<?,?,?>> sqmTreats = sqmFrom.getSqmTreats();
if ( !sqmTreats.isEmpty() ) {
final SqlAstQueryPartProcessingState queryPartProcessingState = getSqlAstQueryPartProcessingState();
queryPartProcessingState.registerTreatedFrom( sqmFrom );
// If a SqmFrom is used anywhere even though treats exists,
// the treats are context dependent and hence we need to downgrade TREAT entity uses to EXPRESSION.
// Treat expressions will be protected via predicates or case when expressions,
// but we may not filter rows based on the TREAT entity uses.
if ( lhsTableGroup.hasRealJoins() ) {//|| sqmFrom instanceof SqmRoot<?> ) {
queryPartProcessingState.registerFromUsage( sqmFrom, true );
}
for ( SqmFrom<?, ?> sqmTreat : sqmTreats ) {
final TableGroup actualTableGroup = getActualTableGroup( lhsTableGroup, sqmTreat );
// We don't know the context yet in which a treat is used, so we have to register base treats and track the usage
registerEntityNameUsage( actualTableGroup, EntityNameUse.BASE_TREAT,
( (SqmTreatedPath<?, ?>) sqmTreat ).getTreatTarget().getTypeName() );
consumeExplicitJoins( sqmTreat, actualTableGroup );
}
}
}
protected TableGroup consumeExplicitJoin(
SqmJoin<?, ?> sqmJoin,
TableGroup lhsTableGroup,
TableGroup ownerTableGroup,
boolean transitive) {
if ( sqmJoin instanceof SqmAttributeJoin<?, ?> attributeJoin ) {
return consumeAttributeJoin( attributeJoin, lhsTableGroup, ownerTableGroup, transitive );
}
else if ( sqmJoin instanceof SqmCrossJoin<?> crossJoin ) {
return consumeCrossJoin( crossJoin, lhsTableGroup, transitive );
}
else if ( sqmJoin instanceof SqmEntityJoin<?,?> entityJoin ) {
return consumeEntityJoin( entityJoin, lhsTableGroup, transitive );
}
else if ( sqmJoin instanceof SqmDerivedJoin<?> derivedJoin ) {
return consumeDerivedJoin( derivedJoin, lhsTableGroup, transitive );
}
else if ( sqmJoin instanceof SqmFunctionJoin<?> functionJoin ) {
return consumeFunctionJoin( functionJoin, lhsTableGroup, transitive );
}
else if ( sqmJoin instanceof SqmCteJoin<?> cteJoin ) {
return consumeCteJoin( cteJoin, lhsTableGroup, transitive );
}
else if ( sqmJoin instanceof SqmPluralPartJoin<?, ?> pluralPartJoin ) {
return consumePluralPartJoin( pluralPartJoin, ownerTableGroup, transitive );
}
else {
throw new InterpretationException( "Could not resolve SqmJoin [" + sqmJoin.getNavigablePath() + "] to TableGroupJoin" );
}
}
private TableGroup getActualTableGroup(TableGroup lhsTableGroup, SqmPath<?> path) {
// The actual table group in case of PluralTableGroups usually is the element table group,
// but if the SqmPath is a SqmPluralPartJoin e.g. `join key(mapAlias) k`
// or the SqmPath is a simple path for the key e.g. `select key(mapAlias)`,
// then we want to return the PluralTableGroup instead
if ( lhsTableGroup instanceof PluralTableGroup pluralTableGroup
&& !( path instanceof SqmPluralPartJoin<?, ?> )
&& CollectionPart.Nature.fromNameExact( path.getNavigablePath().getLocalName() ) == null ) {
final TableGroup elementTableGroup = pluralTableGroup.getElementTableGroup();
// The element table group could be null for basic collections
if ( elementTableGroup != null ) {
return elementTableGroup;
}
}
return lhsTableGroup;
}
private TableGroup consumeAttributeJoin(
SqmAttributeJoin<?, ?> sqmJoin,
TableGroup lhsTableGroup,
TableGroup ownerTableGroup,
boolean transitive) {
final SqmPathSource<?> pathSource = sqmJoin.getReferencedPathSource();
final SqmJoinType sqmJoinType = sqmJoin.getSqmJoinType();
final NavigablePath sqmJoinNavigablePath = sqmJoin.getNavigablePath();
final ModelPart modelPart =
ownerTableGroup.getModelPart()
.findSubPart( pathSource.getPathName(),
resolveExplicitTreatTarget( sqmJoin, this ) );
final List<SqmTreatedFrom<?, ?, ?>> sqmTreats = sqmJoin.getSqmTreats();
final SqmPredicate joinPredicate;
final SqmPredicate[] treatPredicates;
final boolean hasPredicate;
if ( !sqmTreats.isEmpty() ) {
if ( sqmTreats.size() == 1 ) {
// If there is only a single treat, combine the predicates just as they are
joinPredicate = SqmCreationHelper.combinePredicates(
sqmJoin.getJoinPredicate(),
( (SqmTreatedAttributeJoin<?, ?, ?>) sqmTreats.get( 0 ) ).getJoinPredicate()
);
treatPredicates = null;
hasPredicate = joinPredicate != null;
}
else {
// When there are multiple predicates, we have to apply type filters
joinPredicate = sqmJoin.getJoinPredicate();
treatPredicates = new SqmPredicate[sqmTreats.size()];
boolean hasTreatPredicate = false;
for ( int i = 0; i < sqmTreats.size(); i++ ) {
final var p = ( (SqmTreatedAttributeJoin<?, ?, ?>) sqmTreats.get( i ) ).getJoinPredicate();
treatPredicates[i] = p;
hasTreatPredicate = hasTreatPredicate || p != null;
}
hasPredicate = joinPredicate != null || hasTreatPredicate;
}
}
else {
joinPredicate = sqmJoin.getJoinPredicate();
treatPredicates = null;
hasPredicate = joinPredicate != null;
}
final TableGroupJoin joinedTableGroupJoin =
((TableGroupJoinProducer) modelPart)
.createTableGroupJoin(
sqmJoinNavigablePath,
ownerTableGroup,
sqmJoin.getExplicitAlias(),
null,
sqmJoinType.getCorrespondingSqlJoinType(),
sqmJoin.isFetched(),
hasPredicate,
this
);
final TableGroup joinedTableGroup = joinedTableGroupJoin.getJoinedGroup();
if ( pathSource instanceof PluralPersistentAttribute ) {
assert modelPart instanceof PluralAttributeMapping;
if ( sqmJoin.isFetched() ) {
containsCollectionFetches = true;
}
}
else {
// Since this is an explicit join, we force the initialization of a possible lazy table group
// to retain the cardinality, but only if this is a non-trivial attribute join.
// Left or inner singular attribute joins without a predicate can be safely optimized away
if ( hasPredicate || sqmJoinType != SqmJoinType.INNER && sqmJoinType != SqmJoinType.LEFT ) {
joinedTableGroup.getPrimaryTableReference();
}
}
lhsTableGroup.addTableGroupJoin( joinedTableGroupJoin );
registerSqmFromTableGroup( sqmJoin, joinedTableGroup );
registerPluralTableGroupParts( joinedTableGroup );
if ( sqmJoin.isFetched() ) {
// A fetch is like a projection usage, so register that properly
registerEntityNameProjectionUsage( sqmJoin, getActualTableGroup( joinedTableGroup, sqmJoin ) );
}
registerPathAttributeEntityNameUsage( sqmJoin, ownerTableGroup );
if ( !sqmJoin.hasTreats()
&& sqmJoin.getReferencedPathSource().getPathType() instanceof EntityDomainType<?> entityDomainType ) {
final TableGroup elementTableGroup =
joinedTableGroup instanceof PluralTableGroup pluralTableGroup
? pluralTableGroup.getElementTableGroup()
: joinedTableGroup;
final EntityValuedModelPart entityModelPart = (EntityValuedModelPart) elementTableGroup.getModelPart();
final EntityPersister entityDescriptor = entityModelPart.getEntityMappingType().getEntityPersister();
if ( entityDescriptor.getSuperMappingType() != null ) {
// This is a non-treated join with an entity which is an inheritance subtype,
// register a TREAT entity name use to filter only the entities of the correct type.
registerEntityNameUsage( elementTableGroup, EntityNameUse.TREAT,
entityDomainType.getHibernateEntityName() );
}
}
// Implicit joins in the predicate might alter the nested table group joins,
// so defer determination of the join for predicate until after the predicate was visited
final TableGroupJoin joinForPredicate;
// add any additional join restrictions
if ( hasPredicate ) {
if ( sqmJoin.isFetched() ) {
QUERY_MESSAGE_LOGGER.debugf( "Join fetch [%s] is restricted", sqmJoinNavigablePath );
}
final SqmJoin<?, ?> oldJoin = currentlyProcessingJoin;
currentlyProcessingJoin = sqmJoin;
Predicate predicate = joinPredicate == null ? null : visitNestedTopLevelPredicate( joinPredicate );
if ( treatPredicates != null ) {
final Junction orPredicate = new Junction( Junction.Nature.DISJUNCTION );
for ( int i = 0; i < treatPredicates.length; i++ ) {
final var treatType = (EntityDomainType<?>) sqmTreats.get( i ).getTreatTarget();
orPredicate.add( combinePredicates(
createTreatTypeRestriction( sqmJoin, treatType ),
treatPredicates[i] == null ? null : visitNestedTopLevelPredicate( treatPredicates[i] )
) );
}
predicate = predicate != null ? combinePredicates( predicate, orPredicate ) : orPredicate;
}
joinForPredicate = determineJoinForPredicateApply( joinedTableGroupJoin );
// If translating the join predicate didn't initialize the table group,
// we can safely apply it on the collection table group instead
if ( joinForPredicate.getJoinedGroup().isInitialized() ) {
joinForPredicate.applyPredicate( predicate );
}
else {
joinedTableGroupJoin.applyPredicate( predicate );
}
currentlyProcessingJoin = oldJoin;
}
else {
joinForPredicate = determineJoinForPredicateApply( joinedTableGroupJoin );
}
// Since joins on treated paths will never cause table pruning, we need to add a join condition for the treat
if ( sqmJoin.getLhs() instanceof SqmTreatedPath<?, ?> treatedPath ) {
final ManagedDomainType<?> treatTarget = treatedPath.getTreatTarget();
if ( treatTarget.getPersistenceType() == ENTITY ) {
joinForPredicate.applyPredicate(
createTreatTypeRestriction(
treatedPath.getWrappedPath(),
(EntityDomainType<?>) treatTarget
)
);
}
}
if ( transitive ) {
consumeExplicitJoins( sqmJoin, joinedTableGroup );
}
return joinedTableGroup;
}
private TableGroup consumeCrossJoin(SqmCrossJoin<?> sqmJoin, TableGroup lhsTableGroup, boolean transitive) {
final EntityPersister entityDescriptor = resolveEntityPersister( sqmJoin.getReferencedPathSource() );
final TableGroup tableGroup = entityDescriptor.createRootTableGroup(
true,
sqmJoin.getNavigablePath(),
sqmJoin.getExplicitAlias(),
null,
() -> predicate -> additionalRestrictions = combinePredicates( additionalRestrictions, predicate ),
this
);
final TableGroupJoin tableGroupJoin = new TableGroupJoin(
sqmJoin.getNavigablePath(),
SqlAstJoinType.CROSS,
tableGroup
);
lhsTableGroup.addTableGroupJoin( tableGroupJoin );
registerSqmFromTableGroup( sqmJoin, tableGroup );
if ( transitive ) {
consumeExplicitJoins( sqmJoin, tableGroupJoin.getJoinedGroup() );
}
return tableGroup;
}
private TableGroup consumeEntityJoin(SqmEntityJoin<?,?> sqmJoin, TableGroup lhsTableGroup, boolean transitive) {
final EntityPersister entityDescriptor = resolveEntityPersister( sqmJoin.getReferencedPathSource() );
final SqlAstJoinType correspondingSqlJoinType = sqmJoin.getSqmJoinType().getCorrespondingSqlJoinType();
final TableGroup tableGroup = entityDescriptor.createRootTableGroup(
correspondingSqlJoinType == SqlAstJoinType.INNER
|| correspondingSqlJoinType == SqlAstJoinType.CROSS,
sqmJoin.getNavigablePath(),
sqmJoin.getExplicitAlias(),
null,
() -> p -> {},
this
);
registerSqmFromTableGroup( sqmJoin, tableGroup );
if ( entityDescriptor.isInherited() && !sqmJoin.hasTreats() ) {
// Register new treat to apply the discriminator condition to the table reference itself, see #pruneTableGroupJoins
registerEntityNameUsage( tableGroup, EntityNameUse.TREAT, entityDescriptor.getEntityName() );
}
final TableGroupJoin tableGroupJoin = new TableGroupJoin(
sqmJoin.getNavigablePath(),
correspondingSqlJoinType,
tableGroup,
null
);
lhsTableGroup.addTableGroupJoin( tableGroupJoin );
entityDescriptor.applyBaseRestrictions(
tableGroupJoin::applyPredicate,
tableGroup,
true,
getLoadQueryInfluencers().getEnabledFilters(),
false,
null,
this
);
final SoftDeleteMapping softDeleteMapping = entityDescriptor.getSoftDeleteMapping();
if ( softDeleteMapping != null ) {
final Predicate softDeleteRestriction = softDeleteMapping.createNonDeletedRestriction(
tableGroup.resolveTableReference( softDeleteMapping.getTableName() )
);
tableGroupJoin.applyPredicate( softDeleteRestriction );
}
if ( sqmJoin.getJoinPredicate() != null ) {
final SqmJoin<?, ?> oldJoin = currentlyProcessingJoin;
currentlyProcessingJoin = sqmJoin;
tableGroupJoin.applyPredicate( visitNestedTopLevelPredicate( sqmJoin.getJoinPredicate() ) );
currentlyProcessingJoin = oldJoin;
}
else if ( correspondingSqlJoinType != SqlAstJoinType.CROSS ) {
// TODO: should probably be a SyntaxException
throw new SemanticException( "Entity join did not specify a join condition [" + sqmJoin + "]"
+ " (specify a join condition with 'on' or use 'cross join')" );
}
if ( transitive ) {
consumeExplicitJoins( sqmJoin, tableGroupJoin.getJoinedGroup() );
}
return tableGroup;
}
private TableGroup consumeDerivedJoin(SqmDerivedJoin<?> sqmJoin, TableGroup parentTableGroup, boolean transitive) {
if ( !sqmJoin.isLateral() ) {
// Temporarily push an empty FromClauseIndex to disallow access to aliases from the top query
// Only lateral subqueries are allowed to see the aliases
fromClauseIndexStack.push( new FromClauseIndex( null ) );
}
final SelectStatement statement = (SelectStatement) sqmJoin.getQueryPart().accept( this );
if ( !sqmJoin.isLateral() ) {
fromClauseIndexStack.pop();
}
final AnonymousTupleType<?> tupleType = (AnonymousTupleType<?>) sqmJoin.getNodeType();
final List<SqlSelection> sqlSelections =
statement.getQueryPart().getFirstQuerySpec().getSelectClause().getSqlSelections();
final AnonymousTupleTableGroupProducer tableGroupProducer = tupleType.resolveTableGroupProducer(
sqmJoin.getExplicitAlias(),
sqlSelections,
lastPoppedFromClauseIndex
);
final List<String> columnNames = tableGroupProducer.getColumnNames();
final SqlAliasBase sqlAliasBase = getSqlAliasBaseGenerator().createSqlAliasBase(
sqmJoin.getExplicitAlias() == null ? "derived" : sqmJoin.getExplicitAlias()
);
final String identifierVariable = sqlAliasBase.generateNewAlias();
final QueryPartTableGroup queryPartTableGroup = new QueryPartTableGroup(
sqmJoin.getNavigablePath(),
tableGroupProducer,
statement,
identifierVariable,
columnNames,
tableGroupProducer.getCompatibleTableExpressions(),
sqmJoin.isLateral(),
false,
creationContext.getSessionFactory()
);
getFromClauseIndex().register( sqmJoin, queryPartTableGroup );
final TableGroupJoin tableGroupJoin = new TableGroupJoin(
queryPartTableGroup.getNavigablePath(),
sqmJoin.getSqmJoinType().getCorrespondingSqlJoinType(),
queryPartTableGroup,
null
);
parentTableGroup.addTableGroupJoin( tableGroupJoin );
// add any additional join restrictions
if ( sqmJoin.getJoinPredicate() != null ) {
final SqmJoin<?, ?> oldJoin = currentlyProcessingJoin;
currentlyProcessingJoin = sqmJoin;
tableGroupJoin.applyPredicate( visitNestedTopLevelPredicate( sqmJoin.getJoinPredicate() ) );
currentlyProcessingJoin = oldJoin;
}
if ( transitive ) {
consumeExplicitJoins( sqmJoin, queryPartTableGroup );
}
return queryPartTableGroup;
}
private TableGroup consumeFunctionJoin(SqmFunctionJoin<?> sqmJoin, TableGroup parentTableGroup, boolean transitive) {
final SqlAstJoinType correspondingSqlJoinType = sqmJoin.getSqmJoinType().getCorrespondingSqlJoinType();
final TableGroup tableGroup = createFunctionTableGroup(
sqmJoin.getFunction(),
sqmJoin.getNavigablePath(),
sqmJoin.getExplicitAlias(),
sqmJoin.isLateral(),
correspondingSqlJoinType == SqlAstJoinType.INNER
|| correspondingSqlJoinType == SqlAstJoinType.CROSS,
sqmJoin.getReusablePath( CollectionPart.Nature.INDEX.getName() ) != null
);
getFromClauseIndex().register( sqmJoin, tableGroup );
final TableGroupJoin tableGroupJoin = new TableGroupJoin(
tableGroup.getNavigablePath(),
correspondingSqlJoinType,
tableGroup,
null
);
parentTableGroup.addTableGroupJoin( tableGroupJoin );
// add any additional join restrictions
if ( sqmJoin.getJoinPredicate() != null ) {
final SqmJoin<?, ?> oldJoin = currentlyProcessingJoin;
currentlyProcessingJoin = sqmJoin;
tableGroupJoin.applyPredicate( visitNestedTopLevelPredicate( sqmJoin.getJoinPredicate() ) );
currentlyProcessingJoin = oldJoin;
}
if ( transitive ) {
consumeExplicitJoins( sqmJoin, tableGroup );
}
return tableGroup;
}
private TableGroup consumeCteJoin(SqmCteJoin<?> sqmJoin, TableGroup parentTableGroup, boolean transitive) {
final SqlAstJoinType correspondingSqlJoinType = sqmJoin.getSqmJoinType().getCorrespondingSqlJoinType();
final TableGroup tableGroup = createCteTableGroup(
getCteName( sqmJoin.getCte().getCteTable() ),
sqmJoin.getNavigablePath(),
sqmJoin.getExplicitAlias(),
correspondingSqlJoinType == SqlAstJoinType.INNER
|| correspondingSqlJoinType == SqlAstJoinType.CROSS
);
getFromClauseIndex().register( sqmJoin, tableGroup );
final TableGroupJoin tableGroupJoin = new TableGroupJoin(
tableGroup.getNavigablePath(),
correspondingSqlJoinType,
tableGroup,
null
);
parentTableGroup.addTableGroupJoin( tableGroupJoin );
// add any additional join restrictions
if ( sqmJoin.getJoinPredicate() != null ) {
final SqmJoin<?, ?> oldJoin = currentlyProcessingJoin;
currentlyProcessingJoin = sqmJoin;
tableGroupJoin.applyPredicate( visitNestedTopLevelPredicate( sqmJoin.getJoinPredicate() ) );
currentlyProcessingJoin = oldJoin;
}
if ( transitive ) {
consumeExplicitJoins( sqmJoin, tableGroup );
}
return tableGroup;
}
private TableGroup consumePluralPartJoin(SqmPluralPartJoin<?, ?> sqmJoin, TableGroup lhsTableGroup, boolean transitive) {
final PluralTableGroup pluralTableGroup = (PluralTableGroup) lhsTableGroup;
final TableGroup tableGroup = getPluralPartTableGroup( pluralTableGroup, sqmJoin.getReferencedPathSource() );
getFromClauseIndex().register( sqmJoin, tableGroup );
assert sqmJoin.getJoinPredicate() == null;
if ( transitive ) {
consumeExplicitJoins( sqmJoin, tableGroup );
}
return tableGroup;
}
private TableGroup getPluralPartTableGroup(PluralTableGroup pluralTableGroup, SqmPathSource<?> pathSource) {
final CollectionPart.Nature nature = CollectionPart.Nature.fromNameExact( pathSource.getPathName() );
if ( nature != null ) {
switch ( nature ) {
case INDEX:
return pluralTableGroup.getIndexTableGroup();
case ELEMENT:
return pluralTableGroup.getElementTableGroup();
}
}
throw new UnsupportedOperationException( "Unsupported plural part join nature: " + nature );
}
private <X> X prepareReusablePath(SqmPath<?> sqmPath, Supplier<X> supplier) {
return prepareReusablePath( sqmPath, fromClauseIndexStack.getCurrent(), supplier, false );
}
private <X> X prepareReusablePath(
SqmPath<?> sqmPath,
FromClauseIndex fromClauseIndex,
Supplier<X> supplier,
boolean allowLeftJoins) {
final Consumer<TableGroup> implicitJoinChecker;
implicitJoinChecker = tg -> {};
// if ( getCurrentClauseStack().getCurrent() != Clause.SET_EXPRESSION ) {
// implicitJoinChecker = tg -> {};
// }
// else {
// implicitJoinChecker = BaseSqmToSqlAstConverter::verifyManipulationImplicitJoin;
// }
prepareReusablePath( fromClauseIndex, sqmPath, implicitJoinChecker );
// Create the table group for every path that can potentially require one,
// as some paths require joining the target table i.e. inverse one-to-one
// Note that this will not necessarily create joins immediately, as table groups are lazy
if ( sqmPath instanceof SqmEntityValuedSimplePath<?>
|| sqmPath instanceof SqmEmbeddedValuedSimplePath<?>
|| sqmPath instanceof SqmAnyValuedSimplePath<?> ) {
final TableGroup existingTableGroup =
fromClauseIndex.findTableGroupForGetOrCreate( sqmPath.getNavigablePath(), allowLeftJoins );
if ( existingTableGroup == null ) {
final TableGroup createdTableGroup = createTableGroup(
getActualTableGroup(
fromClauseIndex.getTableGroup( sqmPath.getLhs().getNavigablePath() ),
sqmPath
),
sqmPath,
allowLeftJoins
);
if ( createdTableGroup != null && sqmPath instanceof SqmTreatedPath<?, ?> ) {
fromClauseIndex.register( sqmPath, createdTableGroup );
}
}
}
return supplier.get();
}
private TableGroup prepareReusablePath(
FromClauseIndex fromClauseIndex,
JpaPath<?> path,
Consumer<TableGroup> implicitJoinChecker) {
final SqmPath<?> sqmPath = (SqmPath<?>) path;
final SqmPath<?> parentPath;
final boolean treated;
if ( sqmPath instanceof SqmTreatedPath<?, ?> treatedPath ) {
parentPath = treatedPath.getWrappedPath();
treated = true;
}
else {
parentPath = sqmPath.getLhs();
treated = false;
}
if ( parentPath == null ) {
return sqmPath instanceof SqmFunctionPath<?> functionPath
&& functionPath.getReferencedPathSource() instanceof CompositeSqmPathSource<?>
? (TableGroup) visitFunctionPath( functionPath )
: null;
}
final TableGroup parentTableGroup = getActualTableGroup(
fromClauseIndex.findTableGroupForGetOrCreate( parentPath.getNavigablePath() ),
sqmPath
);
if ( parentTableGroup == null ) {
final TableGroup createdParentTableGroup =
prepareReusablePath( fromClauseIndex, parentPath, implicitJoinChecker );
if ( createdParentTableGroup == null ) {
throw new SqlTreeCreationException( "Could not locate TableGroup - " + parentPath.getNavigablePath() );
}
final TableGroup newTableGroup;
if ( parentPath instanceof SqmTreatedPath<?, ?> ) {
fromClauseIndex.register( parentPath, createdParentTableGroup );
newTableGroup = createdParentTableGroup;
}
else if ( createdParentTableGroup instanceof PluralTableGroup pluralTableGroup ) {
final var nature = CollectionPart.Nature.fromName( parentPath.getNavigablePath().getLocalName() );
assert nature != null;
newTableGroup = pluralTableGroup.getTableGroup( nature );
}
else {
newTableGroup = getActualTableGroup(
createTableGroup( createdParentTableGroup, parentPath, false ),
sqmPath
);
}
if ( newTableGroup != null ) {
implicitJoinChecker.accept( newTableGroup );
registerPathAttributeEntityNameUsage( sqmPath, newTableGroup );
if ( treated ) {
fromClauseIndex.register( sqmPath, newTableGroup );
}
}
return newTableGroup;
}
else if ( treated ) {
fromClauseIndex.register( sqmPath, parentTableGroup );
}
upgradeToInnerJoinIfNeeded( parentTableGroup, sqmPath, parentPath, fromClauseIndex );
registerPathAttributeEntityNameUsage( sqmPath, parentTableGroup );
return parentTableGroup;
}
private void upgradeToInnerJoinIfNeeded(
TableGroup parentTableGroup,
SqmPath<?> sqmPath,
SqmPath<?> parentPath,
FromClauseIndex fromClauseIndex) {
if ( getCurrentClauseStack().getCurrent() != Clause.SELECT
&& parentPath instanceof SqmSimplePath<?>
&& CollectionPart.Nature.fromName( parentPath.getNavigablePath().getLocalName() ) == null
&& parentPath.getParentPath() != null
&& parentTableGroup.getModelPart() instanceof ToOneAttributeMapping toOneAttributeMapping ) {
// we need to handle the case of an implicit path involving a to-one
// association that path has been previously joined using left.
// typically, this indicates that the to-one is being
// fetched - the fetch would use a left-join. however, since the path is
// used outside the select-clause also, we need to force the join to be inner
final String partName = sqmPath.getResolvedModel().getPathName();
if ( !toOneAttributeMapping.isFkOptimizationAllowed()
|| !( toOneAttributeMapping.findSubPart( partName ) instanceof ValuedModelPart pathPart )
|| !toOneAttributeMapping.getForeignKeyDescriptor().isKeyPart( pathPart ) ) {
final NavigablePath parentParentPath = parentPath.getParentPath().getNavigablePath();
final TableGroup parentParentTableGroup = fromClauseIndex.findTableGroup( parentParentPath );
final TableGroupJoin tableGroupJoin = parentParentTableGroup.findTableGroupJoin( parentTableGroup );
// We might get null here if the parentParentTableGroup is correlated and tableGroup is from the outer query
// In this case, we don't want to override the join type, though it is debatable if it's ok to reuse a join in this case
if ( tableGroupJoin != null ) {
tableGroupJoin.setJoinType( SqlAstJoinType.INNER );
}
}
}
}
private void prepareForSelection(SqmPath<?> selectionPath) {
// Don't create joins for plural part paths as that will be handled
// through a cardinality preserving mechanism in visitIndexAggregateFunction/visitElementAggregateFunction
final SqmPath<?> path =
selectionPath instanceof AbstractSqmSpecificPluralPartPath<?>
? selectionPath.getLhs().getLhs()
: selectionPath;
final FromClauseIndex fromClauseIndex = getFromClauseIndex();
final TableGroup tableGroup = fromClauseIndex.findTableGroupForGetOrCreate( path.getNavigablePath() );
if ( tableGroup == null ) {
prepareReusablePath( path, () -> null );
if ( path.getLhs() != null && !( path instanceof SqmEntityValuedSimplePath<?>
|| path instanceof SqmEmbeddedValuedSimplePath<?>
|| path instanceof SqmAnyValuedSimplePath<?>
|| path instanceof SqmTreatedPath<?, ?> ) ) {
// Since this is a selection, we must create a table group for the path as a DomainResult will be created
// But only create it for paths that are not handled by #prepareReusablePath anyway
final TableGroup createdTableGroup = createTableGroup(
getActualTableGroup( fromClauseIndex.getTableGroup( path.getLhs().getNavigablePath() ), path ),
path,
false
);
if ( createdTableGroup != null ) {
registerEntityNameProjectionUsage( path, createdTableGroup );
}
}
else {
registerEntityNameProjectionUsage( path, fromClauseIndex.findTableGroup( path.getNavigablePath() ) );
}
}
else {
registerEntityNameProjectionUsage( path, tableGroup );
if ( path instanceof SqmSimplePath<?> && CollectionPart.Nature.fromName( path.getNavigablePath().getLocalName() ) == null ) {
// If a table group for a selection already exists, we must make sure that the join type is INNER
fromClauseIndex.findTableGroup( path.getNavigablePath().getParent() )
.findTableGroupJoin( tableGroup )
.setJoinType( SqlAstJoinType.INNER );
}
}
}
private TableGroup createTableGroup(TableGroup parentTableGroup, SqmPath<?> joinedPath, boolean allowLeftJoins) {
final SqmPath<?> lhsPath = joinedPath.getLhs();
final FromClauseIndex fromClauseIndex = getFromClauseIndex();
final ModelPart subPart = parentTableGroup.getModelPart().findSubPart(
joinedPath.getReferencedPathSource().getPathName(),
lhsPath instanceof SqmTreatedPath<?, ?> treatedPath && treatedPath.getTreatTarget().getPersistenceType() == ENTITY
? resolveEntityPersister( (EntityDomainType<?>) treatedPath.getTreatTarget() )
: null
);
final TableGroup tableGroup;
if ( subPart instanceof TableGroupJoinProducer joinProducer ) {
if ( fromClauseIndex.findTableGroupOnCurrentFromClause( parentTableGroup.getNavigablePath() ) == null
&& !isRecursiveCte( parentTableGroup ) ) {
final SqlAstQueryNodeProcessingState queryNodeProcessingState = currentQueryNodeProcessingState();
// The parent table group is on a parent query, so we need a root table group
tableGroup = joinProducer.createRootTableGroupJoin(
joinedPath.getNavigablePath(),
parentTableGroup,
null,
null,
null,
false,
queryNodeProcessingState::applyPredicate,
this
);
queryNodeProcessingState.getFromClause().addRoot( tableGroup );
}
else {
final boolean mappedByOrNotFoundToOne = isMappedByOrNotFoundToOne( joinProducer );
final TableGroupJoin compatibleLeftJoin =
mappedByOrNotFoundToOne
? parentTableGroup.findCompatibleJoin( joinProducer, SqlAstJoinType.LEFT )
: null;
final SqlAstJoinType sqlAstJoinType = mappedByOrNotFoundToOne ? SqlAstJoinType.LEFT : null;
final TableGroup compatibleTableGroup =
compatibleLeftJoin != null
? compatibleLeftJoin.getJoinedGroup()
: parentTableGroup.findCompatibleJoinedGroup( joinProducer, SqlAstJoinType.INNER );
if ( compatibleTableGroup == null ) {
final TableGroupJoin tableGroupJoin = joinProducer.createTableGroupJoin(
joinedPath.getNavigablePath(),
parentTableGroup,
null,
null,
allowLeftJoins ? sqlAstJoinType : null,
false,
false,
this
);
// Implicit joins in the ON clause need to be added as nested table group joins
final boolean nested = currentlyProcessingJoin != null;
if ( nested ) {
parentTableGroup.addNestedTableGroupJoin( tableGroupJoin );
}
else {
parentTableGroup.addTableGroupJoin( tableGroupJoin );
}
tableGroup = tableGroupJoin.getJoinedGroup();
}
else {
tableGroup = compatibleTableGroup;
// Also register the table group under its original navigable path, which possibly contains an alias
// This is important, as otherwise we might create new joins in subqueries which are unnecessary
fromClauseIndex.registerTableGroup( tableGroup.getNavigablePath(), tableGroup );
// Upgrade the join type to inner if the context doesn't allow left joins
if ( compatibleLeftJoin != null && !allowLeftJoins ) {
compatibleLeftJoin.setJoinType( SqlAstJoinType.INNER );
}
}
}
fromClauseIndex.register( joinedPath, tableGroup );
registerPluralTableGroupParts( joinedPath.getNavigablePath(), tableGroup );
}
else {
tableGroup = null;
}
return tableGroup;
}
private boolean isMappedByOrNotFoundToOne(TableGroupJoinProducer joinProducer) {
if ( joinProducer instanceof ToOneAttributeMapping toOne ) {
return toOne.hasNotFoundAction()
// ToOne( mappedBy = "..." ) always has a referenced property name and is the target side
|| toOne.getReferencedPropertyName() != null && toOne.getSideNature() == ForeignKeyDescriptor.Nature.TARGET;
}
return false;
}
private boolean isRecursiveCte(TableGroup tableGroup) {
return tableGroup instanceof CteTableGroup cteTableGroup
&& cteContainer.getCteStatement( cteTableGroup.getPrimaryTableReference().getTableId() ).isRecursive();
}
private void registerPluralTableGroupParts(TableGroup tableGroup) {
registerPluralTableGroupParts( null, tableGroup );
}
private void registerPluralTableGroupParts(NavigablePath navigablePath, TableGroup tableGroup) {
if ( tableGroup instanceof PluralTableGroup pluralTableGroup ) {
if ( pluralTableGroup.getElementTableGroup() != null ) {
getFromClauseAccess().registerTableGroup(
navigablePath == null || navigablePath == tableGroup.getNavigablePath()
? pluralTableGroup.getElementTableGroup().getNavigablePath()
: navigablePath.append( CollectionPart.Nature.ELEMENT.getName() ),
pluralTableGroup.getElementTableGroup()
);
}
if ( pluralTableGroup.getIndexTableGroup() != null ) {
getFromClauseAccess().registerTableGroup(
navigablePath == null || navigablePath == tableGroup.getNavigablePath()
? pluralTableGroup.getIndexTableGroup().getNavigablePath()
: navigablePath.append( CollectionPart.Nature.INDEX.getName() ),
pluralTableGroup.getIndexTableGroup()
);
}
}
}
@Override
public @Nullable SqlAstJoinType getCurrentlyProcessingJoinType() {
return currentlyProcessingJoin == null
? null
: currentlyProcessingJoin.getSqmJoinType().getCorrespondingSqlJoinType();
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// SqmPath handling
// - Note that SqmFrom references defined in the FROM-clause are already
// handled during `#visitFromClause`
@Override
public Expression visitRootPath(SqmRoot<?> sqmRoot) {
final TableGroup resolved = getFromClauseAccess().findTableGroup( sqmRoot.getNavigablePath() );
if ( resolved != null ) {
// LOG.tracef( "SqmRoot [%s] resolved to existing TableGroup [%s]", sqmRoot, resolved );
return visitTableGroup( resolved, sqmRoot );
}
throw new InterpretationException( "SqmRoot not yet resolved to TableGroup" );
}
@Override
public Object visitRootDerived(SqmDerivedRoot<?> sqmRoot) {
final TableGroup resolved = getFromClauseAccess().findTableGroup( sqmRoot.getNavigablePath() );
if ( resolved != null ) {
// LOG.tracef( "SqmDerivedRoot [%s] resolved to existing TableGroup [%s]", sqmRoot, resolved );
return visitTableGroup( resolved, sqmRoot );
}
throw new InterpretationException( "SqmDerivedRoot not yet resolved to TableGroup" );
}
@Override
public Object visitRootFunction(SqmFunctionRoot<?> sqmRoot) {
final TableGroup resolved = getFromClauseAccess().findTableGroup( sqmRoot.getNavigablePath() );
if ( resolved != null ) {
// LOG.tracef( "SqmFunctionRoot [%s] resolved to existing TableGroup [%s]", sqmRoot, resolved );
return visitTableGroup( resolved, sqmRoot );
}
throw new InterpretationException( "SqmFunctionRoot not yet resolved to TableGroup" );
}
@Override
public Object visitRootCte(SqmCteRoot<?> sqmRoot) {
final TableGroup resolved = getFromClauseAccess().findTableGroup( sqmRoot.getNavigablePath() );
if ( resolved != null ) {
// LOG.tracef( "SqmCteRoot [%s] resolved to existing TableGroup [%s]", sqmRoot, resolved );
return visitTableGroup( resolved, sqmRoot );
}
throw new InterpretationException( "SqmCteRoot not yet resolved to TableGroup" );
}
@Override
public Expression visitQualifiedAttributeJoin(SqmAttributeJoin<?, ?> sqmJoin) {
final TableGroup existing = getFromClauseAccess().findTableGroup( sqmJoin.getNavigablePath() );
if ( existing != null ) {
// LOG.tracef( "SqmAttributeJoin [%s] resolved to existing TableGroup [%s]", sqmJoin, existing );
return visitTableGroup( existing, sqmJoin );
}
throw new InterpretationException( "SqmAttributeJoin not yet resolved to TableGroup" );
}
@Override
public Expression visitQualifiedDerivedJoin(SqmDerivedJoin<?> sqmJoin) {
final TableGroup existing = getFromClauseAccess().findTableGroup( sqmJoin.getNavigablePath() );
if ( existing != null ) {
// LOG.tracef( "SqmDerivedJoin [%s] resolved to existing TableGroup [%s]", sqmJoin, existing );
return visitTableGroup( existing, sqmJoin );
}
throw new InterpretationException( "SqmDerivedJoin not yet resolved to TableGroup" );
}
@Override
public Object visitQualifiedFunctionJoin(SqmFunctionJoin<?> sqmJoin) {
final TableGroup existing = getFromClauseAccess().findTableGroup( sqmJoin.getNavigablePath() );
if ( existing != null ) {
// LOG.tracef( "SqmFunctionJoin [%s] resolved to existing TableGroup [%s]", sqmJoin, existing );
return visitTableGroup( existing, sqmJoin );
}
throw new InterpretationException( "SqmFunctionJoin not yet resolved to TableGroup" );
}
@Override
public Object visitQualifiedCteJoin(SqmCteJoin<?> sqmJoin) {
final TableGroup existing = getFromClauseAccess().findTableGroup( sqmJoin.getNavigablePath() );
if ( existing != null ) {
// LOG.tracef( "SqmCteJoin [%s] resolved to existing TableGroup [%s]", sqmJoin, existing );
return visitTableGroup( existing, sqmJoin );
}
throw new InterpretationException( "SqmCteJoin not yet resolved to TableGroup" );
}
@Override
public Expression visitCrossJoin(SqmCrossJoin<?> sqmJoin) {
final TableGroup existing = getFromClauseAccess().findTableGroup( sqmJoin.getNavigablePath() );
if ( existing != null ) {
// LOG.tracef( "SqmCrossJoin [%s] resolved to existing TableGroup [%s]", sqmJoin, existing );
return visitTableGroup( existing, sqmJoin );
}
throw new InterpretationException( "SqmCrossJoin not yet resolved to TableGroup" );
}
@Override
public Object visitPluralPartJoin(SqmPluralPartJoin<?, ?> sqmJoin) {
final TableGroup existing = getFromClauseAccess().findTableGroup( sqmJoin.getNavigablePath() );
if ( existing != null ) {
// LOG.tracef( "SqmPluralPartJoin [%s] resolved to existing TableGroup [%s]", sqmJoin, existing );
return visitTableGroup( existing, sqmJoin );
}
throw new InterpretationException( "SqmPluralPartJoin not yet resolved to TableGroup" );
}
@Override
public Expression visitQualifiedEntityJoin(SqmEntityJoin<?,?> sqmJoin) {
final TableGroup existing = getFromClauseAccess().findTableGroup( sqmJoin.getNavigablePath() );
if ( existing != null ) {
// LOG.tracef( "SqmEntityJoin [%s] resolved to existing TableGroup [%s]", sqmJoin, existing );
return visitTableGroup( existing, sqmJoin );
}
throw new InterpretationException( "SqmEntityJoin not yet resolved to TableGroup" );
}
private Expression visitTableGroup(TableGroup tableGroup, SqmPath<?> path) {
final ModelPartContainer tableGroupModelPart = tableGroup.getModelPart();
final ModelPart actualModelPart;
final NavigablePath navigablePath;
if ( tableGroupModelPart instanceof PluralAttributeMapping ) {
actualModelPart = ( (PluralAttributeMapping) tableGroupModelPart ).getElementDescriptor();
navigablePath = tableGroup.getNavigablePath().append( actualModelPart.getPartName() );
}
else {
actualModelPart = tableGroupModelPart;
navigablePath = tableGroup.getNavigablePath();
}
return createExpression( tableGroup, navigablePath, actualModelPart, path );
}
private Expression createExpression(
TableGroup tableGroup,
NavigablePath navigablePath,
ModelPart actualModelPart,
SqmPath<?> path) {
final Expression result;
if ( actualModelPart instanceof EntityValuedModelPart entityValuedModelPart ) {
final EntityValuedModelPart inferredEntityMapping = (EntityValuedModelPart) getInferredValueMapping();
final ModelPart resultModelPart;
final EntityValuedModelPart interpretationModelPart;
final TableGroup tableGroupToUse;
if ( inferredEntityMapping == null ) {
// When the inferred mapping is null, we try to resolve to the FK by default, which is fine because
// expansion to all target columns for select and group by clauses is handled in EntityValuedPathInterpretation
if ( entityValuedModelPart instanceof EntityAssociationMapping associationMapping
&& isFkOptimizationAllowed( path, associationMapping ) ) {
// If the table group uses an association mapping that is not a one-to-many,
// we make use of the FK model part - unless the path is a non-optimizable join,
// for which we should always use the target's identifier to preserve semantics
final ModelPart targetPart = associationMapping.getForeignKeyDescriptor().getPart(
associationMapping.getSideNature()
);
if ( entityValuedModelPart.getPartMappingType() == associationMapping.getPartMappingType() ) {
resultModelPart = targetPart;
}
else {
// If the table group is for a different mapping type i.e. an inheritance subtype,
// lookup the target part on that mapping type
resultModelPart = entityValuedModelPart.findSubPart( targetPart.getPartName(), null );
}
}
else if ( entityValuedModelPart instanceof AnonymousTupleEntityValuedModelPart ) {
// The FK of AnonymousTupleEntityValuedModelParts always refers to the PK, so we can safely use the FK
resultModelPart = ( (AnonymousTupleEntityValuedModelPart) entityValuedModelPart ).getForeignKeyPart();
}
else {
// Otherwise, we use the identifier mapping of the target entity type
resultModelPart = entityValuedModelPart.getEntityMappingType().getIdentifierMapping();
}
interpretationModelPart = entityValuedModelPart;
tableGroupToUse = null;
}
else if ( inferredEntityMapping instanceof ToOneAttributeMapping toOneAttributeMapping ) {
// If the inferred mapping is a to-one association,
// we use the FK target part, which must be located on the entity mapping
final ModelPart targetPart = toOneAttributeMapping.getForeignKeyDescriptor().getPart(
toOneAttributeMapping.getSideNature().inverse()
);
if ( entityValuedModelPart.getPartMappingType() == toOneAttributeMapping.getPartMappingType() ) {
resultModelPart = targetPart;
}
else {
// If the table group is for a different mapping type i.e. an inheritance subtype,
// lookup the target part on that mapping type
resultModelPart = entityValuedModelPart.findSubPart( targetPart.getPartName(), null );
}
interpretationModelPart = toOneAttributeMapping;
tableGroupToUse = null;
}
else if ( inferredEntityMapping instanceof EntityCollectionPart collectionPart ) {
// If the inferred mapping is a collection part, we try to make use of the FK again to avoid joins
// If the inferred mapping is a collection part, we try to make use of the FK again to avoid joins
if ( tableGroup.getModelPart() instanceof CollectionPart ) {
tableGroupToUse = findTableGroup( tableGroup.getNavigablePath().getParent() );
}
else {
tableGroupToUse = tableGroup;
}
if ( collectionPart.getCardinality() == EntityCollectionPart.Cardinality.ONE_TO_MANY ) {
resultModelPart = collectionPart.getAssociatedEntityMappingType().getIdentifierMapping();
}
else {
assert collectionPart.getCardinality() == EntityCollectionPart.Cardinality.MANY_TO_MANY;
final ManyToManyCollectionPart manyToManyPart = (ManyToManyCollectionPart) collectionPart;
if ( entityValuedModelPart == collectionPart ) {
// When we compare the same collection parts, we can just use the FK part
resultModelPart = manyToManyPart.getForeignKeyDescriptor().getKeyPart();
}
else if ( entityValuedModelPart instanceof EntityAssociationMapping tableGroupAssociation ) {
// If the table group model part is an association, we check if the FK targets are compatible
final ModelPart pathTargetPart = tableGroupAssociation
.getForeignKeyDescriptor()
.getPart( tableGroupAssociation.getSideNature().inverse() );
final ModelPart inferredTargetPart = manyToManyPart
.getForeignKeyDescriptor()
.getPart( ForeignKeyDescriptor.Nature.TARGET );
// If the inferred association and table group association targets are the same,
// or the table group association refers to the primary key, we can safely use the FK part
if ( pathTargetPart == inferredTargetPart || tableGroupAssociation.isReferenceToPrimaryKey() ) {
resultModelPart = tableGroupAssociation.getForeignKeyDescriptor().getKeyPart();
}
else {
// Otherwise, we must force the use of the identifier mapping and possibly create a join,
// because comparing by primary key is the only sensible thing to do in this case.
// Note that EntityValuedPathInterpretation does the same
resultModelPart = collectionPart.getAssociatedEntityMappingType().getIdentifierMapping();
}
}
else if ( entityValuedModelPart instanceof AnonymousTupleEntityValuedModelPart ) {
resultModelPart = ( (AnonymousTupleEntityValuedModelPart) entityValuedModelPart ).getForeignKeyPart();
}
else {
// Since the table group model part is an EntityMappingType,
// we can render the FK target model part of the inferred collection part,
// which might be a UK, but usually a PK
assert entityValuedModelPart instanceof EntityMappingType;
if ( collectionPart.getCardinality() == EntityCollectionPart.Cardinality.ONE_TO_MANY ) {
// When the inferred mapping is a one-to-many collection part,
// we will render the entity identifier mapping for that collection part,
// so we will have to do the same for the EntityMappingType side
resultModelPart = collectionPart.getAssociatedEntityMappingType().getIdentifierMapping();
}
else {
resultModelPart = manyToManyPart
.getForeignKeyDescriptor()
.getPart( manyToManyPart.getSideNature().inverse() );
}
}
}
interpretationModelPart = inferredEntityMapping;
}
else if ( entityValuedModelPart instanceof AnonymousTupleEntityValuedModelPart ) {
resultModelPart = ( (AnonymousTupleEntityValuedModelPart) entityValuedModelPart ).getForeignKeyPart();
interpretationModelPart = inferredEntityMapping;
tableGroupToUse = null;
}
else {
// Render the identifier mapping if the inferred mapping is an EntityMappingType
assert inferredEntityMapping instanceof EntityMappingType;
resultModelPart = ( (EntityMappingType) inferredEntityMapping ).getIdentifierMapping();
interpretationModelPart = inferredEntityMapping;
tableGroupToUse = null;
}
final EntityMappingType treatedMapping;
if ( path instanceof SqmTreatedPath<?, ?> && ( (SqmTreatedPath<?, ?>) path ).getTreatTarget().getPersistenceType() == ENTITY ) {
final ManagedDomainType<?> treatTarget = ( (SqmTreatedPath<?, ?>) path ).getTreatTarget();
treatedMapping = creationContext.getMappingMetamodel()
.findEntityDescriptor( treatTarget.getTypeName() );
}
else {
treatedMapping = interpretationModelPart.getEntityMappingType();
}
result = EntityValuedPathInterpretation.from(
navigablePath,
tableGroupToUse == null ? tableGroup : tableGroupToUse,
resultModelPart,
interpretationModelPart,
treatedMapping,
this
);
}
else if ( actualModelPart instanceof EmbeddableValuedModelPart mapping ) {
result = new EmbeddableValuedPathInterpretation<>(
mapping.toSqlExpression(
findTableGroup( navigablePath.getParent() ),
currentClauseStack.getCurrent(),
this,
getSqlAstCreationState()
),
navigablePath,
mapping,
tableGroup
);
}
else {
final BasicValuedModelPart mapping = actualModelPart.asBasicValuedModelPart();
if ( mapping != null ) {
final TableReference tableReference = tableGroup.resolveTableReference(
navigablePath.append( actualModelPart.getPartName() ),
mapping,
mapping.getContainingTableExpression()
);
final Expression expression =
getSqlExpressionResolver()
.resolveSqlExpression( tableReference, mapping );
final ColumnReference columnReference;
if ( expression instanceof ColumnReference columnRef ) {
columnReference = columnRef;
}
else if ( expression instanceof SqlSelectionExpression sqlSelectionExpression ) {
columnReference = (ColumnReference) sqlSelectionExpression.getSelection().getExpression();
}
else {
throw new UnsupportedOperationException( "Unsupported basic-valued path expression : " + expression );
}
result = new BasicValuedPathInterpretation<>(
columnReference,
navigablePath,
mapping,
tableGroup
);
}
else if ( actualModelPart instanceof AnonymousTupleTableGroupProducer tableGroupProducer ) {
final ModelPart subPart = tableGroupProducer.findSubPart(
CollectionPart.Nature.ELEMENT.getName(),
null
);
if ( subPart != null ) {
return createExpression( tableGroup, navigablePath, subPart, path );
}
else {
throw new SemanticException(
"The derived SqmFrom" + ( (AnonymousTupleType<?>) path.getReferencedPathSource() ).getComponentNames() + " can not be used in a context where the expression needs to " +
"be expanded to identifying parts, because a derived model part does not have identifying parts. " +
"Replace uses of the root with paths instead e.g. `derivedRoot.get(\"alias1\")` or `derivedRoot.alias1`"
);
}
}
else {
throw new SemanticException(
"The SqmFrom node [" + path + "] can not be used in a context where the expression needs to " +
"be expanded to identifying parts, because the model part [" + actualModelPart +
"] does not have identifying parts."
);
}
}
return withTreatRestriction( result, path );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// SqmPath
@Override
public Expression visitBasicValuedPath(SqmBasicValuedSimplePath<?> sqmPath) {
final BasicValuedPathInterpretation<?> path = prepareReusablePath(
sqmPath,
() -> BasicValuedPathInterpretation.from(
sqmPath,
this,
jpaQueryComplianceEnabled
)
);
Expression result = path;
if ( isDuration( sqmPath.getNodeType() ) ) {
// Durations are stored (at least by default)
// in a NUMERIC column in seconds with fractional
// seconds in the decimal places
// which we need to convert to the given unit
//
// This does not work at all for a Duration
// mapped to a VARCHAR column, in which case
// we would need to parse the weird format
// defined by java.time.Duration (a bit hard
// to do without some custom function).
// Nor does it work for databases which have
// a well-defined INTERVAL type, but that is
// something we could implement.
//first let's apply the propagated scale
Expression scaledExpression = applyScale( toSqlExpression( path ) );
// we use SECOND, not NATIVE, as the unit
// because that's how a Duration is persisted
// in a database table column, and how it's
// returned to a Java client
if ( adjustedTimestamp != null ) {
if ( appliedByUnit != null ) {
throw new IllegalStateException();
}
// we're adding this variable duration to the
// given date or timestamp, producing an
// adjusted date or timestamp
result = timestampadd().expression(
(ReturnableType<?>) adjustedTimestampType,
new DurationUnit( SECOND, basicType( Long.class ) ),
scaledExpression,
adjustedTimestamp
);
}
else if ( appliedByUnit != null ) {
// we're applying the 'by unit' operator,
// producing a literal scalar value, so
// we must convert this duration from
// nanoseconds to the given unit
result = unitConversion( scaledExpression );
}
else {
// a "bare" Duration value in nanoseconds
result = scaledExpression;
}
}
return withTreatRestriction( result, sqmPath );
}
private Expression unitConversion(Expression scaledExpression) {
final var durationType = (BasicValuedMapping) scaledExpression.getExpressionType();
final var scalarType = (BasicValuedMapping) appliedByUnit.getNodeType();
final var duration =
durationType.getSingleJdbcMapping().getJdbcType().isInterval()
// For interval types, we need to extract the epoch for integer arithmetic for the 'by unit' operator
? new Duration( extractEpoch( scaledExpression ), SECOND, durationType )
// Durations are stored as nanoseconds (see DurationJavaType)
: new Duration( scaledExpression, NANOSECOND, durationType );
return new Conversion( duration, appliedByUnit.getUnit().getUnit(), scalarType );
}
private Expression extractEpoch(Expression intervalExpression) {
final var intType = getTypeConfiguration().getBasicTypeForJavaType( Integer.class );
final var patternRenderer = new PatternRenderer( creationContext.getDialect().extractPattern( EPOCH ) );
return new SelfRenderingFunctionSqlAstExpression<>(
"extract",
(sqlAppender, sqlAstArguments, returnType, walker) ->
patternRenderer.render( sqlAppender, sqlAstArguments, walker ),
Arrays.asList( new ExtractUnit( EPOCH, intType ), intervalExpression ),
intType,
intType
);
}
@Override
public Expression visitEmbeddableValuedPath(SqmEmbeddedValuedSimplePath<?> sqmPath) {
return withTreatRestriction(
prepareReusablePath(
sqmPath,
() -> EmbeddableValuedPathInterpretation.from(
sqmPath,
this,
jpaQueryComplianceEnabled
)
),
sqmPath
);
}
@Override
public Expression visitAnyValuedValuedPath(SqmAnyValuedSimplePath<?> sqmPath) {
return withTreatRestriction(
prepareReusablePath( sqmPath, () -> DiscriminatedAssociationPathInterpretation.from( sqmPath, this ) ),
sqmPath
);
}
@Override
public Expression visitNonAggregatedCompositeValuedPath(NonAggregatedCompositeSimplePath<?> sqmPath) {
return withTreatRestriction(
prepareReusablePath(
sqmPath,
() -> NonAggregatedCompositeValuedPathInterpretation.from( sqmPath, this, this )
),
sqmPath
);
}
@Override
public Expression visitEntityValuedPath(SqmEntityValuedSimplePath<?> sqmPath) {
return withTreatRestriction(
prepareReusablePath( sqmPath, () -> EntityValuedPathInterpretation.from( sqmPath, getInferredValueMapping(), this ) ),
sqmPath
);
}
@Override
public Expression visitAnyDiscriminatorTypeExpression(AnyDiscriminatorSqmPath<?> sqmPath) {
return withTreatRestriction(
prepareReusablePath(
sqmPath,
() -> AnyDiscriminatorPathInterpretation.from( sqmPath, this )
),
sqmPath
);
}
@Override
public Expression visitPluralValuedPath(SqmPluralValuedSimplePath<?> sqmPath) {
return withTreatRestriction(
prepareReusablePath( sqmPath, () -> PluralValuedSimplePathInterpretation.from( sqmPath, this ) ),
sqmPath
);
}
@Override
public Object visitFkExpression(SqmFkExpression<?> fkExpression) {
final SqmPath<?> toOnePath = fkExpression.getLhs();
final SqmPath<?> lhs = toOnePath.getLhs();
prepareReusablePath( lhs, () -> null );
final TableGroup tableGroup = getFromClauseIndex().findTableGroup( lhs.getNavigablePath() );
final ModelPart subPart = tableGroup.getModelPart()
.findSubPart( toOnePath.getReferencedPathSource().getPathName(), null );
assert subPart instanceof ToOneAttributeMapping;
final ToOneAttributeMapping toOneMapping = (ToOneAttributeMapping) subPart;
final ForeignKeyDescriptor fkDescriptor = toOneMapping.getForeignKeyDescriptor();
final TableReference tableReference = tableGroup.resolveTableReference( toOneMapping.getContainingTableExpression() );
final ModelPart fkKeyPart = fkDescriptor.getPart( toOneMapping.getSideNature() );
final BasicValuedModelPart basicFkPart = fkKeyPart.asBasicValuedModelPart();
if ( basicFkPart != null ) {
return getSqlExpressionResolver().resolveSqlExpression(
tableReference,
basicFkPart
);
}
else {
assert fkKeyPart instanceof EmbeddableValuedModelPart;
final EmbeddableValuedModelPart compositeFkPart = (EmbeddableValuedModelPart) fkKeyPart;
final int count = compositeFkPart.getJdbcTypeCount();
final ArrayList<Expression> tupleElements = new ArrayList<>( count );
for ( int i = 0; i < count; i++ ) {
tupleElements.add(
getSqlExpressionResolver().resolveSqlExpression(
tableReference,
compositeFkPart.getSelectable( i )
)
);
}
return new SqlTuple( tupleElements, compositeFkPart );
}
}
@Override
public Object visitDiscriminatorPath(DiscriminatorSqmPath<?> sqmPath) {
return prepareReusablePath(
sqmPath,
() -> {
registerTypeUsage( sqmPath );
return DiscriminatorPathInterpretation.from( sqmPath, this );
}
);
}
protected Expression createMinOrMaxIndexOrElement(
AbstractSqmSpecificPluralPartPath<?> pluralPartPath,
boolean index,
String functionName) {
// Try to create a lateral sub-query join if possible which allows the re-use of the expression
if ( creationContext.getDialect().supportsLateral() ) {
return createLateralJoinExpression( pluralPartPath, index, functionName );
}
else {
return createCorrelatedAggregateSubQuery( pluralPartPath, index, functionName );
}
}
@Override
public Expression visitElementAggregateFunction(SqmElementAggregateFunction<?> path) {
return createMinOrMaxIndexOrElement( path, false, path.getFunctionName() );
}
@Override
public Expression visitIndexAggregateFunction(SqmIndexAggregateFunction<?> path) {
return createMinOrMaxIndexOrElement( path, true, path.getFunctionName() );
}
@Override
public Expression visitFunctionPath(SqmFunctionPath<?> functionPath) {
final NavigablePath navigablePath = functionPath.getNavigablePath();
TableGroup tableGroup = getFromClauseAccess().findTableGroup( navigablePath );
if ( tableGroup == null ) {
final Expression functionExpression = (Expression) functionPath.getFunction().accept( this );
final JdbcType jdbcType = functionExpression.getExpressionType().getSingleJdbcMapping().getJdbcType();
if ( jdbcType instanceof AggregateJdbcType aggregateJdbcType ) {
tableGroup = new EmbeddableFunctionTableGroup(
navigablePath,
aggregateJdbcType.getEmbeddableMappingType(),
functionExpression
);
getFromClauseAccess().registerTableGroup( navigablePath, tableGroup );
}
else {
return functionExpression;
}
}
return tableGroup;
}
@Override
public Expression visitCorrelation(SqmCorrelation<?, ?> correlation) {
final TableGroup resolved = getFromClauseAccess().findTableGroup( correlation.getNavigablePath() );
if ( resolved != null ) {
// LOG.tracef( "SqmCorrelation [%s] resolved to existing TableGroup [%s]", correlation, resolved );
return visitTableGroup( resolved, correlation );
}
throw new InterpretationException( "SqmCorrelation not yet resolved to TableGroup" );
}
@Override
public Expression visitTreatedPath(SqmTreatedPath<?, @Nullable ?> sqmTreatedPath) {
prepareReusablePath( sqmTreatedPath, () -> null );
final TableGroup resolved = getFromClauseAccess().findTableGroup( sqmTreatedPath.getNavigablePath() );
if ( resolved != null ) {
// LOG.tracef( "SqmTreatedPath [%s] resolved to existing TableGroup [%s]", sqmTreatedPath, resolved );
return visitTableGroup( resolved, sqmTreatedPath );
}
throw new InterpretationException( "SqmTreatedPath not yet resolved to TableGroup" );
}
@Override
public Expression visitPluralAttributeSizeFunction(SqmCollectionSize function) {
final SqmPath<?> pluralPath = function.getPluralPath();
prepareReusablePath( pluralPath, () -> null );
final TableGroup parentTableGroup = getFromClauseAccess().getTableGroup( pluralPath.getNavigablePath().getParent() );
assert parentTableGroup != null;
final PluralAttributeMapping pluralAttributeMapping = (PluralAttributeMapping) parentTableGroup.getModelPart().findSubPart(
pluralPath.getNavigablePath().getLocalName(),
null
);
assert pluralAttributeMapping != null;
final QuerySpec subQuerySpec = new QuerySpec( false );
pushProcessingState(
new SqlAstQueryPartProcessingStateImpl(
subQuerySpec,
getCurrentProcessingState(),
this,
currentClauseStack::getCurrent,
false
)
);
try {
final TableGroup tableGroup = pluralAttributeMapping.createRootTableGroup(
true,
pluralPath.getNavigablePath(),
null,
null,
() -> subQuerySpec::applyPredicate,
this
);
pluralAttributeMapping.applyBaseRestrictions(
subQuerySpec::applyPredicate,
tableGroup,
true,
getLoadQueryInfluencers().getEnabledFilters(),
false,
null,
this
);
getFromClauseAccess().registerTableGroup( pluralPath.getNavigablePath(), tableGroup );
registerPluralTableGroupParts( tableGroup );
subQuerySpec.getFromClause().addRoot( tableGroup );
final AbstractSqmSelfRenderingFunctionDescriptor functionDescriptor = resolveFunction( "count" );
final BasicType<Integer> integerType = creationContext.getMappingMetamodel()
.getTypeConfiguration()
.getBasicTypeForJavaType( Integer.class );
final Expression expression = new SelfRenderingAggregateFunctionSqlAstExpression<>(
functionDescriptor.getName(),
functionDescriptor,
singletonList( Star.INSTANCE ),
null,
integerType,
integerType
);
subQuerySpec.getSelectClause().addSqlSelection( new SqlSelectionImpl( expression ) );
subQuerySpec.applyPredicate(
pluralAttributeMapping.getKeyDescriptor().generateJoinPredicate(
parentTableGroup,
tableGroup,
this
)
);
}
finally {
popProcessingStateStack();
}
return new SelectStatement( subQuerySpec );
}
@Override
public Object visitIndexedPluralAccessPath(SqmIndexedCollectionAccessPath<?> path) {
// SemanticQueryBuilder applies the index expression to the generated join
return path.getLhs().accept( this );
}
@Override
public Object visitMapEntryFunction(SqmMapEntryReference<?, ?> entryRef) {
final SqmPath<?> mapPath = entryRef.getMapPath();
prepareReusablePath( mapPath, () -> null );
final NavigablePath mapNavigablePath = mapPath.getNavigablePath();
final TableGroup tableGroup = getFromClauseAccess().resolveTableGroup(
mapNavigablePath,
(navigablePath) -> {
final TableGroup parentTableGroup = getFromClauseAccess().getTableGroup( mapNavigablePath.getParent() );
final PluralAttributeMapping mapAttribute = (PluralAttributeMapping) parentTableGroup.getModelPart().findSubPart( mapNavigablePath.getLocalName(), null );
final TableGroupJoin tableGroupJoin = mapAttribute.createTableGroupJoin(
mapNavigablePath,
parentTableGroup,
null,
null,
SqlAstJoinType.INNER,
false,
false,
this
);
parentTableGroup.addTableGroupJoin( tableGroupJoin );
return tableGroupJoin.getJoinedGroup();
}
);
final PluralAttributeMapping mapDescriptor = (PluralAttributeMapping) tableGroup.getModelPart();
final CollectionPart indexDescriptor = mapDescriptor.getIndexDescriptor();
final NavigablePath indexNavigablePath = mapNavigablePath.append( indexDescriptor.getPartName() );
final DomainResult<Object> indexResult = indexDescriptor.createDomainResult(
indexNavigablePath,
tableGroup,
null,
this
);
registerProjectionUsageFromDescriptor( tableGroup, indexDescriptor );
final CollectionPart valueDescriptor = mapDescriptor.getElementDescriptor();
final NavigablePath valueNavigablePath = mapNavigablePath.append( valueDescriptor.getPartName() );
final DomainResult<Object> valueResult = valueDescriptor.createDomainResult(
valueNavigablePath,
tableGroup,
null,
this
);
registerProjectionUsageFromDescriptor( tableGroup, valueDescriptor );
//noinspection rawtypes
return new DomainResultProducer<Map.Entry>() {
@Override
public DomainResult<Map.Entry> createDomainResult(
String resultVariable,
DomainResultCreationState creationState) {
final var mapEntryDescriptor =
getTypeConfiguration().getJavaTypeRegistry()
.resolveDescriptor( Map.Entry.class );
return new SqmMapEntryResult<>( indexResult, valueResult, resultVariable, mapEntryDescriptor );
}
@Override
public void applySqlSelections(DomainResultCreationState creationState) {
throw new UnsupportedOperationException();
}
};
}
private void registerProjectionUsageFromDescriptor(TableGroup tableGroup, CollectionPart descriptor) {
if ( descriptor instanceof EntityCollectionPart entityCollectionPart ) {
final EntityMappingType entityMappingType = entityCollectionPart.getEntityMappingType();
registerEntityNameUsage( tableGroup, EntityNameUse.PROJECTION, entityMappingType.getEntityName(), true );
}
}
protected Expression createCorrelatedAggregateSubQuery(
AbstractSqmSpecificPluralPartPath<?> pluralPartPath,
boolean index,
String function) {
prepareReusablePath( pluralPartPath.getLhs(), () -> null );
final FromClauseAccess parentFromClauseAccess = getFromClauseAccess();
final PluralAttributeMapping pluralAttributeMapping = (PluralAttributeMapping) determineValueMapping(
pluralPartPath.getPluralDomainPath() );
final QuerySpec subQuerySpec = new QuerySpec( false );
pushProcessingState(
new SqlAstQueryPartProcessingStateImpl(
subQuerySpec,
getCurrentProcessingState(),
this,
currentClauseStack::getCurrent,
false
)
);
try {
final TableGroup tableGroup = pluralAttributeMapping.createRootTableGroup(
true,
pluralPartPath.getNavigablePath(),
null,
null,
() -> subQuerySpec::applyPredicate,
this
);
pluralAttributeMapping.applyBaseRestrictions(
subQuerySpec::applyPredicate,
tableGroup,
true,
getLoadQueryInfluencers().getEnabledFilters(),
false,
null,
this
);
getFromClauseAccess().registerTableGroup( pluralPartPath.getNavigablePath(), tableGroup );
registerPluralTableGroupParts( tableGroup );
subQuerySpec.getFromClause().addRoot( tableGroup );
final AbstractSqmSelfRenderingFunctionDescriptor functionDescriptor = resolveFunction( function );
final CollectionPart collectionPart = index
? pluralAttributeMapping.getIndexDescriptor()
: pluralAttributeMapping.getElementDescriptor();
final ModelPart modelPart;
if ( collectionPart instanceof OneToManyCollectionPart toManyPart ) {
modelPart = toManyPart.getAssociatedEntityMappingType().getIdentifierMapping();
// modelPart = pluralAttributeMapping.getKeyDescriptor().getTargetPart();
}
else if ( collectionPart instanceof ManyToManyCollectionPart ) {
modelPart = ( (ManyToManyCollectionPart) collectionPart ).getKeyTargetMatchPart();
}
else {
modelPart = collectionPart;
}
final List<Expression> arguments = new ArrayList<>( 1 );
final NavigablePath navigablePath = pluralPartPath.getNavigablePath();
final int jdbcTypeCount = modelPart.getJdbcTypeCount();
final List<Expression> tupleElements;
if ( jdbcTypeCount == 1 ) {
tupleElements = arguments;
}
else {
tupleElements = new ArrayList<>( jdbcTypeCount );
}
modelPart.forEachSelectable(
(selectionIndex, selectionMapping) -> tupleElements.add(
new ColumnReference(
tableGroup.resolveTableReference(
navigablePath,
(ValuedModelPart) modelPart,
selectionMapping.getContainingTableExpression()
),
selectionMapping
)
)
);
if ( jdbcTypeCount != 1 ) {
arguments.add( new SqlTuple( tupleElements, modelPart ) );
}
final Expression expression = new SelfRenderingAggregateFunctionSqlAstExpression<>(
functionDescriptor.getName(),
functionDescriptor,
arguments,
null,
(ReturnableType<?>) functionDescriptor.getReturnTypeResolver()
.resolveFunctionReturnType( () -> null, arguments )
.getJdbcMapping(),
modelPart
);
subQuerySpec.getSelectClause().addSqlSelection( new SqlSelectionImpl( expression ) );
NavigablePath parent = pluralPartPath.getPluralDomainPath().getNavigablePath().getParent();
subQuerySpec.applyPredicate(
pluralAttributeMapping.getKeyDescriptor().generateJoinPredicate(
parentFromClauseAccess.findTableGroup( parent ),
tableGroup,
this
)
);
}
finally {
popProcessingStateStack();
}
return new SelectStatement( subQuerySpec );
}
private AbstractSqmSelfRenderingFunctionDescriptor resolveFunction(String function) {
return (AbstractSqmSelfRenderingFunctionDescriptor)
creationContext.getSqmFunctionRegistry()
.findFunctionDescriptor( function );
}
protected Expression createLateralJoinExpression(
AbstractSqmSpecificPluralPartPath<?> pluralPartPath,
boolean index,
String functionName) {
prepareReusablePath( pluralPartPath.getLhs(), () -> null );
final PluralAttributeMapping pluralAttributeMapping = (PluralAttributeMapping) determineValueMapping(
pluralPartPath.getPluralDomainPath() );
final FromClauseAccess parentFromClauseAccess = getFromClauseAccess();
final TableGroup parentTableGroup = parentFromClauseAccess.findTableGroup(
pluralPartPath.getNavigablePath().getParent()
);
final CollectionPart collectionPart = index
? pluralAttributeMapping.getIndexDescriptor()
: pluralAttributeMapping.getElementDescriptor();
final ModelPart modelPart;
if ( collectionPart instanceof OneToManyCollectionPart toManyPart ) {
modelPart = toManyPart.getAssociatedEntityMappingType().getIdentifierMapping();
// modelPart = pluralAttributeMapping.getKeyDescriptor().getTargetPart();
}
else if ( collectionPart instanceof ManyToManyCollectionPart ) {
modelPart = ( (ManyToManyCollectionPart) collectionPart ).getKeyTargetMatchPart();
}
else {
modelPart = collectionPart;
}
final int jdbcTypeCount = modelPart.getJdbcTypeCount();
final String pathName = functionName + ( index ? "_index" : "_element" );
final String identifierVariable = parentTableGroup.getPrimaryTableReference().getIdentificationVariable()
+ "_" + pathName;
final NavigablePath queryPath = new NavigablePath( parentTableGroup.getNavigablePath(), pathName, identifierVariable );
TableGroup lateralTableGroup = parentFromClauseAccess.findTableGroup( queryPath );
if ( lateralTableGroup == null ) {
final QuerySpec subQuerySpec = new QuerySpec( false );
pushProcessingState(
new SqlAstQueryPartProcessingStateImpl(
subQuerySpec,
getCurrentProcessingState(),
this,
currentClauseStack::getCurrent,
false
)
);
try {
final TableGroup tableGroup = pluralAttributeMapping.createRootTableGroup(
true,
pluralPartPath.getNavigablePath(),
null,
null,
() -> subQuerySpec::applyPredicate,
this
);
pluralAttributeMapping.applyBaseRestrictions(
subQuerySpec::applyPredicate,
tableGroup,
true,
getLoadQueryInfluencers().getEnabledFilters(),
false,
null,
this
);
getFromClauseAccess().registerTableGroup( pluralPartPath.getNavigablePath(), tableGroup );
registerPluralTableGroupParts( tableGroup );
subQuerySpec.getFromClause().addRoot( tableGroup );
final List<String> columnNames = new ArrayList<>( jdbcTypeCount );
final List<ColumnReference> resultColumnReferences = new ArrayList<>( jdbcTypeCount );
final NavigablePath navigablePath = pluralPartPath.getNavigablePath();
final Boolean max = functionName.equalsIgnoreCase( "max" ) ? Boolean.TRUE
: ( functionName.equalsIgnoreCase( "min" ) ? Boolean.FALSE : null );
final AbstractSqmSelfRenderingFunctionDescriptor functionDescriptor =
resolveFunction( functionName );
final List<ColumnReference> subQueryColumns = new ArrayList<>( jdbcTypeCount );
modelPart.forEachSelectable(
(selectionIndex, selectionMapping) -> {
final ColumnReference columnReference = new ColumnReference(
tableGroup.resolveTableReference(
navigablePath,
(ValuedModelPart) modelPart,
selectionMapping.getContainingTableExpression()
),
selectionMapping
);
final String columnName;
if ( selectionMapping.isFormula() ) {
columnName = "col" + columnNames.size();
}
else {
columnName = selectionMapping.getSelectionExpression();
}
columnNames.add( columnName );
subQueryColumns.add( columnReference );
if ( max != null ) {
subQuerySpec.addSortSpecification(
new SortSpecification(
columnReference,
max ? SortDirection.DESCENDING : SortDirection.ASCENDING
)
);
}
}
);
if ( max != null ) {
for ( int i = 0; i < subQueryColumns.size(); i++ ) {
subQuerySpec.getSelectClause().addSqlSelection(
new SqlSelectionImpl(
i,
subQueryColumns.get( i )
)
);
resultColumnReferences.add(
new ColumnReference(
identifierVariable,
columnNames.get( i ),
false,
null,
subQueryColumns.get( i ).getJdbcMapping()
)
);
}
subQuerySpec.setFetchClauseExpression(
new QueryLiteral<>( 1, basicType( Integer.class ) ),
FetchClauseType.ROWS_ONLY
);
}
else {
final List<? extends SqlAstNode> arguments;
if ( jdbcTypeCount == 1 ) {
arguments = subQueryColumns;
}
else {
arguments = singletonList( new SqlTuple( subQueryColumns, modelPart ) );
}
final Expression expression = new SelfRenderingAggregateFunctionSqlAstExpression<>(
functionDescriptor.getName(),
functionDescriptor,
arguments,
null,
(ReturnableType<?>) functionDescriptor.getReturnTypeResolver()
.resolveFunctionReturnType( () -> null, arguments )
.getJdbcMapping(),
modelPart
);
subQuerySpec.getSelectClause().addSqlSelection( new SqlSelectionImpl( expression ) );
resultColumnReferences.add(
new ColumnReference(
identifierVariable,
columnNames.get( 0 ),
false,
null,
expression.getExpressionType().getSingleJdbcMapping()
)
);
}
subQuerySpec.applyPredicate(
pluralAttributeMapping.getKeyDescriptor().generateJoinPredicate(
parentFromClauseAccess.findTableGroup(
pluralPartPath.getPluralDomainPath().getNavigablePath().getParent()
),
tableGroup,
this
)
);
lateralTableGroup = new QueryPartTableGroup(
queryPath,
null,
new SelectStatement( subQuerySpec ),
identifierVariable,
columnNames,
getCompatibleTableExpressions( modelPart ),
true,
false,
creationContext.getSessionFactory()
);
if ( currentlyProcessingJoin == null ) {
parentTableGroup.addTableGroupJoin(
new TableGroupJoin(
lateralTableGroup.getNavigablePath(),
SqlAstJoinType.LEFT,
lateralTableGroup
)
);
}
else {
// In case this is used in the ON condition, we must prepend this lateral join
final TableGroup targetTableGroup;
if ( currentlyProcessingJoin.getLhs() == null ) {
targetTableGroup = parentFromClauseAccess.getTableGroup(
currentlyProcessingJoin.findRoot().getNavigablePath()
);
}
else {
targetTableGroup = parentFromClauseAccess.getTableGroup(
currentlyProcessingJoin.getLhs().getNavigablePath()
);
}
// Many databases would support modelling this as nested table group join,
// but at least SQL Server doesn't like that, saying that the correlated columns can't be "bound"
// Since there is no dependency on the currentlyProcessingJoin, we can safely prepend this join
targetTableGroup.prependTableGroupJoin(
currentlyProcessingJoin.getNavigablePath(),
new TableGroupJoin(
lateralTableGroup.getNavigablePath(),
SqlAstJoinType.LEFT,
lateralTableGroup
)
);
}
parentFromClauseAccess.registerTableGroup( lateralTableGroup.getNavigablePath(), lateralTableGroup );
if ( jdbcTypeCount == 1 ) {
return new SelfRenderingFunctionSqlAstExpression<>(
pathName,
(sqlAppender, sqlAstArguments, returnType, walker) -> sqlAstArguments.get( 0 ).accept( walker ),
resultColumnReferences,
(ReturnableType<?>) resultColumnReferences.get( 0 ).getJdbcMapping(),
resultColumnReferences.get( 0 ).getJdbcMapping()
);
}
else {
return new SqlTuple( resultColumnReferences, modelPart );
}
}
finally {
popProcessingStateStack();
}
}
final QueryPartTableReference tableReference = (QueryPartTableReference) lateralTableGroup.getPrimaryTableReference();
if ( jdbcTypeCount == 1 ) {
final List<SqlSelection> sqlSelections = tableReference.getQueryPart()
.getFirstQuerySpec()
.getSelectClause()
.getSqlSelections();
return new SelfRenderingFunctionSqlAstExpression<>(
pathName,
(sqlAppender, sqlAstArguments, returnType, walker) -> sqlAstArguments.get( 0 ).accept( walker ),
singletonList(
new ColumnReference(
identifierVariable,
tableReference.getColumnNames().get( 0 ),
false,
null,
sqlSelections.get( 0 ).getExpressionType().getSingleJdbcMapping()
)
),
(ReturnableType<?>) sqlSelections.get( 0 ).getExpressionType().getSingleJdbcMapping(),
sqlSelections.get( 0 ).getExpressionType()
);
}
else {
final List<ColumnReference> resultColumnReferences = new ArrayList<>( jdbcTypeCount );
modelPart.forEachSelectable(
(selectionIndex, selectionMapping) -> resultColumnReferences.add(
new ColumnReference(
identifierVariable,
tableReference.getColumnNames().get( selectionIndex ),
false,
null,
selectionMapping.getJdbcMapping()
)
)
);
return new SqlTuple( resultColumnReferences, modelPart );
}
}
private static Set<String> getCompatibleTableExpressions(ModelPart modelPart) {
final BasicValuedModelPart basicPart = modelPart.asBasicValuedModelPart();
if ( basicPart != null ) {
return singleton( basicPart.getContainingTableExpression() );
}
else if ( modelPart instanceof EmbeddableValuedModelPart embeddableValuedModelPart ) {
return singleton( embeddableValuedModelPart.getContainingTableExpression() );
}
else {
return emptySet();
}
}
private Expression withTreatRestriction(Expression expression, SqmPath<?> path) {
final SqmPath<?> lhs;
if ( path instanceof SqmTreatedPath<?, ?> ) {
lhs = path;
}
else {
lhs = path.getLhs();
}
if ( lhs instanceof SqmTreatedPath<?, ?> treatedPath ) {
final ManagedDomainType<?> treatTarget = treatedPath.getTreatTarget();
final Class<?> treatTargetJavaType = treatTarget.getJavaType();
final SqmPath<?> wrappedPath = treatedPath.getWrappedPath();
final Class<?> originalJavaType = wrappedPath.getJavaType();
if ( treatTargetJavaType.isAssignableFrom( originalJavaType ) ) {
// Treating a node to a super type can be ignored
return expression;
}
if ( treatTarget instanceof EmbeddableDomainType<?> ) {
// For embedded treats we simply register a TREAT use
final TableGroup tableGroup = getFromClauseIndex().findTableGroup( wrappedPath.getNavigablePath() );
registerEntityNameUsage( tableGroup, EntityNameUse.TREAT, treatTarget.getTypeName(), false );
return expression;
}
if ( !( expression.getExpressionType() instanceof BasicValuedMapping ) ) {
// A case wrapper for non-basic paths is not possible,
// because a case expression must return a scalar value.
if ( lhs instanceof SqmRoot ) {
// For treated roots we need to add the type restriction predicate as conjunct
// by registering the treat into tableGroupEntityNameUses.
// Joins don't need the restriction as it will be embedded into
// the joined table group itself by #pruneTableGroupJoins
final TableGroup tableGroup = getFromClauseIndex().findTableGroup( wrappedPath.getNavigablePath() );
registerEntityNameUsage( tableGroup, EntityNameUse.TREAT, treatTarget.getTypeName(), false );
}
return expression;
}
final BasicValuedPathInterpretation<?> basicPath = (BasicValuedPathInterpretation<?>) expression;
final TableGroup tableGroup = basicPath.getTableGroup();
final TableGroup elementTableGroup =
tableGroup instanceof PluralTableGroup pluralTableGroup
? pluralTableGroup.getElementTableGroup()
: tableGroup;
final EntityPersister persister = (EntityPersister) elementTableGroup.getModelPart().getPartMappingType();
// Only need a case expression around the basic valued path for the parent treat expression
// if the column of the basic valued path is shared between subclasses
if ( persister.isSharedColumn( basicPath.getColumnReference().getColumnExpression() ) ) {
return createCaseExpression( wrappedPath, (EntityDomainType<?>) treatTarget, expression );
}
}
return expression;
}
private Expression createCaseExpression(SqmPath<?> lhs, EntityDomainType<?> treatTarget, Expression expression) {
final Predicate treatTypeRestriction = createTreatTypeRestriction( lhs, treatTarget );
if ( treatTypeRestriction == null ) {
return expression;
}
final BasicValuedMapping mappingModelExpressible = (BasicValuedMapping) expression.getExpressionType();
final List<CaseSearchedExpression.WhenFragment> whenFragments = new ArrayList<>( 1 );
whenFragments.add(
new CaseSearchedExpression.WhenFragment(
treatTypeRestriction,
expression
)
);
return new CaseSearchedExpression(
mappingModelExpressible,
whenFragments,
null
);
}
private Predicate consumeConjunctTreatTypeRestrictions() {
return consumeConjunctTreatTypeRestrictions( tableGroupEntityNameUses );
}
private Predicate consumeConjunctTreatTypeRestrictions(Map<TableGroup, Map<String, EntityNameUse>> conjunctTreatUsages) {
if ( conjunctTreatUsages == null || conjunctTreatUsages.isEmpty() ) {
return null;
}
Predicate predicate = null;
for ( Map.Entry<TableGroup, Map<String, EntityNameUse>> entry : conjunctTreatUsages.entrySet() ) {
final TableGroup tableGroup = entry.getKey();
final ModelPartContainer modelPart = tableGroup.getModelPart();
final Set<String> typeNames;
final EntityMappingType entityMapping;
final EmbeddableMappingType embeddableMapping;
if ( modelPart instanceof PluralAttributeMapping pluralAttributeMapping ) {
entityMapping = (EntityMappingType) pluralAttributeMapping.getElementDescriptor().getPartMappingType();
embeddableMapping = null;
}
else if ( modelPart instanceof EntityValuedModelPart entityValuedModelPart ) {
entityMapping = entityValuedModelPart.getEntityMappingType();
embeddableMapping = null;
}
else if ( modelPart instanceof EmbeddableValuedModelPart embeddableValuedModelPart ) {
embeddableMapping = embeddableValuedModelPart.getEmbeddableTypeDescriptor();
entityMapping = null;
}
else {
throw new IllegalStateException( "Unrecognized model part for treated table group: " + tableGroup );
}
final DiscriminatorPathInterpretation<?> typeExpression;
if ( entityMapping != null ) {
typeNames = determineEntityNamesForTreatTypeRestriction( entityMapping, entry.getValue() );
if ( typeNames.isEmpty() ) {
continue;
}
typeExpression = new DiscriminatorPathInterpretation<>(
tableGroup.getNavigablePath().append( EntityDiscriminatorMapping.DISCRIMINATOR_ROLE_NAME ),
entityMapping,
tableGroup,
this
);
registerTypeUsage( tableGroup );
}
else {
assert embeddableMapping != null;
typeNames = determineEmbeddableNamesForTreatTypeRestriction( embeddableMapping, entry.getValue() );
if ( typeNames.isEmpty() ) {
continue;
}
typeExpression = new DiscriminatorPathInterpretation<>(
tableGroup.getNavigablePath().append( EntityDiscriminatorMapping.DISCRIMINATOR_ROLE_NAME ),
embeddableMapping.getDiscriminatorMapping(),
tableGroup,
this
);
}
// We need to check if this is a treated left or full join, which case we should
// allow null discriminator values to maintain correct semantics
final TableGroupJoin join = getParentTableGroupJoin( tableGroup );
final boolean allowNulls = join != null && ( join.getJoinType() == SqlAstJoinType.LEFT || join.getJoinType() == SqlAstJoinType.FULL );
predicate = combinePredicates(
predicate,
createTreatTypeRestriction(
typeExpression,
typeNames,
allowNulls,
entityMapping != null
)
);
}
return predicate;
}
private TableGroupJoin getParentTableGroupJoin(TableGroup tableGroup) {
final NavigablePath parentNavigablePath = tableGroup.getNavigablePath().getParent();
if ( parentNavigablePath != null ) {
final TableGroup parentTableGroup = getFromClauseIndex().findTableGroup( parentNavigablePath );
if ( parentTableGroup instanceof PluralTableGroup ) {
return getParentTableGroupJoin( parentTableGroup );
}
else if ( parentTableGroup != null ) {
return parentTableGroup.findTableGroupJoin( tableGroup );
}
}
return null;
}
private Set<String> determineEntityNamesForTreatTypeRestriction(
EntityMappingType partMappingType,
Map<String, EntityNameUse> entityNameUses) {
final Set<String> entityNameUsesSet = new HashSet<>( entityNameUses.size() );
for ( Map.Entry<String, EntityNameUse> entry : entityNameUses.entrySet() ) {
if ( entry.getValue() == EntityNameUse.PROJECTION ) {
continue;
}
entityNameUsesSet.add( entry.getKey() );
}
if ( entityNameUsesSet.containsAll( partMappingType.getSubclassEntityNames() ) ) {
// No need to create a restriction if all subclasses are used
return emptySet();
}
if ( entityNameUses.containsValue( EntityNameUse.FILTER ) ) {
// If the conjunct contains FILTER uses we can omit the treat type restriction
return emptySet();
}
final String baseEntityNameToAdd;
if ( entityNameUsesSet.contains( partMappingType.getEntityName() ) ) {
if ( !partMappingType.isAbstract() ) {
baseEntityNameToAdd = partMappingType.getEntityName();
}
else {
baseEntityNameToAdd = null;
}
if ( entityNameUses.size() == 1 ) {
return emptySet();
}
}
else {
baseEntityNameToAdd = null;
}
final Set<String> entityNames = new HashSet<>( entityNameUsesSet.size() );
for ( Map.Entry<String, EntityNameUse> entityNameUse : entityNameUses.entrySet() ) {
if ( entityNameUse.getValue() == EntityNameUse.TREAT ) {
final String entityName = entityNameUse.getKey();
final EntityPersister entityDescriptor = creationContext.getMappingMetamodel()
.findEntityDescriptor( entityName );
if ( !entityDescriptor.isAbstract() ) {
entityNames.add( entityDescriptor.getEntityName() );
}
for ( EntityMappingType subMappingType : entityDescriptor.getSubMappingTypes() ) {
if ( !subMappingType.isAbstract() ) {
entityNames.add( subMappingType.getEntityName() );
}
}
}
}
do {
entityNames.remove( partMappingType.getEntityName() );
partMappingType = partMappingType.getSuperMappingType();
} while ( partMappingType != null );
if ( !entityNames.isEmpty() && baseEntityNameToAdd != null ) {
entityNames.add( baseEntityNameToAdd );
}
return entityNames;
}
private Set<String> determineEmbeddableNamesForTreatTypeRestriction(
EmbeddableMappingType embeddableMappingType,
Map<String, EntityNameUse> entityNameUses) {
final EmbeddableDomainType<?> embeddableDomainType =
creationContext.getJpaMetamodel()
.embeddable( embeddableMappingType.getJavaType().getJavaTypeClass() );
final Set<String> entityNameUsesSet = new HashSet<>( entityNameUses.keySet() );
ManagedDomainType<?> superType = embeddableDomainType;
while ( superType != null ) {
entityNameUsesSet.remove( superType.getTypeName() );
superType = superType.getSuperType();
}
return entityNameUsesSet;
}
private Predicate createTreatTypeRestriction(SqmPath<?> lhs, EntityDomainType<?> treatTarget) {
final EntityPersister entityDescriptor = domainModel.findEntityDescriptor( treatTarget.getHibernateEntityName() );
if ( entityDescriptor.isPolymorphic() && lhs.getNodeType() != treatTarget ) {
final Set<String> subclassEntityNames = entityDescriptor.getSubclassEntityNames();
return createTreatTypeRestriction( lhs, subclassEntityNames );
}
return null;
}
private Predicate createTreatTypeRestriction(SqmPath<?> lhs, Set<String> subclassEntityNames) {
// Do what visitSelfInterpretingSqmPath does, except for calling preparingReusablePath
// as that would register a type usage for the table group that we don't want here
final EntityDiscriminatorSqmPath<?> discriminatorSqmPath = (EntityDiscriminatorSqmPath<?>) lhs.type();
registerTypeUsage( discriminatorSqmPath );
return createTreatTypeRestriction(
DiscriminatorPathInterpretation.from( discriminatorSqmPath, this ),
subclassEntityNames,
false,
true
);
}
private Predicate createTreatTypeRestriction(
SqmPathInterpretation<?> typeExpression,
Set<String> subtypeNames,
boolean allowNulls,
boolean entity) {
final Predicate discriminatorPredicate;
if ( subtypeNames.size() == 1 ) {
discriminatorPredicate = new ComparisonPredicate(
typeExpression,
ComparisonOperator.EQUAL,
getTypeLiteral( typeExpression, subtypeNames.iterator().next(), entity )
);
}
else {
final List<Expression> typeLiterals = new ArrayList<>( subtypeNames.size() );
for ( String subtypeName : subtypeNames ) {
typeLiterals.add( getTypeLiteral( typeExpression, subtypeName, entity ) );
}
discriminatorPredicate = new InListPredicate( typeExpression, typeLiterals );
}
if ( allowNulls ) {
return new Junction(
Junction.Nature.DISJUNCTION,
List.of( discriminatorPredicate, new NullnessPredicate( typeExpression ) ),
getBooleanType()
);
}
return discriminatorPredicate;
}
private Expression getTypeLiteral(SqmPathInterpretation<?> typeExpression, String typeName, boolean entity) {
if ( entity ) {
return new EntityTypeLiteral( domainModel.findEntityDescriptor( typeName ) );
}
else {
final EmbeddableDomainType<?> embeddable = creationContext.getJpaMetamodel().embeddable( typeName );
return new EmbeddableTypeLiteral( embeddable,
(BasicType<?>) typeExpression.getExpressionType().getSingleJdbcMapping() );
}
}
private MappingModelExpressible<?> resolveInferredType() {
final Supplier<MappingModelExpressible<?>> inferableTypeAccess = inferrableTypeAccessStack.getCurrent();
if ( inTypeInference || inferableTypeAccess == null ) {
return null;
}
else {
inTypeInference = true;
final MappingModelExpressible<?> inferredType = inferableTypeAccess.get();
inTypeInference = false;
return inferredType;
}
}
@Override
public boolean isInTypeInference() {
return inImpliedResultTypeInference || inTypeInference;
}
@Override
public MappingModelExpressible<?> resolveFunctionImpliedReturnType() {
if ( inImpliedResultTypeInference || inTypeInference || functionImpliedResultTypeAccess == null ) {
return null;
}
inImpliedResultTypeInference = true;
final MappingModelExpressible<?> inferredType = functionImpliedResultTypeAccess.get();
inImpliedResultTypeInference = false;
return inferredType;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// General expressions
@Override
public Expression visitLiteral(SqmLiteral<?> literal) {
if ( literal instanceof SqmLiteralNull<?> sqmLiteralNull ) {
return nullLiteral( sqmLiteralNull );
}
else {
final MappingModelExpressible<?> inferableExpressible = getInferredValueMapping();
if ( inferableExpressible instanceof DiscriminatorMapping discriminatorMapping ) {
return entityTypeLiteral( literal, discriminatorMapping );
}
else if ( inferableExpressible instanceof BasicValuedMapping basicValuedMapping ) {
@SuppressWarnings("rawtypes")
final BasicValueConverter valueConverter = basicValuedMapping.getJdbcMapping().getValueConverter();
if ( valueConverter != null ) {
return queryLiteral( literal, basicValuedMapping, valueConverter );
}
}
final MappingModelExpressible<?> expressible = literalExpressible( literal, inferableExpressible );
if ( expressible instanceof EntityIdentifierMapping identifierMapping
//TODO: remove test against impl | IdGeneratorParameter |
java | google__guava | guava-tests/benchmark/com/google/common/util/concurrent/FuturesGetCheckedBenchmark.java | {
"start": 2327,
"end": 2749
} | enum ____ {
NON_CACHING_WITH_CONSTRUCTOR_CHECK(nonCachingWithConstructorCheckValidator()),
NON_CACHING_WITHOUT_CONSTRUCTOR_CHECK(nonCachingWithoutConstructorCheckValidator()),
WEAK_SET(weakSetValidator()),
CLASS_VALUE(classValueValidator()),
;
final GetCheckedTypeValidator validator;
Validator(GetCheckedTypeValidator validator) {
this.validator = validator;
}
}
private | Validator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafeCheckerTest.java | {
"start": 36620,
"end": 37096
} | class ____ {
A<String> f() {
return new A<>();
}
A<Object> g() {
// BUG: Diagnostic contains: instantiation of 'T' is not thread-safe, 'Object' is not
// thread-safe
return new A<>();
}
}
""")
.doTest();
}
@Test
public void threadSafeTypeParameterUsage() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.ThreadSafeTypeParameter;
| Test |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/language/BeanLanguageOGNLWithDotInParameterPropertyPlaceholderTest.java | {
"start": 1078,
"end": 2469
} | class ____ extends ContextTestSupport {
@Test
public void testDot() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedHeaderReceived("goto", "mock:MyAppV1.2.3/blah");
template.sendBodyAndHeader("direct:start", "Hello World", "id", "blah");
assertMockEndpointsSatisfied();
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myBean", new MyDestinationBean());
Properties myProp = new Properties();
myProp.put("myApp", "MyAppV1.2.3");
jndi.bind("myprop", myProp);
return jndi;
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.getPropertiesComponent().setLocation("ref:myprop");
return context;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").setHeader("goto").simple("${bean:myBean.whereToMate({{myApp}}, ${header.id})}")
.to("mock:result");
}
};
}
public static | BeanLanguageOGNLWithDotInParameterPropertyPlaceholderTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/postgresql/expr/AliasTest_Type.java | {
"start": 456,
"end": 1134
} | class ____ extends PGTest {
public void test_timestamp() throws Exception {
String sql = "select column1 as TYPE from table1 where xx=1";
PGSQLStatementParser parser = new PGSQLStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
PGSchemaStatVisitor visitor = new PGSchemaStatVisitor();
statemen.accept(visitor);
String mergedSql = ParameterizedOutputVisitorUtils.parameterize(sql, JdbcUtils.POSTGRESQL);
System.out.println(mergedSql);
}
}
| AliasTest_Type |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/filter/TopicPartitionFilter.java | {
"start": 1398,
"end": 2155
} | class ____ implements TopicPartitionFilter {
private final IncludeList topicFilter;
private final PartitionFilter partitionFilter;
public TopicFilterAndPartitionFilter(IncludeList topicFilter, PartitionFilter partitionFilter) {
this.topicFilter = topicFilter;
this.partitionFilter = partitionFilter;
}
@Override
public boolean isTopicAllowed(String topic) {
return topicFilter.isTopicAllowed(topic, false);
}
@Override
public boolean isTopicPartitionAllowed(TopicPartition partition) {
return isTopicAllowed(partition.topic()) && partitionFilter.isPartitionAllowed(partition.partition());
}
}
| TopicFilterAndPartitionFilter |
java | quarkusio__quarkus | extensions/security/runtime/src/main/java/io/quarkus/security/runtime/SecurityProviderUtils.java | {
"start": 239,
"end": 3760
} | class ____ {
public static final String SUN_PROVIDER_NAME = "SUN";
public static final String SUN_JSSE_PROVIDER_NAME = "SunJSSE";
public static final String SUN_JSSE_PROVIDER_CLASS_NAME = "com.sun.net.ssl.internal.ssl.Provider";
public static final String BOUNCYCASTLE_PROVIDER_NAME = "BC";
public static final String BOUNCYCASTLE_JSSE_PROVIDER_NAME = BOUNCYCASTLE_PROVIDER_NAME + "JSSE";
public static final String BOUNCYCASTLE_FIPS_PROVIDER_NAME = "BCFIPS";
public static final String BOUNCYCASTLE_FIPS_JSSE_PROVIDER_NAME = BOUNCYCASTLE_FIPS_PROVIDER_NAME + "JSSE";
public static final String BOUNCYCASTLE_PROVIDER_CLASS_NAME = "org.bouncycastle.jce.provider.BouncyCastleProvider";
public static final String BOUNCYCASTLE_JSSE_PROVIDER_CLASS_NAME = "org.bouncycastle.jsse.provider.BouncyCastleJsseProvider";
public static final String BOUNCYCASTLE_FIPS_PROVIDER_CLASS_NAME = "org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider";
public static final Map<String, String> SUN_PROVIDERS = Map.of("SunPKCS11", "sun.security.pkcs11.SunPKCS11");
private SecurityProviderUtils() {
}
public static void addProvider(String provider) {
addProvider(loadProvider(provider));
}
public static void addProvider(Provider provider) {
try {
if (Security.getProvider(provider.getName()) == null) {
Security.addProvider(provider);
}
} catch (Exception t) {
final String errorMessage = String.format("Security provider %s can not be added", provider.getName());
throw new ConfigurationException(errorMessage, t);
}
}
public static void insertProvider(Provider provider, int index) {
try {
if (Security.getProvider(provider.getName()) == null) {
Security.insertProviderAt(provider, index);
}
} catch (Exception t) {
final String errorMessage = String.format("Security provider %s can not be inserted", provider.getName());
throw new ConfigurationException(errorMessage, t);
}
}
public static Provider loadProvider(String providerClassName) {
try {
return (Provider) Thread.currentThread().getContextClassLoader().loadClass(providerClassName)
.getDeclaredConstructor().newInstance();
} catch (Exception t) {
final String errorMessage = String.format("Security provider %s can not be registered", providerClassName);
throw new ConfigurationException(errorMessage, t);
}
}
public static Provider loadProviderWithParams(String providerClassName, Class<?>[] paramClasses, Object[] params) {
try {
Constructor<?> c = Thread.currentThread().getContextClassLoader().loadClass(providerClassName)
.getConstructor(paramClasses);
return (Provider) c.newInstance(params);
} catch (Exception t) {
final String errorMessage = String.format("Security provider %s can not be registered", providerClassName);
throw new ConfigurationException(errorMessage, t);
}
}
public static int findProviderIndex(String providerName) {
Provider[] providers = Security.getProviders();
for (int i = 0; i < providers.length; i++) {
if (providerName.equals(providers[i].getName())) {
return i + 1;
}
}
return 1;
}
}
| SecurityProviderUtils |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Rows.java | {
"start": 567,
"end": 1884
} | class ____ {
public static Schema schema(List<Attribute> attr) {
List<String> names = new ArrayList<>(attr.size());
List<DataType> types = new ArrayList<>(attr.size());
for (Attribute a : attr) {
names.add(a.name());
types.add(a.dataType());
}
return new Schema(names, types);
}
public static SchemaRowSet of(List<Attribute> attrs, List<List<?>> values) {
if (values.isEmpty()) {
return empty(attrs);
}
if (values.size() == 1) {
return singleton(attrs, values.get(0).toArray());
}
Schema schema = schema(attrs);
return new ListRowSet(schema, values);
}
public static SchemaRowSet singleton(List<Attribute> attrs, Object... values) {
return singleton(schema(attrs), values);
}
public static SchemaRowSet singleton(Schema schema, Object... values) {
Check.isTrue(schema.size() == values.length, "Schema {} and values {} are out of sync", schema, values);
return new SingletonRowSet(schema, values);
}
public static SchemaRowSet empty(Schema schema) {
return new EmptyRowSet(schema);
}
public static SchemaRowSet empty(List<Attribute> attrs) {
return new EmptyRowSet(schema(attrs));
}
}
| Rows |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/commons/support/ResourceInteroperabilityTests.java | {
"start": 562,
"end": 928
} | class ____ {
@Test
void newAndOldResourcesAreLogicallyEquivalent() {
var oldResource = new DefaultResource("foo", URI.create("foo"));
var newResource = Resource.of("foo", URI.create("foo"));
var differentResource = Resource.of("foo", URI.create("bar"));
assertEqualsAndHashCode(oldResource, newResource, differentResource);
}
}
| ResourceInteroperabilityTests |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/javatime/misc/DateTimeSchemasTest.java | {
"start": 554,
"end": 8861
} | class ____ implements JsonFormatVisitorWrapper {
SerializationContext serializationContext;
final String baseName;
final Map<String, String> traversedProperties;
public VisitorWrapper(SerializationContext ctxt, String baseName, Map<String, String> traversedProperties) {
this.serializationContext = ctxt;
this.baseName = baseName;
this.traversedProperties = traversedProperties;
}
VisitorWrapper createSubtraverser(String bn) {
return new VisitorWrapper(getContext(), bn, traversedProperties);
}
public Map<String, String> getTraversedProperties() {
return traversedProperties;
}
@Override
public JsonObjectFormatVisitor expectObjectFormat(JavaType type) {
return new JsonObjectFormatVisitor.Base(serializationContext) {
@Override
public void property(BeanProperty prop) {
anyProperty(prop);
}
@Override
public void optionalProperty(BeanProperty prop) {
anyProperty(prop);
}
private void anyProperty(BeanProperty prop) {
final String propertyName = prop.getFullName().toString();
traversedProperties.put(baseName + propertyName, "");
serializationContext.findPrimaryPropertySerializer(prop.getType(), prop)
.acceptJsonFormatVisitor(createSubtraverser(baseName + propertyName + "."), prop.getType());
}
};
}
@Override
public JsonArrayFormatVisitor expectArrayFormat(JavaType type) {
traversedProperties.put(baseName, "ARRAY/"+type.getGenericSignature());
return null;
}
@Override
public JsonStringFormatVisitor expectStringFormat(JavaType type) {
return new JsonStringFormatVisitor.Base() {
@Override
public void format(JsonValueFormat format) {
traversedProperties.put(baseName, "STRING/"+format.name());
}
};
}
@Override
public JsonNumberFormatVisitor expectNumberFormat(JavaType type) {
return new JsonNumberFormatVisitor.Base() {
@Override
public void numberType(JsonParser.NumberType format) {
traversedProperties.put(baseName, "NUMBER/"+format.name());
}
};
}
@Override
public JsonIntegerFormatVisitor expectIntegerFormat(JavaType type) {
return new JsonIntegerFormatVisitor.Base() {
@Override
public void numberType(JsonParser.NumberType numberType) {
traversedProperties.put(baseName + "numberType", "INTEGER/" + numberType.name());
}
@Override
public void format(JsonValueFormat format) {
traversedProperties.put(baseName + "format", "INTEGER/" + format.name());
}
};
}
@Override
public JsonBooleanFormatVisitor expectBooleanFormat(JavaType type) {
traversedProperties.put(baseName, "BOOLEAN");
return new JsonBooleanFormatVisitor.Base();
}
@Override
public JsonNullFormatVisitor expectNullFormat(JavaType type) {
return new JsonNullFormatVisitor.Base();
}
@Override
public JsonAnyFormatVisitor expectAnyFormat(JavaType type) {
traversedProperties.put(baseName, "ANY");
return new JsonAnyFormatVisitor.Base();
}
@Override
public JsonMapFormatVisitor expectMapFormat(JavaType type) {
traversedProperties.put(baseName, "MAP");
return new JsonMapFormatVisitor.Base(serializationContext);
}
@Override
public SerializationContext getContext() {
return serializationContext;
}
@Override
public void setContext(SerializationContext ctxt) {
this.serializationContext = ctxt;
}
}
// 05-Feb-2025, tatu: Change defaults to Jackson 2.x wrt serialization
// shape (as Timestamps vs Strings)
private final ObjectMapper MAPPER = mapperBuilder()
.enable(DateTimeFeature.WRITE_DATES_AS_TIMESTAMPS)
.build();
// // // Local date/time types
// [modules-java8#105]
@Test
public void testLocalTimeSchema() throws Exception
{
VisitorWrapper wrapper = new VisitorWrapper(null, "", new HashMap<String, String>());
MAPPER.writer().acceptJsonFormatVisitor(LocalTime.class, wrapper);
Map<String, String> properties = wrapper.getTraversedProperties();
// By default, serialized as an int array, so:
assertEquals(1, properties.size());
_verifyIntArrayType(properties.get(""));
// but becomes date/time
wrapper = new VisitorWrapper(null, "", new HashMap<String, String>());
MAPPER.writer().without(DateTimeFeature.WRITE_DATES_AS_TIMESTAMPS)
.acceptJsonFormatVisitor(LocalTime.class, wrapper);
properties = wrapper.getTraversedProperties();
_verifyTimeType(properties.get(""));
}
@Test
public void testLocalDateSchema() throws Exception
{
VisitorWrapper wrapper = new VisitorWrapper(null, "", new HashMap<String, String>());
MAPPER.writer().acceptJsonFormatVisitor(LocalDate.class, wrapper);
Map<String, String> properties = wrapper.getTraversedProperties();
// By default, serialized as an int array, so:
assertEquals(1, properties.size());
_verifyIntArrayType(properties.get(""));
// but becomes date/time
wrapper = new VisitorWrapper(null, "", new HashMap<String, String>());
MAPPER.writer().without(DateTimeFeature.WRITE_DATES_AS_TIMESTAMPS)
.acceptJsonFormatVisitor(LocalDate.class, wrapper);
properties = wrapper.getTraversedProperties();
_verifyDateType(properties.get(""));
}
// // // Zoned date/time types
@Test
public void testDateTimeSchema() throws Exception
{
VisitorWrapper wrapper = new VisitorWrapper(null, "", new HashMap<String, String>());
MAPPER.writer().acceptJsonFormatVisitor(ZonedDateTime.class, wrapper);
Map<String, String> properties = wrapper.getTraversedProperties();
// By default, serialized as an int array, so:
assertEquals(1, properties.size());
_verifyBigDecimalType(properties.get(""));
// but becomes long
wrapper = new VisitorWrapper(null, "", new HashMap<String, String>());
MAPPER.writer()
.without(DateTimeFeature.WRITE_DATE_TIMESTAMPS_AS_NANOSECONDS)
.acceptJsonFormatVisitor(ZonedDateTime.class, wrapper);
properties = wrapper.getTraversedProperties();
_verifyLongType(properties.get("numberType"));
_verifyLongFormat(properties.get("format"));
// but becomes date/time
wrapper = new VisitorWrapper(null, "", new HashMap<String, String>());
MAPPER.writer().without(DateTimeFeature.WRITE_DATES_AS_TIMESTAMPS)
.acceptJsonFormatVisitor(ZonedDateTime.class, wrapper);
properties = wrapper.getTraversedProperties();
_verifyDateTimeType(properties.get(""));
}
private void _verifyIntArrayType(String desc) {
assertEquals("ARRAY/Ljava/util/List<Ljava/lang/Integer;>;", desc);
}
private void _verifyTimeType(String desc) {
assertEquals("STRING/TIME", desc);
}
private void _verifyDateType(String desc) {
assertEquals("STRING/DATE", desc);
}
private void _verifyDateTimeType(String desc) {
assertEquals("STRING/DATE_TIME", desc);
}
private void _verifyBigDecimalType(String desc) {
assertEquals("NUMBER/BIG_DECIMAL", desc);
}
private void _verifyLongType(String desc) {
assertEquals("INTEGER/LONG", desc);
}
private void _verifyLongFormat(String desc) {
assertEquals("INTEGER/UTC_MILLISEC", desc);
}
}
| VisitorWrapper |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/devmode/NotFoundPageDisplayableEndpointBuildItem.java | {
"start": 222,
"end": 1351
} | class ____ extends MultiBuildItem {
private final String endpoint;
private final String description;
private final boolean absolutePath;
public NotFoundPageDisplayableEndpointBuildItem(String endpoint, String description) {
this(endpoint, description, false);
}
public NotFoundPageDisplayableEndpointBuildItem(String endpoint, String description, boolean isAbsolutePath) {
this.endpoint = endpoint;
this.description = description;
this.absolutePath = isAbsolutePath;
}
public NotFoundPageDisplayableEndpointBuildItem(String endpoint) {
this(endpoint, null);
}
public String getEndpoint() {
return endpoint;
}
public String getDescription() {
return description;
}
public boolean isAbsolutePath() {
return absolutePath;
}
public String getEndpoint(HttpRootPathBuildItem httpRoot) {
if (absolutePath) {
return endpoint;
} else {
return TemplateHtmlBuilder.adjustRoot(httpRoot.getRootPath(), endpoint);
}
}
}
| NotFoundPageDisplayableEndpointBuildItem |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/suite/engine/testsuites/LifecycleMethodsSuites.java | {
"start": 4892,
"end": 5072
} | class ____ {
@AfterSuite
static void parameterAcceptingAfterSuite(String param) {
fail("Should not be called");
}
}
@TestSuite
public static | ParameterAcceptingAfterSuite |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/support/BeanRegistryAdapterTests.java | {
"start": 11651,
"end": 11893
} | class ____ implements BeanRegistrar {
@Override
public void register(BeanRegistry registry, Environment env) {
registry.registerBean("foo", Foo.class, BeanRegistry.Spec::notAutowirable);
}
}
private static | NotAutowirableBeanRegistrar |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/MoreThanOneQualifierTest.java | {
"start": 5171,
"end": 5315
} | class ____ {
@Foo private int n;
@Foo
public TestClass2() {}
@Foo
public void setN(@Foo int n) {}
}
/**
* A | TestClass2 |
java | apache__camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FromFtpSimpleNoEndpointPathRelativeMoveToRelativeNotStepwiseIT.java | {
"start": 871,
"end": 1247
} | class ____
extends FromFtpSimpleNoEndpointPathRelativeMoveToRelativeIT {
@Override
protected String getFtpUrl() {
return "ftp://admin@localhost:{{ftp.server.port}}?password=admin&recursive=true&binary=false"
+ "&move=.done&initialDelay=2500&delay=5000&stepwise=false";
}
}
| FromFtpSimpleNoEndpointPathRelativeMoveToRelativeNotStepwiseIT |
java | google__guava | guava/src/com/google/common/collect/CompactLinkedHashMap.java | {
"start": 7992,
"end": 8308
} | class ____ extends EntrySetView {
@Override
public Spliterator<Entry<K, V>> spliterator() {
return Spliterators.spliterator(this, Spliterator.ORDERED | Spliterator.DISTINCT);
}
}
return new EntrySetImpl();
}
@Override
Set<K> createKeySet() {
@WeakOuter
final | EntrySetImpl |
java | elastic__elasticsearch | x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java | {
"start": 974,
"end": 6845
} | class ____ extends ESRestTestCase {
public static final String JDBC_ES_URL_PREFIX = "jdbc:es://";
@After
public void checkSearchContent() throws IOException {
// Some context might linger due to fire and forget nature of PIT cleanup
assertNoSearchContexts();
}
/**
* Read an address for Elasticsearch suitable for the JDBC driver from the system properties.
*/
public String elasticsearchAddress() {
String cluster = getTestRestCluster();
// JDBC only supports a single node at a time so we just give it one.
return cluster.split(",")[0];
/* This doesn't include "jdbc:es://" because we want the example in
* esJdbc to be obvious and because we want to use getProtocol to add
* https if we are running against https. */
}
public Connection esJdbc() throws SQLException {
return esJdbc(connectionProperties());
}
public Connection esJdbc(Properties props) throws SQLException {
return createConnection(props);
}
protected Connection createConnection(Properties connectionProperties) throws SQLException {
String elasticsearchAddress = getProtocol() + "://" + elasticsearchAddress();
String address = JDBC_ES_URL_PREFIX + elasticsearchAddress;
Connection connection;
if (randomBoolean()) {
connection = DriverManager.getConnection(address, connectionProperties);
} else {
EsDataSource dataSource = new EsDataSource();
dataSource.setUrl(address);
dataSource.setProperties(connectionProperties);
connection = dataSource.getConnection();
}
assertNotNull("The timezone should be specified", connectionProperties.getProperty("timezone"));
return connection;
}
//
// methods below are used inside the documentation only
//
protected Connection useDriverManager() throws SQLException {
String elasticsearchAddress = getProtocol() + "://" + elasticsearchAddress();
// tag::connect-dm
String address = "jdbc:es://" + elasticsearchAddress; // <1>
Properties connectionProperties = connectionProperties(); // <2>
Connection connection =
DriverManager.getConnection(address, connectionProperties);
// end::connect-dm
assertNotNull("The timezone should be specified", connectionProperties.getProperty("timezone"));
return connection;
}
protected Connection useDataSource() throws SQLException {
String elasticsearchAddress = getProtocol() + "://" + elasticsearchAddress();
// tag::connect-ds
EsDataSource dataSource = new EsDataSource();
String address = "jdbc:es://" + elasticsearchAddress; // <1>
dataSource.setUrl(address);
Properties connectionProperties = connectionProperties(); // <2>
dataSource.setProperties(connectionProperties);
Connection connection = dataSource.getConnection();
// end::connect-ds
assertNotNull("The timezone should be specified", connectionProperties.getProperty("timezone"));
return connection;
}
public static void index(String index, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
index(index, "1", body);
}
public static void index(String index, String documentId, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
Request request = new Request("PUT", "/" + index + "/_doc/" + documentId);
request.addParameter("refresh", "true");
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
body.accept(builder);
builder.endObject();
request.setJsonEntity(Strings.toString(builder));
client().performRequest(request);
}
public static void delete(String index, String documentId) throws IOException {
Request request = new Request("DELETE", "/" + index + "/_doc/" + documentId);
request.addParameter("refresh", "true");
client().performRequest(request);
}
/**
* The properties used to build the connection.
*/
protected Properties connectionProperties() {
Properties connectionProperties = new Properties();
connectionProperties.put(JdbcTestUtils.JDBC_TIMEZONE, randomZone().getId());
// in the tests, don't be lenient towards multi values
connectionProperties.put("field.multi.value.leniency", "false");
return connectionProperties;
}
private static Map<String, Object> searchStats() throws IOException {
Response response = client().performRequest(new Request("GET", "/_stats/search"));
try (InputStream content = response.getEntity().getContent()) {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false);
}
}
@SuppressWarnings("unchecked")
private static int getOpenContexts(Map<String, Object> stats, String index) {
stats = (Map<String, Object>) stats.get("indices");
stats = (Map<String, Object>) stats.get(index);
stats = (Map<String, Object>) stats.get("total");
stats = (Map<String, Object>) stats.get("search");
return (Integer) stats.get("open_contexts");
}
static void assertNoSearchContexts() throws IOException {
Map<String, Object> stats = searchStats();
@SuppressWarnings("unchecked")
Map<String, Object> indicesStats = (Map<String, Object>) stats.get("indices");
for (String index : indicesStats.keySet()) {
if (index.startsWith(".") == false) { // We are not interested in internal indices
assertEquals(index + " should have no search contexts", 0, getOpenContexts(stats, index));
}
}
}
}
| JdbcIntegrationTestCase |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/CreateSchedulerTest_directCreate.java | {
"start": 2483,
"end": 2905
} | class ____ extends MockDriver {
public MockConnection createMockConnection(MockDriver driver, String url, Properties connectProperties) {
try {
Thread.sleep(1000 * 1);
} catch (InterruptedException e) {
e.printStackTrace();
}
return super.createMockConnection(driver, url, connectProperties);
}
}
public static | SlowDriver |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/formatstring/FormatStringTest.java | {
"start": 10177,
"end": 10665
} | class ____ {
void f() {
System.err.printf("%tY", LocalDateTime.ofInstant(Instant.now(), ZoneId.systemDefault()));
System.err.printf("%tQ", Instant.now());
System.err.printf(
"%tZ",
ZonedDateTime.of(
LocalDate.of(2018, 12, 27), LocalTime.of(17, 0), ZoneId.of("Europe/London")));
}
}
""")
.doTest();
}
@Test
public void number() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java | {
"start": 1666,
"end": 3118
} | class ____ {
public static final Logger LOG = LoggerFactory.getLogger(
AsyncCallHandler.class);
private static final ThreadLocal<AsyncGet<?, Exception>>
LOWER_LAYER_ASYNC_RETURN = new ThreadLocal<>();
private static final ThreadLocal<AsyncGet<Object, Throwable>>
ASYNC_RETURN = new ThreadLocal<>();
/**
* @return the async return value from {@link AsyncCallHandler}.
* @param <T> T.
* @param <R> R.
*/
@InterfaceStability.Unstable
@SuppressWarnings("unchecked")
public static <R, T extends Throwable> AsyncGet<R, T> getAsyncReturn() {
final AsyncGet<R, T> asyncGet = (AsyncGet<R, T>)ASYNC_RETURN.get();
if (asyncGet != null) {
ASYNC_RETURN.set(null);
return asyncGet;
} else {
return (AsyncGet<R, T>) getLowerLayerAsyncReturn();
}
}
/**
* For the lower rpc layers to set the async return value.
* @param asyncReturn asyncReturn.
*/
@InterfaceStability.Unstable
public static void setLowerLayerAsyncReturn(
AsyncGet<?, Exception> asyncReturn) {
LOWER_LAYER_ASYNC_RETURN.set(asyncReturn);
}
private static AsyncGet<?, Exception> getLowerLayerAsyncReturn() {
final AsyncGet<?, Exception> asyncGet = LOWER_LAYER_ASYNC_RETURN.get();
Preconditions.checkNotNull(asyncGet);
LOWER_LAYER_ASYNC_RETURN.set(null);
return asyncGet;
}
/** A simple concurrent queue which keeping track the empty start time. */
static | AsyncCallHandler |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/core/v2/client/ClientSyncDatumSnapshot.java | {
"start": 790,
"end": 1153
} | class ____ {
private List<ClientSyncData> clientSyncDataList = new LinkedList<>();
public List<ClientSyncData> getClientSyncDataList() {
return clientSyncDataList;
}
public void setClientSyncDataList(List<ClientSyncData> clientSyncDataList) {
this.clientSyncDataList = clientSyncDataList;
}
}
| ClientSyncDatumSnapshot |
java | apache__camel | components/camel-cassandraql/src/test/java/org/apache/camel/component/cassandra/ResultSetConversionStrategiesTest.java | {
"start": 1388,
"end": 2794
} | class ____ {
public ResultSetConversionStrategiesTest() {
}
@Test
public void testAll() {
ResultSetConversionStrategy strategy = ResultSetConversionStrategies.fromName("ALL");
ResultSet resultSet = mock(ResultSet.class);
List<Row> rows = Collections.nCopies(20, mock(Row.class));
when(resultSet.all()).thenReturn(rows);
Object body = strategy.getBody(resultSet);
assertTrue(body instanceof List);
assertSame(rows, body);
}
@Test
public void testOne() {
ResultSetConversionStrategy strategy = ResultSetConversionStrategies.fromName("ONE");
ResultSet resultSet = mock(ResultSet.class);
Row row = mock(Row.class);
when(resultSet.one()).thenReturn(row);
Object body = strategy.getBody(resultSet);
assertTrue(body instanceof Row);
assertSame(row, body);
}
@Test
public void testLimit() {
ResultSetConversionStrategy strategy = ResultSetConversionStrategies.fromName("LIMIT_10");
ResultSet resultSet = mock(ResultSet.class);
List<Row> rows = Collections.nCopies(20, mock(Row.class));
when(resultSet.iterator()).thenReturn(rows.iterator());
Object body = strategy.getBody(resultSet);
assertTrue(body instanceof List);
assertEquals(10, ((List<?>) body).size());
}
}
| ResultSetConversionStrategiesTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/auth/KafkaPrincipalBuilder.java | {
"start": 882,
"end": 1253
} | interface ____ supports both SSL authentication through
* {@link SslAuthenticationContext} and SASL through {@link SaslAuthenticationContext}.
*
* Note that the {@link org.apache.kafka.common.Configurable} and {@link java.io.Closeable}
* interfaces are respected if implemented. Additionally, implementations must provide a
* default no-arg constructor.
*/
public | which |
java | quarkusio__quarkus | core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/discovery/DiscoveryConfigProperty.java | {
"start": 4734,
"end": 7890
} | class ____ {
private String name;
private final String sourceType;
private final String sourceElementName;
private final SourceElementType sourceElementType;
private final ResolvedType type;
private String defaultValue;
private String defaultValueForDoc;
private boolean escapeDefaultValueForDoc = true;
private Deprecation deprecation;
private String mapKey;
private boolean unnamedMapKey = false;
private boolean converted = false;
private boolean enforceHyphenateEnumValue = false;
private boolean section = false;
private boolean sectionGenerated = false;
public Builder(String sourceType, String sourceElementName, SourceElementType sourceElementType, ResolvedType type) {
this.sourceType = sourceType;
this.sourceElementName = sourceElementName;
this.sourceElementType = sourceElementType;
this.type = type;
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder defaultValue(String defaultValue) {
this.defaultValue = defaultValue;
return this;
}
public Builder defaultValueForDoc(String defaultValueForDoc) {
this.defaultValueForDoc = defaultValueForDoc;
return this;
}
public Builder escapeDefaultValueForDoc(boolean escapeDefaultValueForDoc) {
this.escapeDefaultValueForDoc = escapeDefaultValueForDoc;
return this;
}
public Builder deprecated(String since, String replacement, String reason) {
this.deprecation = new Deprecation(since, replacement);
return this;
}
public Builder mapKey(String mapKey) {
this.mapKey = mapKey;
return this;
}
public Builder unnamedMapKey() {
this.unnamedMapKey = true;
return this;
}
public Builder converted() {
this.converted = true;
return this;
}
public Builder enforceHyphenateEnumValues() {
this.enforceHyphenateEnumValue = true;
return this;
}
public Builder section(boolean generated) {
this.section = true;
this.sectionGenerated = generated;
return this;
}
public DiscoveryConfigProperty build() {
if (type.isPrimitive() && defaultValue == null) {
defaultValue = TypeUtil.getPrimitiveDefaultValue(type.qualifiedName());
}
if (type.isDuration() && !Strings.isBlank(defaultValue)) {
defaultValue = TypeUtil.normalizeDurationValue(defaultValue);
}
return new DiscoveryConfigProperty(name, sourceType, sourceElementName, sourceElementType, defaultValue,
defaultValueForDoc, escapeDefaultValueForDoc,
deprecation, mapKey, unnamedMapKey, type, converted, enforceHyphenateEnumValue, section, sectionGenerated);
}
}
}
| Builder |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/annotations/Select.java | {
"start": 1269,
"end": 1780
} | interface ____ {
* @Select({ "<script>", "select * from users", "where name = #{name}",
* "<if test=\"age != null\"> age = #{age} </if>", "</script>" })
* User select(@NotNull String name, @Nullable Integer age);
* }
* }</pre>
*
* </li>
* </ul>
*
* @author Clinton Begin
*
* @see <a href="https://mybatis.org/mybatis-3/dynamic-sql.html">How to use Dynamic SQL</a>
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Repeatable(Select.List.class)
public @ | UserMapper |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/ResourceResolverSupport.java | {
"start": 1178,
"end": 1250
} | class ____ {@link ResourceResolver} implementations.
*/
public abstract | for |
java | lettuce-io__lettuce-core | src/test/jmh/io/lettuce/core/EmptyRedisChannelWriter.java | {
"start": 341,
"end": 1389
} | class ____ implements RedisChannelWriter {
public static final EmptyRedisChannelWriter INSTANCE = new EmptyRedisChannelWriter();
private static final CompletableFuture CLOSE_FUTURE = CompletableFuture.completedFuture(null);
@Override
public <K, V, T> RedisCommand<K, V, T> write(RedisCommand<K, V, T> command) {
return null;
}
@Override
public <K, V> Collection<RedisCommand<K, V, ?>> write(Collection<? extends RedisCommand<K, V, ?>> redisCommands) {
return (Collection) redisCommands;
}
@Override
public void close() {
}
@Override
public CompletableFuture<Void> closeAsync() {
return CLOSE_FUTURE;
}
@Override
public void setConnectionFacade(ConnectionFacade connection) {
}
@Override
public ClientResources getClientResources() {
return EmptyClientResources.INSTANCE;
}
@Override
public void setAutoFlushCommands(boolean autoFlush) {
}
@Override
public void flushCommands() {
}
}
| EmptyRedisChannelWriter |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/instant/InstantAssertBaseTest.java | {
"start": 728,
"end": 971
} | class ____ {
public static final Instant REFERENCE = Instant.now();
public static final Instant BEFORE = REFERENCE.minus(1, ChronoUnit.HOURS);
public static final Instant AFTER = REFERENCE.plus(1, ChronoUnit.HOURS);
}
| InstantAssertBaseTest |
java | apache__camel | components/camel-graphql/src/test/java/org/apache/camel/component/graphql/server/GraphqlServer.java | {
"start": 1686,
"end": 2270
} | class ____ {
private final GraphQL graphql;
private final HttpServer server;
public GraphqlServer() {
this.graphql = GraphqlFactory.newGraphQL();
this.server = ServerBootstrap.bootstrap()
.setCanonicalHostName("localhost")
.register("/graphql", new GraphqlHandler())
.create();
}
public void start() throws IOException {
server.start();
}
public void shutdown() {
server.close();
}
public int getPort() {
return server.getLocalPort();
}
| GraphqlServer |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GoogleCloudFunctionsEndpointBuilderFactory.java | {
"start": 9156,
"end": 11405
} | interface ____ {
/**
* Google Cloud Functions (camel-google-functions)
* Manage and invoke Google Cloud Functions
*
* Category: cloud
* Since: 3.9
* Maven coordinates: org.apache.camel:camel-google-functions
*
* @return the dsl builder for the headers' name.
*/
default GoogleCloudFunctionsHeaderNameBuilder googleFunctions() {
return GoogleCloudFunctionsHeaderNameBuilder.INSTANCE;
}
/**
* Google Cloud Functions (camel-google-functions)
* Manage and invoke Google Cloud Functions
*
* Category: cloud
* Since: 3.9
* Maven coordinates: org.apache.camel:camel-google-functions
*
* Syntax: <code>google-functions:functionName</code>
*
* Path parameter: functionName (required)
* The user-defined name of the function
*
* @param path functionName
* @return the dsl builder
*/
default GoogleCloudFunctionsEndpointBuilder googleFunctions(String path) {
return GoogleCloudFunctionsEndpointBuilderFactory.endpointBuilder("google-functions", path);
}
/**
* Google Cloud Functions (camel-google-functions)
* Manage and invoke Google Cloud Functions
*
* Category: cloud
* Since: 3.9
* Maven coordinates: org.apache.camel:camel-google-functions
*
* Syntax: <code>google-functions:functionName</code>
*
* Path parameter: functionName (required)
* The user-defined name of the function
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path functionName
* @return the dsl builder
*/
default GoogleCloudFunctionsEndpointBuilder googleFunctions(String componentName, String path) {
return GoogleCloudFunctionsEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the Google Cloud Functions component.
*/
public static | GoogleCloudFunctionsBuilders |
java | google__guice | core/test/com/google/inject/internal/MessagesTest.java | {
"start": 574,
"end": 2604
} | class ____ extends ErrorDetail<ExampleErrorDetail> {
ExampleErrorDetail(String message) {
super(message, ImmutableList.of(), null);
}
@Override
public void formatDetail(List<ErrorDetail<?>> mergeableErrors, Formatter formatter) {
formatter.format("Duplicate count: %s\n", mergeableErrors.size() + 1);
}
@Override
public boolean isMergeable(ErrorDetail<?> otherError) {
return otherError instanceof ExampleErrorDetail
&& otherError.getMessage().equals(getMessage());
}
@Override
public ExampleErrorDetail withSources(List<Object> unused) {
return new ExampleErrorDetail(getMessage());
}
}
@Test
public void customErrorMessage() {
List<Message> messages = new ArrayList<>();
Throwable cause = null;
messages.add(new Message("example", cause));
messages.add(exampleError("a"));
messages.add(exampleError("b"));
messages.add(exampleError("a"));
String result = Messages.formatMessages("Example", messages);
assertThat(result)
.isEqualTo(
"Example:\n\n"
+ "1) example\n\n"
+ "2) a\n"
+ "Duplicate count: 2\n\n"
+ "3) b\n"
+ "Duplicate count: 1\n\n"
+ "3 errors");
}
@Test
public void provisionExceptionWithCustomErrorMessageIsSerializable() {
Throwable cause = null;
ProvisionException exception =
new ProvisionException(
ImmutableList.of(exampleError("Custom error"), new Message("Generic error", cause)));
assertThat(reserialize(exception))
.hasMessageThat()
.isEqualTo(
"Unable to provision, see the following errors:\n\n"
+ "1) Custom error\n\n"
+ "2) Generic error\n\n"
+ "2 errors");
}
private static Message exampleError(String message) {
return new Message(
GuiceInternal.GUICE_INTERNAL, ErrorId.OTHER, new ExampleErrorDetail(message));
}
}
| ExampleErrorDetail |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 951759,
"end": 953865
} | class ____ extends YamlDeserializerBase<RouteContextRefDefinition> {
public RouteContextRefDefinitionDeserializer() {
super(RouteContextRefDefinition.class);
}
@Override
protected RouteContextRefDefinition newInstance() {
return new RouteContextRefDefinition();
}
@Override
protected RouteContextRefDefinition newInstance(String value) {
return new RouteContextRefDefinition(value);
}
@Override
protected boolean setProperty(RouteContextRefDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "ref": {
String val = asText(node);
target.setRef(val);
break;
}
default: {
return false;
}
}
return true;
}
}
@YamlType(
nodes = "route",
inline = true,
types = org.apache.camel.model.RouteDefinition.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Route",
description = "A Camel route",
deprecated = false,
properties = {
@YamlProperty(name = "description", type = "string", description = "Sets the description of this node", displayName = "Description"),
@YamlProperty(name = "disabled", type = "boolean"),
@YamlProperty(name = "id", type = "string", description = "Sets the id of this node", displayName = "Id"),
@YamlProperty(name = "note", type = "string", description = "Sets the note of this node", displayName = "Note"),
@YamlProperty(name = "steps", type = "array:org.apache.camel.model.ProcessorDefinition")
}
)
public static | RouteContextRefDefinitionDeserializer |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/type/AbstractAnnotationMetadataTests.java | {
"start": 6721,
"end": 6775
} | interface ____ {
}
public static final | TestAnnotation |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/jdbc/Sql.java | {
"start": 4083,
"end": 4990
} | interface ____ {
/**
* Alias for {@link #scripts}.
* <p>This attribute may <strong>not</strong> be used in conjunction with
* {@link #scripts}, but it may be used instead of {@link #scripts}.
* @see #scripts
* @see #statements
*/
@AliasFor("scripts")
String[] value() default {};
/**
* The paths to the SQL scripts to execute.
* <p>This attribute may <strong>not</strong> be used in conjunction with
* {@link #value}, but it may be used instead of {@link #value}. Similarly,
* this attribute may be used in conjunction with or instead of
* {@link #statements}.
* <h4>Path Resource Semantics</h4>
* <p>Each path will be interpreted as a Spring
* {@link org.springframework.core.io.Resource Resource}. A plain path
* — for example, {@code "schema.sql"} — will be treated as a
* classpath resource that is <em>relative</em> to the package in which the
* test | Sql |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/filter/subclass/singletable/SingleTableInheritanceFilterTest.java | {
"start": 3494,
"end": 3548
} | class ____ extends AbstractSuperClass {}
}
| ChildEntityTwo |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/diagnostics/analyzer/BindFailureAnalyzerTests.java | {
"start": 7970,
"end": 8342
} | class ____ {
private int value;
private @Nullable Class<?> type;
int getValue() {
return this.value;
}
void setValue(int value) {
this.value = value;
}
@Nullable Class<?> getType() {
return this.type;
}
void setType(@Nullable Class<?> type) {
this.type = type;
}
}
@ConfigurationProperties("test.foo")
static | GenericFailureProperties |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client-jackson/runtime/src/main/java/io/quarkus/rest/client/reactive/jackson/runtime/serialisers/ClientJacksonMessageBodyWriter.java | {
"start": 1224,
"end": 5103
} | class ____ implements ClientMessageBodyWriter<Object> {
private final ObjectWriter defaultWriter;
private final ConcurrentMap<ObjectMapper, ObjectWriter> objectWriterMap = new ConcurrentHashMap<>();
@Inject
public ClientJacksonMessageBodyWriter(ObjectMapper mapper) {
this.defaultWriter = createDefaultWriter(mapper);
}
@Override
public boolean isWriteable(Class type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return true;
}
@Override
public void writeTo(Object o, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
doLegacyWrite(o, annotations, httpHeaders, entityStream, getEffectiveWriter(mediaType, annotations, null));
}
@Override
public void writeTo(Object o, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream,
RestClientRequestContext context) throws IOException, WebApplicationException {
doLegacyWrite(o, annotations, httpHeaders, entityStream, getEffectiveWriter(mediaType, annotations, context));
}
protected ObjectWriter getEffectiveWriter(MediaType responseMediaType, Annotation[] annotations,
RestClientRequestContext context) {
ObjectWriter result = defaultWriter;
ObjectMapper objectMapper = getObjectMapperFromContext(responseMediaType, context);
if (objectMapper != null) {
result = objectWriterMap.computeIfAbsent(objectMapper, new Function<>() {
@Override
public ObjectWriter apply(ObjectMapper objectMapper) {
return createDefaultWriter(objectMapper);
}
});
}
return applyJsonViewIfPresent(result, effectiveView(annotations, context));
}
private Optional<Class<?>> effectiveView(Annotation[] annotations, RestClientRequestContext context) {
Optional<Class<?>> fromAnnotations = matchingView(annotations);
if (fromAnnotations.isPresent()) {
return fromAnnotations;
}
// now check the method parameters for a @JsonView on the body parameter
if (context != null && context.getInvokedMethod() != null) {
Parameter[] parameters = context.getInvokedMethod().getParameters();
if (parameters != null) {
for (Parameter parameter : parameters) {
Annotation[] paramAnnotations = parameter.getAnnotations();
boolean isBodyParameter = true;
for (Annotation paramAnnotation : paramAnnotations) {
String paramTypeClassName = paramAnnotation.annotationType().getName();
// TODO: this should be centralized somewhere
if (paramTypeClassName.startsWith("jakarta.ws.rs")
|| paramTypeClassName.startsWith("io.quarkus.rest.client")
|| paramTypeClassName.startsWith("org.jboss.resteasy.reactive")) {
isBodyParameter = false;
break;
}
}
if (isBodyParameter) {
return matchingView(paramAnnotations);
}
}
}
}
return Optional.empty();
}
private static ObjectWriter applyJsonViewIfPresent(ObjectWriter writer, Optional<Class<?>> maybeView) {
if (maybeView.isPresent()) {
return writer.withView(maybeView.get());
} else {
return writer;
}
}
}
| ClientJacksonMessageBodyWriter |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/graal/Target_org_wildfly_common_net_CidrAddress.java | {
"start": 1095,
"end": 1351
} | class ____ {
static Target_org_wildfly_common_net_CidrAddress newInstance(InetAddress networkAddress, int netmaskBits) {
return Target_org_wildfly_common_net_CidrAddress.create(networkAddress, netmaskBits);
}
}
}
| CidrAddressUtil |
java | apache__camel | components/camel-aws/camel-aws2-mq/src/test/java/org/apache/camel/component/aws2/mq/MQComponentConfigurationTest.java | {
"start": 1157,
"end": 4626
} | class ____ extends CamelTestSupport {
@Test
public void createEndpointWithComponentElements() throws Exception {
MQ2Component component = context.getComponent("aws2-mq", MQ2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
MQ2Endpoint endpoint = (MQ2Endpoint) component.createEndpoint("aws2-mq://MyQueue");
assertEquals("XXX", endpoint.getConfiguration().getAccessKey());
assertEquals("YYY", endpoint.getConfiguration().getSecretKey());
}
@Test
public void createEndpointWithComponentAndEndpointElements() throws Exception {
MQ2Component component = context.getComponent("aws2-mq", MQ2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
component.getConfiguration().setRegion(Region.US_WEST_1.toString());
MQ2Endpoint endpoint
= (MQ2Endpoint) component.createEndpoint("aws2-mq://MyQueue?accessKey=xxxxxx&secretKey=yyyyy®ion=US_EAST_1");
assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion());
}
@Test
public void createEndpointWithComponentEndpointElementsAndProxy() throws Exception {
MQ2Component component = context.getComponent("aws2-mq", MQ2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
component.getConfiguration().setRegion(Region.US_WEST_1.toString());
MQ2Endpoint endpoint = (MQ2Endpoint) component
.createEndpoint(
"aws2-mq://label?accessKey=xxxxxx&secretKey=yyyyy®ion=US_EAST_1&proxyHost=localhost&proxyPort=9000&proxyProtocol=HTTP");
assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion());
assertEquals(Protocol.HTTP, endpoint.getConfiguration().getProxyProtocol());
assertEquals("localhost", endpoint.getConfiguration().getProxyHost());
assertEquals(Integer.valueOf(9000), endpoint.getConfiguration().getProxyPort());
}
@Test
public void createEndpointWithOverride() throws Exception {
MQ2Component component = context.getComponent("aws2-mq", MQ2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
component.getConfiguration().setRegion(Region.US_WEST_1.toString());
MQ2Endpoint endpoint
= (MQ2Endpoint) component.createEndpoint(
"aws2-mq://MyQueue?accessKey=xxxxxx&secretKey=yyyyy®ion=US_EAST_1&overrideEndpoint=true&uriEndpointOverride=http://localhost:9090");
assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion());
assertTrue(endpoint.getConfiguration().isOverrideEndpoint());
assertEquals("http://localhost:9090", endpoint.getConfiguration().getUriEndpointOverride());
}
}
| MQComponentConfigurationTest |
java | apache__flink | flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PriorityThreadsDispatcher.java | {
"start": 1859,
"end": 1932
} | class ____ Pekko will instantiate
* ...
* }
* </pre>
*/
public | that |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/artifact/repository/DefaultArtifactRepository.java | {
"start": 1325,
"end": 7870
} | class ____ extends Repository implements ArtifactRepository {
private ArtifactRepositoryLayout layout;
private ArtifactRepositoryPolicy snapshots;
private ArtifactRepositoryPolicy releases;
private boolean blacklisted;
private Authentication authentication;
private Proxy proxy;
private List<ArtifactRepository> mirroredRepositories = Collections.emptyList();
private boolean blocked;
/**
* Create a local repository or a test repository.
*
* @param id the unique identifier of the repository
* @param url the URL of the repository
* @param layout the layout of the repository
*/
public DefaultArtifactRepository(String id, String url, ArtifactRepositoryLayout layout) {
this(id, url, layout, null, null);
}
/**
* Create a remote deployment repository.
*
* @param id the unique identifier of the repository
* @param url the URL of the repository
* @param layout the layout of the repository
* @param uniqueVersion whether to assign each snapshot a unique version
*/
public DefaultArtifactRepository(String id, String url, ArtifactRepositoryLayout layout, boolean uniqueVersion) {
super(id, url);
this.layout = layout;
}
/**
* Create a remote download repository.
*
* @param id the unique identifier of the repository
* @param url the URL of the repository
* @param layout the layout of the repository
* @param snapshots the policies to use for snapshots
* @param releases the policies to use for releases
*/
public DefaultArtifactRepository(
String id,
String url,
ArtifactRepositoryLayout layout,
ArtifactRepositoryPolicy snapshots,
ArtifactRepositoryPolicy releases) {
super(id, url);
this.layout = layout;
if (snapshots == null) {
snapshots = new ArtifactRepositoryPolicy(
true,
ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS,
ArtifactRepositoryPolicy.CHECKSUM_POLICY_IGNORE);
}
this.snapshots = snapshots;
if (releases == null) {
releases = new ArtifactRepositoryPolicy(
true,
ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS,
ArtifactRepositoryPolicy.CHECKSUM_POLICY_IGNORE);
}
this.releases = releases;
}
@Override
public String pathOf(Artifact artifact) {
return layout.pathOf(artifact);
}
@Override
public String pathOfRemoteRepositoryMetadata(ArtifactMetadata artifactMetadata) {
return layout.pathOfRemoteRepositoryMetadata(artifactMetadata);
}
@Override
public String pathOfLocalRepositoryMetadata(ArtifactMetadata metadata, ArtifactRepository repository) {
return layout.pathOfLocalRepositoryMetadata(metadata, repository);
}
@Override
public void setLayout(ArtifactRepositoryLayout layout) {
this.layout = layout;
}
@Override
public ArtifactRepositoryLayout getLayout() {
return layout;
}
@Override
public void setSnapshotUpdatePolicy(ArtifactRepositoryPolicy snapshots) {
this.snapshots = snapshots;
}
@Override
public ArtifactRepositoryPolicy getSnapshots() {
return snapshots;
}
@Override
public void setReleaseUpdatePolicy(ArtifactRepositoryPolicy releases) {
this.releases = releases;
}
@Override
public ArtifactRepositoryPolicy getReleases() {
return releases;
}
@Override
public String getKey() {
return getId();
}
@Override
public boolean isBlacklisted() {
return blacklisted;
}
@Override
public void setBlacklisted(boolean blacklisted) {
this.blacklisted = blacklisted;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(256);
sb.append(" id: ").append(getId()).append('\n');
sb.append(" url: ").append(getUrl()).append('\n');
sb.append(" layout: ").append(layout != null ? layout : "none").append('\n');
if (snapshots != null) {
sb.append("snapshots: [enabled => ").append(snapshots.isEnabled());
sb.append(", update => ").append(snapshots.getUpdatePolicy()).append("]\n");
}
if (releases != null) {
sb.append(" releases: [enabled => ").append(releases.isEnabled());
sb.append(", update => ").append(releases.getUpdatePolicy()).append("]\n");
}
return sb.toString();
}
@Override
public Artifact find(Artifact artifact) {
File artifactFile = new File(getBasedir(), pathOf(artifact));
// We need to set the file here or the resolver will fail with an NPE, not fully equipped to deal
// with multiple local repository implementations yet.
artifact.setFile(artifactFile);
if (artifactFile.exists()) {
artifact.setResolved(true);
}
return artifact;
}
@Override
public List<String> findVersions(Artifact artifact) {
return Collections.emptyList();
}
@Override
public boolean isProjectAware() {
return false;
}
@Override
public Authentication getAuthentication() {
return authentication;
}
@Override
public void setAuthentication(Authentication authentication) {
this.authentication = authentication;
}
@Override
public Proxy getProxy() {
return proxy;
}
@Override
public void setProxy(Proxy proxy) {
this.proxy = proxy;
}
@Override
public boolean isUniqueVersion() {
return true;
}
@Override
public List<ArtifactRepository> getMirroredRepositories() {
return mirroredRepositories;
}
@Override
public void setMirroredRepositories(List<ArtifactRepository> mirroredRepositories) {
if (mirroredRepositories != null) {
this.mirroredRepositories = Collections.unmodifiableList(mirroredRepositories);
} else {
this.mirroredRepositories = Collections.emptyList();
}
}
@Override
public boolean isBlocked() {
return blocked;
}
@Override
public void setBlocked(boolean blocked) {
this.blocked = blocked;
}
}
| DefaultArtifactRepository |
java | google__guice | core/test/com/google/inject/CircularDependencyTest.java | {
"start": 18497,
"end": 18541
} | interface ____ {}
@SimpleSingleton
static | J |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.