language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__error-prone | core/src/test/java/com/google/errorprone/suppress/CustomSuppressionTest.java | {
"start": 4448,
"end": 5089
} | class ____ {
@SuppressMyChecker2
int identity(int value) {
// BUG: Diagnostic contains:
return value;
}
}
""")
.doTest();
}
@Test
public void myChecker2IsSuppressedWithEitherCustomAnnotation() {
CompilationTestHelper.newInstance(MyChecker2.class, getClass())
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.suppress.CustomSuppressionTest.SuppressBothCheckers;
import com.google.errorprone.suppress.CustomSuppressionTest.SuppressMyChecker2;
| Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/android/StaticOrDefaultInterfaceMethodTest.java | {
"start": 1357,
"end": 1738
} | interface ____ {
// BUG: Diagnostic contains: StaticOrDefaultInterfaceMethod
default void test() {
System.out.println();
}
}
""")
.doTest();
}
@Test
public void positiveCaseStatic() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/xml/NestedBeansElementAttributeRecursionTests.java | {
"start": 6946,
"end": 7500
} | class ____ {
boolean initMethod1Called;
boolean initMethod2Called;
boolean initMethod3Called;
boolean destroyMethod1Called;
boolean destroyMethod2Called;
boolean destroyMethod3Called;
void initMethod1() { this.initMethod1Called = true; }
void initMethod2() { this.initMethod2Called = true; }
void initMethod3() { this.initMethod3Called = true; }
void destroyMethod1() { this.destroyMethod1Called = true; }
void destroyMethod2() { this.destroyMethod2Called = true; }
void destroyMethod3() { this.destroyMethod3Called = true; }
}
| InitDestroyBean |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/BDDMockito.java | {
"start": 8556,
"end": 9952
} | class ____<T> implements Then<T> {
private final T mock;
ThenImpl(T mock) {
this.mock = mock;
}
/**
* @see #verify(Object)
* @since 1.10.5
*/
public T should() {
return verify(mock);
}
/**
* @see #verify(Object, VerificationMode)
* @since 1.10.5
*/
public T should(VerificationMode mode) {
return verify(mock, mode);
}
/**
* @see InOrder#verify(Object)
* @since 2.1.0
*/
public T should(InOrder inOrder) {
return inOrder.verify(mock);
}
/**
* @see InOrder#verify(Object, VerificationMode)
* @since 2.1.0
*/
public T should(InOrder inOrder, VerificationMode mode) {
return inOrder.verify(mock, mode);
}
/**
* @see #verifyNoMoreInteractions(Object...)
* @since 2.1.0
*/
public void shouldHaveNoMoreInteractions() {
verifyNoMoreInteractions(mock);
}
/**
* @see #verifyNoInteractions(Object...)
* @since 3.0.1
*/
public void shouldHaveNoInteractions() {
verifyNoInteractions(mock);
}
}
/**
* See original {@link Stubber}
* @since 1.8.0
*/
public | ThenImpl |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/HierarchyTraversalMode.java | {
"start": 572,
"end": 695
} | class ____.
*
* @since 1.0
* @see #TOP_DOWN
* @see #BOTTOM_UP
*/
@API(status = MAINTAINED, since = "1.0")
public | hierarchy |
java | apache__spark | mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java | {
"start": 1162,
"end": 6907
} | class ____ extends SharedSparkSession {
private void validatePrediction(
MatrixFactorizationModel model,
int users,
int products,
double[] trueRatings,
double matchThreshold,
boolean implicitPrefs,
double[] truePrefs) {
List<Tuple2<Integer, Integer>> localUsersProducts = new ArrayList<>(users * products);
for (int u = 0; u < users; ++u) {
for (int p = 0; p < products; ++p) {
localUsersProducts.add(new Tuple2<>(u, p));
}
}
JavaPairRDD<Integer, Integer> usersProducts = jsc.parallelizePairs(localUsersProducts);
List<Rating> predictedRatings = model.predict(usersProducts).collect();
Assertions.assertEquals(users * products, predictedRatings.size());
if (!implicitPrefs) {
for (Rating r : predictedRatings) {
double prediction = r.rating();
double correct = trueRatings[r.product() * users + r.user()];
Assertions.assertTrue(Math.abs(prediction - correct) < matchThreshold,
String.format("Prediction=%2.4f not below match threshold of %2.2f",
prediction, matchThreshold));
}
} else {
// For implicit prefs we use the confidence-weighted RMSE to test
// (ref Mahout's implicit ALS tests)
double sqErr = 0.0;
double denom = 0.0;
for (Rating r : predictedRatings) {
double prediction = r.rating();
double truePref = truePrefs[r.product() * users + r.user()];
double confidence = 1.0 +
/* alpha = 1.0 * ... */ Math.abs(trueRatings[r.product() * users + r.user()]);
double err = confidence * (truePref - prediction) * (truePref - prediction);
sqErr += err;
denom += confidence;
}
double rmse = Math.sqrt(sqErr / denom);
Assertions.assertTrue(rmse < matchThreshold,
String.format("Confidence-weighted RMSE=%2.4f above threshold of %2.2f",
rmse, matchThreshold));
}
}
@Test
public void runALSUsingStaticMethods() {
int features = 1;
int iterations = 15;
int users = 50;
int products = 100;
Tuple3<List<Rating>, double[], double[]> testData =
ALSSuite.generateRatingsAsJava(users, products, features, 0.7, false, false);
JavaRDD<Rating> data = jsc.parallelize(testData._1());
MatrixFactorizationModel model = ALS.train(data.rdd(), features, iterations);
validatePrediction(model, users, products, testData._2(), 0.3, false, testData._3());
}
@Test
public void runALSUsingConstructor() {
int features = 2;
int iterations = 15;
int users = 100;
int products = 200;
Tuple3<List<Rating>, double[], double[]> testData =
ALSSuite.generateRatingsAsJava(users, products, features, 0.7, false, false);
JavaRDD<Rating> data = jsc.parallelize(testData._1());
MatrixFactorizationModel model = new ALS().setRank(features)
.setIterations(iterations)
.run(data);
validatePrediction(model, users, products, testData._2(), 0.3, false, testData._3());
}
@Test
public void runImplicitALSUsingStaticMethods() {
int features = 1;
int iterations = 15;
int users = 80;
int products = 160;
Tuple3<List<Rating>, double[], double[]> testData =
ALSSuite.generateRatingsAsJava(users, products, features, 0.7, true, false);
JavaRDD<Rating> data = jsc.parallelize(testData._1());
MatrixFactorizationModel model = ALS.trainImplicit(data.rdd(), features, iterations);
validatePrediction(model, users, products, testData._2(), 0.4, true, testData._3());
}
@Test
public void runImplicitALSUsingConstructor() {
int features = 2;
int iterations = 15;
int users = 100;
int products = 200;
Tuple3<List<Rating>, double[], double[]> testData =
ALSSuite.generateRatingsAsJava(users, products, features, 0.7, true, false);
JavaRDD<Rating> data = jsc.parallelize(testData._1());
MatrixFactorizationModel model = new ALS().setRank(features)
.setIterations(iterations)
.setImplicitPrefs(true)
.run(data.rdd());
validatePrediction(model, users, products, testData._2(), 0.4, true, testData._3());
}
@Test
public void runImplicitALSWithNegativeWeight() {
int features = 2;
int iterations = 15;
int users = 80;
int products = 160;
Tuple3<List<Rating>, double[], double[]> testData =
ALSSuite.generateRatingsAsJava(users, products, features, 0.7, true, true);
JavaRDD<Rating> data = jsc.parallelize(testData._1());
MatrixFactorizationModel model = new ALS().setRank(features)
.setIterations(iterations)
.setImplicitPrefs(true)
.setSeed(8675309L)
.run(data.rdd());
validatePrediction(model, users, products, testData._2(), 0.4, true, testData._3());
}
@Test
public void runRecommend() {
int features = 5;
int iterations = 10;
int users = 200;
int products = 50;
List<Rating> testData = ALSSuite.generateRatingsAsJava(
users, products, features, 0.7, true, false)._1();
JavaRDD<Rating> data = jsc.parallelize(testData);
MatrixFactorizationModel model = new ALS().setRank(features)
.setIterations(iterations)
.setImplicitPrefs(true)
.setSeed(8675309L)
.run(data.rdd());
validateRecommendations(model.recommendProducts(1, 10), 10);
validateRecommendations(model.recommendUsers(1, 20), 20);
}
private static void validateRecommendations(Rating[] recommendations, int howMany) {
Assertions.assertEquals(howMany, recommendations.length);
for (int i = 1; i < recommendations.length; i++) {
Assertions.assertTrue(recommendations[i - 1].rating() >= recommendations[i].rating());
}
Assertions.assertTrue(recommendations[0].rating() > 0.7);
}
}
| JavaALSSuite |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/runtime/jobmaster/JobMasterITCase.java | {
"start": 5400,
"end": 5566
} | class ____ implements SourceSplit {
@Override
public String splitId() {
throw new UnsupportedOperationException();
}
}
}
| MockSplit |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hbm/inheritance/AnimalReport.java | {
"start": 150,
"end": 554
} | class ____ {
private long id;
private String name;
private Animal animal;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Animal getAnimal() {
return animal;
}
public void setAnimal(Animal animal) {
this.animal = animal;
}
}
| AnimalReport |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/FoldingException.java | {
"start": 532,
"end": 1628
} | class ____ extends SqlClientException {
private final int line;
private final int column;
public FoldingException(Node<?> source, String message, Object... args) {
super(message, args);
Location loc = Location.EMPTY;
if (source != null && source.source() != null) {
loc = source.source().source();
}
this.line = loc.getLineNumber();
this.column = loc.getColumnNumber();
}
public FoldingException(Node<?> source, String message, Throwable cause) {
super(message, cause);
Location loc = Location.EMPTY;
if (source != null && source.source() != null) {
loc = source.source().source();
}
this.line = loc.getLineNumber();
this.column = loc.getColumnNumber();
}
public int getLineNumber() {
return line;
}
public int getColumnNumber() {
return column;
}
@Override
public String getMessage() {
return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), super.getMessage());
}
}
| FoldingException |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/ai/remote/request/ReleaseAgentCardRequestTest.java | {
"start": 1079,
"end": 3799
} | class ____ extends BasicRequestTest {
@Test
void testSerialize() throws Exception {
ReleaseAgentCardRequest request = new ReleaseAgentCardRequest();
String id = UUID.randomUUID().toString();
request.setRequestId("1");
request.setNamespaceId(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE);
request.setAgentName("testAgent");
request.setRegistrationType(AiConstants.A2a.A2A_ENDPOINT_TYPE_SERVICE);
request.setSetAsLatest(true);
AgentCard agentCard = new AgentCard();
agentCard.setName("testAgentCard");
agentCard.setVersion("1.0.0");
request.setAgentCard(agentCard);
String json = mapper.writeValueAsString(request);
assertNotNull(json);
assertTrue(json.contains("\"requestId\":\"1\""));
assertTrue(json.contains("\"namespaceId\":\"public\""));
assertTrue(json.contains("\"agentName\":\"testAgent\""));
assertTrue(json.contains("\"registrationType\":\"SERVICE\""));
assertTrue(json.contains("\"setAsLatest\":true"));
assertTrue(json.contains("\"name\":\"testAgentCard\""));
assertTrue(json.contains("\"version\":\"1.0.0\""));
}
@Test
void testDeserialize() throws Exception {
String json = "{\"headers\":{},\"requestId\":\"1\",\"namespaceId\":\"public\",\"agentName\":\"testAgent\","
+ "\"agentCard\":{\"protocolVersion\":null,\"name\":\"testAgentCard\",\"description\":null,\"version\":\"1.0.0\","
+ "\"iconUrl\":null,\"capabilities\":null,\"skills\":null,\"url\":null,\"preferredTransport\":null,"
+ "\"additionalInterfaces\":null,\"provider\":null,\"documentationUrl\":null,\"securitySchemes\":null,"
+ "\"security\":null,\"defaultInputModes\":null,\"defaultOutputModes\":null,"
+ "\"supportsAuthenticatedExtendedCard\":null},"
+ "\"registrationType\":\"SERVICE\",\"setAsLatest\":true,\"module\":\"ai\"}";
ReleaseAgentCardRequest result = mapper.readValue(json, ReleaseAgentCardRequest.class);
assertNotNull(result);
assertEquals("1", result.getRequestId());
assertEquals(AiConstants.Mcp.MCP_DEFAULT_NAMESPACE, result.getNamespaceId());
assertEquals("testAgent", result.getAgentName());
assertEquals(AiConstants.A2a.A2A_ENDPOINT_TYPE_SERVICE, result.getRegistrationType());
assertEquals(true, result.isSetAsLatest());
AgentCard agentCard = result.getAgentCard();
assertNotNull(agentCard);
assertEquals("testAgentCard", agentCard.getName());
assertEquals("1.0.0", agentCard.getVersion());
}
} | ReleaseAgentCardRequestTest |
java | elastic__elasticsearch | libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslDiagnostics.java | {
"start": 2466,
"end": 3257
} | enum ____ {
CLIENT,
SERVER
}
private record IssuerTrust(List<X509Certificate> issuerCerts, boolean verified) {
private static IssuerTrust noMatchingCertificate() {
return new IssuerTrust(null, false);
}
private static IssuerTrust verifiedCertificates(List<X509Certificate> issuerCert) {
return new IssuerTrust(issuerCert, true);
}
private static IssuerTrust unverifiedCertificates(List<X509Certificate> issuerCert) {
return new IssuerTrust(issuerCert, false);
}
boolean isVerified() {
return issuerCerts != null && verified;
}
boolean foundCertificateForDn() {
return issuerCerts != null;
}
}
private static | PeerType |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByExpression.java | {
"start": 4688,
"end": 4808
} | class ____ accessed via a magic 'outer$'
* field.
*
* <p>Example:
*
* <pre>
* <code>
* | is |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/metrics/groups/TaskManagerJobGroupTest.java | {
"start": 1558,
"end": 5188
} | class ____ {
private MetricRegistryImpl registry;
@BeforeEach
void setup() {
registry =
new MetricRegistryImpl(
MetricRegistryTestUtils.defaultMetricRegistryConfiguration());
}
@AfterEach
void teardown() throws Exception {
if (registry != null) {
registry.closeAsync().get();
}
}
@Test
void testGenerateScopeDefault() {
TaskManagerMetricGroup tmGroup =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "theHostName", new ResourceID("test-tm-id"));
JobMetricGroup jmGroup =
new TaskManagerJobMetricGroup(registry, tmGroup, new JobID(), "myJobName");
assertThat(jmGroup.getScopeComponents())
.containsExactly("theHostName", "taskmanager", "test-tm-id", "myJobName");
assertThat(jmGroup.getMetricIdentifier("name"))
.isEqualTo("theHostName.taskmanager.test-tm-id.myJobName.name");
}
@Test
void testGenerateScopeCustom() throws Exception {
Configuration cfg = new Configuration();
cfg.set(MetricOptions.SCOPE_NAMING_TM, "abc");
cfg.set(MetricOptions.SCOPE_NAMING_TM_JOB, "some-constant.<job_name>");
MetricRegistryImpl registry =
new MetricRegistryImpl(MetricRegistryTestUtils.fromConfiguration(cfg));
JobID jid = new JobID();
TaskManagerMetricGroup tmGroup =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "theHostName", new ResourceID("test-tm-id"));
JobMetricGroup jmGroup = new TaskManagerJobMetricGroup(registry, tmGroup, jid, "myJobName");
assertThat(jmGroup.getScopeComponents()).containsExactly("some-constant", "myJobName");
assertThat(jmGroup.getMetricIdentifier("name")).isEqualTo("some-constant.myJobName.name");
registry.closeAsync().get();
}
@Test
void testGenerateScopeCustomWildcard() throws Exception {
Configuration cfg = new Configuration();
cfg.set(MetricOptions.SCOPE_NAMING_TM, "peter.<tm_id>");
cfg.set(MetricOptions.SCOPE_NAMING_TM_JOB, "*.some-constant.<job_id>");
MetricRegistryImpl registry =
new MetricRegistryImpl(MetricRegistryTestUtils.fromConfiguration(cfg));
JobID jid = new JobID();
TaskManagerMetricGroup tmGroup =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "theHostName", new ResourceID("test-tm-id"));
JobMetricGroup jmGroup = new TaskManagerJobMetricGroup(registry, tmGroup, jid, "myJobName");
assertThat(jmGroup.getScopeComponents())
.containsExactly("peter", "test-tm-id", "some-constant", jid.toString());
assertThat(jmGroup.getMetricIdentifier("name"))
.isEqualTo("peter.test-tm-id.some-constant." + jid + ".name");
registry.closeAsync().get();
}
@Test
void testCreateQueryServiceMetricInfo() {
JobID jid = new JobID();
TaskManagerMetricGroup tm =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, "host", new ResourceID("id"));
TaskManagerJobMetricGroup job = new TaskManagerJobMetricGroup(registry, tm, jid, "jobname");
QueryScopeInfo.JobQueryScopeInfo info =
job.createQueryServiceMetricInfo(new DummyCharacterFilter());
assertThat(info.scope).isEmpty();
assertThat(info.jobID).isEqualTo(jid.toString());
}
}
| TaskManagerJobGroupTest |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/HeadersConfigurerEagerHeadersTests.java | {
"start": 1994,
"end": 2943
} | class ____ {
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
MockMvc mvc;
@Test
public void requestWhenHeadersEagerlyConfiguredThenHeadersAreWritten() throws Exception {
this.spring.register(HeadersAtTheBeginningOfRequestConfig.class, HomeController.class).autowire();
this.mvc.perform(get("/").secure(true))
.andExpect(header().string("X-Content-Type-Options", "nosniff"))
.andExpect(header().string("X-Frame-Options", "DENY"))
.andExpect(header().string("Strict-Transport-Security", "max-age=31536000 ; includeSubDomains"))
.andExpect(header().string(HttpHeaders.CACHE_CONTROL, "no-cache, no-store, max-age=0, must-revalidate"))
.andExpect(header().string(HttpHeaders.EXPIRES, "0"))
.andExpect(header().string(HttpHeaders.PRAGMA, "no-cache"))
.andExpect(header().string("X-XSS-Protection", "0"));
}
@Configuration
@EnableWebSecurity
public static | HeadersConfigurerEagerHeadersTests |
java | apache__flink | flink-python/src/main/java/org/apache/flink/table/runtime/arrow/ArrowWriter.java | {
"start": 1198,
"end": 2590
} | class ____<IN> {
/** Container that holds a set of vectors for the rows to be sent to the Python worker. */
private final VectorSchemaRoot root;
/**
* An array of writers which are responsible for the serialization of each column of the rows.
*/
private final ArrowFieldWriter<IN>[] fieldWriters;
public ArrowWriter(VectorSchemaRoot root, ArrowFieldWriter<IN>[] fieldWriters) {
this.root = Preconditions.checkNotNull(root);
this.fieldWriters = Preconditions.checkNotNull(fieldWriters);
}
/** Gets the field writers. */
public ArrowFieldWriter<IN>[] getFieldWriters() {
return fieldWriters;
}
/** Writes the specified row which is serialized into Arrow format. */
public void write(IN row) {
for (int i = 0; i < fieldWriters.length; i++) {
fieldWriters[i].write(row, i);
}
}
/** Finishes the writing of the current row batch. */
public void finish() {
root.setRowCount(fieldWriters[0].getCount());
for (ArrowFieldWriter<IN> fieldWriter : fieldWriters) {
fieldWriter.finish();
}
}
/** Resets the state of the writer to write the next batch of rows. */
public void reset() {
root.setRowCount(0);
for (ArrowFieldWriter fieldWriter : fieldWriters) {
fieldWriter.reset();
}
}
}
| ArrowWriter |
java | apache__maven | impl/maven-core/src/test/java/org/apache/maven/lifecycle/DefaultLifecyclesTest.java | {
"start": 1529,
"end": 3984
} | class ____ {
@Inject
private DefaultLifecycles defaultLifeCycles;
@Test
void testDefaultLifecycles() {
final List<Lifecycle> lifecycles = defaultLifeCycles.getLifeCycles();
assertEquals(3, lifecycles.size());
assertEquals(3, DefaultLifecycles.STANDARD_LIFECYCLES.length);
}
@Test
void testDefaultLifecycle() {
final Lifecycle lifecycle = getLifeCycleById("default");
assertEquals("default", lifecycle.getId());
assertEquals(54, lifecycle.getPhases().size());
}
@Test
void testCleanLifecycle() {
final Lifecycle lifecycle = getLifeCycleById("clean");
assertEquals("clean", lifecycle.getId());
assertEquals(3, lifecycle.getPhases().size());
}
@Test
void testSiteLifecycle() {
final Lifecycle lifecycle = getLifeCycleById("site");
assertEquals("site", lifecycle.getId());
assertEquals(6, lifecycle.getPhases().size());
}
@Test
void testCustomLifecycle() throws ComponentLookupException {
List<Lifecycle> myLifecycles = new ArrayList<>();
Lifecycle myLifecycle =
new Lifecycle("etl", Arrays.asList("extract", "transform", "load"), Collections.emptyMap());
myLifecycles.add(myLifecycle);
myLifecycles.addAll(defaultLifeCycles.getLifeCycles());
Map<String, Lifecycle> lifeCycles = myLifecycles.stream().collect(Collectors.toMap(Lifecycle::getId, l -> l));
PlexusContainer mockedPlexusContainer = mock(PlexusContainer.class);
when(mockedPlexusContainer.lookupMap(Lifecycle.class)).thenReturn(lifeCycles);
DefaultLifecycles dl = new DefaultLifecycles(
new DefaultLifecycleRegistry(
List.of(new DefaultLifecycleRegistry.LifecycleWrapperProvider(mockedPlexusContainer))),
new DefaultLookup(mockedPlexusContainer));
assertEquals("clean", dl.getLifeCycles().get(0).getId());
assertEquals("default", dl.getLifeCycles().get(1).getId());
assertEquals("site", dl.getLifeCycles().get(2).getId());
assertEquals("etl", dl.getLifeCycles().get(3).getId());
}
private Lifecycle getLifeCycleById(String id) {
return defaultLifeCycles.getLifeCycles().stream()
.filter(l -> id.equals(l.getId()))
.findFirst()
.orElseThrow(IllegalArgumentException::new);
}
}
| DefaultLifecyclesTest |
java | apache__camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/format/factories/StringFormatFactory.java | {
"start": 981,
"end": 1299
} | class ____ extends AbstractFormatFactory {
private final StringFormat stringFormat = new StringFormat();
{
supportedClasses.add(String.class);
}
@Override
public Format<?> build(FormattingOptions formattingOptions) {
return stringFormat;
}
private static | StringFormatFactory |
java | micronaut-projects__micronaut-core | test-suite-javax-inject/src/test/java/org/atinject/javaxtck/auto/Convertible.java | {
"start": 1371,
"end": 7432
} | class ____ implements Car {
@Inject @Drivers Seat driversSeatA;
@Inject @Drivers Seat driversSeatB;
@Inject SpareTire spareTire;
@Inject Cupholder cupholder;
@Inject Provider<Engine> engineProvider;
private boolean methodWithZeroParamsInjected;
private boolean methodWithMultipleParamsInjected;
private boolean methodWithNonVoidReturnInjected;
private Seat constructorPlainSeat;
private Seat constructorDriversSeat;
private Tire constructorPlainTire;
private Tire constructorSpareTire;
private Provider<Seat> constructorPlainSeatProvider = nullProvider();
private Provider<Seat> constructorDriversSeatProvider = nullProvider();
private Provider<Tire> constructorPlainTireProvider = nullProvider();
private Provider<Tire> constructorSpareTireProvider = nullProvider();
@Inject protected Seat fieldPlainSeat;
@Inject @Drivers protected Seat fieldDriversSeat;
@Inject protected Tire fieldPlainTire;
@Inject @Named("spare") protected Tire fieldSpareTire;
@Inject protected Provider<Seat> fieldPlainSeatProvider = nullProvider();
@Inject @Drivers protected Provider<Seat> fieldDriversSeatProvider = nullProvider();
@Inject protected Provider<Tire> fieldPlainTireProvider = nullProvider();
@Inject @Named("spare") protected Provider<Tire> fieldSpareTireProvider = nullProvider();
private Seat methodPlainSeat;
private Seat methodDriversSeat;
private Tire methodPlainTire;
private Tire methodSpareTire;
private Provider<Seat> methodPlainSeatProvider = nullProvider();
private Provider<Seat> methodDriversSeatProvider = nullProvider();
private Provider<Tire> methodPlainTireProvider = nullProvider();
private Provider<Tire> methodSpareTireProvider = nullProvider();
@Inject static Seat staticFieldPlainSeat;
@Inject @Drivers static Seat staticFieldDriversSeat;
@Inject static Tire staticFieldPlainTire;
@Inject @Named("spare") static Tire staticFieldSpareTire;
@Inject static Provider<Seat> staticFieldPlainSeatProvider = nullProvider();
@Inject @Drivers static Provider<Seat> staticFieldDriversSeatProvider = nullProvider();
@Inject static Provider<Tire> staticFieldPlainTireProvider = nullProvider();
@Inject @Named("spare") static Provider<Tire> staticFieldSpareTireProvider = nullProvider();
private static Seat staticMethodPlainSeat;
private static Seat staticMethodDriversSeat;
private static Tire staticMethodPlainTire;
private static Tire staticMethodSpareTire;
private static Provider<Seat> staticMethodPlainSeatProvider = nullProvider();
private static Provider<Seat> staticMethodDriversSeatProvider = nullProvider();
private static Provider<Tire> staticMethodPlainTireProvider = nullProvider();
private static Provider<Tire> staticMethodSpareTireProvider = nullProvider();
@Inject Convertible(
Seat plainSeat,
@Drivers Seat driversSeat,
Tire plainTire,
@Named("spare") Tire spareTire,
Provider<Seat> plainSeatProvider,
@Drivers Provider<Seat> driversSeatProvider,
Provider<Tire> plainTireProvider,
@Named("spare") Provider<Tire> spareTireProvider) {
constructorPlainSeat = plainSeat;
constructorDriversSeat = driversSeat;
constructorPlainTire = plainTire;
constructorSpareTire = spareTire;
constructorPlainSeatProvider = plainSeatProvider;
constructorDriversSeatProvider = driversSeatProvider;
constructorPlainTireProvider = plainTireProvider;
constructorSpareTireProvider = spareTireProvider;
}
Convertible() {
throw new AssertionError("Unexpected call to non-injectable constructor");
}
void setSeat(Seat unused) {
throw new AssertionError("Unexpected call to non-injectable method");
}
@Inject void injectMethodWithZeroArgs() {
methodWithZeroParamsInjected = true;
}
@Inject String injectMethodWithNonVoidReturn() {
methodWithNonVoidReturnInjected = true;
return "unused";
}
@Inject void injectInstanceMethodWithManyArgs(
Seat plainSeat,
@Drivers Seat driversSeat,
Tire plainTire,
@Named("spare") Tire spareTire,
Provider<Seat> plainSeatProvider,
@Drivers Provider<Seat> driversSeatProvider,
Provider<Tire> plainTireProvider,
@Named("spare") Provider<Tire> spareTireProvider) {
methodWithMultipleParamsInjected = true;
methodPlainSeat = plainSeat;
methodDriversSeat = driversSeat;
methodPlainTire = plainTire;
methodSpareTire = spareTire;
methodPlainSeatProvider = plainSeatProvider;
methodDriversSeatProvider = driversSeatProvider;
methodPlainTireProvider = plainTireProvider;
methodSpareTireProvider = spareTireProvider;
}
@Inject static void injectStaticMethodWithManyArgs(
Seat plainSeat,
@Drivers Seat driversSeat,
Tire plainTire,
@Named("spare") Tire spareTire,
Provider<Seat> plainSeatProvider,
@Drivers Provider<Seat> driversSeatProvider,
Provider<Tire> plainTireProvider,
@Named("spare") Provider<Tire> spareTireProvider) {
staticMethodPlainSeat = plainSeat;
staticMethodDriversSeat = driversSeat;
staticMethodPlainTire = plainTire;
staticMethodSpareTire = spareTire;
staticMethodPlainSeatProvider = plainSeatProvider;
staticMethodDriversSeatProvider = driversSeatProvider;
staticMethodPlainTireProvider = plainTireProvider;
staticMethodSpareTireProvider = spareTireProvider;
}
/**
* Returns a provider that always returns null. This is used as a default
* value to avoid null checks for omitted provider injections.
*/
private static <T> Provider<T> nullProvider() {
return new NullProvider<>();
}
static | Convertible |
java | apache__flink | flink-tests-java17/src/test/java/org/apache/flink/api/java/typeutils/runtime/PojoRecordSerializerUpgradeTestSpecifications.java | {
"start": 1911,
"end": 2781
} | class ____ {
public int id;
public String name;
public PojoBeforeUpgrade() {}
public PojoBeforeUpgrade(int id, String name) {
this.id = id;
this.name = name;
}
}
@Override
public TypeSerializer<PojoBeforeUpgrade> createPriorSerializer() {
TypeSerializer<PojoBeforeUpgrade> serializer =
TypeExtractor.createTypeInfo(PojoBeforeUpgrade.class)
.createSerializer(new SerializerConfigImpl());
assertThat(serializer.getClass()).isSameAs(PojoSerializer.class);
return serializer;
}
@Override
public PojoBeforeUpgrade createTestData() {
return new PojoBeforeUpgrade(911108, "Gordon");
}
}
public static final | PojoBeforeUpgrade |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/client/internal/RemoteClusterClient.java | {
"start": 934,
"end": 2326
} | interface ____ {
/**
* Executes an action, denoted by an {@link ActionType}, on the remote cluster.
*/
default <Request extends ActionRequest, Response extends TransportResponse> void execute(
RemoteClusterActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
getConnection(
request,
listener.delegateFailureAndWrap((responseListener, connection) -> execute(connection, action, request, responseListener))
);
}
/**
* Executes an action, denoted by an {@link ActionType}, using a connection to the remote cluster obtained using {@link #getConnection}.
*/
<Request extends ActionRequest, Response extends TransportResponse> void execute(
Transport.Connection connection,
RemoteClusterActionType<Response> action,
Request request,
ActionListener<Response> listener
);
/**
* Obtain a connection to the remote cluster for use with the {@link #execute} override that allows to specify the connection. Useful
* for cases where you need to inspect {@link Transport.Connection#getTransportVersion} before deciding the exact remote action to
* invoke.
*/
<Request extends ActionRequest> void getConnection(@Nullable Request request, ActionListener<Transport.Connection> listener);
}
| RemoteClusterClient |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/ioc/validation/custom/DurationPatternValidator.java | {
"start": 1003,
"end": 1421
} | class ____ implements ConstraintValidator<DurationPattern, CharSequence> {
@Override
public boolean isValid(
@Nullable CharSequence value,
@NonNull AnnotationValue<DurationPattern> annotationMetadata,
@NonNull ConstraintValidatorContext context) {
return value == null || value.toString().matches("^PT?[\\d]+[SMHD]{1}$");
}
}
// end::class[]
| DurationPatternValidator |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hhh12076/AnnotationMappingJoinClassTest.java | {
"start": 22422,
"end": 22541
} | enum ____ {
RESERVED, ALLOCATED, PAID, VOID, DENIED
}
@Entity(name = "SettlementTask")
public static | SettlementStatus |
java | netty__netty | transport/src/main/java/io/netty/channel/ChannelId.java | {
"start": 1826,
"end": 2167
} | interface ____ extends Serializable, Comparable<ChannelId> {
/**
* Returns the short but globally non-unique string representation of the {@link ChannelId}.
*/
String asShortText();
/**
* Returns the long yet globally unique string representation of the {@link ChannelId}.
*/
String asLongText();
}
| ChannelId |
java | netty__netty | common/src/main/java/io/netty/util/DefaultAttributeMap.java | {
"start": 1755,
"end": 6393
} | class ____.
*/
private static int searchAttributeByKey(DefaultAttribute[] sortedAttributes, AttributeKey<?> key) {
int low = 0;
int high = sortedAttributes.length - 1;
while (low <= high) {
int mid = low + high >>> 1;
DefaultAttribute midVal = sortedAttributes[mid];
AttributeKey midValKey = midVal.key;
if (midValKey == key) {
return mid;
}
int midValKeyId = midValKey.id();
int keyId = key.id();
assert midValKeyId != keyId;
boolean searchRight = midValKeyId < keyId;
if (searchRight) {
low = mid + 1;
} else {
high = mid - 1;
}
}
return -(low + 1);
}
private static void orderedCopyOnInsert(DefaultAttribute[] sortedSrc, int srcLength, DefaultAttribute[] copy,
DefaultAttribute toInsert) {
// let's walk backward, because as a rule of thumb, toInsert.key.id() tends to be higher for new keys
final int id = toInsert.key.id();
int i;
for (i = srcLength - 1; i >= 0; i--) {
DefaultAttribute attribute = sortedSrc[i];
assert attribute.key.id() != id;
if (attribute.key.id() < id) {
break;
}
copy[i + 1] = sortedSrc[i];
}
copy[i + 1] = toInsert;
final int toCopy = i + 1;
if (toCopy > 0) {
System.arraycopy(sortedSrc, 0, copy, 0, toCopy);
}
}
private volatile DefaultAttribute[] attributes = EMPTY_ATTRIBUTES;
@SuppressWarnings("unchecked")
@Override
public <T> Attribute<T> attr(AttributeKey<T> key) {
ObjectUtil.checkNotNull(key, "key");
DefaultAttribute newAttribute = null;
for (;;) {
final DefaultAttribute[] attributes = this.attributes;
final int index = searchAttributeByKey(attributes, key);
final DefaultAttribute[] newAttributes;
if (index >= 0) {
final DefaultAttribute attribute = attributes[index];
assert attribute.key() == key;
if (!attribute.isRemoved()) {
return attribute;
}
// let's try replace the removed attribute with a new one
if (newAttribute == null) {
newAttribute = new DefaultAttribute<T>(this, key);
}
final int count = attributes.length;
newAttributes = Arrays.copyOf(attributes, count);
newAttributes[index] = newAttribute;
} else {
if (newAttribute == null) {
newAttribute = new DefaultAttribute<T>(this, key);
}
final int count = attributes.length;
newAttributes = new DefaultAttribute[count + 1];
orderedCopyOnInsert(attributes, count, newAttributes, newAttribute);
}
if (ATTRIBUTES_UPDATER.compareAndSet(this, attributes, newAttributes)) {
return newAttribute;
}
}
}
@Override
public <T> boolean hasAttr(AttributeKey<T> key) {
ObjectUtil.checkNotNull(key, "key");
return searchAttributeByKey(attributes, key) >= 0;
}
private <T> void removeAttributeIfMatch(AttributeKey<T> key, DefaultAttribute<T> value) {
for (;;) {
final DefaultAttribute[] attributes = this.attributes;
final int index = searchAttributeByKey(attributes, key);
if (index < 0) {
return;
}
final DefaultAttribute attribute = attributes[index];
assert attribute.key() == key;
if (attribute != value) {
return;
}
final int count = attributes.length;
final int newCount = count - 1;
final DefaultAttribute[] newAttributes =
newCount == 0? EMPTY_ATTRIBUTES : new DefaultAttribute[newCount];
// perform 2 bulk copies
System.arraycopy(attributes, 0, newAttributes, 0, index);
final int remaining = count - index - 1;
if (remaining > 0) {
System.arraycopy(attributes, index + 1, newAttributes, index, remaining);
}
if (ATTRIBUTES_UPDATER.compareAndSet(this, attributes, newAttributes)) {
return;
}
}
}
@SuppressWarnings("serial")
private static final | checks |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/PrePostReactiveMethodSecurityConfigurationTests.java | {
"start": 4122,
"end": 22109
} | class ____ {
public final SpringTestContext spring = new SpringTestContext(this);
@Test
@WithMockUser
void getCardNumberWhenPostAuthorizeAndNotAdminThenReturnMasked() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.CardNumberMaskingPostProcessor.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.postAuthorizeGetCardNumberIfAdmin("4444-3333-2222-1111"))
.expectNext("****-****-****-1111")
.verifyComplete();
}
@Test
@WithMockUser
void getCardNumberWhenPreAuthorizeAndNotAdminThenReturnMasked() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class, ReactiveMethodSecurityService.StarMaskingHandler.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.preAuthorizeGetCardNumberIfAdmin("4444-3333-2222-1111"))
.expectNext("***")
.verifyComplete();
}
@Test
@WithMockUser
void getCardNumberWhenPreAuthorizeAndNotAdminAndChildHandlerThenResolveCorrectHandlerAndReturnMasked() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class, ReactiveMethodSecurityService.StarMaskingHandler.class,
ReactiveMethodSecurityService.StartMaskingHandlerChild.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.preAuthorizeWithHandlerChildGetCardNumberIfAdmin("4444-3333-2222-1111"))
.expectNext("***-child")
.verifyComplete();
}
@Test
@WithMockUser
void preAuthorizeWhenDeniedAndHandlerWithCustomAnnotationThenHandlerCanUseMaskFromOtherAnnotation() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.MaskAnnotationHandler.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.preAuthorizeDeniedMethodWithMaskAnnotation())
.expectNext("methodmask")
.verifyComplete();
}
@Test
@WithMockUser
void preAuthorizeWhenDeniedAndHandlerWithCustomAnnotationInClassThenHandlerCanUseMaskFromOtherAnnotation() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.MaskAnnotationHandler.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.preAuthorizeDeniedMethodWithNoMaskAnnotation())
.expectNext("classmask")
.verifyComplete();
}
@Test
@WithMockUser(roles = "ADMIN")
void postAuthorizeWhenHandlerAndAccessDeniedNotThrownFromPostAuthorizeThenNotHandled() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.PostMaskingPostProcessor.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.postAuthorizeThrowAccessDeniedManually()).expectNext("***").verifyComplete();
}
@Test
@WithMockUser(roles = "ADMIN")
void preAuthorizeWhenHandlerAndAccessDeniedNotThrownFromPreAuthorizeThenHandled() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class, ReactiveMethodSecurityService.StarMaskingHandler.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.preAuthorizeThrowAccessDeniedManually()).expectNext("***").verifyComplete();
}
@Test
@WithMockUser
void postAuthorizeWhenNullDeniedMetaAnnotationThanWorks() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class, ReactiveMethodSecurityService.NullPostProcessor.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.postAuthorizeDeniedWithNullDenied()).verifyComplete();
}
@Test
@WithMockUser
void postAuthorizeWhenDeniedAndHandlerWithCustomAnnotationThenHandlerCanUseMaskFromOtherAnnotation() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.MaskAnnotationPostProcessor.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.postAuthorizeDeniedMethodWithMaskAnnotation())
.expectNext("methodmask")
.verifyComplete();
}
@Test
@WithMockUser
void postAuthorizeWhenDeniedAndHandlerWithCustomAnnotationInClassThenHandlerCanUseMaskFromOtherAnnotation() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.MaskAnnotationPostProcessor.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.postAuthorizeDeniedMethodWithNoMaskAnnotation())
.expectNext("classmask")
.verifyComplete();
}
@Test
@WithMockUser
void postAuthorizeWhenDeniedAndHandlerWithCustomAnnotationUsingBeanThenHandlerCanUseMaskFromOtherAnnotation() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.MaskAnnotationPostProcessor.class, MyMasker.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.postAuthorizeWithMaskAnnotationUsingBean())
.expectNext("ok-masked")
.verifyComplete();
}
@Test
@WithMockUser(roles = "ADMIN")
void postAuthorizeWhenAllowedAndHandlerWithCustomAnnotationUsingBeanThenInvokeMethodNormally() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.MaskAnnotationPostProcessor.class, MyMasker.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.postAuthorizeWithMaskAnnotationUsingBean()).expectNext("ok").verifyComplete();
}
@Test
@WithMockUser
void preAuthorizeWhenDeniedAndHandlerWithCustomAnnotationUsingBeanThenHandlerCanUseMaskFromOtherAnnotation() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.MaskAnnotationHandler.class, MyMasker.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.preAuthorizeWithMaskAnnotationUsingBean()).expectNext("mask").verifyComplete();
}
@Test
@WithMockUser(roles = "ADMIN")
void preAuthorizeWhenAllowedAndHandlerWithCustomAnnotationUsingBeanThenInvokeMethodNormally() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class,
ReactiveMethodSecurityService.MaskAnnotationHandler.class, MyMasker.class)
.autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
StepVerifier.create(service.preAuthorizeWithMaskAnnotationUsingBean()).expectNext("ok").verifyComplete();
}
@Test
@WithMockUser(roles = "ADMIN")
public void preAuthorizeWhenCustomMethodSecurityExpressionHandlerThenUses() {
this.spring.register(MethodSecurityServiceEnabledConfig.class, PermissionEvaluatorConfig.class).autowire();
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
PermissionEvaluator permissionEvaluator = this.spring.getContext().getBean(PermissionEvaluator.class);
given(permissionEvaluator.hasPermission(any(), eq("grant"), any())).willReturn(true);
given(permissionEvaluator.hasPermission(any(), eq("deny"), any())).willReturn(false);
StepVerifier.create(service.preAuthorizeHasPermission("grant")).expectNext("ok").verifyComplete();
StepVerifier.create(service.preAuthorizeHasPermission("deny"))
.expectError(AuthorizationDeniedException.class)
.verify();
verify(permissionEvaluator, times(2)).hasPermission(any(), any(), any());
}
@ParameterizedTest
@ValueSource(classes = { MetaAnnotationPlaceholderConfig.class })
@WithMockUser
public void methodeWhenParameterizedPreAuthorizeMetaAnnotationThenPasses(Class<?> config) {
this.spring.register(config).autowire();
MetaAnnotationService service = this.spring.getContext().getBean(MetaAnnotationService.class);
assertThat(service.hasRole("USER").block()).isTrue();
}
@ParameterizedTest
@ValueSource(classes = { MetaAnnotationPlaceholderConfig.class })
@WithMockUser
public void methodRoleWhenPreAuthorizeMetaAnnotationHardcodedParameterThenPasses(Class<?> config) {
this.spring.register(config).autowire();
MetaAnnotationService service = this.spring.getContext().getBean(MetaAnnotationService.class);
assertThat(service.hasUserRole().block()).isTrue();
}
@ParameterizedTest
@ValueSource(classes = { MetaAnnotationPlaceholderConfig.class })
public void methodWhenParameterizedAnnotationThenFails(Class<?> config) {
this.spring.register(config).autowire();
MetaAnnotationService service = this.spring.getContext().getBean(MetaAnnotationService.class);
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> service.placeholdersOnlyResolvedByMetaAnnotations().block());
}
@ParameterizedTest
@ValueSource(classes = { MetaAnnotationPlaceholderConfig.class })
@WithMockUser(authorities = "SCOPE_message:read")
public void methodWhenMultiplePlaceholdersHasAuthorityThenPasses(Class<?> config) {
this.spring.register(config).autowire();
MetaAnnotationService service = this.spring.getContext().getBean(MetaAnnotationService.class);
assertThat(service.readMessage().block()).isEqualTo("message");
}
@ParameterizedTest
@ValueSource(classes = { MetaAnnotationPlaceholderConfig.class })
@WithMockUser(roles = "ADMIN")
public void methodWhenMultiplePlaceholdersHasRoleThenPasses(Class<?> config) {
this.spring.register(config).autowire();
MetaAnnotationService service = this.spring.getContext().getBean(MetaAnnotationService.class);
assertThat(service.readMessage().block()).isEqualTo("message");
}
@ParameterizedTest
@ValueSource(classes = { MetaAnnotationPlaceholderConfig.class })
@WithMockUser
public void methodWhenPostAuthorizeMetaAnnotationThenAuthorizes(Class<?> config) {
this.spring.register(config).autowire();
MetaAnnotationService service = this.spring.getContext().getBean(MetaAnnotationService.class);
service.startsWithDave("daveMatthews");
assertThatExceptionOfType(AccessDeniedException.class)
.isThrownBy(() -> service.startsWithDave("jenniferHarper").block());
}
@ParameterizedTest
@ValueSource(classes = { MetaAnnotationPlaceholderConfig.class })
@WithMockUser
public void methodWhenPreFilterMetaAnnotationThenFilters(Class<?> config) {
this.spring.register(config).autowire();
MetaAnnotationService service = this.spring.getContext().getBean(MetaAnnotationService.class);
assertThat(service.parametersContainDave(Flux.just("dave", "carla", "vanessa", "paul")).collectList().block())
.containsExactly("dave");
}
@ParameterizedTest
@ValueSource(classes = { MetaAnnotationPlaceholderConfig.class })
@WithMockUser
public void methodWhenPostFilterMetaAnnotationThenFilters(Class<?> config) {
this.spring.register(config).autowire();
MetaAnnotationService service = this.spring.getContext().getBean(MetaAnnotationService.class);
assertThat(service.resultsContainDave(Flux.just("dave", "carla", "vanessa", "paul")).collectList().block())
.containsExactly("dave");
}
@Test
@WithMockUser(authorities = "airplane:read")
public void findByIdWhenAuthorizedResultThenAuthorizes() {
this.spring.register(AuthorizeResultConfig.class).autowire();
FlightRepository flights = this.spring.getContext().getBean(FlightRepository.class);
Flight flight = flights.findById("1").block();
assertThatNoException().isThrownBy(flight::getAltitude);
assertThatNoException().isThrownBy(flight::getSeats);
}
@Test
@WithMockUser(authorities = "seating:read")
public void findByIdWhenUnauthorizedResultThenDenies() {
this.spring.register(AuthorizeResultConfig.class).autowire();
FlightRepository flights = this.spring.getContext().getBean(FlightRepository.class);
Flight flight = flights.findById("1").block();
assertThatNoException().isThrownBy(flight::getSeats);
assertThatExceptionOfType(AccessDeniedException.class).isThrownBy(() -> flight.getAltitude().block());
}
@Test
@WithMockUser(authorities = "seating:read")
public void findAllWhenUnauthorizedResultThenDenies() {
this.spring.register(AuthorizeResultConfig.class).autowire();
FlightRepository flights = this.spring.getContext().getBean(FlightRepository.class);
flights.findAll().collectList().block().forEach((flight) -> {
assertThatNoException().isThrownBy(flight::getSeats);
assertThatExceptionOfType(AccessDeniedException.class).isThrownBy(() -> flight.getAltitude().block());
});
}
@Test
public void removeWhenAuthorizedResultThenRemoves() {
this.spring.register(AuthorizeResultConfig.class).autowire();
FlightRepository flights = this.spring.getContext().getBean(FlightRepository.class);
flights.remove("1");
}
@Test
@WithMockUser(authorities = "airplane:read")
public void findAllWhenPostFilterThenFilters() {
this.spring.register(AuthorizeResultConfig.class).autowire();
FlightRepository flights = this.spring.getContext().getBean(FlightRepository.class);
flights.findAll()
.collectList()
.block()
.forEach((flight) -> assertThat(flight.getPassengers().collectList().block())
.extracting((p) -> p.getName().block())
.doesNotContain("Kevin Mitnick"));
}
@Test
@WithMockUser(authorities = "airplane:read")
public void findAllWhenPreFilterThenFilters() {
this.spring.register(AuthorizeResultConfig.class).autowire();
FlightRepository flights = this.spring.getContext().getBean(FlightRepository.class);
flights.findAll().collectList().block().forEach((flight) -> {
flight.board(Flux.just("John")).block();
assertThat(flight.getPassengers().collectList().block()).extracting((p) -> p.getName().block())
.doesNotContain("John");
flight.board(Flux.just("John Doe")).block();
assertThat(flight.getPassengers().collectList().block()).extracting((p) -> p.getName().block())
.contains("John Doe");
});
}
@Test
@WithMockUser(authorities = "seating:read")
public void findAllWhenNestedPreAuthorizeThenAuthorizes() {
this.spring.register(AuthorizeResultConfig.class).autowire();
FlightRepository flights = this.spring.getContext().getBean(FlightRepository.class);
flights.findAll().collectList().block().forEach((flight) -> {
List<Passenger> passengers = flight.getPassengers().collectList().block();
passengers.forEach((passenger) -> assertThatExceptionOfType(AccessDeniedException.class)
.isThrownBy(() -> passenger.getName().block()));
});
}
// gh-15352
@Test
void annotationsInChildClassesDoNotAffectSuperclasses() {
this.spring.register(AbstractClassConfig.class).autowire();
this.spring.getContext().getBean(ClassInheritingAbstractClassWithNoAnnotations.class).method();
}
// gh-15592
@Test
void autowireWhenDefaultsThenCreatesExactlyOneAdvisorPerAnnotation() {
this.spring.register(MethodSecurityServiceEnabledConfig.class).autowire();
AuthorizationAdvisorProxyFactory proxyFactory = this.spring.getContext()
.getBean(AuthorizationAdvisorProxyFactory.class);
assertThat(proxyFactory).hasSize(5);
assertThat(this.spring.getContext().getBeanNamesForType(AuthorizationAdvisor.class)).hasSize(5)
.containsExactlyInAnyOrder("preFilterAuthorizationMethodInterceptor",
"preAuthorizeAuthorizationMethodInterceptor", "postAuthorizeAuthorizationMethodInterceptor",
"postFilterAuthorizationMethodInterceptor", "authorizeReturnObjectMethodInterceptor");
}
// gh-15592
@Test
void autowireWhenAspectJAutoProxyAndFactoryBeanThenExactlyOneAdvisorPerAnnotation() {
this.spring.register(AspectJAwareAutoProxyAndFactoryBeansConfig.class).autowire();
AuthorizationAdvisorProxyFactory proxyFactory = this.spring.getContext()
.getBean(AuthorizationAdvisorProxyFactory.class);
assertThat(proxyFactory).hasSize(5);
assertThat(this.spring.getContext().getBeanNamesForType(AuthorizationAdvisor.class)).hasSize(5)
.containsExactlyInAnyOrder("preFilterAuthorizationMethodInterceptor",
"preAuthorizeAuthorizationMethodInterceptor", "postAuthorizeAuthorizationMethodInterceptor",
"postFilterAuthorizationMethodInterceptor", "authorizeReturnObjectMethodInterceptor");
}
// gh-15651
@Test
@WithMockUser(roles = "ADMIN")
public void adviseWhenPrePostEnabledThenEachInterceptorRunsExactlyOnce() {
this.spring
.register(MethodSecurityServiceEnabledConfig.class, CustomMethodSecurityExpressionHandlerConfig.class)
.autowire();
MethodSecurityExpressionHandler expressionHandler = this.spring.getContext()
.getBean(MethodSecurityExpressionHandler.class);
ReactiveMethodSecurityService service = this.spring.getContext().getBean(ReactiveMethodSecurityService.class);
service.manyAnnotations(Mono.just(new ArrayList<>(Arrays.asList("harold", "jonathan", "tim", "bo")))).block();
verify(expressionHandler, times(4)).createEvaluationContext(any(Authentication.class), any());
}
// gh-15721
@Test
@WithMockUser(roles = "uid")
public void methodWhenMetaAnnotationPropertiesHasClassProperties() {
this.spring.register(MetaAnnotationPlaceholderConfig.class).autowire();
MetaAnnotationService service = this.spring.getContext().getBean(MetaAnnotationService.class);
assertThat(service.getIdPath("uid").block()).isEqualTo("uid");
}
@Configuration
@EnableReactiveMethodSecurity
static | PrePostReactiveMethodSecurityConfigurationTests |
java | apache__hadoop | hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java | {
"start": 1525,
"end": 2355
} | class ____ implements State {
private WordList rackNameState = new WordList("rack");
private WordList hostNameState = new WordList("host");
@Override
@JsonIgnore
public boolean isUpdated() {
return rackNameState.isUpdated() || hostNameState.isUpdated();
}
public WordList getRackNameState() {
return rackNameState;
}
public WordList getHostNameState() {
return hostNameState;
}
public void setRackNameState(WordList state) {
this.rackNameState = state;
}
public void setHostNameState(WordList state) {
this.hostNameState = state;
}
@Override
public String getName() {
return "node";
}
@Override
public void setName(String name) {
// for now, simply assert since this | NodeNameState |
java | spring-projects__spring-boot | integration-test/spring-boot-actuator-integration-tests/src/test/java/org/springframework/boot/actuate/endpoint/web/annotation/AbstractWebEndpointIntegrationTests.java | {
"start": 24606,
"end": 24877
} | class ____ {
@Bean
ResourceWebEndpointResponseEndpoint resourceEndpoint() {
return new ResourceWebEndpointResponseEndpoint();
}
}
@Configuration(proxyBeanMethods = false)
@Import(BaseConfiguration.class)
static | ResourceWebEndpointResponseEndpointConfiguration |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/processor/src/main/java/org/jboss/resteasy/reactive/common/processor/scanning/ScannedSerializer.java | {
"start": 199,
"end": 2289
} | class ____ {
private final ClassInfo classInfo;
private final String className;
private final String handledClassName;
private final List<String> mediaTypeStrings;
private final RuntimeType runtimeType;
private final boolean builtin;
private final Integer priority;
public ScannedSerializer(ClassInfo classInfo, String handledClassName, List<String> mediaTypeStrings) {
this(classInfo, handledClassName, mediaTypeStrings, null, true, Priorities.USER);
}
// used only for testing
public ScannedSerializer(String className, String handledClassName, List<String> mediaTypeStrings) {
this(null, className, handledClassName, mediaTypeStrings, null, true, Priorities.USER);
}
public ScannedSerializer(ClassInfo classInfo, String handledClassName, List<String> mediaTypeStrings,
RuntimeType runtimeType, boolean builtin, Integer priority) {
this(classInfo, classInfo.name().toString(), handledClassName, mediaTypeStrings, runtimeType, builtin, priority);
}
private ScannedSerializer(ClassInfo classInfo, String className, String handledClassName, List<String> mediaTypeStrings,
RuntimeType runtimeType, boolean builtin, Integer priority) {
this.classInfo = classInfo;
this.className = className;
this.handledClassName = handledClassName;
this.mediaTypeStrings = mediaTypeStrings;
this.runtimeType = runtimeType;
this.builtin = builtin;
this.priority = priority;
}
// used only for tests
public ClassInfo getClassInfo() {
return classInfo;
}
public String getClassName() {
return className;
}
public String getHandledClassName() {
return handledClassName;
}
public List<String> getMediaTypeStrings() {
return mediaTypeStrings;
}
public RuntimeType getRuntimeType() {
return runtimeType;
}
public boolean isBuiltin() {
return builtin;
}
public Integer getPriority() {
return priority;
}
}
| ScannedSerializer |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/BeanFactoryLockingTests.java | {
"start": 1022,
"end": 1468
} | class ____ {
@Test
void fallbackForThreadDuringInitialization() {
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.registerBeanDefinition("bean1",
new RootBeanDefinition(ThreadDuringInitialization.class));
beanFactory.registerBeanDefinition("bean2",
new RootBeanDefinition(TestBean.class, () -> new TestBean("tb")));
beanFactory.preInstantiateSingletons();
}
static | BeanFactoryLockingTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/InputsLocationsRetriever.java | {
"start": 1319,
"end": 2384
} | interface ____ {
/**
* Get the consumed result partition groups of an execution vertex.
*
* @param executionVertexId identifies the execution vertex
* @return the consumed result partition groups
*/
Collection<ConsumedPartitionGroup> getConsumedPartitionGroups(
ExecutionVertexID executionVertexId);
/**
* Get the producer execution vertices of a consumed result partition group.
*
* @param consumedPartitionGroup the consumed result partition group
* @return the ids of producer execution vertices
*/
Collection<ExecutionVertexID> getProducersOfConsumedPartitionGroup(
ConsumedPartitionGroup consumedPartitionGroup);
/**
* Get the task manager location future for an execution vertex.
*
* @param executionVertexId identifying the execution vertex
* @return the task manager location future
*/
Optional<CompletableFuture<TaskManagerLocation>> getTaskManagerLocation(
ExecutionVertexID executionVertexId);
}
| InputsLocationsRetriever |
java | junit-team__junit5 | junit-platform-suite-api/src/main/java/org/junit/platform/suite/api/ExcludeClassNamePatterns.java | {
"start": 1318,
"end": 1423
} | interface ____ {
/**
* Regular expressions used to match against fully qualified | ExcludeClassNamePatterns |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/lifecycle/internal/DefaultLifecycleStarter.java | {
"start": 1575,
"end": 5917
} | class ____ implements LifecycleStarter {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final ExecutionEventCatapult eventCatapult;
private final DefaultLifecycles defaultLifeCycles;
private final BuildListCalculator buildListCalculator;
private final LifecycleDebugLogger lifecycleDebugLogger;
private final LifecycleTaskSegmentCalculator lifecycleTaskSegmentCalculator;
private final Map<String, Builder> builders;
@Inject
public DefaultLifecycleStarter(
ExecutionEventCatapult eventCatapult,
DefaultLifecycles defaultLifeCycles,
BuildListCalculator buildListCalculator,
LifecycleDebugLogger lifecycleDebugLogger,
LifecycleTaskSegmentCalculator lifecycleTaskSegmentCalculator,
Map<String, Builder> builders) {
this.eventCatapult = eventCatapult;
this.defaultLifeCycles = defaultLifeCycles;
this.buildListCalculator = buildListCalculator;
this.lifecycleDebugLogger = lifecycleDebugLogger;
this.lifecycleTaskSegmentCalculator = lifecycleTaskSegmentCalculator;
this.builders = builders;
}
@Override
public void execute(MavenSession session) {
eventCatapult.fire(ExecutionEvent.Type.SessionStarted, session, null);
ReactorContext reactorContext = null;
ProjectBuildList projectBuilds = null;
MavenExecutionResult result = session.getResult();
try {
if (buildExecutionRequiresProject(session) && projectIsNotPresent(session)) {
throw new MissingProjectException("The goal you specified requires a project to execute"
+ " but there is no POM in this directory (" + session.getExecutionRootDirectory() + ")."
+ " Please verify you invoked Maven from the correct directory.");
}
List<TaskSegment> taskSegments = lifecycleTaskSegmentCalculator.calculateTaskSegments(session);
projectBuilds = buildListCalculator.calculateProjectBuilds(session, taskSegments);
if (projectBuilds.isEmpty()) {
throw new NoGoalSpecifiedException("No goals have been specified for this build."
+ " You must specify a valid lifecycle phase or a goal in the format <plugin-prefix>:<goal> or"
+ " <plugin-group-id>:<plugin-artifact-id>[:<plugin-version>]:<goal>."
+ " Available lifecycle phases are: " + defaultLifeCycles.getLifecyclePhaseList() + ".");
}
if (logger.isDebugEnabled()) {
lifecycleDebugLogger.debugReactorPlan(projectBuilds);
}
ClassLoader oldContextClassLoader = Thread.currentThread().getContextClassLoader();
ReactorBuildStatus reactorBuildStatus = new ReactorBuildStatus(session.getProjectDependencyGraph());
reactorContext = new ReactorContext(result, oldContextClassLoader, reactorBuildStatus);
String builderId = session.getRequest().getBuilderId();
Builder builder = builders.get(builderId);
if (builder == null) {
throw new BuilderNotFoundException(
String.format("The builder requested using id = %s cannot be" + " found", builderId));
}
int degreeOfConcurrency = session.getRequest().getDegreeOfConcurrency();
if (degreeOfConcurrency > 1) {
logger.info("");
logger.info(String.format(
"Using the %s implementation with a thread count of %d",
builder.getClass().getSimpleName(), degreeOfConcurrency));
}
builder.build(session, reactorContext, projectBuilds, taskSegments, reactorBuildStatus);
} catch (Exception e) {
result.addException(e);
} finally {
eventCatapult.fire(ExecutionEvent.Type.SessionEnded, session, null);
}
}
private boolean buildExecutionRequiresProject(MavenSession session) {
return lifecycleTaskSegmentCalculator.requiresProject(session);
}
private boolean projectIsNotPresent(MavenSession session) {
return !session.getRequest().isProjectPresent();
}
}
| DefaultLifecycleStarter |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/CoreCacheProvider.java | {
"start": 872,
"end": 2001
} | class ____ implements TypePool.CacheProvider {
private final ConcurrentMap<String, TypePool.Resolution> storage = new ConcurrentHashMap<>();
private final CorePrefixFilter acceptedPrefixes;
CoreCacheProvider(final CorePrefixFilter acceptedPrefixes) {
this.acceptedPrefixes = Objects.requireNonNull( acceptedPrefixes );
register( OBJECT_CLASS_NAME,
new TypePool.Resolution.Simple( TypeDescription.ForLoadedType.of( Object.class ) ) );
}
/**
* {@inheritDoc}
*/
@Override
public TypePool.Resolution find(final String name) {
return storage.get( name );
}
/**
* {@inheritDoc}
*/
@Override
public TypePool.Resolution register(String name, TypePool.Resolution resolution) {
//Ensure we DO NOT cache anything from a non-core namespace, to not leak application specific code:
if ( acceptedPrefixes.isCoreClassName( name ) ) {
TypePool.Resolution cached = storage.putIfAbsent( name, resolution );
return cached == null
? resolution
: cached;
}
else {
return resolution;
}
}
/**
* {@inheritDoc}
*/
@Override
public void clear() {
storage.clear();
}
}
| CoreCacheProvider |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/action/CohereActionVisitor.java | {
"start": 692,
"end": 1000
} | interface ____ {
ExecutableAction create(CohereEmbeddingsModel model, Map<String, Object> taskSettings);
ExecutableAction create(CohereRerankModel model, Map<String, Object> taskSettings);
ExecutableAction create(CohereCompletionModel model, Map<String, Object> taskSettings);
}
| CohereActionVisitor |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/idgen/enhanced/HiloOptimizerConcurrencyTest.java | {
"start": 2812,
"end": 3441
} | class ____ {
@Id
@GeneratedValue(generator = "HIB_TGEN")
@GenericGenerator(name = "HIB_TGEN", strategy = "org.hibernate.id.enhanced.TableGenerator", parameters = {
@Parameter(name = "table_name", value = "HIB_TGEN"),
@Parameter(name = "prefer_entity_table_as_segment_value", value = "true"),
@Parameter(name = "optimizer", value = "hilo"),
@Parameter(name = "initial_value", value = "1"),
@Parameter(name = "increment_size", value = "5")
})
private long id = -1;
public HibPerson() {
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
}
| HibPerson |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/data/LocalUriFetcher.java | {
"start": 756,
"end": 4241
} | class ____<T> implements DataFetcher<T> {
protected final boolean useMediaStoreApisIfAvailable;
private static final String TAG = "LocalUriFetcher";
private final Uri uri;
private final ContentResolver contentResolver;
private T data;
/**
* Opens an input stream for a uri pointing to a local asset. Only certain uris are supported
*
* @param contentResolver Any {@link android.content.ContentResolver}.
* @param uri A Uri pointing to a local asset. This load will fail if the uri isn't openable by
* {@link ContentResolver#openInputStream(android.net.Uri)}
* @see ContentResolver#openInputStream(android.net.Uri)
*/
// Public API.
@SuppressWarnings("WeakerAccess")
public LocalUriFetcher(ContentResolver contentResolver, Uri uri) {
this(contentResolver, uri, /* useMediaStoreApisIfAvailable */ false);
}
/**
* Opens an input stream for a uri pointing to a local asset. Only certain uris are supported
*
* @param contentResolver Any {@link android.content.ContentResolver}.
* @param uri A Uri pointing to a local asset. This load will fail if the uri isn't openable by
* {@link ContentResolver#openInputStream(android.net.Uri)}
* @param useMediaStoreApisIfAvailable used to decide if the uri should be opened using MediaStore
* APIs
* @see ContentResolver#openInputStream(android.net.Uri)
*/
LocalUriFetcher(ContentResolver contentResolver, Uri uri, boolean useMediaStoreApisIfAvailable) {
this.contentResolver = contentResolver;
this.uri = uri;
this.useMediaStoreApisIfAvailable = useMediaStoreApisIfAvailable;
}
@Override
public final void loadData(
@NonNull Priority priority, @NonNull DataCallback<? super T> callback) {
try {
data = loadResource(uri, contentResolver);
callback.onDataReady(data);
} catch (FileNotFoundException e) {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Failed to open Uri", e);
}
callback.onLoadFailed(e);
}
}
@Override
public void cleanup() {
if (data != null) {
try {
close(data);
} catch (IOException e) {
// Ignored.
}
}
}
@Override
public void cancel() {
// Do nothing.
}
@NonNull
@Override
public DataSource getDataSource() {
return DataSource.LOCAL;
}
/**
* Opens an {@link AssetFileDescriptor} for a uri pointing to a local asset. Depending on the
* {@code useMediaStoreApisIfAvailable} flag and the availability of MediaStore APIs, the uri may
* be opened using MediaStore APIs or {@link
* ContentResolver#openAssetFileDescriptor(android.net.Uri, String)}.
*
* @param uri A Uri pointing to a local asset.
*/
protected AssetFileDescriptor openAssetFileDescriptor(Uri uri) throws FileNotFoundException {
return useMediaStoreApisIfAvailable
&& MediaStoreUtil.isMediaStoreUri(uri)
&& MediaStoreUtil.isMediaStoreOpenFileApisAvailable()
? MediaStoreUtil.openAssetFileDescriptor(uri, contentResolver)
: contentResolver.openAssetFileDescriptor(uri, "r");
}
/**
* Returns a concrete data type from the given {@link android.net.Uri} using the given {@link
* android.content.ContentResolver}.
*/
protected abstract T loadResource(Uri uri, ContentResolver contentResolver)
throws FileNotFoundException;
/**
* Closes the concrete data type if necessary.
*
* <p>Note - We can't rely on the closeable | LocalUriFetcher |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/builder/RouteBuilderLifecycleStrategy.java | {
"start": 949,
"end": 1558
} | interface ____ extends Ordered {
/**
* This method is invoked before the {@link RouteBuilder#configure()} method is invoked.
*/
default void beforeConfigure(RouteBuilder builder) {
}
/**
* This method is invoked after the {@link RouteBuilder#configure()} method is invoked.
*/
default void afterConfigure(RouteBuilder builder) {
}
/**
* Gets the order.
* <p/>
* Default to {@link Ordered#LOWEST}.
*
* @return the order
*/
@Override
default int getOrder() {
return Ordered.LOWEST;
}
}
| RouteBuilderLifecycleStrategy |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/ssl/JksSslBundleProperties.java | {
"start": 944,
"end": 1337
} | class ____ extends SslBundleProperties {
/**
* Keystore properties.
*/
private final Store keystore = new Store();
/**
* Truststore properties.
*/
private final Store truststore = new Store();
public Store getKeystore() {
return this.keystore;
}
public Store getTruststore() {
return this.truststore;
}
/**
* Store properties.
*/
public static | JksSslBundleProperties |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java | {
"start": 68945,
"end": 193391
} | enum ____ the DDL side,
// but varchar on the ORM expectation side, let's treat the types as equivalent
|| isEnumType(typeCode1) && isVarcharType(typeCode2)
|| sameColumnType(typeCode1, typeCode2);
}
/**
* Tolerate storing {@code short} in {@code INTEGER} or {@code BIGINT}
* or {@code int} in {@code BIGINT} for the purposes of schema validation
* and migration.
*/
private boolean isCompatibleIntegralType(int typeCode1, int typeCode2) {
return switch (typeCode1) {
case TINYINT -> typeCode2 == TINYINT
|| typeCode2 == SMALLINT
|| typeCode2 == INTEGER
|| typeCode2 == BIGINT;
case SMALLINT -> typeCode2 == SMALLINT
|| typeCode2 == INTEGER
|| typeCode2 == BIGINT;
case INTEGER -> typeCode2 == INTEGER
|| typeCode2 == BIGINT;
default -> false;
};
}
private boolean sameColumnType(int typeCode1, int typeCode2) {
try {
return Objects.equals( columnType(typeCode1), columnType(typeCode2) );
}
catch (IllegalArgumentException iae) {
return false;
}
}
/**
* Retrieve a set of default Hibernate properties for this database.
* <p>
* An implementation may set configuration properties from
* {@link #initDefaultProperties()}, though it is discouraged.
* @return the Hibernate configuration properties
*
* @see #initDefaultProperties()
*/
public Properties getDefaultProperties() {
return properties;
}
/**
* The default value to use for the configuration property
* {@value org.hibernate.cfg.Environment#STATEMENT_BATCH_SIZE}.
*/
public int getDefaultStatementBatchSize() {
return 1;
}
/**
* The default value to use for the configuration property
* {@value org.hibernate.cfg.Environment#NON_CONTEXTUAL_LOB_CREATION}.
*/
public boolean getDefaultNonContextualLobCreation() {
return false;
}
/**
* The default value to use for the configuration property
* {@value org.hibernate.cfg.Environment#USE_GET_GENERATED_KEYS}.
*/
public boolean getDefaultUseGetGeneratedKeys() {
return true;
}
@Override
public String toString() {
return getClass().getName() + ", version: " + getVersion();
}
// database type mapping support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public void contribute(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
contributeTypes( typeContributions, serviceRegistry );
}
/**
* A callback which allows the {@code Dialect} to contribute types.
*
* @param typeContributions Callback to contribute the types
* @param serviceRegistry The service registry
*/
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
// by default, not much to do...
registerColumnTypes( typeContributions, serviceRegistry );
final NationalizationSupport nationalizationSupport = getNationalizationSupport();
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
if ( nationalizationSupport == NationalizationSupport.EXPLICIT ) {
jdbcTypeRegistry.addDescriptor( NCharJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( NVarcharJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( LongNVarcharJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( NClobJdbcType.DEFAULT );
}
if ( getTimeZoneSupport() == TimeZoneSupport.NATIVE ) {
jdbcTypeRegistry.addDescriptor( TimestampUtcAsOffsetDateTimeJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( TimeUtcAsOffsetTimeJdbcType.INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptor( TimestampUtcAsJdbcTimestampJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( TimeUtcAsJdbcTimeJdbcType.INSTANCE );
}
if ( supportsStandardArrays() ) {
jdbcTypeRegistry.addTypeConstructorIfAbsent( ArrayJdbcTypeConstructor.INSTANCE );
}
if ( supportsMaterializedLobAccess() ) {
jdbcTypeRegistry.addDescriptor( SqlTypes.MATERIALIZED_BLOB, BlobJdbcType.MATERIALIZED );
jdbcTypeRegistry.addDescriptor( SqlTypes.MATERIALIZED_CLOB, ClobJdbcType.MATERIALIZED );
jdbcTypeRegistry.addDescriptor( SqlTypes.MATERIALIZED_NCLOB, NClobJdbcType.MATERIALIZED );
}
}
/**
* A {@link LobMergeStrategy} representing the legacy behavior of Hibernate.
* LOBs are not processed by merge.
*/
@SuppressWarnings("unused")
protected static final LobMergeStrategy LEGACY_LOB_MERGE_STRATEGY = new LobMergeStrategy() {
@Override
public Blob mergeBlob(Blob original, Blob target, SharedSessionContractImplementor session) {
return target;
}
@Override
public Clob mergeClob(Clob original, Clob target, SharedSessionContractImplementor session) {
return target;
}
@Override
public NClob mergeNClob(NClob original, NClob target, SharedSessionContractImplementor session) {
return target;
}
};
/**
* A {@link LobMergeStrategy} based on transferring contents using streams.
*/
@SuppressWarnings("unused")
protected static final LobMergeStrategy STREAM_XFER_LOB_MERGE_STRATEGY = new LobMergeStrategy() {
@Override
public Blob mergeBlob(Blob original, Blob target, SharedSessionContractImplementor session) {
if ( original != target ) {
try {
// the BLOB just read during the load phase of merge
final OutputStream connectedStream = target.setBinaryStream( 1L );
// the BLOB from the detached state
final InputStream detachedStream = original.getBinaryStream();
detachedStream.transferTo( connectedStream );
return target;
}
catch (IOException e ) {
throw new HibernateException( "Unable to copy stream content", e );
}
catch (SQLException e ) {
throw session.getFactory().getJdbcServices().getSqlExceptionHelper()
.convert( e, "unable to merge BLOB data" );
}
}
else {
return NEW_LOCATOR_LOB_MERGE_STRATEGY.mergeBlob( original, target, session );
}
}
@Override
public Clob mergeClob(Clob original, Clob target, SharedSessionContractImplementor session) {
if ( original != target ) {
try {
// the CLOB just read during the load phase of merge
final OutputStream connectedStream = target.setAsciiStream( 1L );
// the CLOB from the detached state
final InputStream detachedStream = original.getAsciiStream();
detachedStream.transferTo( connectedStream );
return target;
}
catch (IOException e ) {
throw new HibernateException( "Unable to copy stream content", e );
}
catch (SQLException e ) {
throw session.getFactory().getJdbcServices().getSqlExceptionHelper()
.convert( e, "unable to merge CLOB data" );
}
}
else {
return NEW_LOCATOR_LOB_MERGE_STRATEGY.mergeClob( original, target, session );
}
}
@Override
public NClob mergeNClob(NClob original, NClob target, SharedSessionContractImplementor session) {
if ( original != target ) {
try {
// the NCLOB just read during the load phase of merge
final OutputStream connectedStream = target.setAsciiStream( 1L );
// the NCLOB from the detached state
final InputStream detachedStream = original.getAsciiStream();
detachedStream.transferTo( connectedStream );
return target;
}
catch (IOException e ) {
throw new HibernateException( "Unable to copy stream content", e );
}
catch (SQLException e ) {
throw session.getFactory().getJdbcServices().getSqlExceptionHelper()
.convert( e, "unable to merge NCLOB data" );
}
}
else {
return NEW_LOCATOR_LOB_MERGE_STRATEGY.mergeNClob( original, target, session );
}
}
};
/**
* A {@link LobMergeStrategy} based on creating a new LOB locator.
*/
protected static final LobMergeStrategy NEW_LOCATOR_LOB_MERGE_STRATEGY = new LobMergeStrategy() {
@Override
public Blob mergeBlob(Blob original, Blob target, SharedSessionContractImplementor session) {
if ( original == null && target == null ) {
return null;
}
final JdbcServices jdbcServices = session.getFactory().getJdbcServices();
try {
final LobCreator lobCreator = jdbcServices.getLobCreator( session );
return original == null
? lobCreator.createBlob( ArrayHelper.EMPTY_BYTE_ARRAY )
: lobCreator.createBlob( original.getBinaryStream(), original.length() );
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper()
.convert( e, "unable to merge BLOB data" );
}
}
@Override
public Clob mergeClob(Clob original, Clob target, SharedSessionContractImplementor session) {
if ( original == null && target == null ) {
return null;
}
final JdbcServices jdbcServices = session.getFactory().getJdbcServices();
try {
final LobCreator lobCreator = jdbcServices.getLobCreator( session );
return original == null
? lobCreator.createClob( "" )
: lobCreator.createClob( original.getCharacterStream(), original.length() );
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper()
.convert( e, "unable to merge CLOB data" );
}
}
@Override
public NClob mergeNClob(NClob original, NClob target, SharedSessionContractImplementor session) {
if ( original == null && target == null ) {
return null;
}
final JdbcServices jdbcServices = session.getFactory().getJdbcServices();
try {
final LobCreator lobCreator = jdbcServices.getLobCreator( session );
return original == null
? lobCreator.createNClob( "" )
: lobCreator.createNClob( original.getCharacterStream(), original.length() );
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper()
.convert( e, "unable to merge NCLOB data" );
}
}
};
/**
* Get the {@link LobMergeStrategy} to use, {@link #NEW_LOCATOR_LOB_MERGE_STRATEGY}
* by default.
*/
public LobMergeStrategy getLobMergeStrategy() {
return NEW_LOCATOR_LOB_MERGE_STRATEGY;
}
// native identifier generation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* The name identifying the "native" id generation strategy for this dialect.
* <p>
* This is the name of the id generation strategy which should be used when
* {@code "native"} is specified in {@code hbm.xml}.
*
* @return The name identifying the native generator strategy.
*
* @deprecated Use {@linkplain #getNativeValueGenerationStrategy()} instead
*
* @implNote Only used with {@code hbm.xml} and {@linkplain org.hibernate.annotations.GenericGenerator},
* both of which have been deprecated
*/
@Deprecated(since = "7.0", forRemoval = true)
public String getNativeIdentifierGeneratorStrategy() {
return getNativeValueGenerationStrategy().name().toLowerCase( Locale.ROOT );
}
/**
* The native type of generation supported by this Dialect.
*
* @see org.hibernate.annotations.NativeGenerator
* @since 7.0
*/
@Incubating
public GenerationType getNativeValueGenerationStrategy() {
return getIdentityColumnSupport().supportsIdentityColumns()
? GenerationType.IDENTITY
: GenerationType.SEQUENCE;
}
// IDENTITY support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Get the appropriate {@link IdentityColumnSupport} for this dialect.
*
* @return the IdentityColumnSupport
* @since 5.1
*/
public IdentityColumnSupport getIdentityColumnSupport() {
return IdentityColumnSupportImpl.INSTANCE;
}
// SEQUENCE support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Get the appropriate {@link SequenceSupport} for this dialect.
**/
public SequenceSupport getSequenceSupport() {
return NoSequenceSupport.INSTANCE;
}
/**
* Get the {@code select} command used retrieve the names of all sequences.
*
* @return The select command; or null if sequences are not supported.
*/
public String getQuerySequencesString() {
return null;
}
/**
* A {@link SequenceInformationExtractor} which is able to extract
* {@link org.hibernate.tool.schema.extract.spi.SequenceInformation}
* from the JDBC result set returned when {@link #getQuerySequencesString()}
* is executed.
*/
public SequenceInformationExtractor getSequenceInformationExtractor() {
return getQuerySequencesString() == null
? SequenceInformationExtractorNoOpImpl.INSTANCE
: SequenceInformationExtractorLegacyImpl.INSTANCE;
}
/**
* A {@link InformationExtractor} which is able to extract
* table, primary key, foreign key, index information etc. via JDBC.
*
* @since 7.2
*/
public InformationExtractor getInformationExtractor(ExtractionContext extractionContext) {
return new InformationExtractorJdbcDatabaseMetaDataImpl( extractionContext );
}
// GUID support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Get the command used to select a GUID from the database.
* <p>
* Optional operation.
*
* @return The appropriate command.
*/
public String getSelectGUIDString() {
throw new UnsupportedOperationException( getClass().getName() + " does not support GUIDs" );
}
/**
* Does this database have some sort of support for temporary tables?
*
* @return true by default, since most do
* @deprecated Use {@link #getLocalTemporaryTableStrategy()} and {@link #getGlobalTemporaryTableStrategy()} to check instead
*/
@Deprecated(forRemoval = true, since = "7.1")
public boolean supportsTemporaryTables() {
// Most databases do
return true;
}
/**
* Does this database support primary keys for temporary tables?
*
* @return true by default, since most do
* @deprecated Moved to {@link TemporaryTableStrategy#supportsTemporaryTablePrimaryKey()}
*/
@Deprecated(forRemoval = true, since = "7.1")
public boolean supportsTemporaryTablePrimaryKey() {
// Most databases do
return true;
}
// limit/offset support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Obtain a {@link LimitHandler} that implements pagination support for
* {@link Query#setMaxResults(int)} and {@link Query#setFirstResult(int)}.
*/
public LimitHandler getLimitHandler() {
throw new UnsupportedOperationException("this dialect does not support query pagination");
}
// lock acquisition support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Access to various details and operations related to this
* Dialect's support for pessimistic locking.
*/
public LockingSupport getLockingSupport() {
return LockingSupportSimple.STANDARD_SUPPORT;
}
/**
* Whether this dialect supports {@code for update (of)}
*
* @deprecated See notes on {@linkplain LockingSupport.Metadata#supportsForUpdate()}
*/
@Deprecated
public boolean supportsForUpdate() {
return getLockingSupport().getMetadata().supportsForUpdate();
}
/**
* Does this dialect support {@code SKIP_LOCKED} timeout.
*
* @return {@code true} if SKIP_LOCKED is supported
*
* @deprecated See notes on {@linkplain LockingSupport.Metadata#supportsSkipLocked()}
*/
@Deprecated
public boolean supportsSkipLocked() {
return getLockingSupport().getMetadata().supportsSkipLocked();
}
/**
* Does this dialect support {@code NO_WAIT} timeout.
*
* @return {@code true} if {@code NO_WAIT} is supported
*
* @deprecated See notes on {@linkplain LockingSupport.Metadata#supportsNoWait()}
*/
@Deprecated
public boolean supportsNoWait() {
return getLockingSupport().getMetadata().supportsNoWait();
}
/**
* Does this dialect support {@code WAIT} timeout.
*
* @return {@code true} if {@code WAIT} is supported
*
* @deprecated See notes on {@linkplain LockingSupport.Metadata#supportsWait()}
*/
@Deprecated
public boolean supportsWait() {
return getLockingSupport().getMetadata().supportsWait();
}
/**
* Some dialects have trouble applying pessimistic locking depending
* upon what other query options are specified (paging, ordering, etc).
* This method allows these dialects to request that locking be applied
* by subsequent selects.
*
* @return {@code true} indicates that the dialect requests that locking
* be applied by subsequent select;
* {@code false} (the default) indicates that locking
* should be applied to the main SQL statement.
*
* @since 6.0
*
* @todo (db-locking) : determine how to best handle this w/ `LockingSupport`.
* "ideally" we'd move everything to SQL AST and SqlAstTranslator
* and base this on `PessimisticLockStyle` for the AST,
* plus LockingClauseStrategy or ConnectionLockTimeoutStrategy
* depending.
*/
public boolean useFollowOnLocking(String sql, QueryOptions queryOptions) {
return false;
}
/**
* @deprecated Use {@linkplain LockingSupport.Metadata#getPessimisticLockStyle()} instead.
* Here, fwiw, we use {@linkplain Timeouts#ONE_SECOND 1-second} to make the determination.
*/
@Deprecated
public PessimisticLockStyle getPessimisticLockStyle() {
return getLockingSupport().getMetadata().getPessimisticLockStyle();
}
/**
* The {@linkplain RowLockStrategy strategy} for indicating which rows
* to lock as part of a {@code for update of} style clause.
*
* @deprecated Use {@linkplain LockingSupport.Metadata#getWriteRowLockStrategy()},
* via {@linkplain #getLockingSupport()}, instead.
*/
@Deprecated
public RowLockStrategy getWriteRowLockStrategy() {
return getLockingSupport().getMetadata().getWriteRowLockStrategy();
}
/**
* The {@linkplain RowLockStrategy strategy} for indicating which rows
* to lock as part of a {@code for share of} style clause.
*
* @deprecated Use {@linkplain LockingSupport.Metadata#getReadRowLockStrategy()},
* via {@linkplain #getLockingSupport()}, instead.
*/
@Deprecated
public RowLockStrategy getReadRowLockStrategy() {
return getLockingSupport().getMetadata().getReadRowLockStrategy();
}
/**
* Strategy for handling {@linkplain PessimisticLockStyle#CLAUSE locking clause}
* as part of {@linkplain org.hibernate.sql.ast.SqlAstTranslator}.
*/
public LockingClauseStrategy getLockingClauseStrategy(QuerySpec querySpec, LockOptions lockOptions) {
if ( getPessimisticLockStyle() != PessimisticLockStyle.CLAUSE || lockOptions == null ) {
return NON_CLAUSE_STRATEGY;
}
final LockMode lockMode = lockOptions.getLockMode();
final PessimisticLockKind lockKind = PessimisticLockKind.interpret( lockMode );
if ( lockKind == PessimisticLockKind.NONE ) {
return NonLockingClauseStrategy.NON_CLAUSE_STRATEGY;
}
final RowLockStrategy rowLockStrategy;
switch ( lockKind ) {
case SHARE -> rowLockStrategy = getReadRowLockStrategy();
case UPDATE -> rowLockStrategy = getWriteRowLockStrategy();
default -> throw new IllegalStateException( "Should never happen due to checks above" );
}
return buildLockingClauseStrategy( lockKind, rowLockStrategy, lockOptions, querySpec.getRootPathsForLocking() );
}
protected LockingClauseStrategy buildLockingClauseStrategy(
PessimisticLockKind lockKind,
RowLockStrategy rowLockStrategy,
LockOptions lockOptions,
Set<NavigablePath> rootPathsForLocking) {
return new StandardLockingClauseStrategy( this, lockKind, rowLockStrategy, lockOptions, rootPathsForLocking );
}
/**
* A {@link LockingStrategy} which is able to acquire a database-level
* lock with the specified {@linkplain LockMode level}.
*
* @param lockable The persister for the entity to be locked.
* @param lockMode The type of lock to be acquired.
* @return The appropriate locking strategy.
*
* @since 7
*/
public LockingStrategy getLockingStrategy(EntityPersister lockable, LockMode lockMode, Locking.Scope lockScope) {
return switch (lockMode) {
case PESSIMISTIC_FORCE_INCREMENT -> buildPessimisticForceIncrementStrategy( lockable, lockMode, lockScope );
case UPGRADE_NOWAIT, UPGRADE_SKIPLOCKED, PESSIMISTIC_WRITE -> buildPessimisticWriteStrategy( lockable, lockMode, lockScope );
case PESSIMISTIC_READ -> buildPessimisticReadStrategy( lockable, lockMode, lockScope );
case OPTIMISTIC_FORCE_INCREMENT -> buildOptimisticForceIncrementStrategy( lockable, lockMode );
case OPTIMISTIC -> buildOptimisticStrategy( lockable, lockMode );
case READ -> buildReadStrategy( lockable, lockMode, lockScope );
default -> throw new IllegalArgumentException( "Unsupported lock mode : " + lockMode );
};
}
/**
* A {@link LockingStrategy} which is able to acquire a database-level
* lock with the specified {@linkplain LockMode level}.
*
* @param lockable The persister for the entity to be locked.
* @param lockMode The type of lock to be acquired.
* @return The appropriate locking strategy.
*
* @since 3.2
*
* @deprecated Use {@linkplain #getLockingStrategy(EntityPersister, LockMode, Locking.Scope)} instead.
*/
@Deprecated(since = "7", forRemoval = true)
public LockingStrategy getLockingStrategy(EntityPersister lockable, LockMode lockMode) {
return getLockingStrategy( lockable, lockMode, Locking.Scope.ROOT_ONLY );
}
protected LockingStrategy buildPessimisticForceIncrementStrategy(EntityPersister lockable, LockMode lockMode, Locking.Scope lockScope) {
return new PessimisticForceIncrementLockingStrategy( lockable, lockMode );
}
protected LockingStrategy buildPessimisticWriteStrategy(EntityPersister lockable, LockMode lockMode, Locking.Scope lockScope) {
return new SqlAstBasedLockingStrategy( lockable, lockMode, lockScope );
}
protected LockingStrategy buildPessimisticReadStrategy(EntityPersister lockable, LockMode lockMode, Locking.Scope lockScope) {
return new SqlAstBasedLockingStrategy( lockable, lockMode, lockScope );
}
protected LockingStrategy buildOptimisticForceIncrementStrategy(EntityPersister lockable, LockMode lockMode) {
return new OptimisticForceIncrementLockingStrategy( lockable, lockMode );
}
protected LockingStrategy buildOptimisticStrategy(EntityPersister lockable, LockMode lockMode) {
return new OptimisticLockingStrategy( lockable, lockMode );
}
protected LockingStrategy buildReadStrategy(EntityPersister lockable, LockMode lockMode, Locking.Scope lockScope) {
return new SelectLockingStrategy( lockable, lockMode );
}
/**
* Given a set of {@link LockOptions} (lock level, timeout),
* determine the appropriate {@code for update} fragment to
* use to obtain the lock.
*
* @param lockOptions contains the lock mode to apply.
* @return The appropriate {@code for update} fragment.
*/
public String getForUpdateString(LockOptions lockOptions) {
return getForUpdateString( lockOptions.getLockMode(), lockOptions.getTimeout() );
}
/**
* Given a {@linkplain LockMode lock level} and timeout,
* determine the appropriate {@code for update} fragment to
* use to obtain the lock.
*
* @param lockMode the lock mode to apply.
* @param timeout the timeout
* @return The appropriate {@code for update} fragment.
*/
public String getForUpdateString(LockMode lockMode, Timeout timeout) {
return switch (lockMode) {
case PESSIMISTIC_READ -> getReadLockString( timeout );
case PESSIMISTIC_WRITE -> getWriteLockString( timeout );
case UPGRADE_NOWAIT, PESSIMISTIC_FORCE_INCREMENT -> getForUpdateNowaitString();
case UPGRADE_SKIPLOCKED -> getForUpdateSkipLockedString();
default -> "";
};
}
/**
* Given a {@linkplain LockMode lock level} and timeout,
* determine the appropriate {@code for update} fragment to
* use to obtain the lock.
*
* @param lockMode the lock mode to apply.
* @param timeout the timeout
* @return The appropriate {@code for update} fragment.
*
* @deprecated Use {@linkplain #getForUpdateString(LockMode,Timeout)} instead
*/
@Deprecated(since = "7.0")
public String getForUpdateString(LockMode lockMode, int timeout) {
return switch (lockMode) {
case PESSIMISTIC_READ -> getReadLockString( timeout );
case PESSIMISTIC_WRITE -> getWriteLockString( timeout );
case UPGRADE_NOWAIT, PESSIMISTIC_FORCE_INCREMENT -> getForUpdateNowaitString();
case UPGRADE_SKIPLOCKED -> getForUpdateSkipLockedString();
default -> "";
};
}
/**
* Given a {@link LockMode}, determine the appropriate
* {@code for update} fragment to use to obtain the lock.
*
* @param lockMode The lock mode to apply.
* @return The appropriate for update fragment.
*/
public String getForUpdateString(LockMode lockMode) {
return getForUpdateString( lockMode, Timeouts.WAIT_FOREVER );
}
/**
* Get the string to append to {@code SELECT} statements to
* acquire pessimistic UPGRADE locks for this dialect.
*
* @return The appropriate {@code FOR UPDATE} clause string.
*/
public String getForUpdateString() {
return " for update";
}
/**
* Get the string to append to {@code SELECT} statements to
* acquire pessimistic WRITE locks for this dialect.
*
* @param timeout How long the database should wait to acquire the lock.
* See {@linkplain Timeouts} for some "magic values".
*
* @return The appropriate lock clause.
*/
public String getWriteLockString(Timeout timeout) {
if ( timeout.milliseconds() == Timeouts.SKIP_LOCKED_MILLI && supportsSkipLocked() ) {
return getForUpdateSkipLockedString();
}
else if ( timeout.milliseconds() == Timeouts.NO_WAIT_MILLI && supportsNoWait() ) {
return getForUpdateNowaitString();
}
else if ( Timeouts.isRealTimeout( timeout ) && supportsWait() ) {
return getForUpdateString( timeout );
}
else {
return getForUpdateString();
}
}
/**
* Get the string to append to {@code SELECT} statements to
* acquire pessimistic WRITE locks for this dialect.
* <p>
* Location of the returned string is treated the same as
* {@link #getForUpdateString()}.
*
* @param timeout How long, in milliseconds, the database should wait to acquire the lock.
* See {@linkplain Timeouts} for some "magic values".
*
* @return The appropriate {@code LOCK} clause string.
*
* @deprecated Use {@linkplain #getWriteLockString(Timeout)} instead.
*/
@Deprecated(since = "7.0")
public String getWriteLockString(int timeout) {
if ( timeout == Timeouts.SKIP_LOCKED_MILLI && supportsSkipLocked() ) {
return getForUpdateSkipLockedString();
}
else if ( timeout == Timeouts.NO_WAIT_MILLI && supportsNoWait() ) {
return getForUpdateNowaitString();
}
else if ( Timeouts.isRealTimeout( timeout ) && supportsWait() ) {
return getForUpdateString( Timeout.milliseconds( timeout ) );
}
else {
return getForUpdateString();
}
}
/**
* Get the string to append to {@code SELECT} statements to
* acquire WRITE locks for this dialect, given the aliases of
* the columns to be WRITE locked.
* *
* * @param timeout How long the database should wait to acquire the lock.
* <p>
* Location of the returned string is treated the same as
* {@link #getForUpdateString()}.
*
* @param aliases The columns to be read locked.
* @param timeout How long the database should wait to acquire the lock.
* See {@linkplain Timeouts} for some "magic values".
*
* @return The appropriate {@code LOCK} clause string.
*/
public String getWriteLockString(String aliases, Timeout timeout) {
// by default, we simply return getWriteLockString(timeout),
// since the default is no support for "FOR UPDATE OF ..."
return getWriteLockString( timeout );
}
/**
* Get the string to append to {@code SELECT} statements to
* acquire WRITE locks for this dialect, given the aliases of
* the columns to be WRITE locked.
* <p>
* Location of the returned string is treated the same as
* {@link #getForUpdateString()}.
*
* @param aliases The columns to be read locked.
*
* @param timeout How long, in milliseconds, the database should wait to acquire the lock.
* See {@linkplain Timeouts} for some "magic values".
*
* @return The appropriate {@code LOCK} clause string.
*
* @deprecated Use {@linkplain #getWriteLockString(String, Timeout)} instead.
*/
@Deprecated(since = "7.0")
public String getWriteLockString(String aliases, int timeout) {
// by default, we simply return getWriteLockString(timeout),
// since the default is no support for "FOR UPDATE OF ..."
return getWriteLockString( timeout );
}
/**
* Get the string to append to {@code SELECT} statements to
* acquire READ locks for this dialect.
*
* @param timeout How long the database should wait to acquire the lock.
* See {@linkplain Timeouts} for some "magic values".
*
* @return The appropriate {@code LOCK} clause string.
*/
public String getReadLockString(Timeout timeout) {
return getForUpdateString();
}
/**
* Get the string to append to {@code SELECT} statements to
* acquire READ locks for this dialect.
* <p>
* Location of the returned string is treated the same as
* {@link #getForUpdateString()}.
*
* @param timeout in milliseconds, -1 for indefinite wait and 0 for no wait.
* @return The appropriate {@code LOCK} clause string.
*
* @deprecated Use {@linkplain #getReadLockString(Timeout)} instead.
*/
@Deprecated(since = "7.0")
public String getReadLockString(int timeout) {
return getForUpdateString();
}
/**
* Get the string to append to {@code SELECT} statements to
* acquire READ locks for this dialect, given the aliases of
* the columns to be read locked.
*
* @param aliases The columns to be read locked.
* @param timeout How long the database should wait to acquire the lock.
* See {@linkplain Timeouts} for some "magic values".
*
* @return The appropriate {@code LOCK} clause string.
*
* @implNote By default, simply returns the {@linkplain #getReadLockString(Timeout)}
* result since the default is to say no support for "FOR UPDATE OF ...".
*/
public String getReadLockString(String aliases, Timeout timeout) {
return getReadLockString( timeout );
}
/**
* Get the string to append to {@code SELECT} statements to
* acquire READ locks for this dialect, given the aliases of
* the columns to be read locked.
* <p>
* Location of the returned string is treated the same as
* {@link #getForUpdateString()}.
*
* @param aliases The columns to be read locked.
* @param timeout in milliseconds, -1 for indefinite wait and 0 for no wait.
*
* @return The appropriate {@code LOCK} clause string.
*
* @deprecated Use {@linkplain #getReadLockString(String, Timeout)} instead.
*/
@Deprecated(since = "7.0")
public String getReadLockString(String aliases, int timeout) {
// by default we simply return the getReadLockString(timeout) result since
// the default is to say no support for "FOR UPDATE OF ..."
return getReadLockString( timeout );
}
/**
* Get the {@code FOR UPDATE OF column_list} fragment appropriate
* for this dialect, given the aliases of the columns to be write
* locked.
*
* @param aliases The columns to be write locked.
* @return The appropriate {@code FOR UPDATE OF column_list} clause string.
*/
public String getForUpdateString(String aliases) {
// by default we simply return the getForUpdateString() result since
// the default is to say no support for "FOR UPDATE OF ..."
return getForUpdateString();
}
/**
* Get the {@code FOR UPDATE OF} or {@code FOR SHARE OF} fragment
* appropriate for this dialect, given the aliases of the columns
* to be locked.
*
* @param aliases The columns to be locked.
* @param lockOptions the lock options to apply
* @return The appropriate {@code FOR UPDATE OF column_list} clause string.
*/
public String getForUpdateString(String aliases, LockOptions lockOptions) {
LockMode lockMode = lockOptions.getLockMode();
lockOptions.setLockMode( lockMode );
return getForUpdateString( lockOptions );
}
/**
* Retrieves the {@code FOR UPDATE NOWAIT} syntax specific to this dialect.
*
* @return The appropriate {@code FOR UPDATE NOWAIT} clause string.
*/
public String getForUpdateNowaitString() {
// by default, we report no support for NOWAIT lock semantics
return getForUpdateString();
}
/**
* Retrieves the {@code FOR UPDATE SKIP LOCKED} syntax specific to this dialect.
*
* @return The appropriate {@code FOR UPDATE SKIP LOCKED} clause string.
*/
public String getForUpdateSkipLockedString() {
// by default, we report no support for SKIP_LOCKED lock semantics
return getForUpdateString();
}
/**
* Retrieves the {@code FOR UPDATE WAIT x} syntax specific to this dialect.
*
* @return The appropriate {@code FOR UPDATE SKIP LOCKED} clause string.
*/
public String getForUpdateString(Timeout timeout) {
return getForUpdateString();
}
/**
* Get the {@code FOR UPDATE OF column_list NOWAIT} fragment appropriate
* for this dialect, given the aliases of the columns to be write locked.
*
* @param aliases The columns to be write locked.
* @return The appropriate {@code FOR UPDATE OF colunm_list NOWAIT} clause string.
*/
public String getForUpdateNowaitString(String aliases) {
return getForUpdateString( aliases );
}
/**
* Get the {@code FOR UPDATE OF column_list SKIP LOCKED} fragment appropriate
* for this dialect, given the aliases of the columns to be write locked.
*
* @param aliases The columns to be write locked.
* @return The appropriate {@code FOR UPDATE colunm_list SKIP LOCKED} clause string.
*/
public String getForUpdateSkipLockedString(String aliases) {
return getForUpdateString( aliases );
}
/**
* Some dialects support an alternative means to {@code SELECT FOR UPDATE},
* whereby a "lock hint" is appended to the table name in the {@code from}
* clause.
*
* @param lockOptions The lock options to apply
* @param tableName The name of the table to which to apply the lock hint.
* @return The table with any required lock hints.
*/
public String appendLockHint(LockOptions lockOptions, String tableName){
return tableName;
}
/**
* Modifies the given SQL, applying the appropriate updates for the specified
* lock modes and key columns.
* <p>
* This allows emulation of {@code SELECT FOR UPDATE} for dialects which do not
* support the standard syntax.
*
* @param sql the SQL string to modify
* @param aliasedLockOptions lock options indexed by aliased table names.
* @param keyColumnNames a map of key columns indexed by aliased table names.
* @return the modified SQL string.
*/
public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map<String, String[]> keyColumnNames) {
return sql + new ForUpdateFragment( this, aliasedLockOptions, keyColumnNames ).toFragmentString();
}
/**
* Does this dialect support {@code FOR UPDATE} in conjunction with
* outer-joined rows?
*
* @return True if outer-joined rows can be locked via {@code FOR UPDATE}.
*
* @deprecated Use {@linkplain LockingSupport.Metadata#getOuterJoinLockingType()} instead,
* via {@linkplain #getLockingSupport()}.
*/
@Deprecated
public boolean supportsOuterJoinForUpdate() {
return switch ( getLockingSupport().getMetadata().getOuterJoinLockingType() ) {
case FULL, IDENTIFIED -> true;
default -> false;
};
}
/**
* Whether this dialect supports specifying timeouts when requesting locks.
*
* @return True if this dialect supports specifying lock timeouts.
*
* @apiNote Specifically, we are interested here in whether the Dialect supports
* requesting a lock timeout as part of the SQL query.
*
* @deprecated Use {@linkplain LockingSupport.Metadata#getPessimisticLockStyle},
* via {@linkplain #getLockingSupport()}, instead.
*/
@Deprecated
public boolean supportsLockTimeouts() {
return getLockingSupport().getMetadata().getLockTimeoutType( Timeouts.ONE_SECOND ) == LockTimeoutType.QUERY;
}
/**
* @deprecated Use {@linkplain Timeouts#getTimeoutInSeconds(int)} instead.
*/
@Deprecated
protected int getTimeoutInSeconds(int millis) {
return Timeouts.getTimeoutInSeconds( millis );
}
// table support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* The command used to create a table, usually {@code create table}.
*
* @return The command used to create a table.
*/
public String getCreateTableString() {
return "create table";
}
/**
* An arbitrary fragment appended to the end of the {@code create table}
* statement.
*
* @apiNote An example is the MySQL {@code engine} option specifying a
* storage engine.
*/
public String getTableTypeString() {
return "";
}
/**
* For dropping a table, can the phrase {@code if exists} be
* applied before the table name?
*
* @apiNote Only one or the other (or neither) of this and
* {@link #supportsIfExistsAfterTableName} should
* return true.
*
* @return {@code true} if {@code if exists} can be applied
* before the table name
*/
public boolean supportsIfExistsBeforeTableName() {
return false;
}
/**
* For dropping a table, can the phrase {@code if exists} be
* applied after the table name?
*
* @apiNote Only one or the other (or neither) of this and
* {@link #supportsIfExistsBeforeTableName} should
* return true.
*
* @return {@code true} if {@code if exists} can be applied
* after the table name
*/
public boolean supportsIfExistsAfterTableName() {
return false;
}
/**
* A command to execute before dropping tables.
*
* @return A SQL statement, or {@code null}
*/
public String getBeforeDropStatement() {
return null;
}
/**
* The command used to drop a table with the given name, usually
* {@code drop table tab_name}.
*
* @param tableName The name of the table to drop
*
* @return The {@code drop table} statement as a string
*
* @deprecated No longer used
*
* @see StandardTableExporter#getSqlDropStrings
*/
@Deprecated(since = "6.6")
public String getDropTableString(String tableName) {
final StringBuilder buf = new StringBuilder( "drop table " );
if ( supportsIfExistsBeforeTableName() ) {
buf.append( "if exists " );
}
buf.append( tableName ).append( getCascadeConstraintsString() );
if ( supportsIfExistsAfterTableName() ) {
buf.append( " if exists" );
}
return buf.toString();
}
/**
* The command used to create an index, usually {@code create index}
* or {@code create unique index}.
*
* @param unique {@code true} if the index is a unique index
* @return The command used to create an index.
*/
public String getCreateIndexString(boolean unique) {
return unique ? "create unique index" : "create index";
}
/**
* A string to be appended to the end of the {@code create index}
* command, usually to specify that {@code null} values are to be
* considered distinct.
*/
public String getCreateIndexTail(boolean unique, List<Column> columns) {
return "";
}
/**
* Do we need to qualify index names with the schema name?
*
* @return {@code true} if we do
*/
public boolean qualifyIndexName() {
return true;
}
/**
* Slight variation on {@link #getCreateTableString}. Here, we have
* the command used to create a table when there is no primary key
* and duplicate rows are expected.
*
* @apiNote Most databases do not have this distinction; this method
* was originally added for Teradata which does.
*
* @return The command used to create a multiset table.
*/
public String getCreateMultisetTableString() {
return getCreateTableString();
}
/**
* Does this dialect support the {@code ALTER TABLE} syntax?
*
* @return True if we support altering existing tables; false otherwise.
*/
public boolean hasAlterTable() {
return true;
}
/**
* The command used to alter a table with the given name, usually
* {@code alter table tab_name} or
* {@code alter table tab_name if exists}.
* <p>
* We prefer the {@code if exists} form if supported.
*
* @param tableName The name of the table to alter
* @return The command used to alter a table.
*
* @since 5.2.11
*/
public String getAlterTableString(String tableName) {
final StringBuilder sb = new StringBuilder( "alter table " );
if ( supportsIfExistsAfterAlterTable() ) {
sb.append( "if exists " );
}
sb.append( tableName );
return sb.toString();
}
/**
* For an {@code alter table}, can the phrase {@code if exists} be
* applied?
*
* @return {@code true} if {@code if exists} can be applied after
* {@code alter table}
*
* @since 5.2.11
*/
public boolean supportsIfExistsAfterAlterTable() {
return false;
}
/**
* The subcommand of the {@code alter table} command used to add
* a column to a table, usually {@code add column} or {@code add}.
*
* @return The {@code add column} fragment.
*/
public String getAddColumnString() {
return "add column";
}
/**
* The syntax for the suffix used to add a column to a table.
*
* @return The suffix of the {@code add column} fragment.
*/
public String getAddColumnSuffixString() {
return "";
}
/**
* Do we need to drop constraints before dropping tables in this dialect?
*
* @return True if constraints must be dropped prior to dropping the table;
* false otherwise.
*/
public boolean dropConstraints() {
return true;
}
/**
* The subcommand of the {@code alter table} command used to drop
* a foreign key constraint, usually {@code drop constraint}.
*/
public String getDropForeignKeyString() {
return "drop constraint";
}
/**
* The subcommand of the {@code alter table} command used to drop
* a unique key constraint.
*/
public String getDropUniqueKeyString() {
return "drop constraint";
}
/**
* For dropping a constraint with an {@code alter table} statement,
* can the phrase {@code if exists} be applied before the constraint
* name?
*
* @apiNote Only one or the other (or neither) of this and
* {@link #supportsIfExistsAfterConstraintName} should
* return true
*
* @return {@code true} if {@code if exists} can be applied before
* the constraint name
*/
public boolean supportsIfExistsBeforeConstraintName() {
return false;
}
/**
* For dropping a constraint with an {@code alter table}, can the
* phrase {@code if exists} be applied after the constraint name?
*
* @apiNote Only one or the other (or neither) of this and
* {@link #supportsIfExistsBeforeConstraintName} should
* return true.
*
* @return {@code true} if {@code if exists} can be applied after
* the constraint name
*/
public boolean supportsIfExistsAfterConstraintName() {
return false;
}
/**
* Does this dialect support modifying the type of an existing column?
*/
public boolean supportsAlterColumnType() {
return false;
}
/**
* The fragment of an {@code alter table} command which modifies a
* column type, or null if column types cannot be modified.
* Often {@code alter column col_name set data type col_type}.
*
* @param columnName the name of the column
* @param columnType the new type of the column
* @param columnDefinition the full column definition
* @return a fragment to be appended to {@code alter table}
*/
public String getAlterColumnTypeString(String columnName, String columnType, String columnDefinition) {
return null;
}
/**
* The syntax used to add a foreign key constraint to a table,
* with the referenced key columns explicitly specified.
*
* @param constraintName The foreign key constraint name
* @param foreignKey The names of the columns comprising the
* foreign key
* @param referencedTable The table referenced by the foreign key
* @param primaryKey The explicit columns in the referencedTable
* referenced by this foreign key.
* @param referencesPrimaryKey if false, constraint should be
* explicit about which column names
* the constraint refers to
*
* @return the "add FK" fragment
*/
public String getAddForeignKeyConstraintString(
String constraintName,
String[] foreignKey,
String referencedTable,
String[] primaryKey,
boolean referencesPrimaryKey) {
final StringBuilder res = new StringBuilder( 30 );
res.append( " add constraint " )
.append( quote( constraintName ) )
.append( " foreign key (" )
.append( join( ", ", foreignKey ) )
.append( ") references " )
.append( referencedTable );
if ( !referencesPrimaryKey ) {
res.append( " (" )
.append( join( ", ", primaryKey ) )
.append( ')' );
}
return res.toString();
}
/**
* The syntax used to add a foreign key constraint to a table,
* given the definition of the foreign key as a string.
*
* @param constraintName The foreign key constraint name
* @param foreignKeyDefinition The whole definition of the
* foreign key as a fragment
*/
public String getAddForeignKeyConstraintString(
String constraintName,
String foreignKeyDefinition) {
return " add constraint " + quote( constraintName )
+ " " + foreignKeyDefinition;
}
/**
* Does the dialect also need cross-references to get a complete
* list of foreign keys?
*/
public boolean useCrossReferenceForeignKeys(){
return false;
}
/**
* Some dialects require a not null primaryTable filter.
* Sometimes a wildcard entry is sufficient for the like condition.
*/
public String getCrossReferenceParentTableFilter(){
return null;
}
/**
* The syntax used to add a primary key constraint to a table.
*
* @param constraintName The name of the PK constraint.
*
* @apiNote Currently unused, since we never use {@code alter table}
* to add a primary key constraint.
*/
public String getAddPrimaryKeyConstraintString(String constraintName) {
return " add constraint " + constraintName + " primary key ";
}
/**
* Is a list of column names required in the {@code create view} statement?
*
* @since 7.1
*/
public boolean requiresColumnListInCreateView() {
return false;
}
/**
* The {@link SqmMultiTableMutationStrategy} to use when not specified by
* {@link org.hibernate.query.spi.QueryEngineOptions#getCustomSqmMultiTableMutationStrategy}.
*
* @see SqmMultiTableMutationStrategyProvider#createMutationStrategy
*/
public SqmMultiTableMutationStrategy getFallbackSqmMutationStrategy(
EntityMappingType entityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new PersistentTableMutationStrategy( entityDescriptor, runtimeModelCreationContext );
}
/**
* The {@link SqmMultiTableInsertStrategy} to use when not specified by
* {@link org.hibernate.query.spi.QueryEngineOptions#getCustomSqmMultiTableInsertStrategy}.
*
* @see SqmMultiTableMutationStrategyProvider#createInsertStrategy
*/
public SqmMultiTableInsertStrategy getFallbackSqmInsertStrategy(
EntityMappingType entityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new PersistentTableInsertStrategy( entityDescriptor, runtimeModelCreationContext );
}
// UDT support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* The kind of user-defined type to create, or the empty
* string if this does not need to be specified. Included
* after {@code create type type_name as}, but before the
* list of members.
*/
public String getCreateUserDefinedTypeKindString() {
return "";
}
/**
* An arbitrary extension to append to the end of the UDT
* {@code create type} command.
*/
public String getCreateUserDefinedTypeExtensionsString() {
return "";
}
/**
* For dropping a type, can the phrase {@code if exists} be
* applied before the type name?
*
* @apiNote Only one or the other (or neither) of this and
* {@link #supportsIfExistsAfterTypeName} should
* return true.
*
* @return {@code true} if {@code if exists} can be applied
* before the type name
*/
public boolean supportsIfExistsBeforeTypeName() {
return false;
}
/**
* For dropping a type, can the phrase {@code if exists} be
* applied after the type name?
*
* @apiNote Only one or the other (or neither) of this and
* {@link #supportsIfExistsBeforeTypeName} should
* return true.
*
* @return {@code true} if {@code if exists} can be applied
* after the type name
*/
public boolean supportsIfExistsAfterTypeName() {
return false;
}
public String getCatalogSeparator() {
return ".";
}
// callable statement support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Registers a parameter capable of returning a {@link ResultSet}
* <em>by position</em>, either an {@code OUT} parameter, or a
* {@link Types#REF_CURSOR REF_CURSOR} parameter as defined in Java 8.
*
* @apiNote Before Java 8, support for {@link ResultSet}-returning
* parameters was very uneven across database and drivers,
* leading to its inclusion as part of the {@code Dialect}
* contract.
*
* @param statement The callable statement.
* @param position The bind position at which to register the output param.
*
* @return The number of (contiguous) bind positions used.
*
* @throws SQLException Indicates problems registering the param.
*/
public int registerResultSetOutParameter(CallableStatement statement, int position)
throws SQLException {
throw new UnsupportedOperationException(
getClass().getName() +
" does not support resultsets via stored procedures"
);
}
/**
* Registers a parameter capable of returning a {@link ResultSet}
* <em>by name</em>, either an {@code OUT} parameter, or a
* {@link Types#REF_CURSOR REF_CURSOR} parameter as defined in Java 8.
*
* @apiNote Before Java 8, support for {@link ResultSet}-returning
* parameters was very uneven across database and drivers,
* leading to its inclusion as part of the {@code Dialect}
* contract.
*
* @param statement The callable statement.
* @param name The parameter name (for drivers which support named parameters).
*
* @return The number of (contiguous) bind positions used.
*
* @throws SQLException Indicates problems registering the param.
*/
@SuppressWarnings("UnusedParameters")
public int registerResultSetOutParameter(CallableStatement statement, String name)
throws SQLException {
throw new UnsupportedOperationException(
getClass().getName() +
" does not support resultsets via stored procedures"
);
}
/**
* Given a {@linkplain CallableStatement callable statement} previously
* processed by {@link #registerResultSetOutParameter}, extract the
* {@link ResultSet} from the {@code OUT} parameter.
*
* @param statement The callable statement.
* @return The extracted result set.
* @throws SQLException Indicates problems extracting the result set.
*/
public ResultSet getResultSet(CallableStatement statement) throws SQLException {
throw new UnsupportedOperationException(
getClass().getName() + " does not support resultsets via stored procedures"
);
}
/**
* Given a {@linkplain CallableStatement callable statement} previously
* processed by {@link #registerResultSetOutParameter}, extract the
* {@link ResultSet} from the positional {@code OUT} parameter.
*
* @param statement The callable statement.
* @param position The bind position at which to register the output param.
*
* @return The extracted result set.
*
* @throws SQLException Indicates problems extracting the result set.
*/
@SuppressWarnings("UnusedParameters")
public ResultSet getResultSet(CallableStatement statement, int position) throws SQLException {
throw new UnsupportedOperationException(
getClass().getName() + " does not support resultsets via stored procedures"
);
}
/**
* Given a {@linkplain CallableStatement callable statement} previously
* processed by {@link #registerResultSetOutParameter}, extract the
* {@link ResultSet} from the named {@code OUT} parameter.
*
* @param statement The callable statement.
* @param name The parameter name (for drivers which support named parameters).
*
* @return The extracted result set.
*
* @throws SQLException Indicates problems extracting the result set.
*/
@SuppressWarnings("UnusedParameters")
public ResultSet getResultSet(CallableStatement statement, String name) throws SQLException {
throw new UnsupportedOperationException(
getClass().getName() + " does not support resultsets via stored procedures"
);
}
// current timestamp support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Does this dialect support some way to retrieve the current timestamp
* value from the database?
*
* @return True if the current timestamp can be retrieved; false otherwise.
*/
public boolean supportsCurrentTimestampSelection() {
return false;
}
/**
* Is the command returned by {@link #getCurrentTimestampSelectString}
* treated as callable?
* <p>
* Typically, this indicates the use of the JDBC escape syntax.
*
* @return {@code} if the {@link #getCurrentTimestampSelectString} is
* treated as callable; false otherwise.
*/
public boolean isCurrentTimestampSelectStringCallable() {
throw new UnsupportedOperationException( "Database not known to define a current timestamp function" );
}
/**
* The command used to retrieve the current timestamp from the database.
*/
public String getCurrentTimestampSelectString() {
throw new UnsupportedOperationException( "Database not known to define a current timestamp function" );
}
/**
* Does this dialect have an ANSI SQL {@code current_timestamp} function?
*/
public boolean supportsStandardCurrentTimestampFunction() {
return true;
}
// SQLException support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* An instance of {@link SQLExceptionConversionDelegate} for interpreting
* dialect-specific {@linkplain SQLException#getErrorCode() error} or
* {@linkplain SQLException#getSQLState() SQLState} codes.
* <p>
* If this method is overridden to return a non-null value, the default
* {@link SQLExceptionConverter} will use the returned
* {@link SQLExceptionConversionDelegate} in addition to the following
* standard delegates:
* <ol>
* <li>a "static" delegate based on the JDBC4-defined {@link SQLException}
* hierarchy, and
* <li>a delegate that interprets SQLState codes as either X/Open or
* SQL-2003 codes, depending on what is
* {@linkplain java.sql.DatabaseMetaData#getSQLStateType reported}
* by the JDBC driver.
* </ol>
* <p>
* It is strongly recommended that every {@code Dialect} implementation
* override this method, since interpretation of a SQL error is much
* more accurate when based on the vendor-specific
* {@linkplain SQLException#getErrorCode() error code}, rather than on
* the SQLState.
*
* @return The {@link SQLExceptionConversionDelegate} for this dialect
*/
public SQLExceptionConversionDelegate buildSQLExceptionConversionDelegate() {
return null;
}
private static final ViolatedConstraintNameExtractor EXTRACTOR = sqle -> null;
/**
* A {@link ViolatedConstraintNameExtractor} for extracting the name of
* a violated constraint from a {@link SQLException}.
*/
public ViolatedConstraintNameExtractor getViolatedConstraintNameExtractor() {
return EXTRACTOR;
}
// union subclass support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Given a {@linkplain Types JDBC type code}, return the expression
* for a literal null value of that type, to use in a {@code select}
* clause.
* <p>
* The {@code select} query will be an element of a {@code UNION}
* or {@code UNION ALL}.
*
* @implNote Some databases require an explicit type cast.
*
* @param sqlType The {@link Types} type code.
* @param typeConfiguration The type configuration
* @return The appropriate select clause value fragment.
* @deprecated Use {@link #getSelectClauseNullString(SqlTypedMapping, TypeConfiguration)} instead
*/
@Deprecated(forRemoval = true)
public String getSelectClauseNullString(int sqlType, TypeConfiguration typeConfiguration) {
return "null";
}
/**
* Given a type mapping, return the expression
* for a literal null value of that type, to use in a {@code select}
* clause.
* <p>
* The {@code select} query will be an element of a {@code UNION}
* or {@code UNION ALL}.
*
* @implNote Some databases require an explicit type cast.
*
* @param sqlTypeMapping The type mapping.
* @param typeConfiguration The type configuration
* @return The appropriate select clause value fragment.
*/
public String getSelectClauseNullString(SqlTypedMapping sqlTypeMapping, TypeConfiguration typeConfiguration) {
return getSelectClauseNullString( sqlTypeMapping.getJdbcMapping().getJdbcType().getDdlTypeCode(), typeConfiguration );
}
/**
* Does this dialect support {@code UNION ALL}?
*
* @return True if {@code UNION ALL} is supported; false otherwise.
*/
public boolean supportsUnionAll() {
return true;
}
/**
* Does this dialect support {@code UNION} in a subquery.
*
* @return True if {@code UNION} is supported in a subquery; false otherwise.
*/
public boolean supportsUnionInSubquery() {
return supportsUnionAll();
}
// miscellaneous support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* The fragment used to insert a row without specifying any column values,
* usually just {@code ()}, but sometimes {@code default values}.
*
* @implNote On the other hand, this is simply not possible on some databases!
*
* @return The appropriate empty values clause.
*
* @deprecated Override the method {@code renderInsertIntoNoColumns()}
* on the {@link #getSqlAstTranslatorFactory() translator}
* returned by this dialect.
*/
@Deprecated( since = "6" )
public String getNoColumnsInsertString() {
return "values ( )";
}
/**
* Is the {@code INSERT} statement is allowed to contain no columns?
*
* @return if this dialect supports no-column {@code INSERT}.
*/
public boolean supportsNoColumnsInsert() {
return true;
}
/**
* The name of the SQL function that transforms a string to lowercase,
* almost always {@code lower}.
*
* @return The dialect-specific lowercase function.
*/
public String getLowercaseFunction() {
return "lower";
}
/**
* The name of the SQL operator that performs case-insensitive {@code LIKE}
* comparisons.
*
* @return The dialect-specific case-insensitive like operator.
*/
public String getCaseInsensitiveLike(){
return "like";
}
/**
* Does this dialect support case-insensitive {@code LIKE} comparisons?
*
* @return {@code true} if the database supports case-insensitive like
* comparisons, {@code false} otherwise.
* The default is {@code false}.
*/
public boolean supportsCaseInsensitiveLike(){
return false;
}
/**
* Does this dialect support truncation of values to a specified length
* via a {@code cast}?
*
* @return {@code true} if the database supports truncation via a cast,
* {@code false} otherwise.
* The default is {@code true}.
*/
public boolean supportsTruncateWithCast(){
return true;
}
/**
* Does this dialect support the {@code is true} and {@code is false}
* operators?
*
* @return {@code true} if the database supports {@code is true} and
* {@code is false}, or {@code false} if it does not. The
* default is {@code is false}.
*/
public boolean supportsIsTrue() {
return false;
}
/**
* Meant as a means for end users to affect the select strings being sent
* to the database and perhaps manipulate them in some fashion.
*
* @param select The select command
* @return The mutated select command, or the same as was passed in.
*/
public String transformSelectString(String select) {
return select;
}
/**
* What is the maximum length Hibernate can use for generated aliases?
*
* @implNote
* The maximum here should account for the fact that Hibernate often needs
* to append "uniqueing" information to the end of generated aliases.
* That "uniqueing" information will be added to the end of an identifier
* generated to the length specified here; so be sure to leave some room
* (generally speaking 5 positions will suffice).
*
* @return The maximum length.
*/
public int getMaxAliasLength() {
return 10;
}
/**
* What is the maximum identifier length supported by this dialect?
*
* @return The maximum length.
*/
public int getMaxIdentifierLength() {
return Integer.MAX_VALUE;
}
/**
* The SQL literal expression representing the given boolean value.
*
* @param bool The boolean value
* @return The appropriate SQL literal.
*/
public String toBooleanValueString(boolean bool) {
final StringBuilder sb = new StringBuilder();
appendBooleanValueString( new StringBuilderSqlAppender( sb ), bool );
return sb.toString();
}
/**
* Append the SQL literal expression representing the given boolean
* value to the given {@link SqlAppender}.
*
* @param bool The boolean value
* @param appender The {@link SqlAppender} to append the literal expression to
*/
public void appendBooleanValueString(SqlAppender appender, boolean bool) {
appender.appendSql( bool ? '1' : '0' );
}
// keyword support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Register a keyword.
*
* @param word a reserved word in this SQL dialect
*/
protected void registerKeyword(String word) {
// When tokens are checked for keywords, they are always compared against the lowercase version of the token.
// For instance, Template#renderWhereStringTemplate transforms all tokens to lowercase too.
sqlKeywords.add( word.toLowerCase( Locale.ROOT ) );
}
/**
* The keywords of this SQL dialect.
*/
public Set<String> getKeywords() {
return sqlKeywords;
}
/**
* The {@link IdentifierHelper} indicated by this dialect for handling identifier conversions.
* Returning {@code null} is allowed and indicates that Hibernate should fall back to building
* a "standard" helper. In the fallback path, any changes made to the IdentifierHelperBuilder
* during this call will still be incorporated into the built IdentifierHelper.
* <p>
* The incoming builder will have the following set:
* <ul>
* <li>{@link IdentifierHelperBuilder#isGloballyQuoteIdentifiers()}</li>
* <li>{@link IdentifierHelperBuilder#getUnquotedCaseStrategy()} - initialized to UPPER</li>
* <li>{@link IdentifierHelperBuilder#getQuotedCaseStrategy()} - initialized to MIXED</li>
* </ul>
* <p>
* By default, Hibernate will do the following:
* <ul>
* <li>Call {@link IdentifierHelperBuilder#applyIdentifierCasing(DatabaseMetaData)}
* <li>Call {@link IdentifierHelperBuilder#applyReservedWords(DatabaseMetaData)}
* <li>Applies {@link AnsiSqlKeywords#sql2003()} as reserved words</li>
* <li>Applies the {@link #sqlKeywords} collected here as reserved words</li>
* <li>Applies the Dialect's {@link NameQualifierSupport}, if it defines one</li>
* </ul>
*
* @param builder A partially-configured {@link IdentifierHelperBuilder}.
* @param metadata Access to the metadata returned from the driver if needed and if available.
* <em>WARNING:</em> it may be {@code null}.
*
* @return The {@link IdentifierHelper} instance to use,
* or {@code null} to indicate Hibernate should use its fallback path
*
* @throws SQLException Accessing the {@link DatabaseMetaData} can throw it.
* Just rethrow and Hibernate will handle it.
*
* @see #getNameQualifierSupport()
*/
public IdentifierHelper buildIdentifierHelper(
IdentifierHelperBuilder builder,
@Nullable DatabaseMetaData metadata) throws SQLException {
builder.applyIdentifierCasing( metadata );
builder.applyReservedWords( sqlKeywords );
builder.setNameQualifierSupport( getNameQualifierSupport() );
return builder.build();
}
// identifier quoting support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* The character specific to this dialect used to begin a quoted identifier.
*
* @return The dialect-specific open quote character.
*/
public char openQuote() {
return '"';
}
/**
* The character specific to this dialect used to close a quoted identifier.
*
* @return The dialect-specific close quote character.
*/
public char closeQuote() {
return '"';
}
/**
* Apply dialect-specific quoting.
*
* @param name The value to be quoted.
* @return The quoted value.
* @see #openQuote()
* @see #closeQuote()
*/
public String toQuotedIdentifier(String name) {
if ( name == null ) {
return null;
}
return openQuote() + name + closeQuote();
}
/**
* Apply dialect-specific quoting if the given name is quoted using backticks.
* <p>
* By default, the incoming name is checked to see if its first character is
* a backtick ({@code `}). If it is, the dialect specific quoting is applied.
*
* @param name The value to be quoted.
* @return The quoted (or unmodified, if not starting with backtick) value.
* @see #openQuote()
* @see #closeQuote()
*/
public String quote(String name) {
if ( name == null ) {
return null;
}
if ( name.charAt( 0 ) == '`' ) {
return openQuote() + name.substring( 1, name.length() - 1 ) + closeQuote();
}
else {
return name;
}
}
// DDL support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* The {@link SchemaManagementTool} to use if none is explicitly specified.
*
* @apiNote Allows implementations to override how schema tooling works by default
*
* @return a {@link HibernateSchemaManagementTool} by default
*/
@Incubating
public SchemaManagementTool getFallbackSchemaManagementTool(
Map<String, Object> configurationValues,
ServiceRegistryImplementor registry) {
return new HibernateSchemaManagementTool();
}
private final StandardTableExporter tableExporter = new StandardTableExporter( this );
private final StandardUserDefinedTypeExporter userDefinedTypeExporter = new StandardUserDefinedTypeExporter( this );
private final StandardSequenceExporter sequenceExporter = new StandardSequenceExporter( this );
private final StandardIndexExporter indexExporter = new StandardIndexExporter( this );
private final StandardForeignKeyExporter foreignKeyExporter = new StandardForeignKeyExporter( this );
private final StandardUniqueKeyExporter uniqueKeyExporter = new StandardUniqueKeyExporter( this );
private final StandardAuxiliaryDatabaseObjectExporter auxiliaryObjectExporter = new StandardAuxiliaryDatabaseObjectExporter( this );
private final StandardTemporaryTableExporter temporaryTableExporter = new StandardTemporaryTableExporter( this );
private final StandardTableMigrator tableMigrator = new StandardTableMigrator( this );
private final StandardTableCleaner tableCleaner = new StandardTableCleaner( this );
/**
* Get an {@link Exporter} for {@link Table}s,
* usually {@link StandardTableExporter}.
*/
public Exporter<Table> getTableExporter() {
return tableExporter;
}
/**
* Get a {@link TableMigrator},
* usually {@link StandardTableMigrator}.
*/
public TableMigrator getTableMigrator() {
return tableMigrator;
}
/**
* Get a schema {@link Cleaner},
* usually {@link StandardTableCleaner}.
*/
public Cleaner getTableCleaner() {
return tableCleaner;
}
/**
* Get an {@link Exporter} for {@link UserDefinedType user defined types},
* usually {@link StandardUserDefinedTypeExporter}.
*/
public Exporter<UserDefinedType> getUserDefinedTypeExporter() {
return userDefinedTypeExporter;
}
/**
* Get an {@link Exporter} for {@linkplain Sequence sequences},
* usually {@link StandardSequenceExporter}.
*/
public Exporter<Sequence> getSequenceExporter() {
return sequenceExporter;
}
/**
* Get an {@link Exporter} for {@linkplain Index indexes},
* usually {@link StandardIndexExporter}.
*/
public Exporter<Index> getIndexExporter() {
return indexExporter;
}
/**
* Get an {@link Exporter} for {@linkplain ForeignKey foreign key} constraints,
* usually {@link StandardForeignKeyExporter}.
*/
public Exporter<ForeignKey> getForeignKeyExporter() {
return foreignKeyExporter;
}
/**
* Get an {@link Exporter} for {@linkplain UniqueKey unique key} constraints,
* usually {@link StandardUniqueKeyExporter}.
*/
public Exporter<UniqueKey> getUniqueKeyExporter() {
return uniqueKeyExporter;
}
/**
* Get an {@link Exporter} for {@link AuxiliaryDatabaseObject}s,
* usually {@link StandardAuxiliaryDatabaseObjectExporter}.
*/
public Exporter<AuxiliaryDatabaseObject> getAuxiliaryDatabaseObjectExporter() {
return auxiliaryObjectExporter;
}
// Temporary table support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Get a {@link TemporaryTableExporter},
* usually {@link StandardTemporaryTableExporter}.
*/
public TemporaryTableExporter getTemporaryTableExporter() {
return temporaryTableExporter;
}
/**
* The strategy to use for persistent temporary tables.
*
* @since 7.1
*/
public TemporaryTableStrategy getPersistentTemporaryTableStrategy() {
return getSupportedTemporaryTableKind() == TemporaryTableKind.PERSISTENT
? new LegacyTemporaryTableStrategy( this )
: persistentTemporaryTableStrategy;
}
/**
* The strategy to use for local temporary tables.
*
* @since 7.1
*/
public @Nullable TemporaryTableStrategy getLocalTemporaryTableStrategy() {
return getSupportedTemporaryTableKind() == TemporaryTableKind.LOCAL ? new LegacyTemporaryTableStrategy( this )
: null;
}
/**
* The strategy to use for global temporary tables.
*
* @since 7.1
*/
public @Nullable TemporaryTableStrategy getGlobalTemporaryTableStrategy() {
return getSupportedTemporaryTableKind() == TemporaryTableKind.GLOBAL ? new LegacyTemporaryTableStrategy( this )
: null;
}
/**
* The kind of temporary tables that are supported on this database.
*/
@Deprecated(forRemoval = true, since = "7.1")
public TemporaryTableKind getSupportedTemporaryTableKind() {
return TemporaryTableKind.PERSISTENT;
}
/**
* An arbitrary SQL fragment appended to the end of the statement to
* create a temporary table, specifying dialect-specific options, or
* {@code null} if there are no options to specify.
*/
@Deprecated(forRemoval = true, since = "7.1")
public String getTemporaryTableCreateOptions() {
return null;
}
/**
* The command to create a temporary table.
*/
@Deprecated(forRemoval = true, since = "7.1")
public String getTemporaryTableCreateCommand() {
return switch ( getSupportedTemporaryTableKind() ) {
case PERSISTENT -> "create table";
case LOCAL -> "create local temporary table";
case GLOBAL -> "create global temporary table";
};
}
/**
* The command to drop a temporary table.
*/
@Deprecated(forRemoval = true, since = "7.1")
public String getTemporaryTableDropCommand() {
return "drop table";
}
/**
* The command to truncate a temporary table.
*/
@Deprecated(forRemoval = true, since = "7.1")
public String getTemporaryTableTruncateCommand() {
return "delete from";
}
/**
* Annotation to be appended to the end of each COLUMN clause for temporary tables.
*
* @param sqlTypeCode The SQL type code
* @return The annotation to be appended, for example, {@code COLLATE DATABASE_DEFAULT} in SQL Server
*/
@Deprecated(forRemoval = true, since = "7.1")
public String getCreateTemporaryTableColumnAnnotation(int sqlTypeCode) {
return "";
}
/**
* The sort of {@linkplain TempTableDdlTransactionHandling transaction handling}
* to use when creating or dropping temporary tables.
*
* @deprecated No dialect currently overrides this, so it's obsolete
*/
@Deprecated(since = "7.0")
public TempTableDdlTransactionHandling getTemporaryTableDdlTransactionHandling() {
return TempTableDdlTransactionHandling.NONE;
}
/**
* The action to take after finishing use of a temporary table.
*/
@Deprecated(forRemoval = true, since = "7.1")
public AfterUseAction getTemporaryTableAfterUseAction() {
return AfterUseAction.CLEAN;
}
/**
* The action to take before beginning use of a temporary table.
*/
@Deprecated(forRemoval = true, since = "7.1")
public BeforeUseAction getTemporaryTableBeforeUseAction() {
return BeforeUseAction.NONE;
}
// Catalog / schema creation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Does this dialect support creating and dropping catalogs?
*
* @return True if the dialect supports catalog creation; false otherwise.
*/
public boolean canCreateCatalog() {
return false;
}
/**
* Get the SQL command used to create the named catalog.
*
* @param catalogName The name of the catalog to be created.
*
* @return The creation commands
*/
public String[] getCreateCatalogCommand(String catalogName) {
throw new UnsupportedOperationException( "No create catalog syntax supported by " + getClass().getName() );
}
/**
* Get the SQL command used to drop the named catalog.
*
* @param catalogName The name of the catalog to be dropped.
*
* @return The drop commands
*/
public String[] getDropCatalogCommand(String catalogName) {
throw new UnsupportedOperationException( "No drop catalog syntax supported by " + getClass().getName() );
}
/**
* Does this dialect support creating and dropping schema?
*
* @return True if the dialect supports schema creation; false otherwise.
*/
public boolean canCreateSchema() {
return true;
}
/**
* Get the SQL command used to create the named schema.
*
* @param schemaName The name of the schema to be created.
*
* @return The creation commands
*/
public String[] getCreateSchemaCommand(String schemaName) {
return new String[] {"create schema " + schemaName};
}
/**
* Get the SQL command used to drop the named schema.
*
* @param schemaName The name of the schema to be dropped.
*
* @return The drop commands
*/
public String[] getDropSchemaCommand(String schemaName) {
return new String[] {"drop schema " + schemaName};
}
/**
* Get the SQL command used to retrieve the current schema name.
* <p>
* Works in conjunction with {@link #getSchemaNameResolver()},
* unless the resulting {@link SchemaNameResolver} does not need
* this information. For example, a custom implementation might
* make use of the Java 1.7 {@link Connection#getSchema()} method.
*
* @return The current schema retrieval SQL
*
* @deprecated Since Hibernate now baselines on Java 17,
* {@link Connection#getSchema()} is always available directly.
* Never used internally.
*/
@Deprecated(since = "7.0")
public String getCurrentSchemaCommand() {
return null;
}
/**
* Get the strategy for determining the schema name from a JDBC
* {@link Connection}, usually {@link DefaultSchemaNameResolver}.
*
* @return The schema name resolver strategy
*/
public SchemaNameResolver getSchemaNameResolver() {
return DefaultSchemaNameResolver.INSTANCE;
}
/**
* Does the database/driver have bug in deleting rows that refer to
* other rows being deleted in the same query?
*
* @implNote The main culprit is MySQL.
*
* @return {@code true} if the database/driver has this bug
*/
public boolean hasSelfReferentialForeignKeyBug() {
return false;
}
/**
* The keyword used to specify a nullable column, usually {@code ""},
* but sometimes {@code " null"}.
*/
public String getNullColumnString() {
return "";
}
/**
* The keyword used to specify a nullable column of the given SQL type.
*
* @implNote The culprit is {@code timestamp} columns on MySQL.
*/
public String getNullColumnString(String columnType) {
return getNullColumnString();
}
/**
* Quote the given collation name if necessary.
*/
public String quoteCollation(String collation) {
return collation;
}
/**
* Does this dialect support commenting on tables and columns?
*
* @return {@code true} if commenting is supported
*/
public boolean supportsCommentOn() {
return false;
}
/**
* Get the comment into a form supported for table definition.
*
* @param comment The comment to apply
*
* @return The comment fragment
*/
public String getTableComment(String comment) {
return "";
}
/**
* Get the comment into a form supported for UDT definition.
*
* @param comment The comment to apply
*
* @return The comment fragment
*/
public String getUserDefinedTypeComment(String comment) {
return "";
}
/**
* Get the comment into a form supported for column definition.
*
* @param comment The comment to apply
*
* @return The comment fragment
*/
public String getColumnComment(String comment) {
return "";
}
/**
* Does this dialect support column-level check constraints?
*
* @return True if column-level {@code check} constraints are supported;
* false otherwise.
*/
public boolean supportsColumnCheck() {
return true;
}
/**
* Does this dialect support named column-level check constraints?
*
* @return True if named column-level {@code check} constraints are supported;
* false otherwise.
*/
public boolean supportsNamedColumnCheck() {
return supportsColumnCheck();
}
/**
* Does this dialect support table-level check constraints?
*
* @return True if table-level {@code check} constraints are supported;
* false otherwise.
*/
public boolean supportsTableCheck() {
return true;
}
/**
* Does this dialect support {@code on delete} actions in foreign key definitions?
*
* @return {@code true} if the dialect does support the {@code on delete} clause.
*/
public boolean supportsCascadeDelete() {
return true;
}
/**
* The keyword that specifies that a {@code drop table} operation
* should be cascaded to its constraints, typically
* {@code " cascade"} where the leading space is required, or
* the empty string if there is no such keyword in this dialect.
*
* @return The cascade drop keyword, if any, with a leading space
*/
public String getCascadeConstraintsString() {
return "";
}
/**
* A {@link ColumnAliasExtractor}, usually just {@link ResultSetMetaData#getColumnLabel}.
*/
public ColumnAliasExtractor getColumnAliasExtractor() {
return ColumnAliasExtractor.COLUMN_LABEL_EXTRACTOR;
}
// Informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Should LOBs (both BLOB and CLOB) be bound using stream operations,
* that is, using {@link PreparedStatement#setBinaryStream}).
*
* @return True if BLOBs and CLOBs should be bound using stream operations.
*
* @since 3.2
*/
public boolean useInputStreamToInsertBlob() {
return true;
}
/**
* Should {@link Blob}, {@link Clob}, and {@link NClob} be created solely
* using {@link Connection#createBlob()}, {@link Connection#createClob()},
* and {@link Connection#createNClob()}, instead of allowing the use of
* our own implementations.
*
* @return True if these types should be instantiated using {@link Connection}.
*
* @since 6.6
*/
public boolean useConnectionToCreateLob() {
return !useInputStreamToInsertBlob();
}
/**
* Does this dialect support references to result variables
* (i.e, select items) by column positions (1-origin) as defined
* by the select clause?
* @return true if result variable references by column positions
* are supported; false otherwise.
*
* @since 6.0.0
*/
public boolean supportsOrdinalSelectItemReference() {
return true;
}
/**
* Returns the default ordering of null.
*
* @since 6.0.0
*/
public NullOrdering getNullOrdering() {
return NullOrdering.GREATEST;
}
/**
* Does this dialect support {@code nulls first} and {@code nulls last}?
*/
public boolean supportsNullPrecedence() {
return true;
}
/**
* Does this dialect/database require casting of non-string arguments
* in the {@code concat()} function?
*
* @return {@code true} if casting using {@code cast()} is required
*
* @since 6.2
*/
public boolean requiresCastForConcatenatingNonStrings() {
return false;
}
/**
* Does this dialect require that integer divisions be wrapped in
* {@code cast()} calls to tell the db parser the expected type.
*
* @implNote The culprit is HSQLDB.
*
* @return True if integer divisions must be {@code cast()}ed to float
*/
public boolean requiresFloatCastingOfIntegerDivision() {
return false;
}
/**
* Does this dialect support asking the result set its positioning
* information on forward-only cursors?
* <p>
* Specifically, in the case of scrolling fetches, Hibernate needs
* to use {@link ResultSet#isAfterLast} and
* {@link ResultSet#isBeforeFirst}. Certain drivers do not allow
* access to these methods for forward-only cursors.
*
* @apiNote This is highly driver dependent!
*
* @return True if methods like {@link ResultSet#isAfterLast} and
* {@link ResultSet#isBeforeFirst} are supported for forward
* only cursors; false otherwise.
*
* @since 3.2
*/
public boolean supportsResultSetPositionQueryMethodsOnForwardOnlyCursor() {
return true;
}
/**
* Does this dialect support definition of cascade delete constraints
* which can cause circular chains?
*
* @return True if circular cascade delete constraints are supported;
* false otherwise.
*
* @since 3.2
*/
public boolean supportsCircularCascadeDeleteConstraints() {
return true;
}
/**
* Is a subselect supported as the left-hand side (LHS) of an {@code IN}
* predicates?
* <p>
* In other words, is syntax like {@code <subquery> IN (1, 2, 3)} supported?
*
* @return True if a subselect can appear as the LHS of an in-predicate;
* false otherwise.
*
* @since 3.2
*/
public boolean supportsSubselectAsInPredicateLHS() {
return true;
}
/**
* "Expected" LOB usage pattern is such that I can perform an insert via
* prepared statement with a parameter binding for a LOB value without
* crazy casting to JDBC driver implementation-specific classes.
*
* @implNote Part of the trickiness here is the fact that this is largely
* driver-dependent. For example, Oracle (which is notoriously
* bad with LOB support in their drivers historically) actually
* does a pretty good job with LOB support as of the 10.2.x v
* ersions of their driver.
*
* @return True if normal LOB usage patterns can be used with this driver;
* false if driver-specific hookiness needs to be applied.
*
* @since 3.2
*/
public boolean supportsExpectedLobUsagePattern() {
return true;
}
/**
* Does the dialect support propagating changes to LOB values back
* to the database? Talking about mutating the internal value of
* the locator, as opposed to supplying a new locator instance.
* <ul>
* <li>For BLOBs, the internal value might be changed by:
* {@link Blob#setBinaryStream},
* {@link Blob#setBytes(long, byte[])},
* {@link Blob#setBytes(long, byte[], int, int)},
* or {@link Blob#truncate(long)}.
* <li>For CLOBs, the internal value might be changed by:
* {@link Clob#setAsciiStream(long)},
* {@link Clob#setCharacterStream(long)},
* {@link Clob#setString(long, String)},
* {@link Clob#setString(long, String, int, int)},
* or {@link Clob#truncate(long)}.
*</ul>
*
* @implNote I do not know the correct answer currently for databases
* which (1) are not part of the cruise control process, or
* (2) do not {@link #supportsExpectedLobUsagePattern}.
*
* @return True if the changes are propagated back to the database;
* false otherwise.
*
* @since 3.2
*/
public boolean supportsLobValueChangePropagation() {
// todo : pretty sure this is the same as the
// java.sql.DatabaseMetaData.locatorsUpdateCopy()
// method added in JDBC 4, see HHH-6046
return true;
}
/**
* Is it supported to materialize a LOB locator outside the transaction
* in which it was created?
*
* @implNote Again, part of the trickiness here is the fact that this is
* largely driver-dependent. All database I have tested which
* {@link #supportsExpectedLobUsagePattern()} also support the
* ability to materialize a LOB outside the owning transaction.
*
* @return True if unbounded materialization is supported; false otherwise.
*
* @since 3.2
*/
public boolean supportsUnboundedLobLocatorMaterialization() {
return true;
}
/**
* Does this dialect support referencing the table being mutated in a
* subquery? The "table being mutated" is the table referenced in an
* update or delete query. And so can that table then be referenced
* in a subquery of the update or delete query?
* <p>
* For example, would the following two syntaxes be supported:
* <ul>
* <li>{@code delete from TABLE_A where ID not in (select ID from TABLE_A)}
* <li>{@code update TABLE_A set NON_ID = 'something' where ID in (select ID from TABLE_A)}
* </ul>
*
* @return True if this dialect allows references the mutating table
* from a subquery.
*/
public boolean supportsSubqueryOnMutatingTable() {
return true;
}
/**
* Does the dialect support an exists statement in the select clause?
*
* @return True if exists checks are allowed in the select clause;
* false otherwise.
*/
public boolean supportsExistsInSelect() {
return true;
}
/**
* For the underlying database, is {@code READ_COMMITTED} isolation
* implemented by forcing readers to wait for write locks to be released?
*
* @return True if writers block readers to achieve {@code READ_COMMITTED};
* false otherwise.
*/
public boolean doesReadCommittedCauseWritersToBlockReaders() {
return false;
}
/**
* For the underlying database, is {@code REPEATABLE_READ} isolation
* implemented by forcing writers to wait for read locks to be released?
*
* @return True if readers block writers to achieve {@code REPEATABLE_READ};
* false otherwise.
*/
public boolean doesRepeatableReadCauseReadersToBlockWriters() {
return false;
}
/**
* Does this dialect support using a JDBC bind parameter as an argument
* to a function or procedure call?
*
* @return Returns {@code true} if the database supports accepting bind
* params as args, {@code false} otherwise. The default is
* {@code true}.
*/
public boolean supportsBindAsCallableArgument() {
return true;
}
/**
* Does this dialect support {@code count(a,b)}?
*
* @return True if the database supports counting tuples; false otherwise.
*/
public boolean supportsTupleCounts() {
return false;
}
/**
* If {@link #supportsTupleCounts()} is true, does this dialect require
* the tuple to be delimited with parentheses?
*
* @return boolean
*/
public boolean requiresParensForTupleCounts() {
return supportsTupleCounts();
}
/**
* Does this dialect support {@code count(distinct a,b)}?
*
* @return True if the database supports counting distinct tuples;
* false otherwise.
*/
public boolean supportsTupleDistinctCounts() {
// oddly most database in fact seem to, so true is the default.
return true;
}
/**
* If {@link #supportsTupleDistinctCounts()} is true, does this dialect
* require the tuple to be delimited with parentheses?
*
* @return boolean
*/
public boolean requiresParensForTupleDistinctCounts() {
return false;
}
/**
* Return the limit that the underlying database places on the number of
* elements in an {@code IN} predicate. If the database defines no such
* limits, simply return zero or a number smaller than zero.
*
* @return The limit, or a non-positive integer to indicate no limit.
*/
public int getInExpressionCountLimit() {
return 0;
}
/**
* Return the limit that the underlying database places on the number of parameters
* that can be defined for a PreparedStatement. If the database defines no such
* limits, simply return zero or a number smaller than zero. By default, Dialect
* returns the same value as {@link #getInExpressionCountLimit()}.
*
* @return The limit, or a non-positive integer to indicate no limit.
*/
public int getParameterCountLimit() {
return getInExpressionCountLimit();
}
/**
* Must LOB values occur last in inserts and updates?
*
* @implNote Oracle is the culprit here, see HHH-4635.
*
* @return boolean True if Lob values should be last, false if it
* does not matter.
*/
public boolean forceLobAsLastValue() {
return false;
}
/**
* Return whether the dialect considers an empty string value to be null.
*
* @implNote Once again, the culprit is Oracle.
*
* @return boolean True if an empty string is treated as null, false otherwise.
*/
public boolean isEmptyStringTreatedAsNull() {
return false;
}
/**
* Get the {@link UniqueDelegate} supported by this dialect
*
* @return The UniqueDelegate
*/
public UniqueDelegate getUniqueDelegate() {
return new AlterTableUniqueDelegate( this );
}
/**
* Apply a hint to the given SQL query.
* <p>
* The entire query is provided, allowing full control over the placement
* and syntax of the hint.
* <p>
* By default, ignore the hint and simply return the query.
*
* @param query The query to which to apply the hint.
* @param hintList The hints to apply
* @return The modified SQL
*/
public String getQueryHintString(String query, List<String> hintList) {
if ( hintList.isEmpty() ) {
return query;
}
else {
final String hints = join( ", ", hintList );
return isEmpty( hints ) ? query : getQueryHintString( query, hints );
}
}
/**
* Apply a hint to the given SQL query.
* <p>
* The entire query is provided, allowing full control over the placement
* and syntax of the hint.
* <p>
* By default, ignore the hint and simply return the query.
*
* @param query The query to which to apply the hint.
* @param hints The hints to apply
* @return The modified SQL
*/
public String getQueryHintString(String query, String hints) {
return query;
}
/**
* A default {@link ScrollMode} to be used by {@link Query#scroll()}.
*
* @apiNote Certain dialects support a subset of {@link ScrollMode}s.
*
* @return the default {@link ScrollMode} to use.
*/
public ScrollMode defaultScrollMode() {
return ScrollMode.SCROLL_INSENSITIVE;
}
/**
* Does this dialect support {@code offset} in subqueries?
* <p>
* For example:
* <pre>
* select * from Table1 where col1 in (select col1 from Table2 order by col2 limit 1 offset 1)
* </pre>
*
* @return {@code true} if it does
*/
public boolean supportsOffsetInSubquery() {
return false;
}
/**
* Does this dialect support the {@code order by} clause in subqueries?
* <p>
* For example:
* <pre>
* select * from Table1 where col1 in (select col1 from Table2 order by col2 limit 1)
* </pre>
*
* @return {@code true} if it does
*/
public boolean supportsOrderByInSubquery() {
return true;
}
/**
* Does this dialect support subqueries in the {@code select} clause?
* <p>
* For example:
* <pre>
* select col1, (select col2 from Table2 where ...) from Table1
* </pre>
*
* @return {@code true} if it does
*/
public boolean supportsSubqueryInSelect() {
return true;
}
/**
* Does this dialect fully support returning arbitrary generated column values
* after execution of an {@code insert} statement, using native SQL syntax?
* <p>
* Support for identity columns is insufficient here, we require something like:
* <ol>
* <li>{@code insert ... returning ...}, or
* <li>{@code select from final table (insert ... )}.
* </ol>
*
* @return {@code true} if {@link org.hibernate.id.insert.InsertReturningDelegate}
* works for any sort of primary key column (not just identity columns), or
* {@code false} if {@code InsertReturningDelegate} does not work, or only
* works for specialized identity/"autoincrement" columns
*
* @see org.hibernate.id.insert.InsertReturningDelegate
*
* @since 6.2
*/
public boolean supportsInsertReturning() {
return false;
}
/**
* Does this dialect supports returning the {@link org.hibernate.annotations.RowId} column
* after execution of an {@code insert} statement, using native SQL syntax?
*
* @return {@code true} is the dialect supports returning the rowid column
*
* @see #supportsInsertReturning()
* @since 6.5
*/
public boolean supportsInsertReturningRowId() {
return supportsInsertReturning();
}
/**
* Does this dialect fully support returning arbitrary generated column values
* after execution of an {@code update} statement, using native SQL syntax?
* <p>
* Defaults to the value of {@link #supportsInsertReturning()} but can be overridden
* to explicitly disable this for updates.
*
* @see #supportsInsertReturning()
* @since 6.5
*/
public boolean supportsUpdateReturning() {
return supportsInsertReturning();
}
/**
* Does this dialect fully support returning arbitrary generated column values
* after execution of an {@code insert} statement, using the JDBC method
* {@link Connection#prepareStatement(String, String[])}.
* <p>
* Support for returning the generated value of an identity column via the JDBC
* method {@link Connection#prepareStatement(String, int)} is insufficient here.
*
* @return {@code true} if {@link org.hibernate.id.insert.GetGeneratedKeysDelegate}
* works for any sort of primary key column (not just identity columns), or
* {@code false} if {@code GetGeneratedKeysDelegate} does not work, or only
* works for specialized identity/"autoincrement" columns
*
* @see org.hibernate.generator.OnExecutionGenerator#getGeneratedIdentifierDelegate
* @see org.hibernate.id.insert.GetGeneratedKeysDelegate
*
* @since 6.2
*/
public boolean supportsInsertReturningGeneratedKeys() {
return false;
}
/**
* Does this dialect require unquoting identifiers when passing them to the
* {@link Connection#prepareStatement(String, String[])} JDBC method.
*
* @see Dialect#supportsInsertReturningGeneratedKeys()
*/
public boolean unquoteGetGeneratedKeys() {
return false;
}
/**
* Does this dialect support the given {@code FETCH} clause type.
*
* @param type The fetch clause type
* @return {@code true} if the underlying database supports the given
* fetch clause type, {@code false} otherwise.
* The default is {@code false}.
*/
public boolean supportsFetchClause(FetchClauseType type) {
return false;
}
/**
* Does this dialect support window functions like {@code row_number() over (..)}?
*
* @return {@code true} if the underlying database supports window
* functions, {@code false} otherwise.
* The default is {@code false}.
*/
public boolean supportsWindowFunctions() {
return false;
}
/**
* Does this dialect support the SQL {@code lateral} keyword or a
* proprietary alternative?
*
* @return {@code true} if the underlying database supports lateral,
* {@code false} otherwise. The default is {@code false}.
*/
public boolean supportsLateral() {
return false;
}
/**
* The {@link CallableStatementSupport} for this database.
* Does this database support returning cursors?
*/
public CallableStatementSupport getCallableStatementSupport() {
// most databases do not support returning cursors (ref_cursor)...
return StandardCallableStatementSupport.NO_REF_CURSOR_INSTANCE;
}
/**
* The {@linkplain NameQualifierSupport support for qualified identifiers}.
* <p>
* By default, decide based on {@link DatabaseMetaData}.
*
* @return The {@link NameQualifierSupport}, or null to use {@link DatabaseMetaData}.
*/
public NameQualifierSupport getNameQualifierSupport() {
return null;
}
/**
* The strategy used to determine the appropriate number of keys
* to load in a single SQL query with multi-key loading.
* @see org.hibernate.Session#byMultipleIds
* @see org.hibernate.Session#byMultipleNaturalId
*/
public MultiKeyLoadSizingStrategy getMultiKeyLoadSizingStrategy() {
return STANDARD_MULTI_KEY_LOAD_SIZING_STRATEGY;
}
/**
* The strategy used to determine the appropriate number of keys
* to load in a single SQL query with batch-fetch loading.
*
* @implNote By default, the same as {@linkplain #getMultiKeyLoadSizingStrategy}
*
* @see org.hibernate.annotations.BatchSize
*/
public MultiKeyLoadSizingStrategy getBatchLoadSizingStrategy() {
return getMultiKeyLoadSizingStrategy();
}
private int calculateBatchSize(int numberOfColumns, int numberOfKeys, boolean padToPowerOfTwo) {
final int batchSize = padToPowerOfTwo ? ceilingPowerOfTwo( numberOfKeys ) : numberOfKeys;
final int maxBatchSize = getParameterCountLimit() / numberOfColumns;
return maxBatchSize > 0 && batchSize > maxBatchSize ? maxBatchSize : batchSize;
}
protected final MultiKeyLoadSizingStrategy STANDARD_MULTI_KEY_LOAD_SIZING_STRATEGY = this::calculateBatchSize;
/**
* Is JDBC statement warning logging enabled by default?
*
* @since 5.1
*/
public boolean isJdbcLogWarningsEnabledByDefault() {
return true;
}
public void augmentPhysicalTableTypes(List<String> tableTypesList) {
// nothing to do
}
public void augmentRecognizedTableTypes(List<String> tableTypesList) {
// nothing to do
}
/**
* Does is dialect support {@code partition by} in window
* functions?
*
* @apiNote This has nothing to do with table partitioning.
*
* @since 5.2
*/
public boolean supportsPartitionBy() {
return false;
}
/**
* Does this dialect require that the columns listed in
* {@code partition by} also occur in the primary key,
* when defining table partitioning?
*
* @apiNote This has nothing to do with window functions.
*
* @since 7.1
*/
@Incubating
public boolean addPartitionKeyToPrimaryKey() {
return false;
}
/**
* Override {@link DatabaseMetaData#supportsNamedParameters()}.
*
* @throws SQLException Accessing the {@link DatabaseMetaData} cause
* an exception. Just rethrow and Hibernate will
* handle it.
*/
public boolean supportsNamedParameters(DatabaseMetaData databaseMetaData) throws SQLException {
return databaseMetaData != null && databaseMetaData.supportsNamedParameters();
}
/**
* Determines whether this database requires the use of explicitly
* nationalized character (Unicode) data types.
* <p>
* That is, whether the use of {@link Types#NCHAR}, {@link Types#NVARCHAR},
* and {@link Types#NCLOB} is required for nationalized character data.
*/
public NationalizationSupport getNationalizationSupport() {
return NationalizationSupport.EXPLICIT;
}
/**
* Checks whether the JDBC driver implements methods for handling nationalized character data types
* {@link ResultSet#getNString(int)} / {@link java.sql.PreparedStatement#setNString(int, String)},
* {@link ResultSet#getNClob(int)} /{@link java.sql.PreparedStatement#setNClob(int, NClob)},
* {@link ResultSet#getNCharacterStream(int)} / {@link java.sql.PreparedStatement#setNCharacterStream(int, Reader, long)}
*
* @return {@code true} if the driver implements these methods
*/
public boolean supportsNationalizedMethods(){
return true;
}
/**
* How does this dialect support aggregate types like {@link SqlTypes#STRUCT}.
*
* @since 6.2
*/
public AggregateSupport getAggregateSupport() {
return AggregateSupportImpl.INSTANCE;
}
/**
* Does the database support user defined types?
*
* @see org.hibernate.annotations.Struct
*
* @since 7.1
*/
public boolean supportsUserDefinedTypes() {
return false;
}
/**
* Does this database have native support for ANSI SQL standard arrays which
* are expressed in terms of the element type name: {@code integer array}.
*
* @implNote Oracle doesn't have this; we must instead use named array types.
*
* @return boolean
* @since 6.1
*/
public boolean supportsStandardArrays() {
return false;
}
/**
* Does this database prefer to use array types for multi-valued parameters.
*
* @return boolean
*
* @since 6.3
*/
public boolean useArrayForMultiValuedParameters() {
return supportsStandardArrays() && getPreferredSqlTypeCodeForArray() == SqlTypes.ARRAY;
}
/**
* The SQL type name for the array type with elements of the given type name.
* <p>
* The ANSI-standard syntax is {@code integer array}.
*
* @since 6.1
*/
public String getArrayTypeName(String javaElementTypeName, String elementTypeName, Integer maxLength) {
if ( supportsStandardArrays() ) {
return maxLength == null
? elementTypeName + " array"
: elementTypeName + " array[" + maxLength + "]";
}
else {
return null;
}
}
/**
* Append an array literal with the given elements to the given {@link SqlAppender}.
*/
public void appendArrayLiteral(
SqlAppender appender,
Object[] literal,
JdbcLiteralFormatter<Object> elementFormatter,
WrapperOptions wrapperOptions) {
if ( !supportsStandardArrays() ) {
throw new UnsupportedOperationException( getClass().getName() + " does not support array literals" );
}
appender.appendSql( "ARRAY[" );
if ( literal.length != 0 ) {
if ( literal[0] == null ) {
appender.appendSql( "null" );
}
else {
elementFormatter.appendJdbcLiteral( appender, literal[0], this, wrapperOptions );
}
for ( int i = 1; i < literal.length; i++ ) {
appender.appendSql( ',' );
if ( literal[i] == null ) {
appender.appendSql( "null" );
}
else {
elementFormatter.appendJdbcLiteral( appender, literal[i], this, wrapperOptions );
}
}
}
appender.appendSql( ']' );
}
/**
* Does this dialect support some kind of {@code distinct from} predicate?
* <p>
* That is, does it support syntax like:
* <pre>
* ... where FIRST_NAME IS DISTINCT FROM LAST_NAME
* </pre>
*
* @return True if this SQL dialect is known to support some kind of
* {@code distinct from} predicate; false otherwise
*
* @since 6.1
*/
public boolean supportsDistinctFromPredicate() {
return false;
}
/**
* The JDBC {@linkplain SqlTypes type code} to use for mapping
* properties of basic Java array or {@code Collection} types.
* <p>
* Usually {@link SqlTypes#ARRAY} or {@link SqlTypes#VARBINARY}.
*
* @return one of the type codes defined by {@link SqlTypes}.
*
* @since 6.1
*/
public int getPreferredSqlTypeCodeForArray() {
return supportsStandardArrays() ? ARRAY : VARBINARY;
}
/**
* The JDBC {@linkplain Types type code} to use for mapping
* properties of Java type {@code boolean}.
* <p>
* Usually {@link Types#BOOLEAN} or {@link Types#BIT}.
*
* @return one of the type codes defined by {@link Types}.
*/
public int getPreferredSqlTypeCodeForBoolean() {
return Types.BOOLEAN;
}
/**
* Does this dialect support insert, update, and delete statements
* with Common Table Expressions (CTEs)?
*
* @return {@code true} if non-query statements are supported with CTE
*/
public boolean supportsNonQueryWithCTE() {
return false;
}
/**
* Does this dialect/database support recursive CTEs?
*
* @return {@code true} if recursive CTEs are supported
*
* @since 6.2
*/
public boolean supportsRecursiveCTE() {
return false;
}
/**
* Does this dialect support the {@code conflict} clause for insert statements
* that appear in a CTE?
*
* @return {@code true} if {@code conflict} clause is supported
* @since 6.5
*/
public boolean supportsConflictClauseForInsertCTE() {
return false;
}
/**
* Does this dialect support {@code values} lists of form
* {@code VALUES (1), (2), (3)}?
*
* @return {@code true} if {@code values} list are supported
*/
public boolean supportsValuesList() {
return false;
}
/**
* Does this dialect support {@code values} lists of form
* {@code VALUES (1), (2), (3)} in insert statements?
*
* @return {@code true} if {@code values} list are allowed
* in insert statements
*/
public boolean supportsValuesListForInsert() {
return true;
}
/**
* Does this dialect support the {@code from} clause for update statements?
*
* @return {@code true} if {@code from} clause is supported
* @since 6.5
*/
public boolean supportsFromClauseInUpdate() {
return false;
}
/**
* Append a literal string to the given {@link SqlAppender}.
*
* @apiNote Needed because MySQL has nonstandard escape characters
*/
public void appendLiteral(SqlAppender appender, String literal) {
appender.appendSingleQuoteEscapedString( literal );
}
/**
* Append a binary literal to the given {@link SqlAppender}.
*/
public void appendBinaryLiteral(SqlAppender appender, byte[] bytes) {
appender.appendSql( "X'" );
PrimitiveByteArrayJavaType.INSTANCE.appendString( appender, bytes );
appender.appendSql( '\'' );
}
/**
* Check whether the JDBC {@link Connection} supports creating LOBs via
* {@link Connection#createBlob()}, {@link Connection#createNClob()}, or
* {@link Connection#createClob()}.
*
* @param databaseMetaData JDBC {@link DatabaseMetaData} which can be used
* if LOB creation is supported only starting from
* a given driver version
*
* @return {@code true} if LOBs can be created via the JDBC Connection.
*/
public boolean supportsJdbcConnectionLobCreation(DatabaseMetaData databaseMetaData) {
return true;
}
/**
* Check whether the JDBC driver allows setting LOBs via
* {@link PreparedStatement#setBytes(int, byte[])},
* {@link PreparedStatement#setNString(int, String)}, or
* {@link PreparedStatement#setString(int, String)} APIs.
*
* @return {@code true} if LOBs can be set with the materialized APIs.
*
* @since 6.2
*/
public boolean supportsMaterializedLobAccess() {
// Most drivers support this
return true;
}
/**
* Whether to switch:
* <ul>
* <li>from {@code VARCHAR}-like types to {@link SqlTypes#MATERIALIZED_CLOB} types
* when the requested size for a type exceeds the {@link #getMaxVarcharCapacity()},
* <li>from {@code NVARCHAR}-like types to {@link SqlTypes#MATERIALIZED_NCLOB} types
* when the requested size for a type exceeds the {@link #getMaxNVarcharCapacity()},
* and
* <li>from {@code VARBINARY}-like types to {@link SqlTypes#MATERIALIZED_BLOB} types
* when the requested size for a type exceeds the {@link #getMaxVarbinaryCapacity()}.
* </ul>
*
* @return {@code true} if materialized LOBs should be used for capacity exceeding types.
*
* @since 6.2
*/
public boolean useMaterializedLobWhenCapacityExceeded() {
return supportsMaterializedLobAccess();
}
/**
* Modify the SQL, adding hints or comments, if necessary.
*
* @see #getQueryHintString(String,List)
* @see #prependComment
*/
public String addSqlHintOrComment(String sql, QueryOptions queryOptions, boolean commentsEnabled) {
// Keep this here, rather than moving to Select.
// Some Dialects may need the hint to be appended to the very end or beginning
// of the finalized SQL statement, so wait until everything is processed.
if ( queryOptions.getDatabaseHints() != null && !queryOptions.getDatabaseHints().isEmpty() ) {
sql = getQueryHintString( sql, queryOptions.getDatabaseHints() );
}
if ( commentsEnabled && queryOptions.getComment() != null ) {
sql = prependComment( sql, queryOptions.getComment() );
}
return sql;
}
/**
* Adds an {@code INDEX} query hint as follows:
*
* <pre>
* SELECT *
* FROM TEST
* USE INDEX (hint1, hint2)
* WHERE X=1
* </pre>
*
* @since 7.0
*/
public static String addUseIndexQueryHint(String query, String hints) {
final Matcher matcher = QUERY_PATTERN.matcher( query );
if ( matcher.matches() && matcher.groupCount() > 1 ) {
final String startToken = matcher.group( 1 );
return startToken + " use index (" + hints + ")" + query.substring( startToken.length() );
}
else {
return query;
}
}
/**
* Prepend a comment to the given SQL fragment.
*/
protected String prependComment(String sql, String comment) {
return "/* " + escapeComment( comment ) + " */ " + sql;
}
/**
* Perform necessary character escaping on the text of the comment.
*/
public static String escapeComment(String comment) {
if ( isNotEmpty( comment ) ) {
final String escaped = ESCAPE_CLOSING_COMMENT_PATTERN.matcher( comment ).replaceAll( "*\\\\/" );
return ESCAPE_OPENING_COMMENT_PATTERN.matcher( escaped ).replaceAll( "/\\\\*" );
}
return comment;
}
/**
* Return an {@link HqlTranslator} specific to this dialect, or {@code null}
* to use the {@linkplain org.hibernate.query.hql.internal.StandardHqlTranslator
* standard translator}.
* <p>
* Note that {@link SessionFactoryOptions#getCustomHqlTranslator()} has higher
* precedence since it comes directly from the user config.
*
* @see org.hibernate.query.hql.internal.StandardHqlTranslator
* @see org.hibernate.query.spi.QueryEngine#getHqlTranslator()
*/
public HqlTranslator getHqlTranslator() {
return null;
}
/**
* Return a {@link SqmTranslatorFactory} specific to this dialect, or {@code null}
* to use the {@linkplain org.hibernate.query.sqm.sql.internal.StandardSqmTranslator
* standard translator}.
* <p>
* Note that {@link SessionFactoryOptions#getCustomSqmTranslatorFactory()} has higher
* precedence since it comes directly from the user config.
*
* @see org.hibernate.query.sqm.sql.internal.StandardSqmTranslator
* @see org.hibernate.query.spi.QueryEngine#getSqmTranslatorFactory()
*/
public SqmTranslatorFactory getSqmTranslatorFactory() {
return null;
}
/**
* Return a {@link SqlAstTranslatorFactory} specific to this dialect, or {@code null}
* to use the {@linkplain org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory
* standard translator}.
*
* @see org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory
* @see JdbcEnvironment#getSqlAstTranslatorFactory()
*/
public SqlAstTranslatorFactory getSqlAstTranslatorFactory() {
return null;
}
/**
* Determine how selected items are referenced in the {@code group by} clause.
*/
public SelectItemReferenceStrategy getGroupBySelectItemReferenceStrategy() {
return SelectItemReferenceStrategy.EXPRESSION;
}
/**
* A custom {@link SizeStrategy} for column types.
*/
public SizeStrategy getSizeStrategy() {
return sizeStrategy;
}
/**
* The biggest size value that can be supplied as argument to a
* {@link java.sql.Types#VARCHAR}-like type.
* <p>
* For longer column lengths, use some sort of {@code text}-like
* type for the column.
*/
public int getMaxVarcharLength() {
//the longest possible length of a Java string
return Length.LONG32;
}
/**
* The biggest size value that can be supplied as argument to a
* {@link java.sql.Types#NVARCHAR}-like type.
* <p>
* For longer column lengths, use some sort of {@code ntext}-like
* type for the column.
*/
public int getMaxNVarcharLength() {
//for most databases it's the same as for VARCHAR
return getMaxVarcharLength();
}
/**
* The biggest size value that can be supplied as argument to a
* {@link java.sql.Types#VARBINARY}-like type.
* <p>
* For longer column lengths, use some sort of {@code image}-like
* type for the column.
*/
public int getMaxVarbinaryLength() {
//for most databases it's the same as for VARCHAR
return getMaxVarcharLength();
}
/**
* The longest possible length of a {@link java.sql.Types#VARCHAR}-like
* column.
* <p>
* For longer column lengths, use some sort of {@code clob}-like type
* for the column.
*/
public int getMaxVarcharCapacity() {
return getMaxVarcharLength();
}
/**
* The longest possible length of a {@link java.sql.Types#NVARCHAR}-like
* column.
* <p>
* For longer column lengths, use some sort of {@code nclob}-like type
* for the column.
*/
public int getMaxNVarcharCapacity() {
return getMaxNVarcharLength();
}
/**
* The longest possible length of a {@link java.sql.Types#VARBINARY}-like
* column.
* <p>
* For longer column lengths, use some sort of {@code blob}-like type for
* the column.
*/
public int getMaxVarbinaryCapacity() {
return getMaxVarbinaryLength();
}
/**
* This is the default length for a generated column of type
* {@link SqlTypes#BLOB BLOB} or {@link SqlTypes#CLOB CLOB}
* mapped to {@link Blob} or {@link Clob}, if LOB columns
* have a length in this dialect.
*
* @return {@value Size#DEFAULT_LOB_LENGTH} by default
*
* @see Length#LOB_DEFAULT
* @see org.hibernate.type.descriptor.java.BlobJavaType
* @see org.hibernate.type.descriptor.java.ClobJavaType
*/
public long getDefaultLobLength() {
return Size.DEFAULT_LOB_LENGTH;
}
/**
* This is the default precision for a generated column of
* exact numeric type {@link SqlTypes#DECIMAL DECIMAL} or
* {@link SqlTypes#NUMERIC NUMERIC} mapped to a
* {@link java.math.BigInteger} or
* {@link java.math.BigDecimal}.
* <p>
* Usually returns the maximum precision of the
* database, except when there is no such maximum
* precision, or the maximum precision is very high.
*
* @return the default precision, in decimal digits
*
* @see org.hibernate.type.descriptor.java.BigDecimalJavaType
* @see org.hibernate.type.descriptor.java.BigIntegerJavaType
*/
public int getDefaultDecimalPrecision() {
//this is the maximum for Oracle, SQL Server,
//Sybase, and Teradata, so it makes a reasonable
//default (uses 17 bytes on SQL Server and MySQL)
return 38;
}
/**
* This is the default precision for a generated column of
* type {@link SqlTypes#TIMESTAMP TIMESTAMP} mapped to a
* {@link Timestamp} or {@link java.time.LocalDateTime}.
* <p>
* Usually 6 (microseconds) or 3 (milliseconds).
*
* @return the default precision, in decimal digits,
* of the fractional seconds field
*
* @see org.hibernate.type.descriptor.java.JdbcTimestampJavaType
* @see org.hibernate.type.descriptor.java.LocalDateTimeJavaType
* @see org.hibernate.type.descriptor.java.OffsetDateTimeJavaType
* @see org.hibernate.type.descriptor.java.ZonedDateTimeJavaType
* @see org.hibernate.type.descriptor.java.InstantJavaType
*/
public int getDefaultTimestampPrecision() {
//milliseconds or microseconds is the maximum
//for most dialects that support explicit
//precision, with the exception of Oracle,
//which accepts up to 9 digits, and DB2 which
//accepts up to 12 digits!
return 6; //microseconds!
}
/**
* This is the default scale for a generated column of type
* {@link SqlTypes#INTERVAL_SECOND INTERVAL SECOND} mapped
* to a {@link Duration}.
* <p>
* Usually 9 (nanoseconds) or 6 (microseconds).
*
* @return the default scale, in decimal digits,
* of the fractional seconds field
*
* @see org.hibernate.type.descriptor.java.DurationJavaType
*/
public int getDefaultIntervalSecondScale(){
// The default scale necessary is 9 i.e. nanosecond resolution
return 9;
}
/**
* Does this dialect round a temporal when converting from a precision higher to a lower one?
*
* @return true if rounding is applied, false if truncation is applied
*/
public boolean doesRoundTemporalOnOverflow() {
return true;
}
/**
* This is the default precision for a generated
* column mapped to a Java {@link Float} or
* {@code float}. That is, a value representing
* "single precision".
* <p>
* Usually 24 binary digits, at least for
* databases with a conventional interpretation
* of the ANSI SQL specification.
*
* @return a value representing "single precision",
* usually in binary digits, but sometimes
* in decimal digits
*/
public int getFloatPrecision() {
return 24;
}
/**
* This is the default precision for a generated
* column mapped to a Java {@link Double} or
* {@code double}. That is, a value representing
* "double precision".
* <p>
* Usually 53 binary digits, at least for
* databases with a conventional interpretation
* of the ANSI SQL specification.
*
* @return a value representing "double precision",
* usually in binary digits, but sometimes
* in decimal digits
*/
public int getDoublePrecision() {
return 53;
}
/**
* The "native" precision for arithmetic with datetimes
* and day-to-second durations. Datetime differences
* will be calculated with this precision except when a
* precision is explicitly specified as a
* {@link TemporalUnit}.
* <p>
* Usually 1 (nanoseconds), 1_000 (microseconds), or
* 1_000_000 (milliseconds).
*
* @return the precision, specified as a quantity of
* nanoseconds
*
* @see TemporalUnit#NATIVE
*
* @implNote Getting this right is very important. It
* would be great if all platforms supported
* datetime arithmetic with nanosecond
* precision, since that is how we represent
* {@link Duration}. But they don't, and we
* don't want to fill up the SQL expression
* with many conversions to/from nanoseconds.
* (Not to mention the problems with numeric
* overflow that this sometimes causes.) So
* we need to pick the right value here,
* and implement {@link #timestampaddPattern}
* and {@link #timestampdiffPattern} consistent
* with our choice.
*/
public long getFractionalSecondPrecisionInNanos() {
return 1; //default to nanoseconds for now
}
/**
* Does this dialect have a true SQL {@link Types#BIT BIT} type
* with just two values (0 and 1) or, even better, a proper SQL
* {@link Types#BOOLEAN BOOLEAN} type, or does {@link Types#BIT}
* get mapped to a numeric type with more than two values?
*
* @return true if there is a {@code BIT} or {@code BOOLEAN} type
*/
public boolean supportsBitType() {
return true;
}
/**
* Whether a predicate like {@code a > 0} can appear in an expression
* context, for example, in a {@code select} list item.
*/
protected boolean supportsPredicateAsExpression() {
// Most databases seem to allow that
return true;
}
/**
* Obtain a {@link RowLockStrategy} for the given {@link LockMode}.
*/
public RowLockStrategy getLockRowIdentifier(LockMode lockMode) {
return switch (lockMode) {
case PESSIMISTIC_READ ->
getReadRowLockStrategy();
case WRITE, PESSIMISTIC_FORCE_INCREMENT, PESSIMISTIC_WRITE, UPGRADE_SKIPLOCKED, UPGRADE_NOWAIT ->
getWriteRowLockStrategy();
default -> RowLockStrategy.NONE;
};
}
/**
* The {@code generated as} clause, or similar, for generated column
* declarations in DDL statements.
*
* @param generatedAs a SQL expression used to generate the column value
* @return The {@code generated as} clause containing the given expression
*/
public String generatedAs(String generatedAs) {
return " generated always as (" + generatedAs + ") stored";
}
/**
* Is an explicit column type required for {@code generated as} columns?
*
* @return {@code true} if an explicit type is required
*/
public boolean hasDataTypeBeforeGeneratedAs() {
return true;
}
/**
* Create a {@link MutationOperation} for a updating an optional table
*/
public MutationOperation createOptionalTableUpdateOperation(
EntityMutationTarget mutationTarget,
OptionalTableUpdate optionalTableUpdate,
SessionFactoryImplementor factory) {
return new OptionalTableUpdateOperation( mutationTarget, optionalTableUpdate, factory );
}
/**
* Is there some way to disable foreign key constraint checking while
* truncating tables? (If there's no way to do it, and if we can't
* {@linkplain #canBatchTruncate() batch truncate}, we must drop and
* recreate the constraints instead.)
*
* @return {@code true} if there is some way to do it
*
* @see #getDisableConstraintsStatement()
* @see #getDisableConstraintStatement(String, String)
*/
public boolean canDisableConstraints() {
return false;
}
/**
* A SQL statement that temporarily disables foreign key constraint
* checking for all tables.
*/
public String getDisableConstraintsStatement() {
return null;
}
/**
* A SQL statement that re-enables foreign key constraint checking for
* all tables.
*/
public String getEnableConstraintsStatement() {
return null;
}
/**
* A SQL statement that temporarily disables checking of the given
* foreign key constraint.
*
* @param tableName the name of the table
* @param name the name of the constraint
*/
public String getDisableConstraintStatement(String tableName, String name) {
return null;
}
/**
* A SQL statement that re-enables checking of the given foreign key
* constraint.
*
* @param tableName the name of the table
* @param name the name of the constraint
*/
public String getEnableConstraintStatement(String tableName, String name) {
return null;
}
/**
* Does the {@link #getTruncateTableStatement(String) truncate table}
* statement accept multiple tables?
*
* @return {@code true} if it does
*/
public boolean canBatchTruncate() {
return false;
}
/**
* A SQL statement or statements that truncate the given tables.
*
* @param tableNames the names of the tables
*/
public String[] getTruncateTableStatements(String[] tableNames) {
if ( canBatchTruncate() ) {
final StringBuilder builder = new StringBuilder();
for ( String tableName : tableNames ) {
if ( !builder.isEmpty() ) {
builder.append(", ");
}
builder.append( tableName );
}
return new String[] { getTruncateTableStatement( builder.toString() ) };
}
else {
final String[] statements = new String[tableNames.length];
for ( int i = 0; i < tableNames.length; i++ ) {
statements[i] = getTruncateTableStatement( tableNames[i] );
}
return statements;
}
}
/**
* A SQL statement that truncates the given table.
*
* @param tableName the name of the table
*/
public String getTruncateTableStatement(String tableName) {
return "truncate table " + tableName;
}
/**
* Support for native parameter markers.
* <p/>
* This is generally dependent on both the database and the driver.
*
* @return May return {@code null} to indicate that the JDBC
* {@linkplain ParameterMarkerStrategyStandard standard} strategy should be used
*/
public ParameterMarkerStrategy getNativeParameterMarkerStrategy() {
return null;
}
/**
* Whether this Dialect supports {@linkplain PreparedStatement#addBatch() batch updates}.
*
* @return {@code true} indicates it does; {@code false} indicates it does not; {@code null} indicates
* it might and that database-metadata should be consulted.
*
* @see org.hibernate.engine.jdbc.env.spi.ExtractedDatabaseMetaData#supportsBatchUpdates
*/
public Boolean supportsBatchUpdates() {
// are there any databases/drivers which don't?
return true;
}
/**
* Whether this Dialect supports the JDBC {@link java.sql.Types#REF_CURSOR} type.
*
* @return {@code true} indicates it does; {@code false} indicates it does not; {@code null} indicates
* it might and that database-metadata should be consulted
*
* @see org.hibernate.engine.jdbc.env.spi.ExtractedDatabaseMetaData#supportsRefCursors
*/
public Boolean supportsRefCursors() {
return null;
}
/**
* Returns the default name of the ordinality column for a set-returning function
* if it supports that, otherwise returns {@code null}.
*/
public @Nullable String getDefaultOrdinalityColumnName() {
return null;
}
/**
* Pluggable strategy for determining the {@link Size} to use for
* columns of a given SQL type.
* <p>
* Allows dialects, integrators, and users a chance to apply column
* size defaults and limits in certain situations based on the mapped
* SQL and Java types. For example, when mapping a {@code UUID} to a
* {@code VARCHAR} column, we know the default {@code Size} should
* have {@link Size#getLength() Size.length == 36}.
*/
public | on |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/model/ast/TableUpdate.java | {
"start": 518,
"end": 1616
} | interface ____<O extends MutationOperation>
extends RestrictedTableMutation<O> {
/**
* The value bindings for each column.
*
* @implNote Table key column(s) are not included here as
* those are not ever updated
*/
List<ColumnValueBinding> getValueBindings();
/**
* The number of value bindings
*
* @see #getValueBindings()
*/
default int getNumberOfValueBindings() {
return getValueBindings().size();
}
/**
* Visit each value binding
*
* @see #getValueBindings()
*/
void forEachValueBinding(BiConsumer<Integer, ColumnValueBinding> consumer);
/**
* The columns to return from the insert.
*/
List<ColumnReference> getReturningColumns();
/**
* The number of columns being returned
*
* @see #getReturningColumns
*/
default int getNumberOfReturningColumns() {
final List<ColumnReference> returningColumns = getReturningColumns();
return CollectionHelper.size( returningColumns );
}
/**
* Visit each return-column
*
* @see #getReturningColumns
*/
void forEachReturningColumn(BiConsumer<Integer,ColumnReference> consumer);
}
| TableUpdate |
java | google__guice | extensions/persist/src/com/google/inject/persist/jpa/JpaLocalTxnInterceptor.java | {
"start": 1045,
"end": 1310
} | class ____ implements MethodInterceptor {
// TODO(gak): Move these args to the cxtor & make these final.
@Inject private JpaPersistService emProvider = null;
@Inject private UnitOfWork unitOfWork = null;
@Transactional
private static | JpaLocalTxnInterceptor |
java | google__dagger | dagger-android-proguard-processor/main/java/dagger/android/internal/proguard/ProguardProcessor.java | {
"start": 1625,
"end": 2053
} | class ____ extends JavacBasicAnnotationProcessor {
private XProcessingEnv env;
@Override
public void initialize(XProcessingEnv env) {
this.env = env;
}
@Override
public Iterable<XProcessingStep> processingSteps() {
return ImmutableList.of(new ProguardProcessingStep(env));
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latestSupported();
}
}
| ProguardProcessor |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/graph/BaseRepartitionNode.java | {
"start": 3877,
"end": 6306
} | class ____<K, V, RepartitionNode extends BaseRepartitionNode<K, V>> {
protected String nodeName;
protected ProcessorParameters<K, V, K, V> processorParameters;
protected Serde<K> keySerde;
protected Serde<V> valueSerde;
protected String sinkName;
protected String sourceName;
protected String repartitionTopic;
protected StreamPartitioner<K, V> partitioner;
protected InternalTopicProperties internalTopicProperties = InternalTopicProperties.empty();
public BaseRepartitionNodeBuilder<K, V, RepartitionNode> withProcessorParameters(final ProcessorParameters<K, V, K, V> processorParameters) {
this.processorParameters = processorParameters;
return this;
}
public BaseRepartitionNodeBuilder<K, V, RepartitionNode> withKeySerde(final Serde<K> keySerde) {
this.keySerde = keySerde;
return this;
}
public BaseRepartitionNodeBuilder<K, V, RepartitionNode> withValueSerde(final Serde<V> valueSerde) {
this.valueSerde = valueSerde;
return this;
}
public BaseRepartitionNodeBuilder<K, V, RepartitionNode> withSinkName(final String sinkName) {
this.sinkName = sinkName;
return this;
}
public BaseRepartitionNodeBuilder<K, V, RepartitionNode> withSourceName(final String sourceName) {
this.sourceName = sourceName;
return this;
}
public BaseRepartitionNodeBuilder<K, V, RepartitionNode> withRepartitionTopic(final String repartitionTopic) {
this.repartitionTopic = repartitionTopic;
return this;
}
public BaseRepartitionNodeBuilder<K, V, RepartitionNode> withStreamPartitioner(final StreamPartitioner<K, V> partitioner) {
this.partitioner = partitioner;
return this;
}
public BaseRepartitionNodeBuilder<K, V, RepartitionNode> withNodeName(final String nodeName) {
this.nodeName = nodeName;
return this;
}
public BaseRepartitionNodeBuilder<K, V, RepartitionNode> withInternalTopicProperties(final InternalTopicProperties internalTopicProperties) {
this.internalTopicProperties = internalTopicProperties;
return this;
}
public abstract RepartitionNode build();
}
}
| BaseRepartitionNodeBuilder |
java | quarkusio__quarkus | core/runtime/src/test/java/io/quarkus/logging/GenerateLog.java | {
"start": 1645,
"end": 7553
} | class ____, during build time, replaced by invocations\n" +
"of the same methods on a generated instance of {@link Logger}.";
public static void main(String[] args) throws Exception {
String source = BasicLogger.class.getProtectionDomain().getCodeSource().getLocation().getPath();
Matcher matcher = Pattern.compile("\\d+\\.\\d+\\.\\d+\\.Final").matcher(source);
if (matcher.find()) {
String version = matcher.group();
String url = "https://raw.githubusercontent.com/jboss-logging/jboss-logging/" + version
+ "/src/main/java/org/jboss/logging/BasicLogger.java";
HttpClient client = HttpClient.newBuilder()
.followRedirects(HttpClient.Redirect.NORMAL)
.connectTimeout(Duration.ofSeconds(10))
.build();
HttpRequest request = HttpRequest.newBuilder(new URI(url)).build();
HttpResponse<String> response = client.send(request, BodyHandlers.ofString(StandardCharsets.UTF_8));
if (response.statusCode() == 200) {
generateLogClass(response.body());
} else {
throw new Exception("Failed fetching " + url);
}
} else {
throw new Exception("Couldn't find JBoss Logging version in " + source);
}
}
private static void generateLogClass(String templateSource) {
CompilationUnit templateUnit = StaticJavaParser.parse(templateSource);
ClassOrInterfaceDeclaration templateClass = (ClassOrInterfaceDeclaration) templateUnit.getTypes().get(0);
CompilationUnit unit = new CompilationUnit();
unit.setPackageDeclaration("io.quarkus.logging");
unit.addImport(Logger.class);
unit.addImport(Application.class);
unit.addOrphanComment(new LineComment(" automatically generated by io.quarkus.logging.GenerateLog"));
ClassOrInterfaceDeclaration clazz = unit.addClass("Log", Modifier.Keyword.PUBLIC, Modifier.Keyword.FINAL)
.setJavadocComment(CLASS_JAVADOC);
clazz.addFieldWithInitializer("StackWalker", "stackWalker",
StaticJavaParser.parseExpression("StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE)"),
Modifier.Keyword.PRIVATE, Modifier.Keyword.STATIC, Modifier.Keyword.FINAL);
clazz.addFieldWithInitializer(PrimitiveType.booleanType(), "shouldFail",
StaticJavaParser.parseExpression("shouldFail()"),
Modifier.Keyword.PRIVATE, Modifier.Keyword.STATIC, Modifier.Keyword.FINAL);
{
MethodDeclaration method = clazz.addMethod("shouldFail");
method.setPrivate(true);
method.setStatic(true);
method.setType(PrimitiveType.booleanType());
BlockStmt body = new BlockStmt();
Expression ifCondition = StaticJavaParser.parseExpression("Application.currentApplication() != null");
Statement thenPart = StaticJavaParser.parseStatement("return true;");
body.addOrphanComment(new LineComment(" inside Quarkus, all call sites should be rewritten"));
body.addStatement(new IfStmt(ifCondition, thenPart, null));
BlockStmt tryPart = new BlockStmt();
tryPart.addStatement("Class.forName(\"org.junit.jupiter.api.Assertions\");");
tryPart.addStatement("return false;");
BlockStmt catchPart = new BlockStmt();
catchPart.addStatement("return true;");
CatchClause catchClause = new CatchClause(
new Parameter(StaticJavaParser.parseType(ClassNotFoundException.class.getName()), "ignored"),
catchPart);
body.addOrphanComment(new LineComment(" outside Quarkus, allow in tests"));
body.addStatement(new TryStmt(tryPart, new NodeList<>(catchClause), null));
method.setBody(body);
}
for (MethodDeclaration methodTemplate : templateClass.getMethods()) {
MethodDeclaration method = clazz.addMethod(methodTemplate.getNameAsString());
method.setJavadocComment(methodTemplate.getJavadoc().orElseThrow());
method.setPublic(true);
method.setStatic(true);
method.setType(methodTemplate.getType());
method.setParameters(methodTemplate.getParameters());
BlockStmt body = new BlockStmt();
body.addStatement("if (shouldFail) { throw fail(); }");
Expression logger = StaticJavaParser
.parseExpression("Logger.getLogger(stackWalker.getCallerClass())");
List<Expression> forwardParams = methodTemplate.getParameters()
.stream()
.map(NodeWithSimpleName::getNameAsExpression)
.collect(Collectors.toList());
MethodCallExpr forwardCall = new MethodCallExpr(logger, methodTemplate.getName().getIdentifier(),
new NodeList<>(forwardParams));
if (methodTemplate.getType().isVoidType()) {
body.addStatement(forwardCall);
} else {
body.addStatement(new ReturnStmt(forwardCall));
}
method.setBody(body);
}
{
MethodDeclaration method = clazz.addMethod("fail", Modifier.Keyword.PRIVATE, Modifier.Keyword.STATIC);
method.setType(UnsupportedOperationException.class);
BlockStmt body = new BlockStmt();
body.addStatement("return new UnsupportedOperationException(\"Using " + Log.class.getName()
+ " is only possible with Quarkus bytecode transformation;"
+ " make sure the archive is indexed, for example by including a beans.xml file\");");
method.setBody(body);
}
System.out.println(unit);
}
}
| are |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/TestTypedRootValueSerialization.java | {
"start": 376,
"end": 651
} | interface ____ {
public int getA();
}
// If this annotation is added, things will work:
//@tools.jackson.databind.annotation.JsonSerialize(as=Issue822Interface.class)
// but it should not be necessary when root type is passed
static | Issue822Interface |
java | google__dagger | javatests/artifacts/hilt-android/simple/app/src/sharedTest/java/dagger/hilt/android/simple/AliasOfMultipleScopesTest.java | {
"start": 2640,
"end": 2720
} | interface ____ {}
@Module
@InstallIn(SingletonComponent.class)
| AliasScopedDep |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/security/HttpUpgradeDenyAllAnnotationTest.java | {
"start": 2573,
"end": 2966
} | class ____ {
@Inject
UserService userService;
@Inject
AdminService adminService;
@OnTextMessage
String echo(String message) {
return message.equals("hello") ? adminService.ping() : userService.ping();
}
@OnError
String error(ForbiddenException t) {
return "forbidden";
}
}
}
| Endpoint |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/immutable/Contract.java | {
"start": 273,
"end": 2483
} | class ____ implements Serializable {
private long id;
private long version;
private String customerName;
private String type;
private List variations;
private Contract parent;
private Set subcontracts;
private Set plans;
private Set parties;
private Set infos;
public Contract() {
super();
}
public Contract(Plan plan, String customerName, String type) {
plans = new HashSet();
if ( plan != null ) {
plans.add( plan );
plan.getContracts().add( this );
}
this.customerName = customerName;
this.type = type;
variations = new ArrayList();
subcontracts = new HashSet();
parties = new HashSet();
infos = new HashSet();
}
public long getVersion() {
return version;
}
public void setVersion(long version) {
this.version = version;
}
public Set getPlans() {
return plans;
}
public void setPlans(Set plans) {
this.plans = plans;
}
public String getCustomerName() {
return customerName;
}
public void setCustomerName(String customerName) {
this.customerName = customerName;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public List<ContractVariation> getVariations() {
return variations;
}
public void setVariations(List variations) {
this.variations = variations;
}
public Contract getParent() {
return parent;
}
public void setParent(Contract parent) {
this.parent = parent;
}
public Set getSubcontracts() {
return subcontracts;
}
public void setSubcontracts(Set subcontracts) {
this.subcontracts = subcontracts;
}
public void addSubcontract(Contract subcontract) {
subcontracts.add( subcontract );
subcontract.setParent( this );
}
public Set getParties() {
return parties;
}
public void setParties(Set parties) {
this.parties = parties;
}
public void addParty(Party party) {
parties.add( party );
party.setContract( this );
}
public void removeParty(Party party) {
parties.remove( party );
party.setContract( null );
}
public Set getInfos() {
return infos;
}
public void setInfos(Set infos) {
this.infos = infos;
}
}
| Contract |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/ide/IdeProcessor.java | {
"start": 8753,
"end": 10232
} | class ____ {
// the executable pathname of the process.
private final String command;
private final String commandLine;
private final String[] arguments;
public ProcessInfo(String command, String commandLine, String[] arguments) {
this.command = command;
this.commandLine = commandLine;
this.arguments = arguments;
}
public String getCommand() {
return command;
}
public String getCommandLine() {
return commandLine;
}
public String[] getArguments() {
return arguments;
}
private boolean containInCommand(String value) {
return this.command.contains(value) || this.commandLine.contains(value);
}
private boolean containInArguments(String value) {
if (arguments != null) {
for (String argument : arguments) {
if (argument.contains(value)) {
return true;
}
}
}
return false;
}
private String getArgumentThatContains(String contain) {
if (arguments != null) {
for (String argument : arguments) {
if (argument.contains(contain)) {
return argument;
}
}
}
return null;
}
}
}
| ProcessInfo |
java | apache__rocketmq | common/src/main/java/org/apache/rocketmq/common/message/MessageExtBrokerInner.java | {
"start": 1115,
"end": 3991
} | class ____ extends MessageExt {
private static final long serialVersionUID = 7256001576878700634L;
private String propertiesString;
private long tagsCode;
private ByteBuffer encodedBuff;
private volatile boolean encodeCompleted;
private MessageVersion version = MessageVersion.MESSAGE_VERSION_V1;
public ByteBuffer getEncodedBuff() {
return encodedBuff;
}
public void setEncodedBuff(ByteBuffer encodedBuff) {
this.encodedBuff = encodedBuff;
}
public static long tagsString2tagsCode(final TopicFilterType filter, final String tags) {
if (Strings.isNullOrEmpty(tags)) { return 0; }
return tags.hashCode();
}
public static long tagsString2tagsCode(final String tags) {
return tagsString2tagsCode(null, tags);
}
public String getPropertiesString() {
return propertiesString;
}
public void setPropertiesString(String propertiesString) {
this.propertiesString = propertiesString;
}
public void deleteProperty(String name) {
super.clearProperty(name);
if (propertiesString != null) {
this.setPropertiesString(MessageUtils.deleteProperty(propertiesString, name));
}
}
public long getTagsCode() {
return tagsCode;
}
public void setTagsCode(long tagsCode) {
this.tagsCode = tagsCode;
}
public MessageVersion getVersion() {
return version;
}
public void setVersion(MessageVersion version) {
this.version = version;
}
public void removeWaitStorePropertyString() {
if (this.getProperties().containsKey(MessageConst.PROPERTY_WAIT_STORE_MSG_OK)) {
// There is no need to store "WAIT=true", remove it from propertiesString to save 9 bytes for each message.
// It works for most case. In some cases msgInner.setPropertiesString invoked later and replace it.
String waitStoreMsgOKValue = this.getProperties().remove(MessageConst.PROPERTY_WAIT_STORE_MSG_OK);
this.setPropertiesString(MessageDecoder.messageProperties2String(this.getProperties()));
// Reput to properties, since msgInner.isWaitStoreMsgOK() will be invoked later
this.getProperties().put(MessageConst.PROPERTY_WAIT_STORE_MSG_OK, waitStoreMsgOKValue);
} else {
this.setPropertiesString(MessageDecoder.messageProperties2String(this.getProperties()));
}
}
public boolean isEncodeCompleted() {
return encodeCompleted;
}
public void setEncodeCompleted(boolean encodeCompleted) {
this.encodeCompleted = encodeCompleted;
}
public boolean needDispatchLMQ() {
return StringUtils.isNoneBlank(getProperty(MessageConst.PROPERTY_INNER_MULTI_DISPATCH))
&& MixAll.topicAllowsLMQ(getTopic());
}
}
| MessageExtBrokerInner |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/test/java/org/springframework/boot/jdbc/autoconfigure/DataSourceJsonSerializationTests.java | {
"start": 2865,
"end": 3592
} | class ____ extends ValueSerializer<DataSource> {
private final ConversionService conversionService = new DefaultConversionService();
@Override
public void serialize(DataSource value, JsonGenerator jgen, SerializationContext context) {
jgen.writeStartObject();
for (PropertyDescriptor property : BeanUtils.getPropertyDescriptors(DataSource.class)) {
Method reader = property.getReadMethod();
if (reader != null && property.getWriteMethod() != null
&& this.conversionService.canConvert(String.class, property.getPropertyType())) {
jgen.writePOJOProperty(property.getName(), ReflectionUtils.invokeMethod(reader, value));
}
}
jgen.writeEndObject();
}
}
static | TomcatDataSourceSerializer |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/domain/sample/AuditableEmbeddable.java | {
"start": 1031,
"end": 1462
} | class ____ {
@CreatedDate //
private Instant dateCreated;
@LastModifiedDate //
private Instant dateUpdated;
public Instant getDateCreated() {
return dateCreated;
}
public void setDateCreated(Instant dateCreated) {
this.dateCreated = dateCreated;
}
public Instant getDateUpdated() {
return dateUpdated;
}
public void setDateUpdated(Instant dateUpdated) {
this.dateUpdated = dateUpdated;
}
}
| AuditableEmbeddable |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/BoxedPrimitiveConstructorTest.java | {
"start": 9051,
"end": 9518
} | class ____ {
void f(float f) {
// BUG: Diagnostic contains: (double) f;
Double d = new Double(f);
// BUG: Diagnostic contains: (short) (byte) 0;
Short s = new Short((byte) 0);
}
}
""")
.doTest();
}
@Test
public void autoboxGenerics() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/disallowdotsonnames/PersonMapper.java | {
"start": 739,
"end": 907
} | interface ____ {
Person selectByIdFlush(int id);
Person selectByIdNoFlush(int id);
List<Person> selectAllFlush();
List<Person> selectAllNoFlush();
}
| PersonMapper |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/load/resource/bitmap/BitmapDrawableResourceTest.java | {
"start": 785,
"end": 1686
} | class ____ {
private BitmapDrawableResourceHarness harness;
@Before
public void setUp() {
harness = new BitmapDrawableResourceHarness();
}
@Test
public void testReturnsGivenBitmapFromGet() {
assertEquals(harness.bitmap, harness.create().get().getBitmap());
}
@Test
public void testReturnsDifferentDrawableEachTime() {
BitmapDrawableResource resource = harness.create();
BitmapDrawable first = resource.get();
BitmapDrawable second = resource.get();
assertNotSame(first, second);
}
@Test
public void testReturnsSizeFromGivenBitmap() {
assertEquals(
harness.bitmap.getHeight() * harness.bitmap.getRowBytes(), harness.create().getSize());
}
@Test
public void testBitmapIsReturnedToPoolOnRecycle() {
harness.create().recycle();
verify(harness.bitmapPool).put(eq(harness.bitmap));
}
private static | BitmapDrawableResourceTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/failover/RestartAllFailoverStrategyTest.java | {
"start": 1304,
"end": 2176
} | class ____ {
@Test
void testGetTasksNeedingRestart() {
final TestingSchedulingTopology topology = new TestingSchedulingTopology();
final TestingSchedulingExecutionVertex v1 = topology.newExecutionVertex();
final TestingSchedulingExecutionVertex v2 = topology.newExecutionVertex();
final TestingSchedulingExecutionVertex v3 = topology.newExecutionVertex();
topology.connect(v1, v2, ResultPartitionType.PIPELINED);
topology.connect(v2, v3, ResultPartitionType.BLOCKING);
final RestartAllFailoverStrategy strategy = new RestartAllFailoverStrategy(topology);
assertThat(new HashSet<>(Arrays.asList(v1.getId(), v2.getId(), v3.getId())))
.isEqualTo(
strategy.getTasksNeedingRestart(v1.getId(), new Exception("Test failure")));
}
}
| RestartAllFailoverStrategyTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetUpgradeModeAction.java | {
"start": 7146,
"end": 7709
} | class ____ extends SimpleBatchedExecutor<UpdateModeStateListener, Void> {
@Override
public Tuple<ClusterState, Void> executeTask(UpdateModeStateListener clusterStateListener, ClusterState clusterState) {
return Tuple.tuple(createUpdatedState(clusterStateListener.request(), clusterState), null);
}
@Override
public void taskSucceeded(UpdateModeStateListener clusterStateListener, Void unused) {
clusterStateListener.listener().onResponse(AcknowledgedResponse.TRUE);
}
}
}
| UpdateModeExecutor |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/assertj/AbstractHttpServletRequestAssertTests.java | {
"start": 4047,
"end": 4240
} | class ____ extends AbstractHttpServletRequestAssert<RequestAssert, HttpServletRequest> {
RequestAssert(HttpServletRequest actual) {
super(actual, RequestAssert.class);
}
}
}
| RequestAssert |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/impl/AuditingIntegration.java | {
"start": 1652,
"end": 1719
} | class ____ any backport.
*/
@InterfaceAudience.Private
public final | on |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/builders/HttpSecurityAuthenticationManagerTests.java | {
"start": 2096,
"end": 3358
} | class ____ {
@Autowired
MockMvc mvc;
public final SpringTestContext spring = new SpringTestContext(this);
@Test
public void authenticationManagerWhenConfiguredThenUsed() throws Exception {
this.spring.register(AuthenticationManagerConfig.class).autowire();
given(AuthenticationManagerConfig.AUTHENTICATION_MANAGER.authenticate(any()))
.willReturn(new TestingAuthenticationToken("user", "test", "ROLE_USER"));
this.mvc.perform(get("/").with(httpBasic("user", "test")));
verify(AuthenticationManagerConfig.AUTHENTICATION_MANAGER).authenticate(any());
}
@Test
public void authenticationManagerWhenBuilderAndAuthenticationManagerConfiguredThenBuilderIgnored()
throws Exception {
this.spring.register(AuthenticationManagerBuilderConfig.class).autowire();
given(AuthenticationManagerBuilderConfig.AUTHENTICATION_MANAGER.authenticate(any()))
.willReturn(new TestingAuthenticationToken("user", "test", "ROLE_USER"));
this.mvc.perform(get("/").with(httpBasic("user", "test")));
verify(AuthenticationManagerBuilderConfig.AUTHENTICATION_MANAGER).authenticate(any());
verifyNoInteractions(AuthenticationManagerBuilderConfig.USER_DETAILS_SERVICE);
}
@Configuration
@EnableWebSecurity
static | HttpSecurityAuthenticationManagerTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/matchers/AnnotationMatcherTest.java | {
"start": 1797,
"end": 1948
} | interface ____ {}
""");
writeFile(
"SampleAnnotation2.java",
"""
package com.google;
public @ | SampleAnnotation1 |
java | apache__camel | tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/generics/OwbWildcardTypeImpl.java | {
"start": 933,
"end": 2737
} | class ____ implements WildcardType {
private final Type[] upperBounds;
private final Type[] lowerBounds;
public OwbWildcardTypeImpl(Type[] upperBounds, Type[] lowerBounds) {
this.upperBounds = upperBounds.clone();
this.lowerBounds = lowerBounds.clone();
}
@Override
public Type[] getUpperBounds() {
return upperBounds.clone();
}
@Override
public Type[] getLowerBounds() {
return lowerBounds.clone();
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder("?");
if (upperBounds.length > 0) {
buffer.append(" extends");
boolean first = true;
for (Type upperBound : upperBounds) {
if (first) {
first = false;
} else {
buffer.append(',');
}
buffer.append(' ');
if (upperBound instanceof Class) {
buffer.append(((Class<?>) upperBound).getSimpleName());
} else {
buffer.append(upperBound);
}
}
}
if (lowerBounds.length > 0) {
buffer.append(" super");
boolean first = true;
for (Type lowerBound : lowerBounds) {
if (first) {
first = false;
} else {
buffer.append(',');
}
buffer.append(' ');
if (lowerBound instanceof Class) {
buffer.append(((Class<?>) lowerBound).getSimpleName());
} else {
buffer.append(lowerBound);
}
}
}
return buffer.toString();
}
}
| OwbWildcardTypeImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/EntityHierarchySourceImpl.java | {
"start": 4674,
"end": 14349
} | class ____ use.",
rootEntitySource.origin()
);
}
return new IdentifierSourceNonAggregatedCompositeImpl( rootEntitySource );
}
else {
if ( compositeId.isMapped() ) {
throw new MappingException(
"cannot combine mapped=\"true\" with specified name",
rootEntitySource.origin()
);
}
return new IdentifierSourceAggregatedCompositeImpl( rootEntitySource );
}
}
}
private static VersionAttributeSource interpretVersionSource(RootEntitySourceImpl rootEntitySource) {
final JaxbHbmRootEntityType entityElement = rootEntitySource.jaxbEntityMapping();
if ( entityElement.getVersion() != null ) {
return new VersionAttributeSourceImpl(
rootEntitySource.sourceMappingDocument(),
rootEntitySource,
entityElement.getVersion()
);
}
else if ( entityElement.getTimestamp() != null ) {
return new TimestampAttributeSourceImpl(
rootEntitySource.sourceMappingDocument(),
rootEntitySource,
entityElement.getTimestamp()
);
}
return null;
}
private static DiscriminatorSource interpretDiscriminatorSource(final RootEntitySourceImpl rootEntitySource) {
final JaxbHbmEntityDiscriminatorType jaxbDiscriminatorMapping =
rootEntitySource.jaxbEntityMapping().getDiscriminator();
if ( jaxbDiscriminatorMapping == null ) {
return null;
}
final RelationalValueSource relationalValueSource = RelationalValueSourceHelper.buildValueSource(
rootEntitySource.sourceMappingDocument(),
null,
new RelationalValueSourceHelper.AbstractColumnsAndFormulasSource() {
@Override
public XmlElementMetadata getSourceType() {
return XmlElementMetadata.DISCRIMINATOR;
}
@Override
public String getSourceName() {
return null;
}
@Override
public SizeSource getSizeSource() {
return Helper.interpretSizeSource(
jaxbDiscriminatorMapping.getLength(),
(Integer) null,
null
);
}
@Override
public String getFormulaAttribute() {
return jaxbDiscriminatorMapping.getFormulaAttribute();
}
@Override
public String getColumnAttribute() {
return jaxbDiscriminatorMapping.getColumnAttribute();
}
private List columnOrFormulas;
@Override
public List getColumnOrFormulaElements() {
if ( columnOrFormulas == null ) {
if ( jaxbDiscriminatorMapping.getColumn() != null ) {
if ( jaxbDiscriminatorMapping.getFormula() != null ) {
throw new MappingException(
String.format(
Locale.ENGLISH,
"discriminator mapping [%s] named both <column/> and <formula/>, but only one or other allowed",
rootEntitySource.getEntityNamingSource().getEntityName()
),
rootEntitySource.sourceMappingDocument().getOrigin()
);
}
else {
columnOrFormulas = Collections.singletonList( jaxbDiscriminatorMapping.getColumn() );
}
}
else {
if ( jaxbDiscriminatorMapping.getFormula() != null ) {
columnOrFormulas = Collections.singletonList( jaxbDiscriminatorMapping.getFormula() );
}
else {
columnOrFormulas = Collections.emptyList();
}
}
}
return columnOrFormulas;
}
@Override
public Boolean isNullable() {
return !jaxbDiscriminatorMapping.isNotNull();
}
}
);
return new DiscriminatorSource() {
@Override
public EntityNaming getEntityNaming() {
return rootEntitySource.getEntityNamingSource();
}
@Override
public MetadataBuildingContext getBuildingContext() {
return rootEntitySource.metadataBuildingContext();
}
@Override
public RelationalValueSource getDiscriminatorRelationalValueSource() {
return relationalValueSource;
}
@Override
public String getExplicitHibernateTypeName() {
return jaxbDiscriminatorMapping.getType();
}
@Override
public boolean isForced() {
return jaxbDiscriminatorMapping.isForce();
}
@Override
public boolean isInserted() {
return jaxbDiscriminatorMapping.isInsert();
}
};
}
private static MultiTenancySource interpretMultiTenancySource(final RootEntitySourceImpl rootEntitySource) {
final JaxbHbmMultiTenancyType jaxbMultiTenancy = rootEntitySource.jaxbEntityMapping().getMultiTenancy();
if ( jaxbMultiTenancy == null ) {
return null;
}
final RelationalValueSource relationalValueSource = RelationalValueSourceHelper.buildValueSource(
rootEntitySource.sourceMappingDocument(),
null,
new RelationalValueSourceHelper.AbstractColumnsAndFormulasSource() {
@Override
public XmlElementMetadata getSourceType() {
return XmlElementMetadata.MULTI_TENANCY;
}
@Override
public String getSourceName() {
return null;
}
@Override
public String getFormulaAttribute() {
return jaxbMultiTenancy.getFormulaAttribute();
}
@Override
public String getColumnAttribute() {
return jaxbMultiTenancy.getColumnAttribute();
}
private List columnOrFormulas;
@Override
public List getColumnOrFormulaElements() {
if ( columnOrFormulas == null ) {
if ( jaxbMultiTenancy.getColumn() != null ) {
if ( jaxbMultiTenancy.getFormula() != null ) {
throw new MappingException(
String.format(
Locale.ENGLISH,
"discriminator mapping [%s] named both <column/> and <formula/>, but only one or other allowed",
rootEntitySource.getEntityNamingSource().getEntityName()
),
rootEntitySource.sourceMappingDocument().getOrigin()
);
}
else {
columnOrFormulas = Collections.singletonList( jaxbMultiTenancy.getColumn() );
}
}
else {
if ( jaxbMultiTenancy.getFormula() != null ) {
columnOrFormulas = Collections.singletonList( jaxbMultiTenancy.getColumn() );
}
else {
columnOrFormulas = Collections.emptyList();
}
}
}
return columnOrFormulas;
}
@Override
public Boolean isNullable() {
return false;
}
}
);
return new MultiTenancySource() {
@Override
public RelationalValueSource getRelationalValueSource() {
return relationalValueSource;
}
@Override
public boolean isShared() {
return jaxbMultiTenancy.isShared();
}
@Override
public boolean bindAsParameter() {
return jaxbMultiTenancy.isBindAsParam();
}
};
}
@Override
public InheritanceType getHierarchyInheritanceType() {
return hierarchyInheritanceType;
}
@Override
public RootEntitySourceImpl getRoot() {
return rootEntitySource;
}
public void processSubclass(SubclassEntitySourceImpl subclassEntitySource) {
final InheritanceType inheritanceType = Helper.interpretInheritanceType( subclassEntitySource.jaxbEntityMapping() );
if ( hierarchyInheritanceType == InheritanceType.NO_INHERITANCE ) {
hierarchyInheritanceType = inheritanceType;
}
else if ( hierarchyInheritanceType != inheritanceType ) {
throw new MappingException( "Mixed inheritance strategies not supported", subclassEntitySource.getOrigin() );
}
collectedEntityNames.add( subclassEntitySource.getEntityNamingSource().getEntityName() );
}
protected JaxbHbmRootEntityType entityElement() {
return rootEntitySource.jaxbEntityMapping();
}
@Override
public IdentifierSource getIdentifierSource() {
return identifierSource;
}
@Override
public VersionAttributeSource getVersionAttributeSource() {
return versionAttributeSource;
}
@Override
public boolean isMutable() {
return entityElement().isMutable();
}
@Override
public boolean isExplicitPolymorphism() {
return JaxbHbmPolymorphismEnum.EXPLICIT == entityElement().getPolymorphism();
}
@Override
public String getWhere() {
return entityElement().getWhere();
}
@Override
public String getRowId() {
return entityElement().getRowid();
}
@Override
public OptimisticLockStyle getOptimisticLockStyle() {
return entityElement().getOptimisticLock();
}
@Override
public Caching getCaching() {
return caching;
}
@Override
public Caching getNaturalIdCaching() {
return naturalIdCaching;
}
@Override
public DiscriminatorSource getDiscriminatorSource() {
return discriminatorSource;
}
@Override
public MultiTenancySource getMultiTenancySource() {
return multiTenancySource;
}
/**
* Package-protected to allow IdentifierSource implementations to access it.
*
* @param mappingDocument The source mapping document
* @param entityNaming The entity naming
* @param jaxbGeneratorMapping The identifier generator mapping
*
* @return The collected information.
*/
static IdentifierGeneratorDefinition interpretGeneratorDefinition(
MappingDocument mappingDocument,
EntityNamingSource entityNaming,
JaxbHbmGeneratorSpecificationType jaxbGeneratorMapping) {
if ( jaxbGeneratorMapping == null ) {
return null;
}
final String generatorName = jaxbGeneratorMapping.getClazz();
IdentifierGeneratorDefinition identifierGeneratorDefinition = mappingDocument.getMetadataCollector()
.getIdentifierGenerator( generatorName );
if ( identifierGeneratorDefinition == null ) {
identifierGeneratorDefinition = new IdentifierGeneratorDefinition(
entityNaming.getEntityName() + '.' + generatorName,
generatorName,
Helper.extractParameters( jaxbGeneratorMapping.getConfigParameters() )
);
}
return identifierGeneratorDefinition;
}
public Set<String> getContainedEntityNames() {
return collectedEntityNames;
}
}
| to |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/path/JSONPath_none_root.java | {
"start": 242,
"end": 1079
} | class ____ extends TestCase {
public void test_root() throws Exception {
List<Object> list = new ArrayList<Object>();
list.add(new Object());
Assert.assertSame(list.get(0), new JSONPath("[0]").eval(list));
}
public void test_null() throws Exception {
Assert.assertNull(new JSONPath("name").eval(null));
}
public void test_map() throws Exception {
Map<String, Object> map = new HashMap<String, Object>();
map.put("val", new Object());
Assert.assertSame(map.get("val"), new JSONPath("val").eval(map));
}
public void test_entity() throws Exception {
Entity entity = new Entity();
entity.setValue(new Object());
Assert.assertSame(entity.getValue(), new JSONPath("value").eval(entity));
}
public static | JSONPath_none_root |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PreferredInterfaceTypeTest.java | {
"start": 27574,
"end": 27937
} | class ____ {
final ImmutableSet<String> NON_STATIC = ImmutableSet.of();
}
""")
.doTest();
}
@Test
public void nonFinal() {
testHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.collect.ImmutableSet;
import java.util.Set;
| Test |
java | apache__rocketmq | proxy/src/main/java/org/apache/rocketmq/proxy/processor/TransactionProcessor.java | {
"start": 1268,
"end": 3374
} | class ____ extends AbstractProcessor {
public TransactionProcessor(MessagingProcessor messagingProcessor,
ServiceManager serviceManager) {
super(messagingProcessor, serviceManager);
}
public CompletableFuture<Void> endTransaction(ProxyContext ctx, String topic, String transactionId, String messageId, String producerGroup,
TransactionStatus transactionStatus, boolean fromTransactionCheck, long timeoutMillis) {
CompletableFuture<Void> future = new CompletableFuture<>();
try {
EndTransactionRequestData headerData = serviceManager.getTransactionService().genEndTransactionRequestHeader(
ctx,
topic,
producerGroup,
buildCommitOrRollback(transactionStatus),
fromTransactionCheck,
messageId,
transactionId
);
if (headerData == null) {
future.completeExceptionally(new ProxyException(ProxyExceptionCode.TRANSACTION_DATA_NOT_FOUND, "cannot found transaction data"));
return future;
}
return this.serviceManager.getMessageService().endTransactionOneway(
ctx,
headerData.getBrokerName(),
headerData.getRequestHeader(),
timeoutMillis
);
} catch (Throwable t) {
future.completeExceptionally(t);
}
return future;
}
protected int buildCommitOrRollback(TransactionStatus transactionStatus) {
switch (transactionStatus) {
case COMMIT:
return MessageSysFlag.TRANSACTION_COMMIT_TYPE;
case ROLLBACK:
return MessageSysFlag.TRANSACTION_ROLLBACK_TYPE;
default:
return MessageSysFlag.TRANSACTION_NOT_TYPE;
}
}
public void addTransactionSubscription(ProxyContext ctx, String producerGroup, String topic) {
this.serviceManager.getTransactionService().addTransactionSubscription(ctx, producerGroup, topic);
}
}
| TransactionProcessor |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitShm.java | {
"start": 1712,
"end": 4029
} | class ____ {
public static final Logger LOG = LoggerFactory.getLogger(
TestShortCircuitShm.class);
private static final File TEST_BASE = GenericTestUtils.getTestDir();
@BeforeEach
public void before() {
assumeTrue(null ==
SharedFileDescriptorFactory.getLoadingFailureReason());
}
@Test
@Timeout(value = 60)
public void testStartupShutdown() throws Exception {
File path = new File(TEST_BASE, "testStartupShutdown");
path.mkdirs();
SharedFileDescriptorFactory factory =
SharedFileDescriptorFactory.create("shm_",
new String[] { path.getAbsolutePath() } );
FileInputStream stream =
factory.createDescriptor("testStartupShutdown", 4096);
ShortCircuitShm shm = new ShortCircuitShm(ShmId.createRandom(), stream);
shm.free();
stream.close();
FileUtil.fullyDelete(path);
}
@Test
@Timeout(value = 60)
public void testAllocateSlots() throws Exception {
File path = new File(TEST_BASE, "testAllocateSlots");
path.mkdirs();
SharedFileDescriptorFactory factory =
SharedFileDescriptorFactory.create("shm_",
new String[] { path.getAbsolutePath() });
FileInputStream stream =
factory.createDescriptor("testAllocateSlots", 4096);
ShortCircuitShm shm = new ShortCircuitShm(ShmId.createRandom(), stream);
int numSlots = 0;
ArrayList<Slot> slots = new ArrayList<Slot>();
while (!shm.isFull()) {
Slot slot = shm.allocAndRegisterSlot(new ExtendedBlockId(123L, "test_bp1"));
slots.add(slot);
numSlots++;
}
LOG.info("allocated " + numSlots + " slots before running out.");
int slotIdx = 0;
for (Iterator<Slot> iter = shm.slotIterator();
iter.hasNext(); ) {
assertTrue(slots.contains(iter.next()));
}
for (Slot slot : slots) {
assertFalse(slot.addAnchor());
assertEquals(slotIdx++, slot.getSlotIdx());
}
for (Slot slot : slots) {
slot.makeAnchorable();
}
for (Slot slot : slots) {
assertTrue(slot.addAnchor());
}
for (Slot slot : slots) {
slot.removeAnchor();
}
for (Slot slot : slots) {
shm.unregisterSlot(slot.getSlotIdx());
slot.makeInvalid();
}
shm.free();
stream.close();
FileUtil.fullyDelete(path);
}
}
| TestShortCircuitShm |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/time/StopWatch.java | {
"start": 4441,
"end": 25312
} | enum ____ {
RUNNING {
@Override
boolean isStarted() {
return true;
}
@Override
boolean isStopped() {
return false;
}
@Override
boolean isSuspended() {
return false;
}
},
STOPPED {
@Override
boolean isStarted() {
return false;
}
@Override
boolean isStopped() {
return true;
}
@Override
boolean isSuspended() {
return false;
}
},
SUSPENDED {
@Override
boolean isStarted() {
return true;
}
@Override
boolean isStopped() {
return false;
}
@Override
boolean isSuspended() {
return true;
}
},
UNSTARTED {
@Override
boolean isStarted() {
return false;
}
@Override
boolean isStopped() {
return true;
}
@Override
boolean isSuspended() {
return false;
}
};
/**
* Tests whether this StopWatch is started. A suspended StopWatch is also started.
*
* @return boolean If this StopWatch is started.
*/
abstract boolean isStarted();
/**
* Tests whether this StopWatch is stopped. A StopWatch which is not yet started and explicitly stopped is considered stopped.
*
* @return boolean If this StopWatch is stopped.
*/
abstract boolean isStopped();
/**
* Tests whether this StopWatch is suspended.
*
* @return boolean If this StopWatch is suspended.
*/
abstract boolean isSuspended();
}
private static final long NANO_2_MILLIS = 1_000_000L;
/**
* Creates a StopWatch.
*
* @return StopWatch a StopWatch.
* @since 3.10
*/
public static StopWatch create() {
return new StopWatch();
}
/**
* Creates and starts a StopWatch.
*
* @return StopWatch a started StopWatch.
* @since 3.5
*/
public static StopWatch createStarted() {
final StopWatch sw = new StopWatch();
sw.start();
return sw;
}
/**
* A message for string presentation.
*
* @since 3.10
*/
private final String message;
/**
* The current running state of this StopWatch.
*/
private State runningState = State.UNSTARTED;
/**
* Whether this StopWatch has a split time recorded.
*/
private SplitState splitState = SplitState.UNSPLIT;
/**
* The start time in nanoseconds.
*
* This field can be removed once we move off of Java 8.
*/
private long startTimeNanos;
/**
* The start Instant.
* <p>
* nanoTime is only for elapsed time so we need to also store the currentTimeMillis to maintain the old getStartTime API.
* </p>
* <p>
* On Java 8, Instant has millisecond precision, later versions use nanoseconds.
* </p>
*/
private Instant startInstant;
/**
* The end Instant.
* <p>
* nanoTime is only for elapsed time so we need to also store the currentTimeMillis to maintain the old getStartTime API.
* </p>
* <p>
* On Java 8, Instant has millisecond precision, later versions use nanoseconds.
* </p>
*/
private Instant stopInstant;
/**
* The stop time in nanoseconds.
*
* This field can be removed once we move off of Java 8.
*/
private long stopTimeNanos;
/**
* The split list.
*/
private final List<Split> splits = new ArrayList<>();
/**
* Constructs a new instance.
*/
public StopWatch() {
this(null);
}
/**
* Constructs a new instance.
*
* @param message A message for string presentation.
* @since 3.10
*/
public StopWatch(final String message) {
this.message = message;
}
/**
* Formats the split time with {@link DurationFormatUtils#formatDurationHMS}.
*
* @return the split time formatted by {@link DurationFormatUtils#formatDurationHMS}.
* @since 3.10
*/
public String formatSplitTime() {
return DurationFormatUtils.formatDurationHMS(getSplitDuration().toMillis());
}
/**
* Formats the time formatted with {@link DurationFormatUtils#formatDurationHMS}.
*
* @return the time formatted by {@link DurationFormatUtils#formatDurationHMS}.
* @since 3.10
*/
public String formatTime() {
return DurationFormatUtils.formatDurationHMS(getTime());
}
/**
* Delegates to {@link Supplier#get()} while recording the duration of the call.
*
* @param <T> the type of results supplied by this supplier.
* @param supplier The supplier to {@link Supplier#get()}.
* @return a result from the given Supplier.
* @since 3.18.0
*/
public <T> T get(final Supplier<T> supplier) {
startResume();
try {
return supplier.get();
} finally {
suspend();
}
}
/**
* Gets the Duration on this StopWatch.
*
* <p>
* This is either the Duration between the start and the moment this method is called, or the Duration between start and stop.
* </p>
*
* @return the Duration.
* @since 3.16.0
*/
public Duration getDuration() {
return Duration.ofNanos(getNanoTime());
}
/**
* Gets the message for string presentation.
*
* @return the message for string presentation.
* @since 3.10
*/
public String getMessage() {
return message;
}
/**
* Gets the <em>elapsed</em> time in nanoseconds.
*
* <p>
* This is either the time between the start and the moment this method is called, or the amount of time between start and stop.
* </p>
*
* @return the <em>elapsed</em> time in nanoseconds.
* @see System#nanoTime()
* @since 3.0
*/
public long getNanoTime() {
switch (runningState) {
case STOPPED:
case SUSPENDED:
return stopTimeNanos - startTimeNanos;
case UNSTARTED:
return 0;
case RUNNING:
return System.nanoTime() - startTimeNanos;
default:
break;
}
throw new IllegalStateException("Illegal running state has occurred.");
}
/**
* Gets the split Duration on this StopWatch.
*
* <p>
* This is the Duration between start and latest split.
* </p>
*
* @return the split Duration.
* @throws IllegalStateException if this StopWatch has not yet been split.
* @since 3.16.0
*/
public Duration getSplitDuration() {
return Duration.ofNanos(getSplitNanoTime());
}
/**
* Gets the split time in nanoseconds.
*
* <p>
* This is the time between start and latest split.
* </p>
*
* @return the split time in nanoseconds.
* @throws IllegalStateException if this StopWatch has not yet been split.
* @since 3.0
*/
public long getSplitNanoTime() {
if (splitState != SplitState.SPLIT) {
throw new IllegalStateException("Stopwatch must be split to get the split time.");
}
return splits.get(splits.size() - 1).getRight().toNanos();
}
/**
* Gets the split list.
*
* @return the list of splits.
* @since 3.20.0
*/
public List<Split> getSplits() {
return Collections.unmodifiableList(splits);
}
/**
* Gets the split time on this StopWatch.
*
* <p>
* This is the time between start and latest split.
* </p>
*
* @return the split time in milliseconds.
* @throws IllegalStateException if this StopWatch has not yet been split.
* @since 2.1
* @deprecated Use {@link #getSplitDuration()}.
*/
@Deprecated
public long getSplitTime() {
return nanosToMillis(getSplitNanoTime());
}
/**
* Gets the Instant this StopWatch was started, between the current time and midnight, January 1, 1970 UTC.
*
* @return the Instant this StopWatch was started, between the current time and midnight, January 1, 1970 UTC.
* @throws IllegalStateException if this StopWatch has not been started.
* @since 3.16.0
*/
public Instant getStartInstant() {
if (runningState == State.UNSTARTED) {
throw new IllegalStateException("Stopwatch has not been started");
}
return startInstant;
}
/**
* Gets the time this StopWatch was started in milliseconds, between the current time and midnight, January 1, 1970 UTC.
*
* @return the time this StopWatch was started in milliseconds, between the current time and midnight, January 1, 1970 UTC.
* @throws IllegalStateException if this StopWatch has not been started.
* @since 2.4
* @deprecated Use {@link #getStartInstant()}.
*/
@Deprecated
public long getStartTime() {
return getStartInstant().toEpochMilli();
}
/**
* Gets the Instant this StopWatch was stopped, between the current time and midnight, January 1, 1970 UTC.
*
* @return the Instant this StopWatch was stopped in milliseconds, between the current time and midnight, January 1, 1970 UTC.
* @throws IllegalStateException if this StopWatch has not been started.
* @since 3.16.0
*/
public Instant getStopInstant() {
if (runningState == State.UNSTARTED) {
throw new IllegalStateException("Stopwatch has not been started");
}
return stopInstant;
}
/**
* Gets the time this StopWatch was stopped in milliseconds, between the current time and midnight, January 1, 1970 UTC.
*
* @return the time this StopWatch was stopped in milliseconds, between the current time and midnight, January 1, 1970 UTC.
* @throws IllegalStateException if this StopWatch has not been started.
* @since 3.12.0
* @deprecated Use {@link #getStopInstant()}.
*/
@Deprecated
public long getStopTime() {
// stopTimeNanos stores System.nanoTime() for elapsed time
return getStopInstant().toEpochMilli();
}
/**
* Delegates to {@link FailableSupplier#get()} while recording the duration of the call.
*
* @param <T> the type of results supplied by this supplier.
* @param <E> The kind of thrown exception or error.
* @param supplier The supplier to {@link Supplier#get()}.
* @return a result from the given Supplier.
* @throws Throwable if the supplier fails.
* @since 3.18.0
*/
public <T, E extends Throwable> T getT(final FailableSupplier<T, E> supplier) throws Throwable {
startResume();
try {
return supplier.get();
} finally {
suspend();
}
}
/**
* Gets the time on this StopWatch.
*
* <p>
* This is either the time between the start and the moment this method is called, or the amount of time between start and stop.
* </p>
*
* @return the time in milliseconds.
* @see #getDuration()
*/
public long getTime() {
return nanosToMillis(getNanoTime());
}
/**
* Gets the time in the specified TimeUnit.
*
* <p>
* This is either the time between the start and the moment this method is called, or the amount of time between start and stop. The resulting time will be
* expressed in the desired TimeUnit with any remainder rounded down. For example, if the specified unit is {@code TimeUnit.HOURS} and this StopWatch time
* is 59 minutes, then the result returned will be {@code 0}.
* </p>
*
* @param timeUnit the unit of time, not null.
* @return the time in the specified TimeUnit, rounded down.
* @since 3.5
*/
public long getTime(final TimeUnit timeUnit) {
return timeUnit.convert(getNanoTime(), TimeUnit.NANOSECONDS);
}
/**
* Tests whether this StopWatch is started. A suspended StopWatch is also started watch.
*
* @return boolean If this StopWatch is started.
* @since 3.2
*/
public boolean isStarted() {
return runningState.isStarted();
}
/**
* Tests whether StopWatch is stopped. this StopWatch which's not yet started and explicitly stopped StopWatch is considered as stopped.
*
* @return boolean If this StopWatch is stopped.
* @since 3.2
*/
public boolean isStopped() {
return runningState.isStopped();
}
/**
* Tests whether this StopWatch is suspended.
*
* @return boolean If this StopWatch is suspended.
* @since 3.2
*/
public boolean isSuspended() {
return runningState.isSuspended();
}
/**
* Converts nanoseconds to milliseconds.
*
* @param nanos nanoseconds to convert.
* @return milliseconds conversion result.
*/
private long nanosToMillis(final long nanos) {
return nanos / NANO_2_MILLIS;
}
/**
* Resets this StopWatch. Stops it if need be.
*
* <p>
* This method clears the internal values to allow the object to be reused.
* </p>
*/
public void reset() {
runningState = State.UNSTARTED;
splitState = SplitState.UNSPLIT;
splits.clear();
}
/**
* Resumes this StopWatch after a suspend.
*
* <p>
* This method resumes the watch after it was suspended. The watch will not include time between the suspend and resume calls in the total time.
* </p>
*
* @throws IllegalStateException if this StopWatch has not been suspended.
*/
public void resume() {
if (runningState != State.SUSPENDED) {
throw new IllegalStateException("Stopwatch must be suspended to resume.");
}
startTimeNanos += System.nanoTime() - stopTimeNanos;
runningState = State.RUNNING;
}
/**
* Delegates to {@link Runnable#run()} while recording the duration of the call.
*
* @param runnable The runnable to {@link Runnable#run()}.
* @since 3.18.0
*/
public void run(final Runnable runnable) {
startResume();
try {
runnable.run();
} finally {
suspend();
}
}
/**
* Delegates to {@link FailableRunnable#run()} while recording the duration of the call.
*
* @param <E> The kind of {@link Throwable}.
* @param runnable The runnable to {@link FailableRunnable#run()}.
* @throws Throwable Thrown by {@link FailableRunnable#run()}.
* @since 3.18.0
*/
public <E extends Throwable> void runT(final FailableRunnable<E> runnable) throws Throwable {
startResume();
try {
runnable.run();
} finally {
suspend();
}
}
/**
* Splits the time.
*
* <p>
* This method sets the stop time of the watch to allow a time to be extracted. The start time is unaffected, enabling {@link #unsplit()} to continue the
* timing from the original start point.
* </p>
*
* @throws IllegalStateException if this StopWatch is not running.
*/
public void split() {
if (runningState != State.RUNNING) {
throw new IllegalStateException("Stopwatch is not running.");
}
stopTimeNanos = System.nanoTime();
splitState = SplitState.SPLIT;
splits.add(new Split(String.valueOf(splits.size()), Duration.ofNanos(stopTimeNanos - startTimeNanos)));
}
/**
* Splits the time with a label.
*
* <p>
* This method sets the stop time of the watch to allow a time to be extracted. The start time is unaffected, enabling {@link #unsplit()} to continue the
* timing from the original start point.
* </p>
*
* @param label A message for string presentation.
* @throws IllegalStateException if the StopWatch is not running.
* @since 3.20.0
*/
public void split(final String label) {
if (runningState != State.RUNNING) {
throw new IllegalStateException("Stopwatch is not running.");
}
stopTimeNanos = System.nanoTime();
splitState = SplitState.SPLIT;
splits.add(new Split(label, Duration.ofNanos(stopTimeNanos - startTimeNanos)));
}
/**
* Starts this StopWatch.
*
* <p>
* This method starts a new timing session, clearing any previous values.
* </p>
*
* @throws IllegalStateException if this StopWatch is already running.
*/
public void start() {
if (runningState == State.STOPPED) {
throw new IllegalStateException("Stopwatch must be reset before being restarted.");
}
if (runningState != State.UNSTARTED) {
throw new IllegalStateException("Stopwatch already started.");
}
startTimeNanos = System.nanoTime();
startInstant = Instant.now();
runningState = State.RUNNING;
splits.clear();
}
/**
* Starts or resumes this StopWatch.
*/
private void startResume() {
if (isStopped()) {
start();
} else if (isSuspended()) {
resume();
}
}
/**
* Stops this StopWatch.
*
* <p>
* This method ends a new timing session, allowing the time to be retrieved.
* </p>
*
* @throws IllegalStateException if this StopWatch is not running.
*/
public void stop() {
if (runningState != State.RUNNING && runningState != State.SUSPENDED) {
throw new IllegalStateException("Stopwatch is not running.");
}
if (runningState == State.RUNNING) {
stopTimeNanos = System.nanoTime();
stopInstant = Instant.now();
}
runningState = State.STOPPED;
}
/**
* Suspends this StopWatch for later resumption.
*
* <p>
* This method suspends the watch until it is resumed. The watch will not include time between the suspend and resume calls in the total time.
* </p>
*
* @throws IllegalStateException if this StopWatch is not currently running.
*/
public void suspend() {
if (runningState != State.RUNNING) {
throw new IllegalStateException("Stopwatch must be running to suspend.");
}
stopTimeNanos = System.nanoTime();
stopInstant = Instant.now();
runningState = State.SUSPENDED;
}
/**
* Gets a summary of the last split time that this StopWatch recorded as a string.
*
* <p>
* The format used is ISO 8601-like, [<em>message</em> ]<em>hours</em>:<em>minutes</em>:<em>seconds</em>.<em>milliseconds</em>.
* </p>
*
* @return the split time as a String.
* @since 2.1
* @since 3.10 Returns the prefix {@code "message "} if the message is set.
*/
public String toSplitString() {
final String msgStr = Objects.toString(message, StringUtils.EMPTY);
final String formattedTime = formatSplitTime();
return msgStr.isEmpty() ? formattedTime : msgStr + StringUtils.SPACE + formattedTime;
}
/**
* Gets a summary of the time that this StopWatch recorded as a string.
*
* <p>
* The format used is ISO 8601-like, [<em>message</em> ]<em>hours</em>:<em>minutes</em>:<em>seconds</em>.<em>milliseconds</em>.
* </p>
*
* @return the time as a String.
* @since 3.10 Returns the prefix {@code "message "} if the message is set.
*/
@Override
public String toString() {
final String msgStr = Objects.toString(message, StringUtils.EMPTY);
final String formattedTime = formatTime();
return msgStr.isEmpty() ? formattedTime : msgStr + StringUtils.SPACE + formattedTime;
}
/**
* Removes the split.
*
* <p>
* This method clears the stop time. The start time is unaffected, enabling timing from the original start point to continue.
* </p>
*
* @throws IllegalStateException if this StopWatch has not been split.
*/
public void unsplit() {
if (splitState != SplitState.SPLIT) {
throw new IllegalStateException("Stopwatch has not been split.");
}
splitState = SplitState.UNSPLIT;
splits.remove(splits.size() - 1);
}
}
| State |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/health/Diagnosis.java | {
"start": 1766,
"end": 6550
} | enum ____ {
INDEX("indices"),
NODE("nodes"),
SLM_POLICY("slm_policies"),
ILM_POLICY("ilm_policies"),
FEATURE_STATE("feature_states"),
SNAPSHOT_REPOSITORY("snapshot_repositories");
private final String displayValue;
Type(String displayValue) {
this.displayValue = displayValue;
}
}
private final Type type;
@Nullable
private Collection<String> values;
@Nullable
private Collection<DiscoveryNode> nodes;
public Resource(Type type, Collection<String> values) {
if (type == Type.NODE) {
throw new IllegalArgumentException("Nodes should be modelled using the dedicated constructor");
}
this.type = type;
this.values = values;
}
public Resource(Collection<DiscoveryNode> nodes) {
this.type = Type.NODE;
this.nodes = nodes;
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params outerParams) {
final Iterator<? extends ToXContent> valuesIterator;
if (nodes != null) {
valuesIterator = Iterators.map(nodes.iterator(), node -> (builder, params) -> {
builder.startObject();
builder.field(ID_FIELD, node.getId());
if (node.getName() != null) {
builder.field(NAME_FIELD, node.getName());
}
builder.endObject();
return builder;
});
} else {
valuesIterator = Iterators.map(values.iterator(), value -> (builder, params) -> builder.value(value));
}
return ChunkedToXContentHelper.array(type.displayValue, valuesIterator);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Resource resource = (Resource) o;
return type == resource.type && Objects.equals(values, resource.values) && Objects.equals(nodes, resource.nodes);
}
@Override
public int hashCode() {
return Objects.hash(type, values, nodes);
}
public Type getType() {
return type;
}
@Nullable
public Collection<String> getValues() {
return values;
}
@Nullable
public Collection<DiscoveryNode> getNodes() {
return nodes;
}
}
/**
* Details a diagnosis - cause and a potential action that a user could take to clear an issue identified by a {@link HealthService}.
*
* @param indicatorName The name of the health indicator service that will generate this diagnosis
* @param id An identifier unique to this diagnosis across the health indicator that generates it
* @param cause A description of the cause of the problem
* @param action A description of the action to be taken to remedy the problem
* @param helpURL Optional evergreen url to a help document
*/
public record Definition(String indicatorName, String id, String cause, String action, String helpURL) {
public String getUniqueId() {
return HEALTH_API_ID_PREFIX + indicatorName + ":diagnosis:" + id;
}
}
private boolean hasResources() {
return affectedResources != null && affectedResources.isEmpty() == false;
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params outerParams) {
return Iterators.concat(ChunkedToXContentHelper.chunk((builder, params) -> {
builder.startObject();
builder.field("id", definition.getUniqueId());
builder.field("cause", definition.cause);
builder.field("action", definition.action);
builder.field("help_url", definition.helpURL);
if (hasResources()) {
// don't want to have a new chunk & nested iterator for this, so we start the object here
builder.startObject("affected_resources");
}
return builder;
}),
hasResources()
? Iterators.flatMap(affectedResources.iterator(), s -> s.toXContentChunked(outerParams))
: Collections.emptyIterator(),
ChunkedToXContentHelper.chunk((b, p) -> {
if (hasResources()) {
b.endObject();
}
return b.endObject();
})
);
}
}
| Type |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/NXYSignificanceHeuristic.java | {
"start": 3877,
"end": 3988
} | class ____ do contain term
frequencies.N10 = supersetFreq - subsetFreq;
// documents in | and |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/internal/UntargettedBindingImpl.java | {
"start": 886,
"end": 1792
} | class ____<T> extends BindingImpl<T> implements UntargettedBinding<T> {
public UntargettedBindingImpl(Injector injector, Key<T> key, Object source) {
super(injector, key, source, (errors, context, dependency) -> { throw new AssertionError(); }, Scoping.UNSCOPED);
}
public UntargettedBindingImpl(Object source, Key<T> key, Scoping scoping) {
super(source, key, scoping);
}
@Override
public <V> void acceptTargetVisitor(BindingTargetVisitor<? super T, V> visitor) {
visitor.visit(this);
}
@Override
public BindingImpl<T> withEagerSingletonScoping() {
return new UntargettedBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON);
}
@Override
public String toString() {
return new ToStringBuilder(UntargettedBinding.class).add("key", getKey()).add("source", getSource()).toString();
}
}
| UntargettedBindingImpl |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ObjectEqualsForPrimitivesTest.java | {
"start": 894,
"end": 1283
} | class ____ {
private final BugCheckerRefactoringTestHelper refactoringHelper =
BugCheckerRefactoringTestHelper.newInstance(ObjectEqualsForPrimitives.class, getClass());
@Test
public void boxedIntegers() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import java.util.Objects;
public | ObjectEqualsForPrimitivesTest |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplication.java | {
"start": 2057,
"end": 13699
} | class ____ implements Writeable, ToXContentObject {
public static final String NO_TEMPLATE_STORED_WARNING = "Using default search application template which is subject to change. "
+ "We recommend storing a template to avoid breaking changes.";
public static final String NO_ALIAS_WARNING = "Alias is missing for the search application";
private static final TransportVersion INDICES_REMOVED_TRANSPORT_VERSION = TransportVersions.V_8_11_X;
private final String name;
@Nullable
private final String[] indices;
private final long updatedAtMillis;
private final String analyticsCollectionName;
private final SearchApplicationTemplate searchApplicationTemplate;
/**
* Public constructor.
*
* @param name The name of the search application.
* @param indices The list of indices targeted by this search application.
* @param analyticsCollectionName The name of the associated analytics collection.
* @param updatedAtMillis Last updated time in milliseconds for the search application.
* @param searchApplicationTemplate The search application template to be used on search
*/
public SearchApplication(
String name,
String[] indices,
@Nullable String analyticsCollectionName,
long updatedAtMillis,
@Nullable SearchApplicationTemplate searchApplicationTemplate
) {
if (Strings.isNullOrEmpty(name)) {
throw new IllegalArgumentException("Search Application name cannot be null or blank");
}
this.name = name;
Objects.requireNonNull(indices, "Search Application indices cannot be null");
this.indices = indices.clone();
Arrays.sort(this.indices);
this.analyticsCollectionName = analyticsCollectionName;
this.updatedAtMillis = updatedAtMillis;
this.searchApplicationTemplate = searchApplicationTemplate;
}
public SearchApplication(StreamInput in) throws IOException {
this(in, null);
}
public SearchApplication(StreamInput in, String[] indices) throws IOException {
this.name = in.readString();
if (in.getTransportVersion().onOrAfter(INDICES_REMOVED_TRANSPORT_VERSION)) {
this.indices = indices; // Uses the provided indices, as they are no longer serialized
} else {
this.indices = in.readStringArray(); // old behaviour, read it from input as it was serialized
}
this.analyticsCollectionName = in.readOptionalString();
this.updatedAtMillis = in.readLong();
this.searchApplicationTemplate = in.readOptionalWriteable(SearchApplicationTemplate::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
if (out.getTransportVersion().before(INDICES_REMOVED_TRANSPORT_VERSION)) {
out.writeStringArray(indices); // old behaviour. New behaviour does not serialize indices, so no need to do anything else
}
out.writeOptionalString(analyticsCollectionName);
out.writeLong(updatedAtMillis);
out.writeOptionalWriteable(searchApplicationTemplate);
}
private static final ConstructingObjectParser<SearchApplication, String> PARSER = new ConstructingObjectParser<>(
"search_application",
false,
(params, resourceName) -> {
final String name = (String) params[0];
// If name is provided, check that it matches the resource name. We don't want it to be updatable
if (name != null) {
if (name.equals(resourceName) == false) {
throw new IllegalArgumentException(
"Search Application name [" + name + "] does not match the resource name: [" + resourceName + "]"
);
}
}
@SuppressWarnings("unchecked")
final String[] indices = (params[1] != null) ? ((List<String>) params[1]).toArray(String[]::new) : new String[0];
final String analyticsCollectionName = (String) params[2];
final Long maybeUpdatedAtMillis = (Long) params[3];
long updatedAtMillis = (maybeUpdatedAtMillis != null ? maybeUpdatedAtMillis : System.currentTimeMillis());
final SearchApplicationTemplate template = (SearchApplicationTemplate) params[4];
return new SearchApplication(resourceName, indices, analyticsCollectionName, updatedAtMillis, template);
}
);
public static final ParseField NAME_FIELD = new ParseField("name");
public static final ParseField INDICES_FIELD = new ParseField("indices");
public static final ParseField ANALYTICS_COLLECTION_NAME_FIELD = new ParseField("analytics_collection_name");
public static final ParseField TEMPLATE_FIELD = new ParseField("template");
public static final ParseField TEMPLATE_SCRIPT_FIELD = new ParseField("script");
public static final ParseField UPDATED_AT_MILLIS_FIELD = new ParseField("updated_at_millis");
public static final ParseField BINARY_CONTENT_FIELD = new ParseField("binary_content");
static {
PARSER.declareStringOrNull(optionalConstructorArg(), NAME_FIELD);
PARSER.declareStringArray(optionalConstructorArg(), INDICES_FIELD);
PARSER.declareStringOrNull(optionalConstructorArg(), ANALYTICS_COLLECTION_NAME_FIELD);
PARSER.declareLong(optionalConstructorArg(), UPDATED_AT_MILLIS_FIELD);
PARSER.declareObjectOrNull(optionalConstructorArg(), (p, c) -> SearchApplicationTemplate.parse(p), null, TEMPLATE_FIELD);
}
/**
* Parses an {@link SearchApplication} from its {@param xContentType} representation in bytes.
*
* @param resourceName The name of the resource (must match the {@link SearchApplication} name).
* @param source The bytes that represents the {@link SearchApplication}.
* @param xContentType The format of the representation.
*
* @return The parsed {@link SearchApplication}.
*/
public static SearchApplication fromXContentBytes(String resourceName, BytesReference source, XContentType xContentType) {
try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) {
return SearchApplication.fromXContent(resourceName, parser);
} catch (IOException e) {
throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e);
}
}
/**
* Parses an {@link SearchApplication} through the provided {@param parser}.
*
* @param resourceName The name of the resource (must match the {@link SearchApplication} name).
* @param parser The {@link XContentType} parser.
*
* @return The parsed {@link SearchApplication}.
*/
public static SearchApplication fromXContent(String resourceName, XContentParser parser) throws IOException {
return PARSER.parse(parser, resourceName);
}
/**
* Converts the {@link SearchApplication} to XContent.
*
* @return The {@link XContentBuilder} containing the serialized {@link SearchApplication}.
*/
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(NAME_FIELD.getPreferredName(), name);
if (indices != null) {
builder.field(INDICES_FIELD.getPreferredName(), indices);
}
if (analyticsCollectionName != null) {
builder.field(ANALYTICS_COLLECTION_NAME_FIELD.getPreferredName(), analyticsCollectionName);
}
builder.field(UPDATED_AT_MILLIS_FIELD.getPreferredName(), updatedAtMillis);
builder.field(TEMPLATE_FIELD.getPreferredName(), searchApplicationTemplate);
builder.endObject();
return builder;
}
/**
* Returns the name of the {@link SearchApplication}.
*
* @return The name of the {@link SearchApplication}.
*/
public String name() {
return name;
}
/**
* Returns the list of indices targeted by the {@link SearchApplication}.
*
* @return The list of indices targeted by the {@link SearchApplication}.
*/
public String[] indices() {
return indices;
}
/**
* Returns the name of the analytics collection linked with this {@link SearchApplication}.
*
* @return The analytics collection name.
*/
public @Nullable String analyticsCollectionName() {
return analyticsCollectionName;
}
/**
* Returns the timestamp in milliseconds that this {@link SearchApplication} was last modified.
*
* @return The last updated timestamp in milliseconds.
*/
public long updatedAtMillis() {
return updatedAtMillis;
}
public boolean hasStoredTemplate() {
return searchApplicationTemplate != null;
}
public SearchApplicationTemplate searchApplicationTemplateOrDefault() {
return hasStoredTemplate() ? searchApplicationTemplate : SearchApplicationTemplate.DEFAULT_TEMPLATE;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SearchApplication app = (SearchApplication) o;
return name.equals(app.name)
&& Objects.equals(analyticsCollectionName, app.analyticsCollectionName)
&& updatedAtMillis == app.updatedAtMillis()
&& Objects.equals(searchApplicationTemplate, app.searchApplicationTemplate);
}
@Override
public int hashCode() {
return Objects.hash(name, analyticsCollectionName, updatedAtMillis, searchApplicationTemplate);
}
@Override
public String toString() {
return Strings.toString(this);
}
/**
* Returns the merged {@link SearchApplication} from the current state and the provided {@param update}.
* This function returns the current instance if the update is a noop.
*
* @param update The source of the update represented in bytes.
* @param xContentType The format of the bytes.
* @return The merged {@link SearchApplication}.
*/
SearchApplication merge(BytesReference update, XContentType xContentType) throws IOException {
final Tuple<XContentType, Map<String, Object>> sourceAndContent;
try (BytesStreamOutput sourceBuffer = new BytesStreamOutput()) {
try (XContentBuilder builder = XContentFactory.jsonBuilder(sourceBuffer)) {
toXContent(builder, EMPTY_PARAMS);
}
sourceAndContent = XContentHelper.convertToMap(sourceBuffer.bytes(), true, XContentType.JSON);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
final Tuple<XContentType, Map<String, Object>> updateAndContent = XContentHelper.convertToMap(update, true, xContentType);
final Map<String, Object> newSourceAsMap = new HashMap<>(sourceAndContent.v2());
final boolean noop = XContentHelper.update(newSourceAsMap, updateAndContent.v2(), true) == false;
if (noop) {
return this;
}
try (BytesStreamOutput newSourceBuffer = new BytesStreamOutput()) {
try (XContentBuilder builder = XContentFactory.jsonBuilder(newSourceBuffer)) {
builder.value(newSourceAsMap);
}
return SearchApplication.fromXContentBytes(name, newSourceBuffer.bytes(), XContentType.JSON);
}
}
}
| SearchApplication |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/io/LegacyBinaryEncoder.java | {
"start": 1700,
"end": 1785
} | class ____ extends Encoder {
protected OutputStream out;
private | LegacyBinaryEncoder |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java | {
"start": 74713,
"end": 98401
} | class ____.
// This is NOT a problem because such metadata (eg containing GeoPoint) will most probably
// cause troubles in downstream code (eg storing the metadata), so this simply introduces a new failure mode.
// Also the malevolent metadata can only be produced by the transport client.
builder.field(RoleDescriptor.Fields.METADATA.getPreferredName(), roleDescriptor.getMetadata());
}
builder.endObject();
}
private static void withIndicesPrivileges(XContentBuilder builder, RoleDescriptor.IndicesPrivileges indicesPrivileges)
throws IOException {
builder.startObject();
builder.array("names", indicesPrivileges.getIndices());
builder.array("privileges", indicesPrivileges.getPrivileges());
if (indicesPrivileges.isUsingFieldLevelSecurity()) {
builder.startObject(RoleDescriptor.Fields.FIELD_PERMISSIONS.getPreferredName());
// always print the "grant" fields (even if the placeholder for all) because it looks better when avoiding the sole
// "except" field
builder.array(RoleDescriptor.Fields.GRANT_FIELDS.getPreferredName(), indicesPrivileges.getGrantedFields());
if (indicesPrivileges.hasDeniedFields()) {
builder.array(RoleDescriptor.Fields.EXCEPT_FIELDS.getPreferredName(), indicesPrivileges.getDeniedFields());
}
builder.endObject();
}
if (indicesPrivileges.isUsingDocumentLevelSecurity()) {
builder.field("query", indicesPrivileges.getQuery().utf8ToString());
}
// default for "allow_restricted_indices" is false, and it's very common to stay that way, so don't show it unless true
if (indicesPrivileges.allowRestrictedIndices()) {
builder.field("allow_restricted_indices", indicesPrivileges.allowRestrictedIndices());
}
builder.endObject();
}
LogEntryBuilder withRequestBody(DeleteUserRequest deleteUserRequest) throws IOException {
logEntry.with(EVENT_ACTION_FIELD_NAME, "delete_user");
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.startObject()
.startObject("user")
.field("name", deleteUserRequest.username())
.endObject() // user
.endObject();
logEntry.with(DELETE_CONFIG_FIELD_NAME, Strings.toString(builder));
return this;
}
LogEntryBuilder withRequestBody(DeleteRoleRequest deleteRoleRequest) throws IOException {
return withDeleteRole(deleteRoleRequest.name());
}
LogEntryBuilder withRequestBody(DeleteRoleMappingRequest deleteRoleMappingRequest) throws IOException {
logEntry.with(EVENT_ACTION_FIELD_NAME, "delete_role_mapping");
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.startObject()
.startObject("role_mapping")
.field("name", deleteRoleMappingRequest.getName())
.endObject() // role_mapping
.endObject();
logEntry.with(DELETE_CONFIG_FIELD_NAME, Strings.toString(builder));
return this;
}
LogEntryBuilder withRequestBody(InvalidateApiKeyRequest invalidateApiKeyRequest) throws IOException {
logEntry.with(EVENT_ACTION_FIELD_NAME, "invalidate_apikeys");
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.startObject().startObject("apikeys");
if (invalidateApiKeyRequest.getIds() != null && invalidateApiKeyRequest.getIds().length > 0) {
builder.array("ids", invalidateApiKeyRequest.getIds());
}
if (Strings.hasLength(invalidateApiKeyRequest.getName())) {
builder.field("name", invalidateApiKeyRequest.getName());
}
builder.field("owned_by_authenticated_user", invalidateApiKeyRequest.ownedByAuthenticatedUser());
if (Strings.hasLength(invalidateApiKeyRequest.getUserName()) || Strings.hasLength(invalidateApiKeyRequest.getRealmName())) {
builder.startObject("user")
.field("name", invalidateApiKeyRequest.getUserName())
.field("realm", invalidateApiKeyRequest.getRealmName())
.endObject(); // user
}
builder.endObject() // apikeys
.endObject();
logEntry.with(INVALIDATE_API_KEYS_FIELD_NAME, Strings.toString(builder));
return this;
}
LogEntryBuilder withRequestBody(DeletePrivilegesRequest deletePrivilegesRequest) throws IOException {
logEntry.with(EVENT_ACTION_FIELD_NAME, "delete_privileges");
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.startObject()
.startObject("privileges")
.field("application", deletePrivilegesRequest.application())
.array("privileges", deletePrivilegesRequest.privileges())
.endObject() // privileges
.endObject();
logEntry.with(DELETE_CONFIG_FIELD_NAME, Strings.toString(builder));
return this;
}
LogEntryBuilder withRequestBody(CreateServiceAccountTokenRequest createServiceAccountTokenRequest) throws IOException {
logEntry.with(EVENT_ACTION_FIELD_NAME, "create_service_token");
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.startObject()
.startObject("service_token")
.field("namespace", createServiceAccountTokenRequest.getNamespace())
.field("service", createServiceAccountTokenRequest.getServiceName())
.field("name", createServiceAccountTokenRequest.getTokenName())
.endObject() // service_token
.endObject();
logEntry.with(CREATE_CONFIG_FIELD_NAME, Strings.toString(builder));
return this;
}
LogEntryBuilder withRequestBody(DeleteServiceAccountTokenRequest deleteServiceAccountTokenRequest) throws IOException {
logEntry.with(EVENT_ACTION_FIELD_NAME, "delete_service_token");
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.startObject()
.startObject("service_token")
.field("namespace", deleteServiceAccountTokenRequest.getNamespace())
.field("service", deleteServiceAccountTokenRequest.getServiceName())
.field("name", deleteServiceAccountTokenRequest.getTokenName())
.endObject() // service_token
.endObject();
logEntry.with(DELETE_CONFIG_FIELD_NAME, Strings.toString(builder));
return this;
}
LogEntryBuilder withRequestBody(ActivateProfileRequest activateProfileRequest) throws IOException {
logEntry.with(EVENT_ACTION_FIELD_NAME, "activate_user_profile");
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.startObject();
Grant grant = activateProfileRequest.getGrant();
withGrant(builder, grant);
builder.endObject();
logEntry.with(PUT_CONFIG_FIELD_NAME, Strings.toString(builder));
return this;
}
LogEntryBuilder withRequestBody(UpdateProfileDataRequest updateProfileDataRequest) throws IOException {
logEntry.with(EVENT_ACTION_FIELD_NAME, "update_user_profile_data");
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.startObject()
.field("uid", updateProfileDataRequest.getUid())
.field("labels", updateProfileDataRequest.getLabels())
.field("data", updateProfileDataRequest.getData())
.endObject();
logEntry.with(PUT_CONFIG_FIELD_NAME, Strings.toString(builder));
return this;
}
LogEntryBuilder withRequestBody(SetProfileEnabledRequest setProfileEnabledRequest) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
if (setProfileEnabledRequest.isEnabled()) {
builder.startObject()
.startObject("enable")
.field("uid", setProfileEnabledRequest.getUid())
.endObject() // enable
.endObject();
logEntry.with(EVENT_ACTION_FIELD_NAME, "change_enable_user_profile");
} else {
builder.startObject()
.startObject("disable")
.field("uid", setProfileEnabledRequest.getUid())
.endObject() // disable
.endObject();
logEntry.with(EVENT_ACTION_FIELD_NAME, "change_disable_user_profile");
}
logEntry.with(CHANGE_CONFIG_FIELD_NAME, Strings.toString(builder));
return this;
}
LogEntryBuilder withDeleteRole(String roleName) throws IOException {
logEntry.with(EVENT_ACTION_FIELD_NAME, "delete_role");
XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.startObject()
.startObject("role")
.field("name", roleName)
.endObject() // role
.endObject();
logEntry.with(DELETE_CONFIG_FIELD_NAME, Strings.toString(builder));
return this;
}
static void withGrant(XContentBuilder builder, Grant grant) throws IOException {
builder.startObject("grant").field("type", grant.getType());
if (grant.getUsername() != null) {
builder.startObject("user")
.field("name", grant.getUsername())
.field("has_password", grant.getPassword() != null)
.endObject(); // user
}
if (grant.getAccessToken() != null) {
builder.field("has_access_token", grant.getAccessToken() != null);
}
if (grant.getRunAsUsername() != null) {
builder.field("run_as", grant.getRunAsUsername());
}
builder.endObject();
}
LogEntryBuilder withRestUriAndMethod(HttpPreRequest request) {
final int queryStringIndex = request.uri().indexOf('?');
int queryStringLength = request.uri().indexOf('#');
if (queryStringLength < 0) {
queryStringLength = request.uri().length();
}
if (queryStringIndex < 0) {
logEntry.with(URL_PATH_FIELD_NAME, request.uri().substring(0, queryStringLength));
} else {
logEntry.with(URL_PATH_FIELD_NAME, request.uri().substring(0, queryStringIndex));
}
if (queryStringIndex > -1) {
logEntry.with(URL_QUERY_FIELD_NAME, request.uri().substring(queryStringIndex + 1, queryStringLength));
}
logEntry.with(REQUEST_METHOD_FIELD_NAME, request.method().toString());
return this;
}
LogEntryBuilder withRunAsSubject(Authentication authentication) {
logEntry.with(PRINCIPAL_FIELD_NAME, authentication.getAuthenticatingSubject().getUser().principal())
.with(PRINCIPAL_REALM_FIELD_NAME, authentication.getAuthenticatingSubject().getRealm().getName())
.with(PRINCIPAL_RUN_AS_FIELD_NAME, authentication.getEffectiveSubject().getUser().principal());
if (authentication.getAuthenticatingSubject().getRealm().getDomain() != null) {
logEntry.with(PRINCIPAL_DOMAIN_FIELD_NAME, authentication.getAuthenticatingSubject().getRealm().getDomain().name());
}
final Authentication.RealmRef lookedUpBy = authentication.isRunAs() ? authentication.getEffectiveSubject().getRealm() : null;
if (lookedUpBy != null) {
logEntry.with(PRINCIPAL_RUN_AS_REALM_FIELD_NAME, lookedUpBy.getName());
if (lookedUpBy.getDomain() != null) {
logEntry.with(PRINCIPAL_RUN_AS_DOMAIN_FIELD_NAME, lookedUpBy.getDomain().name());
}
}
return this;
}
LogEntryBuilder withRestOrigin(ThreadContext threadContext) {
assert LOCAL_ORIGIN_FIELD_VALUE.equals(logEntry.get(ORIGIN_TYPE_FIELD_NAME)); // this is the default
final InetSocketAddress restAddress = RemoteHostHeader.restRemoteAddress(threadContext);
if (restAddress != null) {
logEntry.with(ORIGIN_TYPE_FIELD_NAME, REST_ORIGIN_FIELD_VALUE)
.with(ORIGIN_ADDRESS_FIELD_NAME, NetworkAddress.format(restAddress));
}
// fall through to local_node default
return this;
}
LogEntryBuilder withRestOrTransportOrigin(TransportRequest transportRequest, ThreadContext threadContext) {
assert LOCAL_ORIGIN_FIELD_VALUE.equals(logEntry.get(ORIGIN_TYPE_FIELD_NAME)); // this is the default
final InetSocketAddress restAddress = RemoteHostHeader.restRemoteAddress(threadContext);
if (restAddress != null) {
logEntry.with(ORIGIN_TYPE_FIELD_NAME, REST_ORIGIN_FIELD_VALUE)
.with(ORIGIN_ADDRESS_FIELD_NAME, NetworkAddress.format(restAddress));
} else {
final InetSocketAddress address = transportRequest.remoteAddress();
if (address != null) {
logEntry.with(ORIGIN_TYPE_FIELD_NAME, TRANSPORT_ORIGIN_FIELD_VALUE)
.with(ORIGIN_ADDRESS_FIELD_NAME, NetworkAddress.format(address));
}
}
// fall through to local_node default
return this;
}
LogEntryBuilder withRequestBody(RestRequest request) {
if (includeRequestBody) {
final String requestContent = restRequestContent(request);
if (Strings.hasLength(requestContent)) {
logEntry.with(REQUEST_BODY_FIELD_NAME, requestContent);
}
}
return this;
}
LogEntryBuilder withRequestId(String requestId) {
if (requestId != null) {
logEntry.with(REQUEST_ID_FIELD_NAME, requestId);
}
return this;
}
LogEntryBuilder withThreadContext(ThreadContext threadContext) {
setThreadContextField(threadContext, AuditTrail.X_FORWARDED_FOR_HEADER, X_FORWARDED_FOR_FIELD_NAME);
setThreadContextField(threadContext, Task.X_OPAQUE_ID_HTTP_HEADER, OPAQUE_ID_FIELD_NAME);
setThreadContextField(threadContext, Task.TRACE_ID, TRACE_ID_FIELD_NAME);
return this;
}
private void setThreadContextField(ThreadContext threadContext, String threadContextFieldName, String auditLogFieldName) {
final String fieldValue = threadContext.getHeader(threadContextFieldName);
if (fieldValue != null) {
logEntry.with(auditLogFieldName, fieldValue);
}
}
LogEntryBuilder withAuthentication(Authentication authentication) {
addAuthenticationFieldsToLogEntry(logEntry, authentication);
return this;
}
static void addAuthenticationFieldsToLogEntry(StringMapMessage logEntry, Authentication authentication) {
assert false == authentication.isCloudApiKey() : "audit logging for Cloud API keys is not supported";
logEntry.with(PRINCIPAL_FIELD_NAME, authentication.getEffectiveSubject().getUser().principal());
logEntry.with(AUTHENTICATION_TYPE_FIELD_NAME, authentication.getAuthenticationType().toString());
if (authentication.isApiKey() || authentication.isCrossClusterAccess()) {
logEntry.with(
API_KEY_ID_FIELD_NAME,
(String) authentication.getAuthenticatingSubject().getMetadata().get(AuthenticationField.API_KEY_ID_KEY)
);
String apiKeyName = (String) authentication.getAuthenticatingSubject()
.getMetadata()
.get(AuthenticationField.API_KEY_NAME_KEY);
if (apiKeyName != null) {
logEntry.with(API_KEY_NAME_FIELD_NAME, apiKeyName);
}
final String creatorRealmName = ApiKeyService.getCreatorRealmName(authentication);
if (creatorRealmName != null) {
// can be null for API keys created before version 7.7
logEntry.with(PRINCIPAL_REALM_FIELD_NAME, creatorRealmName);
// No domain information is needed here since API key itself does not work across realms
}
if (authentication.isCrossClusterAccess()) {
final Authentication innerAuthentication = (Authentication) authentication.getAuthenticatingSubject()
.getMetadata()
.get(AuthenticationField.CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY);
final StringMapMessage crossClusterAccessLogEntry = logEntry.newInstance(Collections.emptyMap());
addAuthenticationFieldsToLogEntry(crossClusterAccessLogEntry, innerAuthentication);
try {
final XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true);
builder.map(crossClusterAccessLogEntry.getData());
logEntry.with(CROSS_CLUSTER_ACCESS_FIELD_NAME, Strings.toString(builder));
} catch (IOException e) {
throw new ElasticsearchSecurityException(
"Unexpected error while serializing cross cluster access authentication data",
e
);
}
}
} else {
final Authentication.RealmRef authenticatedBy = authentication.getAuthenticatingSubject().getRealm();
if (authentication.isRunAs()) {
final Authentication.RealmRef lookedUpBy = authentication.getEffectiveSubject().getRealm();
if (lookedUpBy != null) {
logEntry.with(PRINCIPAL_REALM_FIELD_NAME, lookedUpBy.getName());
if (lookedUpBy.getDomain() != null) {
logEntry.with(PRINCIPAL_DOMAIN_FIELD_NAME, lookedUpBy.getDomain().name());
}
}
logEntry.with(PRINCIPAL_RUN_BY_FIELD_NAME, authentication.getAuthenticatingSubject().getUser().principal())
// API key can run-as, when that happens, the following field will be _es_api_key,
// not the API key owner user's realm.
.with(PRINCIPAL_RUN_BY_REALM_FIELD_NAME, authenticatedBy.getName());
if (authenticatedBy.getDomain() != null) {
logEntry.with(PRINCIPAL_RUN_BY_DOMAIN_FIELD_NAME, authenticatedBy.getDomain().name());
}
// TODO: API key can run-as which means we could use extra fields (#84394)
} else {
logEntry.with(PRINCIPAL_REALM_FIELD_NAME, authenticatedBy.getName());
if (authenticatedBy.getDomain() != null) {
logEntry.with(PRINCIPAL_DOMAIN_FIELD_NAME, authenticatedBy.getDomain().name());
}
}
}
// TODO: service token info is logged in a separate authentication field (#84394)
if (authentication.isServiceAccount()) {
logEntry.with(
SERVICE_TOKEN_NAME_FIELD_NAME,
(String) authentication.getAuthenticatingSubject().getMetadata().get(TOKEN_NAME_FIELD)
)
.with(
SERVICE_TOKEN_TYPE_FIELD_NAME,
ServiceAccountSettings.REALM_TYPE
+ "_"
+ authentication.getAuthenticatingSubject().getMetadata().get(TOKEN_SOURCE_FIELD)
);
}
}
LogEntryBuilder with(String key, String value) {
if (value != null) {
logEntry.with(key, value);
}
return this;
}
LogEntryBuilder with(String key, String[] values) {
if (values != null) {
logEntry.with(key, toQuotedJsonArray(values));
}
return this;
}
LogEntryBuilder with(Map<String, Object> map) {
for (Entry<String, Object> entry : map.entrySet()) {
Object value = entry.getValue();
if (value.getClass().isArray()) {
logEntry.with(entry.getKey(), toQuotedJsonArray((Object[]) value));
} else {
logEntry.with(entry.getKey(), value);
}
}
return this;
}
void build() {
logger.info(AUDIT_MARKER, logEntry);
}
static String toQuotedJsonArray(Object[] values) {
assert values != null;
final StringBuilder stringBuilder = new StringBuilder();
final JsonStringEncoder jsonStringEncoder = JsonStringEncoder.getInstance();
stringBuilder.append("[");
for (final Object value : values) {
if (value != null) {
if (stringBuilder.length() > 1) {
stringBuilder.append(",");
}
stringBuilder.append("\"");
jsonStringEncoder.quoteAsString(value.toString(), stringBuilder);
stringBuilder.append("\"");
}
}
stringBuilder.append("]");
return stringBuilder.toString();
}
}
public static void registerSettings(List<Setting<?>> settings) {
settings.add(EMIT_HOST_ADDRESS_SETTING);
settings.add(EMIT_HOST_NAME_SETTING);
settings.add(EMIT_NODE_NAME_SETTING);
settings.add(EMIT_NODE_ID_SETTING);
settings.add(EMIT_CLUSTER_NAME_SETTING);
settings.add(EMIT_CLUSTER_UUID_SETTING);
settings.add(INCLUDE_EVENT_SETTINGS);
settings.add(EXCLUDE_EVENT_SETTINGS);
settings.add(INCLUDE_REQUEST_BODY);
settings.add(FILTER_POLICY_IGNORE_PRINCIPALS);
settings.add(FILTER_POLICY_IGNORE_INDICES);
settings.add(FILTER_POLICY_IGNORE_ROLES);
settings.add(FILTER_POLICY_IGNORE_REALMS);
settings.add(FILTER_POLICY_IGNORE_ACTIONS);
}
/**
* Builds the predicate for a single policy filter. The predicate matches events
* that will be ignored, aka filtered out, aka not logged. The event can be
* filtered by the following fields : `user`, `realm`, `role` and `index`.
* Predicates on each field are ANDed together to form the filter predicate of
* the policy.
*/
private static final | types |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/RememberMeConfigurerTests.java | {
"start": 23459,
"end": 24223
} | class ____ {
private SecurityContextRepository repository = spy(new SpySecurityContextRepository());
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((authorize) -> authorize.anyRequest().authenticated())
.securityContext((context) -> context.securityContextRepository(this.repository))
.formLogin(withDefaults())
.rememberMe(withDefaults());
return http.build();
// @formatter:on
}
@Bean
SecurityContextRepository securityContextRepository() {
return this.repository;
}
@Bean
UserDetailsService userDetailsService() {
return new InMemoryUserDetailsManager(PasswordEncodedUser.user());
}
private static | SecurityContextRepositoryConfig |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/AtomicIntegerArrayAssertBaseTest.java | {
"start": 917,
"end": 1651
} | class ____ extends BaseTestTemplate<AtomicIntegerArrayAssert, AtomicIntegerArray> {
protected IntArrays arrays;
@Override
protected AtomicIntegerArrayAssert create_assertions() {
return new AtomicIntegerArrayAssert(new AtomicIntegerArray(emptyArray()));
}
@Override
protected void inject_internal_objects() {
super.inject_internal_objects();
arrays = mock(IntArrays.class);
assertions.arrays = arrays;
}
protected IntArrays getArrays(AtomicIntegerArrayAssert someAssertions) {
return someAssertions.arrays;
}
protected int[] internalArray() {
return array(getActual(assertions));
}
protected AssertionInfo info() {
return getInfo(assertions);
}
}
| AtomicIntegerArrayAssertBaseTest |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/observers/DeferredScalarObserverTest.java | {
"start": 1193,
"end": 4736
} | class ____ extends DeferredScalarObserver<Integer, Integer> {
private static final long serialVersionUID = -2793723002312330530L;
TakeFirst(Observer<? super Integer> downstream) {
super(downstream);
}
@Override
public void onNext(Integer value) {
upstream.dispose();
complete(value);
complete(value);
}
}
@Test
public void normal() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
TestObserver<Integer> to = new TestObserver<>();
TakeFirst source = new TakeFirst(to);
source.onSubscribe(Disposable.empty());
Disposable d = Disposable.empty();
source.onSubscribe(d);
assertTrue(d.isDisposed());
source.onNext(1);
to.assertResult(1);
TestHelper.assertError(errors, 0, ProtocolViolationException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void error() {
TestObserver<Integer> to = new TestObserver<>();
TakeFirst source = new TakeFirst(to);
source.onSubscribe(Disposable.empty());
source.onError(new TestException());
to.assertFailure(TestException.class);
}
@Test
public void complete() {
TestObserver<Integer> to = new TestObserver<>();
TakeFirst source = new TakeFirst(to);
source.onSubscribe(Disposable.empty());
source.onComplete();
to.assertResult();
}
@Test
public void dispose() {
TestObserver<Integer> to = new TestObserver<>();
TakeFirst source = new TakeFirst(to);
Disposable d = Disposable.empty();
source.onSubscribe(d);
assertFalse(d.isDisposed());
to.dispose();
assertTrue(d.isDisposed());
assertTrue(source.isDisposed());
}
@Test
public void fused() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
TestObserverEx<Integer> to = new TestObserverEx<>(QueueFuseable.ANY);
TakeFirst source = new TakeFirst(to);
Disposable d = Disposable.empty();
source.onSubscribe(d);
to.assertFuseable();
to.assertFusionMode(QueueFuseable.ASYNC);
source.onNext(1);
source.onNext(1);
source.onError(new TestException());
source.onComplete();
assertTrue(d.isDisposed());
to.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void fusedReject() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
TestObserverEx<Integer> to = new TestObserverEx<>(QueueFuseable.SYNC);
TakeFirst source = new TakeFirst(to);
Disposable d = Disposable.empty();
source.onSubscribe(d);
to.assertFuseable();
to.assertFusionMode(QueueFuseable.NONE);
source.onNext(1);
source.onNext(1);
source.onError(new TestException());
source.onComplete();
assertTrue(d.isDisposed());
to.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
static final | TakeFirst |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilder.java | {
"start": 4755,
"end": 5455
} | class ____
out.writeString(inferenceId);
out.writeString(inferenceText);
out.writeString(field);
out.writeOptionalFloat(minScore);
if (out.getTransportVersion().supports(RERANKER_FAILURES_ALLOWED)) {
out.writeBoolean(failuresAllowed);
}
if (out.getTransportVersion().supports(RERANK_SNIPPETS)) {
out.writeOptionalWriteable(chunkScorerConfig);
}
}
@Override
public void doXContent(XContentBuilder builder, Params params) throws IOException {
// this object is not parsed, but it sometimes needs to be output as xcontent
// rankWindowSize serialization is handled by the parent | RankBuilder |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/server/firewall/HttpStatusExchangeRejectedHandler.java | {
"start": 1132,
"end": 2120
} | class ____ implements ServerExchangeRejectedHandler {
private static final Log logger = LogFactory.getLog(HttpStatusExchangeRejectedHandler.class);
private final HttpStatus status;
/**
* Constructs an instance which uses {@code 400} as response code.
*/
public HttpStatusExchangeRejectedHandler() {
this(HttpStatus.BAD_REQUEST);
}
/**
* Constructs an instance which uses a configurable http code as response.
* @param status http status code to use
*/
public HttpStatusExchangeRejectedHandler(HttpStatus status) {
this.status = status;
}
@Override
public Mono<Void> handle(ServerWebExchange exchange,
ServerExchangeRejectedException serverExchangeRejectedException) {
return Mono.fromRunnable(() -> {
logger.debug(
LogMessage.format("Rejecting request due to: %s", serverExchangeRejectedException.getMessage()),
serverExchangeRejectedException);
exchange.getResponse().setStatusCode(this.status);
});
}
}
| HttpStatusExchangeRejectedHandler |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schematools/FallbackSchemaManagementToolTests.java | {
"start": 2665,
"end": 3953
} | class ____ {
@Test
public void testFallbackToolIsPickedUp(ServiceRegistryScope registryScope, DomainModelScope modelScope) {
final StandardServiceRegistry registry = registryScope.getRegistry();
final MetadataImplementor domainModel = modelScope.getDomainModel();
final HibernateSchemaManagementTool tool = (HibernateSchemaManagementTool) registry.getService( SchemaManagementTool.class );
final Map<String, Object> settings = registry.getService( ConfigurationService.class ).getSettings();
final SchemaCreator schemaCreator = tool.getSchemaCreator( settings );
schemaCreator.doCreation(
domainModel,
new ExecutionOptionsTestImpl(),
contributed -> true,
new SourceDescriptor() {
@Override
public SourceType getSourceType() {
return SourceType.METADATA;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return null;
}
},
new TargetDescriptor() {
@Override
public EnumSet<TargetType> getTargetTypes() {
return EnumSet.of( TargetType.DATABASE );
}
@Override
public ScriptTargetOutput getScriptTargetOutput() {
return null;
}
}
);
assertThat( CollectingGenerationTarget.commands ).hasSize( 1 );
}
private static | FallbackSchemaManagementToolTests |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/pkg/jar/FastJarBuilder.java | {
"start": 27395,
"end": 28398
} | class ____ {
private Path transformedJar;
private Path generatedJar;
private Path runnerJar;
private final List<Path> dependencies = new ArrayList<>();
public FastJarJarsBuilder setTransformedJar(Path transformedJar) {
this.transformedJar = transformedJar;
return this;
}
public FastJarJarsBuilder setGeneratedJar(Path generatedJar) {
this.generatedJar = generatedJar;
return this;
}
public FastJarJarsBuilder setRunnerJar(Path runnerJar) {
this.runnerJar = runnerJar;
return this;
}
public FastJarJarsBuilder addDependency(Path dependency) {
this.dependencies.add(dependency);
return this;
}
public FastJarJars build() {
return new FastJarJars(this);
}
}
}
}
| FastJarJarsBuilder |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/MvcUriComponentsBuilderTests.java | {
"start": 24700,
"end": 24777
} | class ____ {
}
@RequestMapping("/${context.test.mapping}")
| InvalidController |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/test/CustomBlockHoundIntegration.java | {
"start": 812,
"end": 4453
} | class ____ implements BlockHoundIntegration {
@Override
public void applyTo(BlockHound.Builder builder) {
// builder.blockingMethodCallback(it -> {
// Error error = new Error(it.toString());
// error.printStackTrace();
// throw error;
// });
// Uses Unsafe#park
builder.allowBlockingCallsInside("reactor.core.scheduler.SchedulerTask", "dispose");
// Uses
// ch.qos.logback.classic.spi.PackagingDataCalculator#getImplementationVersion
builder.allowBlockingCallsInside(
"org.springframework.boot.autoconfigure.web.reactive.error.AbstractErrorWebExceptionHandler",
"logError");
builder.allowBlockingCallsInside("reactor.util.Loggers$Slf4JLogger", "debug");
builder.allowBlockingCallsInside("reactor.util.Loggers$Slf4JLogger", "info");
builder.allowBlockingCallsInside("reactor.util.Loggers$Slf4JLogger", "error");
// Uses org.springframework.util.JdkIdGenerator#generateId
// Uses UUID#randomUUID
builder.allowBlockingCallsInside("org.springframework.web.server.session.InMemoryWebSessionStore",
"lambda$createWebSession$0");
// Uses java.util.Random#nextInt
builder.allowBlockingCallsInside("org.springframework.util.MimeTypeUtils", "generateMultipartBoundary");
// SPRING DATA REDIS RELATED
// Uses Unsafe#park
builder.allowBlockingCallsInside("org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory",
"getReactiveConnection");
// NETTY RELATED
// Uses Thread#sleep
builder.allowBlockingCallsInside("io.netty.channel.nio.NioEventLoop", "handleLoopException");
builder.allowBlockingCallsInside("io.netty.util.concurrent.SingleThreadEventExecutor", "confirmShutdown");
// Uses Unsafe#park
builder.allowBlockingCallsInside("io.netty.util.concurrent.GlobalEventExecutor", "execute");
builder.allowBlockingCallsInside("io.netty.util.concurrent.SingleThreadEventExecutor$6", "run");
// builder.allowBlockingCallsInside("io.netty.util.concurrent.GlobalEventExecutor",
// "takeTask");
// builder.allowBlockingCallsInside("io.netty.util.concurrent.GlobalEventExecutor",
// "addTask");
builder.allowBlockingCallsInside("io.netty.util.concurrent.FastThreadLocalRunnable", "run");
// SECURITY RELATED
// For HTTPS traffic
builder.allowBlockingCallsInside("io.netty.handler.ssl.SslHandler", "channelActive");
builder.allowBlockingCallsInside("io.netty.handler.ssl.SslHandler", "channelInactive");
builder.allowBlockingCallsInside("io.netty.handler.ssl.SslHandler", "unwrap");
builder.allowBlockingCallsInside("io.netty.handler.ssl.SslContext", "newClientContextInternal");
// Uses org.springframework.security.crypto.bcrypt.BCrypt#gensalt
// Uses java.security.SecureRandom#nextBytes
builder.allowBlockingCallsInside(
"org.springframework.security.authentication.AbstractUserDetailsReactiveAuthenticationManager",
"lambda$authenticate$4");
// Uses java.io.RandomAccessFile#readBytes
builder.allowBlockingCallsInside("org.springframework.http.codec.AbstractJacksonEncoder", "encodeValue");
builder.allowBlockingCallsInside("org.springframework.context.annotation.ConfigurationClassParser", "parse");
builder.allowBlockingCallsInside(
"org.springframework.context.annotation.ConfigurationClassBeanDefinitionReader", "loadBeanDefinitions");
builder.allowBlockingCallsInside("org.springframework.core.type.classreading.SimpleMetadataReader",
"getClassReader");
builder.allowBlockingCallsInside("io.micrometer.context.ContextRegistry", "loadContextAccessors");
builder.allowBlockingCallsInside("io.micrometer.context.ContextRegistry", "loadThreadLocalAccessors");
}
}
| CustomBlockHoundIntegration |
java | netty__netty | transport-sctp/src/main/java/io/netty/channel/sctp/SctpNotificationHandler.java | {
"start": 1309,
"end": 2496
} | class ____ extends AbstractNotificationHandler<Object> {
private final SctpChannel sctpChannel;
public SctpNotificationHandler(SctpChannel sctpChannel) {
this.sctpChannel = ObjectUtil.checkNotNull(sctpChannel, "sctpChannel");
}
@Override
public HandlerResult handleNotification(AssociationChangeNotification notification, Object o) {
fireEvent(notification);
return HandlerResult.CONTINUE;
}
@Override
public HandlerResult handleNotification(PeerAddressChangeNotification notification, Object o) {
fireEvent(notification);
return HandlerResult.CONTINUE;
}
@Override
public HandlerResult handleNotification(SendFailedNotification notification, Object o) {
fireEvent(notification);
return HandlerResult.CONTINUE;
}
@Override
public HandlerResult handleNotification(ShutdownNotification notification, Object o) {
fireEvent(notification);
sctpChannel.close();
return HandlerResult.RETURN;
}
private void fireEvent(Notification notification) {
sctpChannel.pipeline().fireUserEventTriggered(notification);
}
}
| SctpNotificationHandler |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/specific/int$.java | {
"start": 4175,
"end": 6388
} | class ____ extends SpecificRecordBuilderBase<int$>
implements org.apache.avro.data.RecordBuilder<int$> {
/** Creates a new Builder */
private Builder() {
super(SCHEMA$, MODEL$);
}
/**
* Creates a Builder by copying an existing Builder.
*
* @param other The existing Builder to copy.
*/
private Builder(Builder other) {
super(other);
}
/**
* Creates a Builder by copying an existing int$ instance
*
* @param other The existing instance to copy.
*/
private Builder(int$ other) {
super(SCHEMA$, MODEL$);
}
@Override
@SuppressWarnings("unchecked")
public int$ build() {
try {
int$ record = new int$();
return record;
} catch (org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter<int$> WRITER$ = (org.apache.avro.io.DatumWriter<int$>) MODEL$
.createDatumWriter(SCHEMA$);
@Override
public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader<int$> READER$ = (org.apache.avro.io.DatumReader<int$>) MODEL$
.createDatumReader(SCHEMA$);
@Override
public void readExternal(java.io.ObjectInput in) throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
@Override
protected boolean hasCustomCoders() {
return true;
}
@Override
public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException {
}
@Override
public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException {
org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
if (fieldOrder == null) {
} else {
for (int i = 0; i < 0; i++) {
switch (fieldOrder[i].pos()) {
default:
throw new java.io.IOException("Corrupt ResolvingDecoder.");
}
}
}
}
}
| Builder |
java | apache__camel | components/camel-test/camel-test-main-junit5/src/main/java/org/apache/camel/test/main/junit5/ReplaceInRegistry.java | {
"start": 2392,
"end": 2736
} | class ____ in a parent class.
* <p/>
* In the next example, the annotation {@code ReplaceInRegistry} on the method {@code myGreetings} whose return type is
* {@code Greetings} indicates that the bean with the same name and type should be replaced by an instance of
* {@code CustomGreetings}.
*
* <pre>
* <code>
*
* @CamelMainTest
* | or |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/util/ResourceLoaderTest.java | {
"start": 14289,
"end": 14326
} | class ____ {"));
}
}
}
| MyFoo |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java | {
"start": 13838,
"end": 15170
} | interface ____ {
void collect(int doc, long bucket) throws IOException;
}
@Override
public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException {
return buildAggregationsForFixedBucketCount(
owningBucketOrds,
ranges.length,
(offsetInOwningOrd, docCount, subAggregationResults) -> {
Range range = ranges[offsetInOwningOrd];
return new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, docCount, subAggregationResults);
},
buckets -> new InternalBinaryRange(name, format, keyed, buckets, metadata())
);
}
@Override
public InternalAggregation buildEmptyAggregation() {
// Create empty buckets with 0 count and with empty sub-aggs so we can merge them with non-empty aggs
InternalAggregations subAggs = buildEmptySubAggregations();
List<InternalBinaryRange.Bucket> buckets = new ArrayList<>(ranges.length);
for (Range range : ranges) {
InternalBinaryRange.Bucket bucket = new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, 0, subAggs);
buckets.add(bucket);
}
return new InternalBinaryRange(name, format, keyed, buckets, metadata());
}
}
| DocCollector |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/groupwindow/AbstractWindowProperty.java | {
"start": 1010,
"end": 1252
} | class ____ {@link WindowProperty}.
*
* @deprecated The POJOs in this package are used to represent the deprecated Group Window feature.
* Currently, they also used to configure Python operators.
*/
@Deprecated
@Internal
public abstract | of |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamWindowAggregate.java | {
"start": 2124,
"end": 4129
} | class ____<KIn, VIn, VAgg, W extends Window> implements KStreamAggProcessorSupplier<KIn, VIn, Windowed<KIn>, VAgg> {
private static final Logger log = LoggerFactory.getLogger(KStreamWindowAggregate.class);
private final String storeName;
private final StoreFactory storeFactory;
private final Windows<W> windows;
private final Initializer<VAgg> initializer;
private final Aggregator<? super KIn, ? super VIn, VAgg> aggregator;
private final EmitStrategy emitStrategy;
private boolean sendOldValues = false;
public KStreamWindowAggregate(final Windows<W> windows,
final StoreFactory storeFactory,
final EmitStrategy emitStrategy,
final Initializer<VAgg> initializer,
final Aggregator<? super KIn, ? super VIn, VAgg> aggregator) {
this.windows = windows;
this.storeName = storeFactory.storeName();
this.storeFactory = storeFactory;
this.emitStrategy = emitStrategy;
this.initializer = initializer;
this.aggregator = aggregator;
if (emitStrategy.type() == StrategyType.ON_WINDOW_CLOSE) {
if (!(windows instanceof TimeWindows)) {
throw new IllegalArgumentException("ON_WINDOW_CLOSE strategy is only supported for "
+ "TimeWindows and SlidingWindows for TimeWindowedKStream");
}
}
}
@Override
public Set<StoreBuilder<?>> stores() {
return Collections.singleton(new FactoryWrappingStoreBuilder<>(storeFactory));
}
@Override
public Processor<KIn, VIn, Windowed<KIn>, Change<VAgg>> get() {
return new KStreamWindowAggregateProcessor(storeName, emitStrategy, sendOldValues);
}
public Windows<W> windows() {
return windows;
}
@Override
public void enableSendingOldValues() {
sendOldValues = true;
}
private | KStreamWindowAggregate |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/functions/UserDefinedFunctionHelperTest.java | {
"start": 17777,
"end": 18130
} | class ____
extends TableAggregateFunction<String, String> {
public void accumulate(String acc, String in) {
// nothing to do
}
@Override
public String createAccumulator() {
return null;
}
}
/** Valid table function. */
public static | MissingEmitTableAggregateFunction |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java | {
"start": 2209,
"end": 26526
} | class ____ extends ESIntegTestCase {
@Override
protected int minimumNumberOfShards() {
return 2;
}
public String findNonMatchingRoutingValue(String index, String id) {
ClusterState state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).all().get().getState();
IndexMetadata metadata = state.metadata().getProject().index(index);
IndexMetadata withoutRoutingRequired = IndexMetadata.builder(metadata).putMapping("{}").build();
IndexRouting indexRouting = IndexRouting.fromIndexMetadata(withoutRoutingRequired);
int routing = -1;
int idShard;
int routingShard;
do {
idShard = indexRouting.getShard(id, null);
routingShard = indexRouting.getShard(id, Integer.toString(++routing));
} while (idShard == routingShard);
return Integer.toString(routing);
}
public void testSimpleCrudRouting() throws Exception {
createIndex("test");
ensureGreen();
String routingValue = findNonMatchingRoutingValue("test", "1");
logger.info("--> indexing with id [1], and routing [{}]", routingValue);
prepareIndex("test").setId("1")
.setRouting(routingValue)
.setSource("field", "value1")
.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
.get();
logger.info("--> verifying get with no routing, should not find anything");
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "1").get().isExists(), equalTo(false));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "1").setRouting(routingValue).get().isExists(), equalTo(true));
}
logger.info("--> deleting with no routing, should not delete anything");
client().prepareDelete("test", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "1").get().isExists(), equalTo(false));
assertThat(client().prepareGet("test", "1").setRouting(routingValue).get().isExists(), equalTo(true));
}
logger.info("--> deleting with routing, should delete");
client().prepareDelete("test", "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "1").get().isExists(), equalTo(false));
assertThat(client().prepareGet("test", "1").setRouting(routingValue).get().isExists(), equalTo(false));
}
logger.info("--> indexing with id [1], and routing [0]");
prepareIndex("test").setId("1")
.setRouting(routingValue)
.setSource("field", "value1")
.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
.get();
logger.info("--> verifying get with no routing, should not find anything");
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "1").get().isExists(), equalTo(false));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "1").setRouting(routingValue).get().isExists(), equalTo(true));
}
}
public void testSimpleSearchRouting() {
createIndex("test");
ensureGreen();
String routingValue = findNonMatchingRoutingValue("test", "1");
logger.info("--> indexing with id [1], and routing [{}]", routingValue);
prepareIndex("test").setId("1")
.setRouting(routingValue)
.setSource("field", "value1")
.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
.get();
logger.info("--> verifying get with no routing, should not find anything");
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "1").get().isExists(), equalTo(false));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "1").setRouting(routingValue).get().isExists(), equalTo(true));
}
logger.info("--> search with no routing, should fine one");
for (int i = 0; i < 5; i++) {
assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 1L);
}
logger.info("--> search with wrong routing, should not find");
for (int i = 0; i < 5; i++) {
assertHitCount(
0,
prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()),
prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery())
);
}
logger.info("--> search with correct routing, should find");
for (int i = 0; i < 5; i++) {
assertHitCount(
1,
prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()),
prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery())
);
}
String secondRoutingValue = "1";
logger.info("--> indexing with id [{}], and routing [{}]", routingValue, secondRoutingValue);
prepareIndex("test").setId(routingValue)
.setRouting(secondRoutingValue)
.setSource("field", "value1")
.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
.get();
logger.info("--> search with no routing, should fine two");
for (int i = 0; i < 5; i++) {
assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 2);
assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2);
}
logger.info("--> search with {} routing, should find one", routingValue);
for (int i = 0; i < 5; i++) {
assertHitCount(prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1);
assertHitCount(prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1);
}
logger.info("--> search with {} routing, should find one", secondRoutingValue);
for (int i = 0; i < 5; i++) {
assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1);
assertHitCount(prepareSearch().setSize(0).setRouting(secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), 1);
}
logger.info("--> search with {},{} indexRoutings , should find two", routingValue, "1");
for (int i = 0; i < 5; i++) {
assertHitCount(prepareSearch().setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), 2);
assertHitCount(
prepareSearch().setSize(0).setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()),
2
);
}
logger.info("--> search with {},{},{} indexRoutings , should find two", routingValue, secondRoutingValue, routingValue);
for (int i = 0; i < 5; i++) {
assertHitCount(
prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue).setQuery(QueryBuilders.matchAllQuery()),
2
);
assertHitCount(
prepareSearch().setSize(0)
.setRouting(routingValue, secondRoutingValue, routingValue)
.setQuery(QueryBuilders.matchAllQuery()),
2
);
}
}
public void testRequiredRoutingCrudApis() throws Exception {
indicesAdmin().prepareCreate("test")
.addAlias(new Alias("alias"))
.setMapping(
XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("_routing")
.field("required", true)
.endObject()
.endObject()
.endObject()
)
.get();
ensureGreen();
String routingValue = findNonMatchingRoutingValue("test", "1");
logger.info("--> indexing with id [1], and routing [{}]", routingValue);
prepareIndex(indexOrAlias()).setId("1")
.setRouting(routingValue)
.setSource("field", "value1")
.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
.get();
logger.info("--> verifying get with no routing, should fail");
logger.info("--> indexing with id [1], with no routing, should fail");
try {
prepareIndex(indexOrAlias()).setId("1").setSource("field", "value1").get();
fail("index with missing routing when routing is required should fail");
} catch (ElasticsearchException e) {
assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).get().isExists(), equalTo(true));
}
logger.info("--> deleting with no routing, should fail");
try {
client().prepareDelete(indexOrAlias(), "1").get();
fail("delete with missing routing when routing is required should fail");
} catch (ElasticsearchException e) {
assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class));
}
for (int i = 0; i < 5; i++) {
try {
client().prepareGet(indexOrAlias(), "1").get().isExists();
fail("get with missing routing when routing is required should fail");
} catch (RoutingMissingException e) {
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]"));
}
assertThat(client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).get().isExists(), equalTo(true));
}
try {
client().prepareUpdate(indexOrAlias(), "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").get();
fail("update with missing routing when routing is required should fail");
} catch (ElasticsearchException e) {
assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class));
}
client().prepareUpdate(indexOrAlias(), "1").setRouting(routingValue).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").get();
indicesAdmin().prepareRefresh().get();
for (int i = 0; i < 5; i++) {
try {
client().prepareGet(indexOrAlias(), "1").get().isExists();
fail();
} catch (RoutingMissingException e) {
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]"));
}
GetResponse getResponse = client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).get();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getSourceAsMap().get("field"), equalTo("value2"));
}
client().prepareDelete(indexOrAlias(), "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
for (int i = 0; i < 5; i++) {
try {
client().prepareGet(indexOrAlias(), "1").get().isExists();
fail();
} catch (RoutingMissingException e) {
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]"));
}
assertThat(client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).get().isExists(), equalTo(false));
}
}
public void testRequiredRoutingBulk() throws Exception {
indicesAdmin().prepareCreate("test")
.addAlias(new Alias("alias"))
.setMapping(
XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("_routing")
.field("required", true)
.endObject()
.endObject()
.endObject()
)
.get();
ensureGreen();
{
String index = indexOrAlias();
BulkResponse bulkResponse = client().prepareBulk()
.add(new IndexRequest(index).id("1").source(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.get();
assertThat(bulkResponse.getItems().length, equalTo(1));
assertThat(bulkResponse.hasFailures(), equalTo(true));
for (BulkItemResponse bulkItemResponse : bulkResponse) {
assertThat(bulkItemResponse.isFailed(), equalTo(true));
assertThat(bulkItemResponse.getOpType(), equalTo(DocWriteRequest.OpType.INDEX));
assertThat(bulkItemResponse.getFailure().getStatus(), equalTo(RestStatus.BAD_REQUEST));
assertThat(bulkItemResponse.getFailure().getCause(), instanceOf(RoutingMissingException.class));
assertThat(bulkItemResponse.getFailureMessage(), containsString("routing is required for [test]/[1]"));
}
}
{
String index = indexOrAlias();
BulkResponse bulkResponse = client().prepareBulk()
.add(new IndexRequest(index).id("1").routing("0").source(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.get();
assertThat(bulkResponse.hasFailures(), equalTo(false));
}
{
BulkResponse bulkResponse = client().prepareBulk()
.add(new UpdateRequest(indexOrAlias(), "1").doc(Requests.INDEX_CONTENT_TYPE, "field", "value2"))
.get();
assertThat(bulkResponse.getItems().length, equalTo(1));
assertThat(bulkResponse.hasFailures(), equalTo(true));
for (BulkItemResponse bulkItemResponse : bulkResponse) {
assertThat(bulkItemResponse.isFailed(), equalTo(true));
assertThat(bulkItemResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE));
assertThat(bulkItemResponse.getFailure().getStatus(), equalTo(RestStatus.BAD_REQUEST));
assertThat(bulkItemResponse.getFailure().getCause(), instanceOf(RoutingMissingException.class));
assertThat(bulkItemResponse.getFailureMessage(), containsString("routing is required for [test]/[1]"));
}
}
{
BulkResponse bulkResponse = client().prepareBulk()
.add(new UpdateRequest(indexOrAlias(), "1").doc(Requests.INDEX_CONTENT_TYPE, "field", "value2").routing("0"))
.get();
assertThat(bulkResponse.hasFailures(), equalTo(false));
}
{
String index = indexOrAlias();
BulkResponse bulkResponse = client().prepareBulk().add(new DeleteRequest(index).id("1")).get();
assertThat(bulkResponse.getItems().length, equalTo(1));
assertThat(bulkResponse.hasFailures(), equalTo(true));
for (BulkItemResponse bulkItemResponse : bulkResponse) {
assertThat(bulkItemResponse.isFailed(), equalTo(true));
assertThat(bulkItemResponse.getOpType(), equalTo(DocWriteRequest.OpType.DELETE));
assertThat(bulkItemResponse.getFailure().getStatus(), equalTo(RestStatus.BAD_REQUEST));
assertThat(bulkItemResponse.getFailure().getCause(), instanceOf(RoutingMissingException.class));
assertThat(bulkItemResponse.getFailureMessage(), containsString("routing is required for [test]/[1]"));
}
}
{
String index = indexOrAlias();
BulkResponse bulkResponse = client().prepareBulk().add(new DeleteRequest(index).id("1").routing("0")).get();
assertThat(bulkResponse.getItems().length, equalTo(1));
assertThat(bulkResponse.hasFailures(), equalTo(false));
}
}
public void testRequiredRoutingMappingVariousAPIs() throws Exception {
indicesAdmin().prepareCreate("test")
.addAlias(new Alias("alias"))
.setMapping(
XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("_routing")
.field("required", true)
.endObject()
.endObject()
.endObject()
)
.get();
ensureGreen();
String routingValue = findNonMatchingRoutingValue("test", "1");
logger.info("--> indexing with id [1], and routing [{}]", routingValue);
prepareIndex(indexOrAlias()).setId("1").setRouting(routingValue).setSource("field", "value1").get();
logger.info("--> indexing with id [2], and routing [{}]", routingValue);
prepareIndex(indexOrAlias()).setId("2")
.setRouting(routingValue)
.setSource("field", "value2")
.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
.get();
logger.info("--> verifying get with id [1] with routing [0], should succeed");
assertThat(client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).get().isExists(), equalTo(true));
logger.info("--> verifying get with id [1], with no routing, should fail");
try {
client().prepareGet(indexOrAlias(), "1").get();
fail();
} catch (RoutingMissingException e) {
assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]"));
}
logger.info("--> verifying explain with id [2], with routing [0], should succeed");
ExplainResponse explainResponse = client().prepareExplain(indexOrAlias(), "2")
.setQuery(QueryBuilders.matchAllQuery())
.setRouting(routingValue)
.get();
assertThat(explainResponse.isExists(), equalTo(true));
assertThat(explainResponse.isMatch(), equalTo(true));
logger.info("--> verifying explain with id [2], with no routing, should fail");
try {
client().prepareExplain(indexOrAlias(), "2").setQuery(QueryBuilders.matchAllQuery()).get();
fail();
} catch (RoutingMissingException e) {
assertThat(e.getMessage(), equalTo("routing is required for [test]/[2]"));
}
logger.info("--> verifying term vector with id [1], with routing [0], should succeed");
TermVectorsResponse termVectorsResponse = client().prepareTermVectors(indexOrAlias(), "1").setRouting(routingValue).get();
assertThat(termVectorsResponse.isExists(), equalTo(true));
assertThat(termVectorsResponse.getId(), equalTo("1"));
try {
client().prepareTermVectors(indexOrAlias(), "1").get();
fail();
} catch (RoutingMissingException e) {
assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]"));
}
UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1")
.setRouting(routingValue)
.setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1")
.get();
assertThat(updateResponse.getId(), equalTo("1"));
assertThat(updateResponse.getVersion(), equalTo(2L));
try {
client().prepareUpdate(indexOrAlias(), "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1").get();
fail();
} catch (RoutingMissingException e) {
assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]"));
}
logger.info("--> verifying mget with ids [1,2], with routing [0], should succeed");
MultiGetResponse multiGetResponse = client().prepareMultiGet()
.add(new MultiGetRequest.Item(indexOrAlias(), "1").routing("0"))
.add(new MultiGetRequest.Item(indexOrAlias(), "2").routing("0"))
.get();
assertThat(multiGetResponse.getResponses().length, equalTo(2));
assertThat(multiGetResponse.getResponses()[0].isFailed(), equalTo(false));
assertThat(multiGetResponse.getResponses()[0].getResponse().getId(), equalTo("1"));
assertThat(multiGetResponse.getResponses()[1].isFailed(), equalTo(false));
assertThat(multiGetResponse.getResponses()[1].getResponse().getId(), equalTo("2"));
logger.info("--> verifying mget with ids [1,2], with no routing, should fail");
multiGetResponse = client().prepareMultiGet()
.add(new MultiGetRequest.Item(indexOrAlias(), "1"))
.add(new MultiGetRequest.Item(indexOrAlias(), "2"))
.get();
assertThat(multiGetResponse.getResponses().length, equalTo(2));
assertThat(multiGetResponse.getResponses()[0].isFailed(), equalTo(true));
assertThat(multiGetResponse.getResponses()[0].getFailure().getId(), equalTo("1"));
assertThat(multiGetResponse.getResponses()[0].getFailure().getMessage(), equalTo("routing is required for [test]/[1]"));
assertThat(multiGetResponse.getResponses()[1].isFailed(), equalTo(true));
assertThat(multiGetResponse.getResponses()[1].getFailure().getId(), equalTo("2"));
assertThat(multiGetResponse.getResponses()[1].getFailure().getMessage(), equalTo("routing is required for [test]/[2]"));
MultiTermVectorsResponse multiTermVectorsResponse = client().prepareMultiTermVectors()
.add(new TermVectorsRequest(indexOrAlias(), "1").routing(routingValue))
.add(new TermVectorsRequest(indexOrAlias(), "2").routing(routingValue))
.get();
assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2));
assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1"));
assertThat(multiTermVectorsResponse.getResponses()[0].isFailed(), equalTo(false));
assertThat(multiTermVectorsResponse.getResponses()[0].getResponse().getId(), equalTo("1"));
assertThat(multiTermVectorsResponse.getResponses()[0].getResponse().isExists(), equalTo(true));
assertThat(multiTermVectorsResponse.getResponses()[1].getId(), equalTo("2"));
assertThat(multiTermVectorsResponse.getResponses()[1].isFailed(), equalTo(false));
assertThat(multiTermVectorsResponse.getResponses()[1].getResponse().getId(), equalTo("2"));
assertThat(multiTermVectorsResponse.getResponses()[1].getResponse().isExists(), equalTo(true));
multiTermVectorsResponse = client().prepareMultiTermVectors()
.add(new TermVectorsRequest(indexOrAlias(), "1"))
.add(new TermVectorsRequest(indexOrAlias(), "2"))
.get();
assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2));
assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1"));
assertThat(multiTermVectorsResponse.getResponses()[0].isFailed(), equalTo(true));
assertThat(
multiTermVectorsResponse.getResponses()[0].getFailure().getCause().getMessage(),
equalTo("routing is required for [test]/[1]")
);
assertThat(multiTermVectorsResponse.getResponses()[0].getResponse(), nullValue());
assertThat(multiTermVectorsResponse.getResponses()[1].getId(), equalTo("2"));
assertThat(multiTermVectorsResponse.getResponses()[1].isFailed(), equalTo(true));
assertThat(multiTermVectorsResponse.getResponses()[1].getResponse(), nullValue());
assertThat(
multiTermVectorsResponse.getResponses()[1].getFailure().getCause().getMessage(),
equalTo("routing is required for [test]/[2]")
);
}
private static String indexOrAlias() {
return randomBoolean() ? "test" : "alias";
}
}
| SimpleRoutingIT |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/MethodParameter.java | {
"start": 5580,
"end": 7617
} | class ____ set.
* @param executable the Executable to specify a parameter for
* @param parameterIndex the index of the parameter
* @param containingClass the containing class
* @since 5.2
*/
MethodParameter(Executable executable, int parameterIndex, @Nullable Class<?> containingClass) {
Assert.notNull(executable, "Executable must not be null");
this.executable = executable;
this.parameterIndex = validateIndex(executable, parameterIndex);
this.nestingLevel = 1;
this.containingClass = containingClass;
}
/**
* Copy constructor, resulting in an independent MethodParameter object
* based on the same metadata and cache state that the original object was in.
* @param original the original MethodParameter object to copy from
*/
public MethodParameter(MethodParameter original) {
Assert.notNull(original, "Original must not be null");
this.executable = original.executable;
this.parameterIndex = original.parameterIndex;
this.parameter = original.parameter;
this.nestingLevel = original.nestingLevel;
this.typeIndexesPerLevel = original.typeIndexesPerLevel;
this.containingClass = original.containingClass;
this.parameterType = original.parameterType;
this.genericParameterType = original.genericParameterType;
this.parameterAnnotations = original.parameterAnnotations;
this.parameterNameDiscoverer = original.parameterNameDiscoverer;
this.parameterName = original.parameterName;
}
/**
* Return the wrapped Method, if any.
* <p>Note: Either Method or Constructor is available.
* @return the Method, or {@code null} if none
*/
public @Nullable Method getMethod() {
return (this.executable instanceof Method method ? method : null);
}
/**
* Return the wrapped Constructor, if any.
* <p>Note: Either Method or Constructor is available.
* @return the Constructor, or {@code null} if none
*/
public @Nullable Constructor<?> getConstructor() {
return (this.executable instanceof Constructor<?> constructor ? constructor : null);
}
/**
* Return the | already |
java | quarkusio__quarkus | integration-tests/reactive-messaging-mqtt/src/main/java/io/quarkus/it/mqtt/PeopleManager.java | {
"start": 502,
"end": 1275
} | class ____ {
@Inject
@Channel("people-out")
Emitter<byte[]> emitter;
private final Logger log = Logger.getLogger(PeopleManager.class);
private final List<String> list = new CopyOnWriteArrayList<>();
@Incoming("people-in")
public void consume(byte[] raw) {
list.add(new String(raw));
}
public List<String> getPeople() {
log.info("Returning people " + list);
return list;
}
public void seedPeople() {
Stream
.of("bob",
"alice",
"tom",
"jerry",
"anna",
"ken")
.forEach(s -> emitter.send(s.getBytes(StandardCharsets.UTF_8)));
}
}
| PeopleManager |
java | quarkusio__quarkus | extensions/hibernate-validator/spi/src/main/java/io/quarkus/hibernate/validator/spi/BeanValidationTraversableResolverBuildItem.java | {
"start": 212,
"end": 664
} | class ____ extends SimpleBuildItem {
private final BiPredicate<Object, String> attributeLoadedPredicate;
public BeanValidationTraversableResolverBuildItem(BiPredicate<Object, String> attributeLoadedPredicate) {
this.attributeLoadedPredicate = attributeLoadedPredicate;
}
public BiPredicate<Object, String> getAttributeLoadedPredicate() {
return attributeLoadedPredicate;
}
}
| BeanValidationTraversableResolverBuildItem |
java | apache__maven | impl/maven-cli/src/main/java/org/apache/maven/cling/invoker/mvn/CommonsCliMavenOptions.java | {
"start": 1171,
"end": 7547
} | class ____ extends CommonsCliOptions implements MavenOptions {
public static CommonsCliMavenOptions parse(String source, String[] args) throws ParseException {
CLIManager cliManager = new CLIManager();
return new CommonsCliMavenOptions(source, cliManager, cliManager.parse(args));
}
protected CommonsCliMavenOptions(String source, CLIManager cliManager, CommandLine commandLine) {
super(source, cliManager, commandLine);
}
@Override
public Optional<String> alternatePomFile() {
if (commandLine.hasOption(CLIManager.ALTERNATE_POM_FILE)) {
return Optional.of(commandLine.getOptionValue(CLIManager.ALTERNATE_POM_FILE));
}
return Optional.empty();
}
@Override
public Optional<Boolean> nonRecursive() {
if (commandLine.hasOption(CLIManager.NON_RECURSIVE)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<Boolean> updateSnapshots() {
if (commandLine.hasOption(CLIManager.UPDATE_SNAPSHOTS)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<List<String>> activatedProfiles() {
if (commandLine.hasOption(CLIManager.ACTIVATE_PROFILES)) {
return Optional.of(Arrays.asList(commandLine.getOptionValues(CLIManager.ACTIVATE_PROFILES)));
}
return Optional.empty();
}
@Override
public Optional<Boolean> suppressSnapshotUpdates() {
if (commandLine.hasOption(CLIManager.SUPPRESS_SNAPSHOT_UPDATES)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<Boolean> strictChecksums() {
if (commandLine.hasOption(CLIManager.CHECKSUM_FAILURE_POLICY)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<Boolean> relaxedChecksums() {
if (commandLine.hasOption(CLIManager.CHECKSUM_WARNING_POLICY)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<Boolean> failFast() {
if (commandLine.hasOption(CLIManager.FAIL_FAST)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<Boolean> failAtEnd() {
if (commandLine.hasOption(CLIManager.FAIL_AT_END)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<Boolean> failNever() {
if (commandLine.hasOption(CLIManager.FAIL_NEVER)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<Boolean> resume() {
if (commandLine.hasOption(CLIManager.RESUME)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<String> resumeFrom() {
if (commandLine.hasOption(CLIManager.RESUME_FROM)) {
return Optional.of(commandLine.getOptionValue(CLIManager.RESUME_FROM));
}
return Optional.empty();
}
@Override
public Optional<List<String>> projects() {
if (commandLine.hasOption(CLIManager.PROJECT_LIST)) {
return Optional.of(Arrays.asList(commandLine.getOptionValues(CLIManager.PROJECT_LIST)));
}
return Optional.empty();
}
@Override
public Optional<Boolean> alsoMake() {
if (commandLine.hasOption(CLIManager.ALSO_MAKE)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<Boolean> alsoMakeDependents() {
if (commandLine.hasOption(CLIManager.ALSO_MAKE_DEPENDENTS)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<String> threads() {
if (commandLine.hasOption(CLIManager.THREADS)) {
return Optional.of(commandLine.getOptionValue(CLIManager.THREADS));
}
return Optional.empty();
}
@Override
public Optional<String> builder() {
if (commandLine.hasOption(CLIManager.BUILDER)) {
return Optional.of(commandLine.getOptionValue(CLIManager.BUILDER));
}
return Optional.empty();
}
@Override
public Optional<Boolean> noTransferProgress() {
if (commandLine.hasOption(CLIManager.NO_TRANSFER_PROGRESS)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<Boolean> cacheArtifactNotFound() {
if (commandLine.hasOption(CLIManager.CACHE_ARTIFACT_NOT_FOUND)) {
return Optional.of(Boolean.parseBoolean(commandLine.getOptionValue(CLIManager.CACHE_ARTIFACT_NOT_FOUND)));
}
return Optional.empty();
}
@Override
public Optional<Boolean> strictArtifactDescriptorPolicy() {
if (commandLine.hasOption(CLIManager.STRICT_ARTIFACT_DESCRIPTOR_POLICY)) {
return Optional.of(
Boolean.parseBoolean(commandLine.getOptionValue(CLIManager.STRICT_ARTIFACT_DESCRIPTOR_POLICY)));
}
return Optional.empty();
}
@Override
public Optional<Boolean> ignoreTransitiveRepositories() {
if (commandLine.hasOption(CLIManager.IGNORE_TRANSITIVE_REPOSITORIES)) {
return Optional.of(Boolean.TRUE);
}
return Optional.empty();
}
@Override
public Optional<String> atFile() {
if (commandLine.hasOption(CLIManager.AT_FILE)) {
return Optional.of(commandLine.getOptionValue(CLIManager.AT_FILE));
}
return Optional.empty();
}
@Override
public Optional<List<String>> goals() {
if (!commandLine.getArgList().isEmpty()) {
return Optional.of(commandLine.getArgList());
}
return Optional.empty();
}
@Override
protected CommonsCliMavenOptions copy(
String source, CommonsCliOptions.CLIManager cliManager, CommandLine commandLine) {
return new CommonsCliMavenOptions(source, (CLIManager) cliManager, commandLine);
}
protected static | CommonsCliMavenOptions |
java | apache__flink | flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/example/WordCount.java | {
"start": 5406,
"end": 5752
} | class ____
extends RichMapFunction<Tuple2<String, Integer>, Tuple2<Text, IntWritable>> {
@Override
public Tuple2<Text, IntWritable> map(Tuple2<String, Integer> value) throws Exception {
return new Tuple2<Text, IntWritable>(new Text(value.f0), new IntWritable(value.f1));
}
}
}
| HadoopDatatypeMapper |
java | playframework__playframework | dev-mode/sbt-plugin/src/sbt-test/play-sbt-plugin/routes-compiler-routes-compilation-java/app/utils/JavaScriptRouterGenerator.java | {
"start": 505,
"end": 8649
} | class ____ {
public static void main(String[] args) throws IOException {
var routes = new ArrayList<JavaScriptReverseRoute>();
routes.addAll(applicationControllerRoutes());
routes.addAll(methodControllerRoutes());
routes.addAll(assetsControllerRoutes());
routes.addAll(booleanControllerRoutes());
routes.addAll(characterControllerRoutes());
routes.addAll(stringControllerRoutes());
routes.addAll(shortControllerRoutes());
routes.addAll(integerControllerRoutes());
routes.addAll(longControllerRoutes());
routes.addAll(optionalControllerRoutes());
routes.addAll(doubleControllerRoutes());
routes.addAll(floatControllerRoutes());
routes.addAll(uuidControllerRoutes());
routes.addAll(userControllerRoutes());
var jsFile =
JavaScriptReverseRouter.apply("jsRoutes", Option.empty(), "localhost", Scala.toSeq(routes))
.body();
// Add module exports for node
var jsModule = jsFile + "\nmodule.exports = jsRoutes";
var path = Paths.get(args[0]);
Files.createDirectories(path.getParent());
Files.writeString(path, jsModule);
}
private static List<JavaScriptReverseRoute> booleanControllerRoutes() {
return List.of(
BooleanController.path(),
BooleanController.query(),
BooleanController.queryDefault(),
BooleanController.queryFixed(),
BooleanController.queryNullable(),
BooleanController.queryOptional(),
BooleanController.queryOptionalDefault(),
BooleanController.queryList(),
BooleanController.queryListDefault(),
BooleanController.queryListNullable(),
BooleanController.queryListOptional(),
BooleanController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> characterControllerRoutes() {
return List.of(
CharacterController.path(),
CharacterController.query(),
CharacterController.queryDefault(),
CharacterController.queryFixed(),
CharacterController.queryNullable(),
CharacterController.queryOptional(),
CharacterController.queryOptionalDefault(),
CharacterController.queryList(),
CharacterController.queryListDefault(),
CharacterController.queryListNullable(),
CharacterController.queryListOptional(),
CharacterController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> stringControllerRoutes() {
return List.of(
StringController.path(),
StringController.query(),
StringController.queryDefault(),
StringController.queryFixed(),
StringController.queryNullable(),
StringController.queryOptional(),
StringController.queryOptionalDefault(),
StringController.queryList(),
StringController.queryListDefault(),
StringController.queryListNullable(),
StringController.queryListOptional(),
StringController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> shortControllerRoutes() {
return List.of(
ShortController.path(),
ShortController.query(),
ShortController.queryDefault(),
ShortController.queryFixed(),
ShortController.queryNullable(),
ShortController.queryOptional(),
ShortController.queryOptionalDefault(),
ShortController.queryList(),
ShortController.queryListDefault(),
ShortController.queryListNullable(),
ShortController.queryListOptional(),
ShortController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> integerControllerRoutes() {
return List.of(
IntegerController.path(),
IntegerController.query(),
IntegerController.queryDefault(),
IntegerController.queryFixed(),
IntegerController.queryNullable(),
IntegerController.queryOptional(),
IntegerController.queryOptionalDefault(),
IntegerController.queryList(),
IntegerController.queryListDefault(),
IntegerController.queryListNullable(),
IntegerController.queryListOptional(),
IntegerController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> longControllerRoutes() {
return List.of(
LongController.path(),
LongController.query(),
LongController.queryDefault(),
LongController.queryFixed(),
LongController.queryNullable(),
LongController.queryOptional(),
LongController.queryOptionalDefault(),
LongController.queryList(),
LongController.queryListDefault(),
LongController.queryListNullable(),
LongController.queryListOptional(),
LongController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> doubleControllerRoutes() {
return List.of(
DoubleController.path(),
DoubleController.query(),
DoubleController.queryDefault(),
DoubleController.queryFixed(),
DoubleController.queryNullable(),
DoubleController.queryOptional(),
DoubleController.queryOptionalDefault(),
DoubleController.queryList(),
DoubleController.queryListDefault(),
DoubleController.queryListNullable(),
DoubleController.queryListOptional(),
DoubleController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> floatControllerRoutes() {
return List.of(
FloatController.path(),
FloatController.query(),
FloatController.queryDefault(),
FloatController.queryFixed(),
FloatController.queryNullable(),
FloatController.queryOptional(),
FloatController.queryOptionalDefault(),
FloatController.queryList(),
FloatController.queryListDefault(),
FloatController.queryListNullable(),
FloatController.queryListOptional(),
FloatController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> uuidControllerRoutes() {
return List.of(
UUIDController.path(),
UUIDController.query(),
UUIDController.queryDefault(),
UUIDController.queryFixed(),
UUIDController.queryNullable(),
UUIDController.queryOptional(),
UUIDController.queryOptionalDefault(),
UUIDController.queryList(),
UUIDController.queryListDefault(),
UUIDController.queryListNullable(),
UUIDController.queryListOptional(),
UUIDController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> userControllerRoutes() {
return List.of(
UserController.path(),
UserController.query(),
UserController.queryDefault(),
UserController.queryFixed(),
UserController.queryNullable(),
UserController.queryOptional(),
UserController.queryOptionalDefault(),
UserController.queryList(),
UserController.queryListDefault(),
UserController.queryListNullable(),
UserController.queryListOptional(),
UserController.queryListOptionalDefault());
}
private static List<JavaScriptReverseRoute> optionalControllerRoutes() {
return List.of(
OptionalController.queryInt(),
OptionalController.queryIntDefault(),
OptionalController.queryLong(),
OptionalController.queryLongDefault(),
OptionalController.queryDouble(),
OptionalController.queryDoubleDefault());
}
private static List<JavaScriptReverseRoute> methodControllerRoutes() {
return List.of(
MethodController.get(),
MethodController.post(),
MethodController.put(),
MethodController.patch(),
MethodController.delete(),
MethodController.head(),
MethodController.options());
}
private static List<JavaScriptReverseRoute> assetsControllerRoutes() {
return List.of(Assets.versioned());
}
private static List<JavaScriptReverseRoute> applicationControllerRoutes() {
return List.of(Application.async(), Application.reverse());
}
}
| JavaScriptRouterGenerator |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/http/HttpConfigTests.java | {
"start": 2573,
"end": 5944
} | class ____ {
private static final String CONFIG_LOCATION_PREFIX = "classpath:org/springframework/security/config/http/HttpConfigTests";
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
MockMvc mvc;
@Test
public void getWhenUsingMinimalConfigurationThenRedirectsToLogin() throws Exception {
this.spring.configLocations(this.xml("Minimal")).autowire();
// @formatter:off
this.mvc.perform(get("/"))
.andExpect(status().isFound())
.andExpect(redirectedUrl("/login"));
// @formatter:on
}
@Test
public void getWhenUsingMinimalAuthorizationManagerThenRedirectsToLogin() throws Exception {
this.spring.configLocations(this.xml("MinimalAuthorizationManager")).autowire();
// @formatter:off
this.mvc.perform(get("/"))
.andExpect(status().isFound())
.andExpect(redirectedUrl("/login"));
// @formatter:on
}
@Test
public void getWhenUsingAuthorizationManagerThenRedirectsToLogin() throws Exception {
this.spring.configLocations(this.xml("AuthorizationManager")).autowire();
AuthorizationManager<HttpServletRequest> authorizationManager = this.spring.getContext()
.getBean(AuthorizationManager.class);
given(authorizationManager.authorize(any(), any())).willReturn(new AuthorizationDecision(false));
// @formatter:off
this.mvc.perform(get("/"))
.andExpect(status().isFound())
.andExpect(redirectedUrl("/login"));
// @formatter:on
verify(authorizationManager).authorize(any(), any());
}
@Test
public void getWhenUsingMinimalConfigurationThenPreventsSessionAsUrlParameter() throws Exception {
this.spring.configLocations(this.xml("Minimal")).autowire();
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/");
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChainProxy proxy = this.spring.getContext().getBean(FilterChainProxy.class);
proxy.doFilter(request, new EncodeUrlDenyingHttpServletResponseWrapper(response), (req, resp) -> {
});
assertThat(response.getStatus()).isEqualTo(HttpStatus.SC_MOVED_TEMPORARILY);
assertThat(response.getRedirectedUrl()).isEqualTo("/login");
}
@Test
public void getWhenUsingObservationRegistryThenObservesRequest() throws Exception {
this.spring.configLocations(this.xml("WithObservationRegistry")).autowire();
// @formatter:off
this.mvc.perform(get("/").with(httpBasic("user", "password")))
.andExpect(status().isNotFound());
// @formatter:on
ObservationHandler<Observation.Context> handler = this.spring.getContext().getBean(ObservationHandler.class);
ArgumentCaptor<Observation.Context> captor = ArgumentCaptor.forClass(Observation.Context.class);
verify(handler, times(5)).onStart(captor.capture());
Iterator<Observation.Context> contexts = captor.getAllValues().iterator();
assertThat(contexts.next().getContextualName()).isEqualTo("security filterchain before");
assertThat(contexts.next().getName()).isEqualTo("spring.security.authentications");
assertThat(contexts.next().getName()).isEqualTo("spring.security.authorizations");
assertThat(contexts.next().getName()).isEqualTo("spring.security.http.secured.requests");
assertThat(contexts.next().getContextualName()).isEqualTo("security filterchain after");
}
private String xml(String configName) {
return CONFIG_LOCATION_PREFIX + "-" + configName + ".xml";
}
private static | HttpConfigTests |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.