language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/reseasy/reactive/ResteasyReactiveProcessorNoClientFilterTest.java
|
{
"start": 1528,
"end": 1793
}
|
interface ____ {
@GET
String hello();
}
@Test
public void testSimpleSubresourceWithNoClientImportsOnClassLevel() {
given().when().get("/test/subresource")
.then()
.statusCode(200);
}
}
|
TestClient
|
java
|
apache__camel
|
core/camel-util/src/test/java/org/apache/camel/util/OrderedPropertiesTest.java
|
{
"start": 997,
"end": 2633
}
|
class ____ {
@Test
public void testOrdered() {
Properties prop = new OrderedProperties();
prop.setProperty("c", "CCC");
prop.setProperty("d", "DDD");
prop.setProperty("e", "EEE");
prop.setProperty("b", "BBB");
prop.setProperty("a", "AAA");
assertEquals(5, prop.size());
Iterator it = prop.keySet().iterator();
assertEquals("c", it.next());
assertEquals("d", it.next());
assertEquals("e", it.next());
assertEquals("b", it.next());
assertEquals("a", it.next());
it = prop.values().iterator();
assertEquals("CCC", it.next());
assertEquals("DDD", it.next());
assertEquals("EEE", it.next());
assertEquals("BBB", it.next());
assertEquals("AAA", it.next());
}
@Test
public void testOrderedLoad() throws Exception {
Properties prop = new OrderedProperties();
prop.load(OrderedPropertiesTest.class.getResourceAsStream("/application.properties"));
assertEquals(4, prop.size());
Iterator it = prop.keySet().iterator();
assertEquals("hello", it.next());
assertEquals("camel.component.seda.concurrent-consumers", it.next());
assertEquals("camel.component.seda.queueSize", it.next());
assertEquals("camel.component.direct.timeout", it.next());
// should be ordered values
it = prop.values().iterator();
assertEquals("World", it.next());
assertEquals("2", it.next());
assertEquals("500", it.next());
assertEquals("1234", it.next());
}
}
|
OrderedPropertiesTest
|
java
|
google__gson
|
gson/src/main/java/com/google/gson/internal/bind/MapTypeAdapterFactory.java
|
{
"start": 5849,
"end": 9675
}
|
class ____<K, V> extends TypeAdapter<Map<K, V>> {
private final TypeAdapter<K> keyTypeAdapter;
private final TypeAdapter<V> valueTypeAdapter;
private final ObjectConstructor<? extends Map<K, V>> constructor;
Adapter(
TypeAdapter<K> keyTypeAdapter,
TypeAdapter<V> valueTypeAdapter,
ObjectConstructor<? extends Map<K, V>> constructor) {
this.keyTypeAdapter = keyTypeAdapter;
this.valueTypeAdapter = valueTypeAdapter;
this.constructor = constructor;
}
@Override
public Map<K, V> read(JsonReader in) throws IOException {
JsonToken peek = in.peek();
if (peek == JsonToken.NULL) {
in.nextNull();
return null;
}
Map<K, V> map = constructor.construct();
if (peek == JsonToken.BEGIN_ARRAY) {
in.beginArray();
while (in.hasNext()) {
in.beginArray(); // entry array
K key = keyTypeAdapter.read(in);
V value = valueTypeAdapter.read(in);
V replaced = map.put(key, value);
if (replaced != null) {
throw new JsonSyntaxException("duplicate key: " + key);
}
in.endArray();
}
in.endArray();
} else {
in.beginObject();
while (in.hasNext()) {
JsonReaderInternalAccess.INSTANCE.promoteNameToValue(in);
K key = keyTypeAdapter.read(in);
V value = valueTypeAdapter.read(in);
V replaced = map.put(key, value);
if (replaced != null) {
throw new JsonSyntaxException("duplicate key: " + key);
}
}
in.endObject();
}
return map;
}
@Override
public void write(JsonWriter out, Map<K, V> map) throws IOException {
if (map == null) {
out.nullValue();
return;
}
if (!complexMapKeySerialization) {
out.beginObject();
for (Map.Entry<K, V> entry : map.entrySet()) {
out.name(String.valueOf(entry.getKey()));
valueTypeAdapter.write(out, entry.getValue());
}
out.endObject();
return;
}
boolean hasComplexKeys = false;
List<JsonElement> keys = new ArrayList<>(map.size());
List<V> values = new ArrayList<>(map.size());
for (Map.Entry<K, V> entry : map.entrySet()) {
JsonElement keyElement = keyTypeAdapter.toJsonTree(entry.getKey());
keys.add(keyElement);
values.add(entry.getValue());
hasComplexKeys |= keyElement.isJsonArray() || keyElement.isJsonObject();
}
if (hasComplexKeys) {
out.beginArray();
for (int i = 0, size = keys.size(); i < size; i++) {
out.beginArray(); // entry array
Streams.write(keys.get(i), out);
valueTypeAdapter.write(out, values.get(i));
out.endArray();
}
out.endArray();
} else {
out.beginObject();
for (int i = 0, size = keys.size(); i < size; i++) {
JsonElement keyElement = keys.get(i);
out.name(keyToString(keyElement));
valueTypeAdapter.write(out, values.get(i));
}
out.endObject();
}
}
private String keyToString(JsonElement keyElement) {
if (keyElement.isJsonPrimitive()) {
JsonPrimitive primitive = keyElement.getAsJsonPrimitive();
if (primitive.isNumber()) {
return String.valueOf(primitive.getAsNumber());
} else if (primitive.isBoolean()) {
return Boolean.toString(primitive.getAsBoolean());
} else if (primitive.isString()) {
return primitive.getAsString();
} else {
throw new AssertionError();
}
} else if (keyElement.isJsonNull()) {
return "null";
} else {
throw new AssertionError();
}
}
}
}
|
Adapter
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/scanner/ErrorProneInjectorTest.java
|
{
"start": 1027,
"end": 2578
}
|
class ____ {
@Test
public void retrievesPredefinedInstance() {
var injector = ErrorProneInjector.create().addBinding(Integer.class, 2);
assertThat(injector.getInstance(Integer.class))
.isSameInstanceAs(injector.getInstance(Integer.class));
}
@Test
public void noConstructor_injectable() {
var injector = ErrorProneInjector.create();
var unused = injector.getInstance(NoConstructor.class);
}
@Test
public void injectConstructor_injectable() {
var injector = ErrorProneInjector.create();
var unused = injector.getInstance(InjectConstructor.class);
}
@Test
public void bothConstructors_injectable() {
var injector = ErrorProneInjector.create().addBinding(Integer.class, 2);
var obj = injector.getInstance(InjectConstructorAndZeroArgConstructor.class);
assertThat(obj.x).isEqualTo(2);
}
@Test
public void errorProneFlags_favouredOverZeroArg() {
var injector =
ErrorProneInjector.create().addBinding(ErrorProneFlags.class, ErrorProneFlags.empty());
var obj = injector.getInstance(ErrorProneFlagsAndZeroArgsConstructor.class);
assertThat(obj.x).isEqualTo(1);
}
@Test
public void pathInError() {
var injector = ErrorProneInjector.create();
var e =
assertThrows(
ProvisionException.class,
() -> injector.getInstance(InjectConstructorAndZeroArgConstructor.class));
assertThat(e).hasMessageThat().contains("Integer <- InjectConstructorAndZeroArgConstructor");
}
public static final
|
ErrorProneInjectorTest
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/resume/Deserializable.java
|
{
"start": 895,
"end": 2969
}
|
interface ____ {
/**
* Deserializes an arbitrary resumable object within a byte buffer
*
* @param buffer the buffer containing the object
* @return the deserialized object
*/
default Object deserializeObject(ByteBuffer buffer) {
buffer.clear();
int dataType = buffer.getInt();
switch (dataType) {
case Serializable.TYPE_INTEGER: {
return buffer.getInt();
}
case Serializable.TYPE_LONG: {
return buffer.getLong();
}
case Serializable.TYPE_STRING: {
int remaining = buffer.remaining();
byte[] tmp = new byte[remaining];
buffer.get(tmp);
return new String(tmp);
}
case Serializable.TYPE_FILE: {
int remaining = buffer.remaining();
byte[] tmp = new byte[remaining];
buffer.get(tmp);
return new File(new String(tmp));
}
default: {
return null;
}
}
}
/**
* Deserializes the key data
*
* @param keyBuffer the buffer containing the key data
* @return the deserialized object
*/
default Object deserializeKey(ByteBuffer keyBuffer) {
return deserializeObject(keyBuffer);
}
/**
* Deserializes the value of resumable data
*
* @param valueBuffer the buffer containing the value data
* @return the deserialized object
*/
default Object deserializeValue(ByteBuffer valueBuffer) {
return deserializeObject(valueBuffer);
}
/**
* Deserializes resume data (invalid data may be ignored)
*
* @param keyBuffer the buffer containing the key data
* @param valueBuffer the buffer containing the value data
* @return true if successfully deserialized or false otherwise
*/
boolean deserialize(ByteBuffer keyBuffer, ByteBuffer valueBuffer);
}
|
Deserializable
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/JdkObsoleteTest.java
|
{
"start": 5048,
"end": 5320
}
|
class ____ {
String f() {
StringBuffer sb = new StringBuffer();
return sb.append(42).toString();
}
}
""")
.addOutputLines(
"out/Test.java",
"""
|
Test
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
|
{
"start": 21819,
"end": 22763
}
|
class ____ implements FilterScript.LeafFactory {
private final Function<Map<String, Object>, Object> script;
private final Map<String, Object> vars;
private final SearchLookup lookup;
public MockFilterScript(SearchLookup lookup, Map<String, Object> vars, Function<Map<String, Object>, Object> script) {
this.lookup = lookup;
this.vars = vars;
this.script = script;
}
public FilterScript newInstance(DocReader docReader) throws IOException {
Map<String, Object> ctx = new HashMap<>(docReader.docAsMap());
if (vars != null) {
ctx.putAll(vars);
}
return new FilterScript(ctx, lookup, docReader) {
@Override
public boolean execute() {
return (boolean) script.apply(ctx);
}
};
}
}
public
|
MockFilterScript
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_3000/Issue3060.java
|
{
"start": 541,
"end": 580
}
|
enum ____ {
Small, Big
}
}
|
Type
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/validator/ValidatorIncludeEncodingRouteTest.java
|
{
"start": 1214,
"end": 3077
}
|
class ____ extends ContextTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(ValidatorIncludeEncodingRouteTest.class);
protected MockEndpoint validEndpoint;
protected MockEndpoint finallyEndpoint;
protected MockEndpoint invalidEndpoint;
@Test
public void testValidMessage() throws Exception {
validEndpoint.expectedMessageCount(1);
finallyEndpoint.expectedMessageCount(1);
String body = "<t:text xmlns:t=\"org.text\">\n" + " <t:sentence>J'aime les cam\u00E9lid\u00E9s</t:sentence>\n"
+ "</t:text>";
template.sendBody("direct:start", body);
MockEndpoint.assertIsSatisfied(validEndpoint, invalidEndpoint, finallyEndpoint);
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
validEndpoint = resolveMandatoryEndpoint("mock:valid", MockEndpoint.class);
invalidEndpoint = resolveMandatoryEndpoint("mock:invalid", MockEndpoint.class);
finallyEndpoint = resolveMandatoryEndpoint("mock:finally", MockEndpoint.class);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").doTry().to("validator:org/apache/camel/component/validator/text.xsd").to("mock:valid")
.doCatch(NumberFormatException.class)
.process(new Processor() {
@Override
public void process(Exchange exchange) {
LOG.error("helo", exchange.getException());
}
}).to("mock:invalid").doFinally().to("mock:finally").end();
}
};
}
}
|
ValidatorIncludeEncodingRouteTest
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/utils/CIDRUtils.java
|
{
"start": 1582,
"end": 4842
}
|
class ____ {
private final String cidr;
private InetAddress inetAddress;
private InetAddress startAddress;
private InetAddress endAddress;
private final int prefixLength;
public CIDRUtils(String cidr) throws UnknownHostException {
this.cidr = cidr;
/* split CIDR to address and prefix part */
if (this.cidr.contains("/")) {
int index = this.cidr.indexOf("/");
String addressPart = this.cidr.substring(0, index);
String networkPart = this.cidr.substring(index + 1);
inetAddress = InetAddress.getByName(addressPart);
prefixLength = Integer.parseInt(networkPart);
calculate();
} else {
throw new IllegalArgumentException("not an valid CIDR format!");
}
}
private void calculate() throws UnknownHostException {
ByteBuffer maskBuffer;
int targetSize;
if (inetAddress.getAddress().length == 4) {
maskBuffer =
ByteBuffer
.allocate(4)
.putInt(-1);
targetSize = 4;
} else {
maskBuffer = ByteBuffer.allocate(16)
.putLong(-1L)
.putLong(-1L);
targetSize = 16;
}
BigInteger mask = (new BigInteger(1, maskBuffer.array())).not().shiftRight(prefixLength);
ByteBuffer buffer = ByteBuffer.wrap(inetAddress.getAddress());
BigInteger ipVal = new BigInteger(1, buffer.array());
BigInteger startIp = ipVal.and(mask);
BigInteger endIp = startIp.add(mask.not());
byte[] startIpArr = toBytes(startIp.toByteArray(), targetSize);
byte[] endIpArr = toBytes(endIp.toByteArray(), targetSize);
this.startAddress = InetAddress.getByAddress(startIpArr);
this.endAddress = InetAddress.getByAddress(endIpArr);
}
private byte[] toBytes(byte[] array, int targetSize) {
int counter = 0;
List<Byte> newArr = new ArrayList<>();
while (counter < targetSize && (array.length - 1 - counter >= 0)) {
newArr.add(0, array[array.length - 1 - counter]);
counter++;
}
int size = newArr.size();
for (int i = 0; i < (targetSize - size); i++) {
newArr.add(0, (byte) 0);
}
byte[] ret = new byte[newArr.size()];
for (int i = 0; i < newArr.size(); i++) {
ret[i] = newArr.get(i);
}
return ret;
}
public String getNetworkAddress() {
return this.startAddress.getHostAddress();
}
public String getBroadcastAddress() {
return this.endAddress.getHostAddress();
}
public boolean isInRange(String ipAddress) throws UnknownHostException {
InetAddress address = InetAddress.getByName(ipAddress);
BigInteger start = new BigInteger(1, this.startAddress.getAddress());
BigInteger end = new BigInteger(1, this.endAddress.getAddress());
BigInteger target = new BigInteger(1, address.getAddress());
int st = start.compareTo(target);
int te = target.compareTo(end);
return (st == -1 || st == 0) && (te == -1 || te == 0);
}
}
|
CIDRUtils
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/fielddata/MultiGeoPointValues.java
|
{
"start": 1163,
"end": 1862
}
|
class ____ extends MultiPointValues<GeoPoint> {
private final GeoPoint point = new GeoPoint();
public MultiGeoPointValues(SortedNumericLongValues numericValues) {
super(numericValues);
}
@Override
public GeoPoint nextValue() throws IOException {
return point.resetFromEncoded(numericValues.nextValue());
}
/**
* Returns a single-valued view of the {@link MultiPointValues} if possible, otherwise null.
*/
protected GeoPointValues getPointValues() {
final LongValues singleton = SortedNumericLongValues.unwrapSingleton(numericValues);
return singleton != null ? new GeoPointValues(singleton) : null;
}
}
|
MultiGeoPointValues
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/Assertions_withinPercentage_Test.java
|
{
"start": 813,
"end": 1140
}
|
class ____ {
@Test
void should_create_double() {
assertThat(withinPercentage(1d)).isNotNull();
}
@Test
void should_create_integer() {
assertThat(withinPercentage(1)).isNotNull();
}
@Test
void should_create_long() {
assertThat(withinPercentage(1L)).isNotNull();
}
}
|
Assertions_withinPercentage_Test
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/async/utils/AsyncForEachRun.java
|
{
"start": 1702,
"end": 2556
}
|
class ____ the AsyncRun interface, which means it can be used
* in asynchronous task chains. It maintains an Iterator of elements to
* process, an asyncDoOnce to apply to each element.</p>
*
* <p>The run method initiates the asynchronous operation, and the doOnce
* method recursively applies the asyncDoOnce to each element and handles
* the results. If the shouldBreak flag is set, the operation is completed
* with the current result.</p>
*
* <p>AsyncForEachRun is used to implement the following semantics:</p>
* <pre>
* {@code
* for (I element : elements) {
* R result = asyncDoOnce(element);
* }
* return result;
* }
* </pre>
*
* @param <I> the type of the elements being iterated over
* @param <R> the type of the final result after applying the thenApply function
* @see AsyncRun
* @see AsyncBiFunction
*/
public
|
implements
|
java
|
playframework__playframework
|
persistence/play-java-jdbc/src/main/java/play/db/Databases.java
|
{
"start": 401,
"end": 538
}
|
interface ____ be declared final.
// Also, that should
// clarify why the class' constructor is private: we really don't want this
|
cannot
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/StateAssignmentOperationTest.java
|
{
"start": 63639,
"end": 79940
}
|
class ____ {
private final OperatorID operatorID;
private final int parallelism;
public OperatorID getOperatorID() {
return operatorID;
}
public int getParallelism() {
return parallelism;
}
public OperatorIdWithParallelism(OperatorID operatorID, int parallelism) {
this.operatorID = operatorID;
this.parallelism = parallelism;
}
}
private Map<OperatorID, ExecutionJobVertex> buildVertices(
List<OperatorID> operatorIds,
int parallelisms,
SubtaskStateMapper downstreamRescaler,
SubtaskStateMapper upstreamRescaler)
throws JobException, JobExecutionException {
List<OperatorIdWithParallelism> opIdsWithParallelism =
operatorIds.stream()
.map(operatorID -> new OperatorIdWithParallelism(operatorID, parallelisms))
.collect(Collectors.toList());
return buildVertices(opIdsWithParallelism, downstreamRescaler, upstreamRescaler);
}
private Map<OperatorID, ExecutionJobVertex> buildVertices(
List<OperatorIdWithParallelism> operatorIdsAndParallelism,
SubtaskStateMapper downstreamRescaler,
SubtaskStateMapper upstreamRescaler)
throws JobException, JobExecutionException {
final JobVertex[] jobVertices =
operatorIdsAndParallelism.stream()
.map(
idWithParallelism ->
createJobVertex(
idWithParallelism.getOperatorID(),
idWithParallelism.getOperatorID(),
idWithParallelism.getParallelism()))
.toArray(JobVertex[]::new);
for (int index = 1; index < jobVertices.length; index++) {
connectVertices(
jobVertices[index - 1],
jobVertices[index],
upstreamRescaler,
downstreamRescaler);
}
return toExecutionVertices(jobVertices);
}
private Map<OperatorID, ExecutionJobVertex> toExecutionVertices(JobVertex... jobVertices)
throws JobException, JobExecutionException {
JobGraph jobGraph = JobGraphTestUtils.streamingJobGraph(jobVertices);
ExecutionGraph eg =
TestingDefaultExecutionGraphBuilder.newBuilder()
.setJobGraph(jobGraph)
.build(EXECUTOR_EXTENSION.getExecutor());
return Arrays.stream(jobVertices)
.collect(
Collectors.toMap(
jobVertex ->
jobVertex.getOperatorIDs().get(0).getGeneratedOperatorID(),
jobVertex -> {
try {
return eg.getJobVertex(jobVertex.getID());
} catch (Exception e) {
throw new RuntimeException(e);
}
}));
}
private void connectVertices(
JobVertex upstream,
JobVertex downstream,
SubtaskStateMapper upstreamRescaler,
SubtaskStateMapper downstreamRescaler) {
final JobEdge jobEdge =
connectNewDataSetAsInput(
downstream,
upstream,
DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED);
jobEdge.setDownstreamSubtaskStateMapper(downstreamRescaler);
jobEdge.setUpstreamSubtaskStateMapper(upstreamRescaler);
}
private ExecutionJobVertex buildExecutionJobVertex(
OperatorID operatorID, OperatorID userDefinedOperatorId, int parallelism) {
try {
JobVertex jobVertex = createJobVertex(operatorID, userDefinedOperatorId, parallelism);
return ExecutionGraphTestUtils.getExecutionJobVertex(jobVertex);
} catch (Exception e) {
throw new AssertionError("Cannot create ExecutionJobVertex", e);
}
}
private JobVertex createJobVertex(OperatorID operatorID, int parallelism) {
return createJobVertex(operatorID, operatorID, parallelism);
}
private JobVertex createJobVertex(
OperatorID operatorID, OperatorID userDefinedOperatorId, int parallelism) {
JobVertex jobVertex =
new JobVertex(
operatorID.toHexString(),
new JobVertexID(),
singletonList(
OperatorIDPair.of(
operatorID,
userDefinedOperatorId,
"operatorName",
"operatorUid")));
jobVertex.setInvokableClass(NoOpInvokable.class);
jobVertex.setParallelism(parallelism);
return jobVertex;
}
private List<TaskStateSnapshot> getTaskStateSnapshotFromVertex(
ExecutionJobVertex executionJobVertex) {
return Arrays.stream(executionJobVertex.getTaskVertices())
.map(ExecutionVertex::getCurrentExecutionAttempt)
.map(Execution::getTaskRestore)
.map(JobManagerTaskRestore::getTaskStateSnapshot)
.collect(Collectors.toList());
}
private OperatorSubtaskState getAssignedState(
ExecutionJobVertex executionJobVertex, OperatorID operatorId, int subtaskIdx) {
return executionJobVertex
.getTaskVertices()[subtaskIdx]
.getCurrentExecutionAttempt()
.getTaskRestore()
.getTaskStateSnapshot()
.getSubtaskStateByOperatorID(operatorId);
}
@Test
void testMixedExchangesForwardAndHashNoStateOnForward()
throws JobException, JobExecutionException {
// Create topology: source -> (forward to map1, hash to map2)
JobVertex source = createJobVertex(new OperatorID(), 2);
JobVertex map1 = createJobVertex(new OperatorID(), 2);
JobVertex map2 = createJobVertex(new OperatorID(), 3);
List<OperatorID> operatorIds =
Stream.of(source, map1, map2)
.map(v -> v.getOperatorIDs().get(0).getGeneratedOperatorID())
.collect(Collectors.toList());
// Create state with output state only for hash exchange (to map2)
Map<OperatorID, OperatorState> states = new HashMap<>();
Random random = new Random();
// Source has output state only for partition 1 (hash exchange)
OperatorState sourceState = new OperatorState("", "", operatorIds.get(0), 2, MAX_P);
for (int i = 0; i < 2; i++) {
sourceState.putState(
i,
OperatorSubtaskState.builder()
.setResultSubpartitionState(
new StateObjectCollection<>(
Arrays.asList(
// No state for partition 0 (forward)
createNewResultSubpartitionStateHandle(
10, 1, random) // partition 1 (hash)
)))
.build());
}
states.put(operatorIds.get(0), sourceState);
// Map1 (forward) has no input state
OperatorState map1State = new OperatorState("", "", operatorIds.get(1), 2, MAX_P);
for (int i = 0; i < 2; i++) {
map1State.putState(i, OperatorSubtaskState.builder().build());
}
states.put(operatorIds.get(1), map1State);
// Map2 (hash) has input state
OperatorState map2State = new OperatorState("", "", operatorIds.get(2), 2, MAX_P);
for (int i = 0; i < 2; i++) {
map2State.putState(
i,
OperatorSubtaskState.builder()
.setInputChannelState(
new StateObjectCollection<>(
Arrays.asList(
createNewInputChannelStateHandle(
10, 0, random))))
.build());
}
states.put(operatorIds.get(2), map2State);
// Connect vertices
connectVertices(source, map1, RANGE, RANGE); // Forward-like connection
connectVertices(source, map2, ARBITRARY, RANGE); // Hash connection
Map<OperatorID, ExecutionJobVertex> vertices = toExecutionVertices(source, map1, map2);
// This should not throw UnsupportedOperationException
new StateAssignmentOperation(0, new HashSet<>(vertices.values()), states, false)
.assignStates();
// Verify state assignment succeeded
assertThat(getAssignedState(vertices.get(operatorIds.get(2)), operatorIds.get(2), 0))
.isNotNull();
}
@Test
void testMixedExchangesMultipleGatesWithPartialState()
throws JobException, JobExecutionException {
// Create topology with 3 upstreams connecting to 1 downstream
JobVertex upstream1 = createJobVertex(new OperatorID(), 2);
JobVertex upstream2 = createJobVertex(new OperatorID(), 2);
JobVertex upstream3 = createJobVertex(new OperatorID(), 2);
JobVertex downstream = createJobVertex(new OperatorID(), 2);
List<OperatorID> operatorIds =
Stream.of(upstream1, upstream2, upstream3, downstream)
.map(v -> v.getOperatorIDs().get(0).getGeneratedOperatorID())
.collect(Collectors.toList());
// Build state where only upstream2 has output state
Map<OperatorID, OperatorState> states = new HashMap<>();
Random random = new Random();
// Upstream1 - no output state
OperatorState upstream1State = new OperatorState("", "", operatorIds.get(0), 3, MAX_P);
for (int i = 0; i < 3; i++) {
upstream1State.putState(i, OperatorSubtaskState.builder().build());
}
states.put(operatorIds.get(0), upstream1State);
// Upstream2 - has output state
OperatorState upstream2State = new OperatorState("", "", operatorIds.get(1), 3, MAX_P);
for (int i = 0; i < 3; i++) {
upstream2State.putState(
i,
OperatorSubtaskState.builder()
.setResultSubpartitionState(
new StateObjectCollection<>(
Arrays.asList(
createNewResultSubpartitionStateHandle(
10, 0, random))))
.build());
}
states.put(operatorIds.get(1), upstream2State);
// Upstream3 - no output state
OperatorState upstream3State = new OperatorState("", "", operatorIds.get(2), 3, MAX_P);
for (int i = 0; i < 3; i++) {
upstream3State.putState(i, OperatorSubtaskState.builder().build());
}
states.put(operatorIds.get(2), upstream3State);
// Downstream - has input state only for gate 1 (from upstream2)
OperatorState downstreamState = new OperatorState("", "", operatorIds.get(3), 3, MAX_P);
for (int i = 0; i < 3; i++) {
downstreamState.putState(
i,
OperatorSubtaskState.builder()
.setInputChannelState(
new StateObjectCollection<>(
Arrays.asList(
createNewInputChannelStateHandle(
10, 1, random) // gate 1 only
)))
.build());
}
states.put(operatorIds.get(3), downstreamState);
// Connect all upstreams to downstream
connectVertices(upstream1, downstream, RANGE, RANGE); // gate 0
connectVertices(upstream2, downstream, ARBITRARY, RANGE); // gate 1
connectVertices(upstream3, downstream, ROUND_ROBIN, ROUND_ROBIN); // gate 2
Map<OperatorID, ExecutionJobVertex> vertices =
toExecutionVertices(upstream1, upstream2, upstream3, downstream);
// This should not throw UnsupportedOperationException
new StateAssignmentOperation(0, new HashSet<>(vertices.values()), states, false)
.assignStates();
// Verify downstream received state
OperatorSubtaskState downstreamAssignedState =
getAssignedState(vertices.get(operatorIds.get(3)), operatorIds.get(3), 0);
assertThat(downstreamAssignedState).isNotNull();
assertThat(downstreamAssignedState.getInputChannelState()).isNotEmpty();
}
@Test
void testMixedExchangesRescaleAndRebalanceNoStateOnRescale()
throws JobException, JobExecutionException {
// Create topology with mixed partitioner types
JobVertex source = createJobVertex(new OperatorID(), 4);
JobVertex sink = createJobVertex(new OperatorID(), 2);
List<OperatorID> operatorIds =
Stream.of(source, sink)
.map(v -> v.getOperatorIDs().get(0).getGeneratedOperatorID())
.collect(Collectors.toList());
// Create state - source has output state
Map<OperatorID, OperatorState> states = new HashMap<>();
Random random = new Random();
OperatorState sourceState = new OperatorState("", "", operatorIds.get(0), 4, MAX_P);
for (int i = 0; i < 4; i++) {
sourceState.putState(
i,
OperatorSubtaskState.builder()
.setResultSubpartitionState(
new StateObjectCollection<>(
Arrays.asList(
createNewResultSubpartitionStateHandle(
10, 0, random))))
.build());
}
states.put(operatorIds.get(0), sourceState);
// Sink has input state
OperatorState sinkState = new OperatorState("", "", operatorIds.get(1), 4, MAX_P);
for (int i = 0; i < 4; i++) {
sinkState.putState(
i,
OperatorSubtaskState.builder()
.setInputChannelState(
new StateObjectCollection<>(
Arrays.asList(
createNewInputChannelStateHandle(
10, 0, random))))
.build());
}
states.put(operatorIds.get(1), sinkState);
// Connect with RESCALE partitioner
connectVertices(source, sink, ROUND_ROBIN, ROUND_ROBIN);
Map<OperatorID, ExecutionJobVertex> vertices = toExecutionVertices(source, sink);
// This should succeed even with RESCALE partitioner when parallelism changes
new StateAssignmentOperation(0, new HashSet<>(vertices.values()), states, false)
.assignStates();
// Verify state was assigned
OperatorSubtaskState sinkAssignedState =
getAssignedState(vertices.get(operatorIds.get(1)), operatorIds.get(1), 0);
assertThat(sinkAssignedState).isNotNull();
}
}
|
OperatorIdWithParallelism
|
java
|
alibaba__nacos
|
core/src/test/java/com/alibaba/nacos/core/model/request/LookupUpdateRequestTest.java
|
{
"start": 762,
"end": 991
}
|
class ____ {
@Test
void test() {
LookupUpdateRequest request = new LookupUpdateRequest();
request.setType("type");
assertEquals("type", request.getType());
}
}
|
LookupUpdateRequestTest
|
java
|
quarkusio__quarkus
|
integration-tests/jpa-mariadb/src/main/java/io/quarkus/it/jpa/mariadb/OfflineDialectEndpoint.java
|
{
"start": 524,
"end": 1067
}
|
class ____ {
@Inject
@PersistenceUnit("offline")
SessionFactory sessionFactory;
@GET
public OfflineDialectDescriptor test() throws IOException {
return new OfflineDialectDescriptor(
(MariaDBDialect) sessionFactory.unwrap(SessionFactoryImplementor.class).getJdbcServices().getDialect(),
(String) sessionFactory.unwrap(SessionFactoryImplementor.class)
.getProperties()
.get(AvailableSettings.STORAGE_ENGINE));
}
}
|
OfflineDialectEndpoint
|
java
|
hibernate__hibernate-orm
|
hibernate-spatial/src/main/java/org/hibernate/spatial/dialect/oracle/OracleSpatial10gDialect.java
|
{
"start": 478,
"end": 640
}
|
class ____ extends OracleDialect implements SpatialDialect {
public OracleSpatial10gDialect() {
super( DatabaseVersion.make( 10 ) );
}
}
|
OracleSpatial10gDialect
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/compliance/tck2_2/caching/InheritedCacheableTest.java
|
{
"start": 1523,
"end": 4282
}
|
class ____ {
@Test
public void testMapping(SessionFactoryScope scope) {
assertThat(
scope.getSessionFactory().getMappingMetamodel().getEntityDescriptor( Person.class ).hasCache(),
CoreMatchers.is( true )
);
assertThat(
scope.getSessionFactory().getMappingMetamodel().getEntityDescriptor( Employee.class ).hasCache(),
CoreMatchers.is( true )
);
assertThat(
scope.getSessionFactory().getMappingMetamodel().getEntityDescriptor( Customer.class ).hasCache(),
CoreMatchers.is( false )
);
}
@Test
public void testOnlySubclassIsCached(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
scope.inTransaction(
s -> {
s.persist( new Employee( "1", "John Doe", "987", "engineering") );
s.persist( new Customer( "2", "Acme Corp", "123" ) );
}
);
assertTrue( scope.getSessionFactory().getCache().contains( Employee.class, "1" ) );
assertTrue( scope.getSessionFactory().getCache().contains( Person.class, "1" ) );
assertFalse( scope.getSessionFactory().getCache().contains( Customer.class, "2" ) );
assertFalse( scope.getSessionFactory().getCache().contains( Person.class, "2" ) );
scope.inTransaction(
s -> {
statistics.clear();
final Customer customer = s.find( Customer.class, "2" );
assertTrue( Hibernate.isInitialized( customer ) );
assertThat( statistics.getSecondLevelCacheHitCount(), CoreMatchers.is(0L) );
assertThat( statistics.getSecondLevelCachePutCount(), CoreMatchers.is(0L) );
statistics.clear();
final Employee emp = s.find( Employee.class, "1" );
assertTrue( Hibernate.isInitialized( emp ) );
assertThat( statistics.getSecondLevelCacheHitCount(), CoreMatchers.is(1L) );
assertThat( statistics.getSecondLevelCachePutCount(), CoreMatchers.is(0L) );
}
);
scope.inTransaction(
s -> {
statistics.clear();
final Person customer = s.find( Person.class, "2" );
assertTrue( Hibernate.isInitialized( customer ) );
assertThat( statistics.getSecondLevelCacheHitCount(), CoreMatchers.is(0L) );
assertThat( statistics.getSecondLevelCachePutCount(), CoreMatchers.is(0L) );
statistics.clear();
final Person emp = s.find( Person.class, "1" );
assertTrue( Hibernate.isInitialized( emp ) );
assertThat( statistics.getSecondLevelCacheHitCount(), CoreMatchers.is(1L) );
assertThat( statistics.getSecondLevelCachePutCount(), CoreMatchers.is(0L) );
}
);
}
@AfterEach
public void cleanupData(SessionFactoryScope scope) {
scope.dropData();
}
@Entity( name = "Person" )
@Table( name = "persons" )
@Cacheable()
@Inheritance( strategy = InheritanceType.SINGLE_TABLE )
public static
|
InheritedCacheableTest
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/Disabled.java
|
{
"start": 2791,
"end": 2861
}
|
class ____ test method is disabled.
*/
String value() default "";
}
|
or
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/aot/ReflectiveProcessorAotContributionBuilder.java
|
{
"start": 4197,
"end": 4708
}
|
class ____ implements BeanFactoryInitializationAotContribution {
private final Class<?>[] classes;
public AotContribution(Set<Class<?>> classes) {
this.classes = classes.toArray(Class<?>[]::new);
}
@Override
public void applyTo(GenerationContext generationContext, BeanFactoryInitializationCode beanFactoryInitializationCode) {
RuntimeHints runtimeHints = generationContext.getRuntimeHints();
registrar.registerRuntimeHints(runtimeHints, this.classes);
}
}
private static
|
AotContribution
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/EnabledIf.java
|
{
"start": 765,
"end": 921
}
|
class ____ test
* method is only <em>enabled</em> if the provided {@linkplain #value() condition}
* evaluates to {@code true}.
*
* <p>When applied at the
|
or
|
java
|
apache__camel
|
components/camel-azure/camel-azure-storage-queue/src/test/java/org/apache/camel/component/azure/storage/queue/integration/StorageQueueBase.java
|
{
"start": 1760,
"end": 3730
}
|
class ____ extends CamelTestSupport {
@RegisterExtension
public static AzureService service;
protected QueueServiceClient serviceClient;
protected String queueName;
protected QueueConfiguration configuration;
static {
initCredentials();
service = AzureStorageQueueServiceFactory.createService();
}
/*
* The previous behavior of the test code was such that if accessKey or accountName properties were
* set, the code would not start the azurite container and would execute against a remote environment.
* To avoid breaking tests for environments relying on this behavior, copy the old properties into the
* new and set the test as remote.
*/
private static void initCredentials() {
String accountName = System.getProperty("accountName");
String accessKey = System.getProperty("accessKey");
if (StringUtils.isNotEmpty(accountName) && StringUtils.isNotEmpty(accessKey)) {
System.setProperty(AzureConfigs.ACCOUNT_NAME, accountName);
System.setProperty(AzureConfigs.ACCOUNT_KEY, accessKey);
System.setProperty("azure.instance.type", "remote");
}
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.getRegistry().bind("serviceClient", serviceClient);
return context;
}
@BeforeAll
public void initProperties() {
queueName = RandomStringUtils.randomAlphabetic(5).toLowerCase();
configuration = new QueueConfiguration();
configuration.setCredentials(new StorageSharedKeyCredential(
service.azureCredentials().accountName(), service.azureCredentials().accountKey()));
configuration.setQueueName(queueName);
serviceClient = AzureStorageClientUtils.getClient();
serviceClient.getQueueClient(queueName).create();
}
}
|
StorageQueueBase
|
java
|
spring-projects__spring-security
|
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/web/CacheSaml2AuthenticationRequestRepository.java
|
{
"start": 1626,
"end": 3391
}
|
class ____
implements Saml2AuthenticationRequestRepository<AbstractSaml2AuthenticationRequest> {
private Cache cache = new ConcurrentMapCache("authentication-requests");
@Override
public AbstractSaml2AuthenticationRequest loadAuthenticationRequest(HttpServletRequest request) {
String relayState = request.getParameter(Saml2ParameterNames.RELAY_STATE);
Assert.notNull(relayState, "relayState must not be null");
return this.cache.get(relayState, AbstractSaml2AuthenticationRequest.class);
}
@Override
public void saveAuthenticationRequest(AbstractSaml2AuthenticationRequest authenticationRequest,
HttpServletRequest request, HttpServletResponse response) {
String relayState = request.getParameter(Saml2ParameterNames.RELAY_STATE);
Assert.notNull(relayState, "relayState must not be null");
this.cache.put(relayState, authenticationRequest);
}
@Override
public AbstractSaml2AuthenticationRequest removeAuthenticationRequest(HttpServletRequest request,
HttpServletResponse response) {
String relayState = request.getParameter(Saml2ParameterNames.RELAY_STATE);
Assert.notNull(relayState, "relayState must not be null");
AbstractSaml2AuthenticationRequest authenticationRequest = this.cache.get(relayState,
AbstractSaml2AuthenticationRequest.class);
if (authenticationRequest == null) {
return null;
}
this.cache.evict(relayState);
return authenticationRequest;
}
/**
* Use this {@link Cache} instance. The default is an in-memory cache, which means it
* won't work in a clustered environment. Instead, replace it here with a distributed
* cache.
* @param cache the {@link Cache} instance to use
*/
public void setCache(Cache cache) {
this.cache = cache;
}
}
|
CacheSaml2AuthenticationRequestRepository
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/handler/GatewayServerResponse.java
|
{
"start": 1052,
"end": 7222
}
|
interface ____ extends ServerResponse {
void setStatusCode(HttpStatusCode statusCode);
// Static methods
/**
* Create a builder with the status code and headers of the given response.
* @param other the response to copy the status and headers from
* @return the created builder
*/
static ServerResponse.BodyBuilder from(ServerResponse other) {
return new GatewayServerResponseBuilder(other);
}
/**
* Create a {@code ServerResponse} from the given {@link ErrorResponse}.
* @param response the {@link ErrorResponse} to initialize from
* @return the built response
* @since 6.0
*/
static ServerResponse from(ErrorResponse response) {
return status(response.getStatusCode()).headers(headers -> headers.putAll(response.getHeaders()))
.body(response.getBody());
}
/**
* Create a builder with the given HTTP status.
* @param status the response status
* @return the created builder
*/
static ServerResponse.BodyBuilder status(HttpStatusCode status) {
return new GatewayServerResponseBuilder(status);
}
/**
* Create a builder with the given HTTP status.
* @param status the response status
* @return the created builder
*/
static ServerResponse.BodyBuilder status(int status) {
return new GatewayServerResponseBuilder(HttpStatusCode.valueOf(status));
}
/**
* Create a builder with the status set to {@linkplain HttpStatus#OK 200 OK}.
* @return the created builder
*/
static ServerResponse.BodyBuilder ok() {
return status(HttpStatus.OK);
}
/**
* Create a builder with a {@linkplain HttpStatus#CREATED 201 Created} status and a
* location header set to the given URI.
* @param location the location URI
* @return the created builder
*/
static ServerResponse.BodyBuilder created(URI location) {
ServerResponse.BodyBuilder builder = status(HttpStatus.CREATED);
return builder.location(location);
}
/**
* Create a builder with a {@linkplain HttpStatus#ACCEPTED 202 Accepted} status.
* @return the created builder
*/
static ServerResponse.BodyBuilder accepted() {
return status(HttpStatus.ACCEPTED);
}
/**
* Create a builder with a {@linkplain HttpStatus#NO_CONTENT 204 No Content} status.
* @return the created builder
*/
static ServerResponse.HeadersBuilder<?> noContent() {
return status(HttpStatus.NO_CONTENT);
}
/**
* Create a builder with a {@linkplain HttpStatus#SEE_OTHER 303 See Other} status and
* a location header set to the given URI.
* @param location the location URI
* @return the created builder
*/
static ServerResponse.BodyBuilder seeOther(URI location) {
ServerResponse.BodyBuilder builder = status(HttpStatus.SEE_OTHER);
return builder.location(location);
}
/**
* Create a builder with a {@linkplain HttpStatus#TEMPORARY_REDIRECT 307 Temporary
* Redirect} status and a location header set to the given URI.
* @param location the location URI
* @return the created builder
*/
static ServerResponse.BodyBuilder temporaryRedirect(URI location) {
ServerResponse.BodyBuilder builder = status(HttpStatus.TEMPORARY_REDIRECT);
return builder.location(location);
}
/**
* Create a builder with a {@linkplain HttpStatus#PERMANENT_REDIRECT 308 Permanent
* Redirect} status and a location header set to the given URI.
* @param location the location URI
* @return the created builder
*/
static ServerResponse.BodyBuilder permanentRedirect(URI location) {
ServerResponse.BodyBuilder builder = status(HttpStatus.PERMANENT_REDIRECT);
return builder.location(location);
}
/**
* Create a builder with a {@linkplain HttpStatus#BAD_REQUEST 400 Bad Request} status.
* @return the created builder
*/
static ServerResponse.BodyBuilder badRequest() {
return status(HttpStatus.BAD_REQUEST);
}
/**
* Create a builder with a {@linkplain HttpStatus#NOT_FOUND 404 Not Found} status.
* @return the created builder
*/
static ServerResponse.HeadersBuilder<?> notFound() {
return status(HttpStatus.NOT_FOUND);
}
/**
* Create a builder with a {@linkplain HttpStatus#UNPROCESSABLE_ENTITY 422
* Unprocessable Entity} status.
* @return the created builder
*/
static ServerResponse.BodyBuilder unprocessableEntity() {
return status(HttpStatus.UNPROCESSABLE_ENTITY);
}
/**
* Create a (built) response with the given asynchronous response. Parameter
* {@code asyncResponse} can be a {@link CompletableFuture
* CompletableFuture<ServerResponse>} or {@link Publisher
* Publisher<ServerResponse>} (or any asynchronous producer of a single
* {@code ServerResponse} that can be adapted via the
* {@link ReactiveAdapterRegistry}).
*
* <p>
* This method can be used to set the response status code, headers, and body based on
* an asynchronous result. If only the body is asynchronous,
* {@link ServerResponse.BodyBuilder#body(Object)} can be used instead.
* @param asyncResponse a {@code CompletableFuture<ServerResponse>} or
* {@code Publisher<ServerResponse>}
* @return the asynchronous response
* @since 5.3
*/
static ServerResponse async(Object asyncResponse) {
return GatewayAsyncServerResponse.create(asyncResponse, null);
}
/**
* Create a (built) response with the given asynchronous response. Parameter
* {@code asyncResponse} can be a {@link CompletableFuture
* CompletableFuture<ServerResponse>} or {@link Publisher
* Publisher<ServerResponse>} (or any asynchronous producer of a single
* {@code ServerResponse} that can be adapted via the
* {@link ReactiveAdapterRegistry}).
*
* <p>
* This method can be used to set the response status code, headers, and body based on
* an asynchronous result. If only the body is asynchronous,
* {@link ServerResponse.BodyBuilder#body(Object)} can be used instead.
* @param asyncResponse a {@code CompletableFuture<ServerResponse>} or
* {@code Publisher<ServerResponse>}
* @param timeout maximum time period to wait for before timing out
* @return the asynchronous response
* @since 5.3.2
*/
static ServerResponse async(Object asyncResponse, Duration timeout) {
return GatewayAsyncServerResponse.create(asyncResponse, timeout);
}
}
|
GatewayServerResponse
|
java
|
junit-team__junit5
|
junit-jupiter-params/src/main/java/org/junit/jupiter/params/provider/EnumSource.java
|
{
"start": 7121,
"end": 7573
}
|
enum ____ name(s) in " + enumSource + ". Valid names include: " + allNames);
}
private static void validatePatterns(EnumSource enumSource, Set<? extends Enum<?>> constants,
Set<String> names) {
try {
names.forEach(Pattern::compile);
}
catch (PatternSyntaxException e) {
throw new PreconditionViolationException(
"Pattern compilation failed for a regular expression supplied in " + enumSource, e);
}
}
private
|
constant
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/io/network/netty/PartitionRequestClientFactoryTest.java
|
{
"start": 16302,
"end": 16920
}
|
class ____
extends CreditBasedPartitionRequestClientHandler {
private final CompletableFuture<Void> inactiveFuture;
private ChannelInactiveFutureHandler(CompletableFuture<Void> inactiveFuture) {
this.inactiveFuture = inactiveFuture;
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
super.channelInactive(ctx);
inactiveFuture.complete(null);
}
public CompletableFuture<Void> getInactiveFuture() {
return inactiveFuture;
}
}
}
|
ChannelInactiveFutureHandler
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/BadImportTest.java
|
{
"start": 16406,
"end": 16890
}
|
class ____ {
void x(Iterable<?> i) {
assertThat(i).isEmpty();
}
}
""")
.doTest();
}
@Test
public void suppressed_class() {
compilationTestHelper
.addSourceLines(
"Test.java",
"""
import static com.google.common.collect.ImmutableList.of;
import com.google.common.collect.ImmutableList;
@SuppressWarnings("BadImport")
|
Test
|
java
|
apache__maven
|
impl/maven-impl/src/main/java/org/apache/maven/impl/model/profile/OperatingSystemProfileActivator.java
|
{
"start": 1485,
"end": 5373
}
|
class ____ implements ProfileActivator {
private static final String REGEX_PREFIX = "regex:";
@Override
public boolean isActive(Profile profile, ProfileActivationContext context, ModelProblemCollector problems) {
Activation activation = profile.getActivation();
if (activation == null) {
return false;
}
ActivationOS os = activation.getOs();
if (os == null) {
return false;
}
boolean active = ensureAtLeastOneNonNull(os);
String actualOsName = getSystemProperty(context, "os.name", Os.OS_NAME).toLowerCase(Locale.ENGLISH);
String actualOsArch = getSystemProperty(context, "os.arch", Os.OS_ARCH).toLowerCase(Locale.ENGLISH);
String actualOsVersion =
getSystemProperty(context, "os.version", Os.OS_VERSION).toLowerCase(Locale.ENGLISH);
if (active && os.getFamily() != null) {
active = determineFamilyMatch(os.getFamily(), actualOsName);
}
if (active && os.getName() != null) {
active = determineNameMatch(os.getName(), actualOsName);
}
if (active && os.getArch() != null) {
active = determineArchMatch(os.getArch(), actualOsArch);
}
if (active && os.getVersion() != null) {
active = determineVersionMatch(os.getVersion(), actualOsVersion);
}
return active;
}
private String getSystemProperty(ProfileActivationContext context, String key, String defValue) {
String val = context.getSystemProperty(key);
return val != null ? val : defValue;
}
@Override
public boolean presentInConfig(Profile profile, ProfileActivationContext context, ModelProblemCollector problems) {
Activation activation = profile.getActivation();
if (activation == null) {
return false;
}
ActivationOS os = activation.getOs();
return os != null;
}
private boolean ensureAtLeastOneNonNull(ActivationOS os) {
return os.getArch() != null || os.getFamily() != null || os.getName() != null || os.getVersion() != null;
}
private boolean determineVersionMatch(String expectedVersion, String actualVersion) {
String test = expectedVersion.toLowerCase(Locale.ENGLISH);
boolean reverse = false;
final boolean result;
if (test.startsWith(REGEX_PREFIX)) {
result = actualVersion.matches(test.substring(REGEX_PREFIX.length()));
} else {
if (test.startsWith("!")) {
reverse = true;
test = test.substring(1);
}
result = actualVersion.equals(test);
}
return reverse != result;
}
private boolean determineArchMatch(String expectedArch, String actualArch) {
String test = expectedArch.toLowerCase(Locale.ENGLISH);
boolean reverse = false;
if (test.startsWith("!")) {
reverse = true;
test = test.substring(1);
}
boolean result = actualArch.equals(test);
return reverse != result;
}
private boolean determineNameMatch(String family, String actualName) {
String test = family.toLowerCase(Locale.ENGLISH);
boolean reverse = false;
if (test.startsWith("!")) {
reverse = true;
test = test.substring(1);
}
boolean result = actualName.equals(test);
return reverse != result;
}
private boolean determineFamilyMatch(String family, String actualName) {
String test = family;
boolean reverse = false;
if (test.startsWith("!")) {
reverse = true;
test = test.substring(1);
}
boolean result = Os.isFamily(test, actualName);
return reverse != result;
}
}
|
OperatingSystemProfileActivator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/DataAttachment.java
|
{
"start": 439,
"end": 2223
}
|
class ____ implements EmailAttachmentParser.EmailAttachment {
private final String id;
private final org.elasticsearch.xpack.watcher.notification.email.DataAttachment dataAttachment;
public DataAttachment(String id, org.elasticsearch.xpack.watcher.notification.email.DataAttachment dataAttachment) {
this.id = id;
this.dataAttachment = dataAttachment;
}
public org.elasticsearch.xpack.watcher.notification.email.DataAttachment getDataAttachment() {
return dataAttachment;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(id).startObject(DataAttachmentParser.TYPE);
if (dataAttachment == org.elasticsearch.xpack.watcher.notification.email.DataAttachment.YAML) {
builder.field("format", "yaml");
} else {
builder.field("format", "json");
}
return builder.endObject().endObject();
}
@Override
public String type() {
return DataAttachmentParser.TYPE;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DataAttachment otherDataAttachment = (DataAttachment) o;
return Objects.equals(id, otherDataAttachment.id) && Objects.equals(dataAttachment, otherDataAttachment.dataAttachment);
}
@Override
public int hashCode() {
return Objects.hash(id, dataAttachment);
}
@Override
public String id() {
return id;
}
@Override
public boolean inline() {
return false;
}
public static Builder builder(String id) {
return new Builder(id);
}
public static
|
DataAttachment
|
java
|
apache__camel
|
components/camel-zipfile/src/test/java/org/apache/camel/processor/aggregate/zipfile/ZipAggregationStrategyNullBodyTest.java
|
{
"start": 1539,
"end": 5200
}
|
class ____ extends CamelTestSupport {
private static final String TEST_DIR = "target/out_ZipAggregationStrategyNullBodyTest";
public static final String MOCK_AGGREGATE_TO_ZIP_ENTRY = "mock:aggregateToZipEntry";
@BeforeEach
public void deleteTestDirs() {
deleteDirectory(TEST_DIR);
}
@Test
public void testNullBodyLast() throws Exception {
MockEndpoint mock = getMockEndpoint(MOCK_AGGREGATE_TO_ZIP_ENTRY);
mock.expectedMessageCount(1);
template.sendBody("direct:start", "Hello");
template.sendBody("direct:start", "Hello again");
template.sendBody("direct:start", null);
assertZipContainsFiles(2);
}
@Test
public void testNullBodyFirst() throws Exception {
MockEndpoint mock = getMockEndpoint(MOCK_AGGREGATE_TO_ZIP_ENTRY);
mock.expectedMessageCount(1);
template.sendBody("direct:start", null);
template.sendBody("direct:start", "Hello");
template.sendBody("direct:start", "Hello again");
assertZipContainsFiles(2);
}
@Test
public void testNullBodyMiddle() throws Exception {
MockEndpoint mock = getMockEndpoint(MOCK_AGGREGATE_TO_ZIP_ENTRY);
mock.expectedMessageCount(1);
template.sendBody("direct:start", "Hello");
template.sendBody("direct:start", null);
template.sendBody("direct:start", "Hello again");
assertZipContainsFiles(2);
}
@Test
public void testNullBodiesOnly() throws Exception {
MockEndpoint mock = getMockEndpoint(MOCK_AGGREGATE_TO_ZIP_ENTRY);
mock.expectedMessageCount(1);
template.sendBody("direct:start", null);
template.sendBody("direct:start", null);
template.sendBody("direct:start", null);
assertZipContainsFiles(0);
}
@Test
public void testTwoNullBodies() throws Exception {
MockEndpoint mock = getMockEndpoint(MOCK_AGGREGATE_TO_ZIP_ENTRY);
mock.expectedMessageCount(1);
template.sendBody("direct:start", null);
template.sendBody("direct:start", null);
template.sendBody("direct:start", "Hello");
assertZipContainsFiles(1);
}
private void assertZipContainsFiles(int expectedCount) throws InterruptedException, IOException {
MockEndpoint.assertIsSatisfied(context);
File[] files = new File(TEST_DIR).listFiles();
assertNotNull(files);
assertTrue(files.length > 0, "Should be a file in " + TEST_DIR + " directory");
File resultFile = files[0];
ZipInputStream zin = new ZipInputStream(new FileInputStream(resultFile));
try {
int fileCount = 0;
for (ZipEntry ze = zin.getNextEntry(); ze != null; ze = zin.getNextEntry()) {
fileCount++;
}
assertEquals(expectedCount, fileCount,
"Zip file should contains " + expectedCount + " files");
} finally {
IOHelper.close(zin);
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.aggregate(new ZipAggregationStrategy())
.constant(true)
.completionSize(3)
.eagerCheckCompletion()
.to("file:" + TEST_DIR)
.to(MOCK_AGGREGATE_TO_ZIP_ENTRY)
.log("Done processing zip file: ${header.CamelFileName}");
}
};
}
}
|
ZipAggregationStrategyNullBodyTest
|
java
|
apache__rocketmq
|
proxy/src/test/java/org/apache/rocketmq/proxy/grpc/v2/producer/ForwardMessageToDLQActivityTest.java
|
{
"start": 1749,
"end": 4863
}
|
class ____ extends BaseActivityTest {
private ForwardMessageToDLQActivity forwardMessageToDLQActivity;
@Before
public void before() throws Throwable {
super.before();
this.forwardMessageToDLQActivity = new ForwardMessageToDLQActivity(messagingProcessor, grpcClientSettingsManager, grpcChannelManager);
}
@Test
public void testForwardMessageToDeadLetterQueue() throws Throwable {
ArgumentCaptor<ReceiptHandle> receiptHandleCaptor = ArgumentCaptor.forClass(ReceiptHandle.class);
when(this.messagingProcessor.forwardMessageToDeadLetterQueue(any(), receiptHandleCaptor.capture(), anyString(), anyString(), anyString()))
.thenReturn(CompletableFuture.completedFuture(RemotingCommand.createResponseCommand(ResponseCode.SUCCESS, "")));
String handleStr = buildReceiptHandle("topic", System.currentTimeMillis(), 3000);
ForwardMessageToDeadLetterQueueResponse response = this.forwardMessageToDLQActivity.forwardMessageToDeadLetterQueue(
createContext(),
ForwardMessageToDeadLetterQueueRequest.newBuilder()
.setTopic(Resource.newBuilder().setName("topic").build())
.setGroup(Resource.newBuilder().setName("group").build())
.setMessageId(MessageClientIDSetter.createUniqID())
.setReceiptHandle(handleStr)
.build()
).get();
assertEquals(Code.OK, response.getStatus().getCode());
assertEquals(handleStr, receiptHandleCaptor.getValue().getReceiptHandle());
}
@Test
public void testForwardMessageToDeadLetterQueueWhenHasMappingHandle() throws Throwable {
ArgumentCaptor<ReceiptHandle> receiptHandleCaptor = ArgumentCaptor.forClass(ReceiptHandle.class);
when(this.messagingProcessor.forwardMessageToDeadLetterQueue(any(), receiptHandleCaptor.capture(), anyString(), anyString(), anyString()))
.thenReturn(CompletableFuture.completedFuture(RemotingCommand.createResponseCommand(ResponseCode.SUCCESS, "")));
String savedHandleStr = buildReceiptHandle("topic", System.currentTimeMillis(),3000);
when(messagingProcessor.removeReceiptHandle(any(), any(), anyString(), anyString(), anyString()))
.thenReturn(new MessageReceiptHandle("group", "topic", 0, savedHandleStr, "msgId", 0, 0));
ForwardMessageToDeadLetterQueueResponse response = this.forwardMessageToDLQActivity.forwardMessageToDeadLetterQueue(
createContext(),
ForwardMessageToDeadLetterQueueRequest.newBuilder()
.setTopic(Resource.newBuilder().setName("topic").build())
.setGroup(Resource.newBuilder().setName("group").build())
.setMessageId(MessageClientIDSetter.createUniqID())
.setReceiptHandle(buildReceiptHandle("topic", System.currentTimeMillis(), 3000))
.build()
).get();
assertEquals(Code.OK, response.getStatus().getCode());
assertEquals(savedHandleStr, receiptHandleCaptor.getValue().getReceiptHandle());
}
}
|
ForwardMessageToDLQActivityTest
|
java
|
apache__camel
|
components/camel-wordpress/src/main/java/org/apache/camel/component/wordpress/api/model/Order.java
|
{
"start": 924,
"end": 958
}
|
enum ____ {
asc,
desc;
}
|
Order
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/inject/writer/BeanDefinitionWriter.java
|
{
"start": 61526,
"end": 198022
}
|
interface
____(defaultConstructor, false);
}
}
/**
* Finalize the bean definition to the given output stream.
*/
@SuppressWarnings("Duplicates")
@Override
public void visitBeanDefinitionEnd() {
if (executableMethodsDefinitionWriter != null) {
// Make sure the methods are written and annotation defaults are contributed
executableMethodsDefinitionWriter.visitDefinitionEnd();
}
processAllBeanElementVisitors();
evaluatedExpressionProcessor.registerExpressionForBuildTimeInit(classDefBuilder);
MethodDef getOrderMethod = getGetOrder();
if (getOrderMethod != null) {
classDefBuilder.addMethod(getOrderMethod);
}
if (interceptedType != null) {
classDefBuilder.addMethod(
getGetInterceptedType(TypeDef.of(interceptedType))
);
}
classDefBuilder.addMethod(
MethodDef.override(
LOAD_REFERENCE_METHOD
).build((aThis, methodParameters) -> aThis.type().instantiate().returning())
);
if (annotationMetadata.hasDeclaredAnnotation(Context.class)) {
classDefBuilder.addMethod(
MethodDef.override(
IS_CONTEXT_SCOPE_METHOD
).build((aThis, methodParameters) -> ExpressionDef.trueValue().returning())
);
}
if (proxiedBean || superType != TYPE_ABSTRACT_BEAN_DEFINITION_AND_REFERENCE) {
classDefBuilder.addMethod(
MethodDef.override(
IS_PROXIED_BEAN_METHOD
).build((aThis, methodParameters) -> ExpressionDef.constant(proxiedBean).returning())
);
}
if (isProxyTarget || superType != TYPE_ABSTRACT_BEAN_DEFINITION_AND_REFERENCE) {
classDefBuilder.addMethod(
MethodDef.override(
IS_PROXY_TARGET_METHOD
).build((aThis, methodParameters) -> ExpressionDef.constant(isProxyTarget).returning())
);
}
if (!annotationMetadata.hasStereotype(Requires.class)) {
classDefBuilder.addMethod(
MethodDef.override(
IS_ENABLED_METHOD
).build((aThis, methodParameters) -> ExpressionDef.trueValue().returning())
);
classDefBuilder.addMethod(
MethodDef.override(
IS_ENABLED2_METHOD
).build((aThis, methodParameters) -> ExpressionDef.trueValue().returning())
);
}
if (proxyBeanDefinitionName != null) {
classDefBuilder.addMethod(
MethodDef.override(
METHOD_PROXY_TARGET_TYPE
).build((aThis, methodParameters)
-> ExpressionDef.constant(ClassTypeDef.of(proxyBeanDefinitionName)).returning())
);
classDefBuilder.addMethod(
MethodDef.override(
METHOD_PROXY_TARGET_CLASS
).build((aThis, methodParameters)
-> ExpressionDef.constant(ClassTypeDef.of(proxyBeanTypeName)).returning())
);
}
classDefBuilder.addMethod(
getBuildMethod(buildMethodDefinition)
);
if (!injectCommands.isEmpty()) {
classDefBuilder.addMethod(
getInjectMethod(injectCommands)
);
}
if (buildMethodDefinition.postConstruct != null) {
// for "super bean definition" we only add code to trigger "initialize"
if (!superBeanDefinition || buildMethodDefinition.postConstruct.intercepted) {
classDefBuilder.addSuperinterface(TypeDef.of(InitializingBeanDefinition.class));
// Create a new method that will be invoked by the intercepted chain
MethodDef targetInitializeMethod = buildInitializeMethod(buildMethodDefinition.postConstruct, MethodDef.builder("initialize$intercepted")
.addModifiers(Modifier.PUBLIC)
.addParameters(BeanResolutionContext.class, BeanContext.class, Object.class)
.returns(Object.class));
classDefBuilder.addMethod(
targetInitializeMethod
);
// Original initialize method is invoking the interceptor chain
classDefBuilder.addMethod(
MethodDef.override(METHOD_INITIALIZE).build((aThis, methodParameters) -> {
ClassTypeDef executableMethodInterceptor = createExecutableMethodInterceptor(targetInitializeMethod, "InitializeInterceptor");
return interceptAndReturn(aThis, methodParameters, executableMethodInterceptor, INITIALIZE_INTERCEPTOR_METHOD);
})
);
}
}
if (buildMethodDefinition.preDestroy != null) {
classDefBuilder.addSuperinterface(TypeDef.of(DisposableBeanDefinition.class));
if (buildMethodDefinition.preDestroy.intercepted) {
// Create a new method that will be invoked by the intercepted chain
MethodDef targetDisposeMethod = buildDisposeMethod(buildMethodDefinition.preDestroy, MethodDef.builder("dispose$intercepted")
.addModifiers(Modifier.PUBLIC)
.addParameters(BeanResolutionContext.class, BeanContext.class, Object.class)
.returns(Object.class));
classDefBuilder.addMethod(
targetDisposeMethod
);
// Original dispose method is invoking the interceptor chain
classDefBuilder.addMethod(
MethodDef.override(METHOD_DISPOSE).build((aThis, methodParameters) -> {
ClassTypeDef executableMethodInterceptor = createExecutableMethodInterceptor(targetDisposeMethod, "DisposeInterceptor");
return interceptAndReturn(aThis, methodParameters, executableMethodInterceptor, DISPOSE_INTERCEPTOR_METHOD);
})
);
} else {
classDefBuilder.addMethod(
buildDisposeMethod(buildMethodDefinition.preDestroy, MethodDef.override(METHOD_DISPOSE))
);
}
}
StaticBlock staticBlock = getStaticInitializer();
classDefBuilder.addStaticInitializer(staticBlock.statement);
addConstructor(staticBlock);
boolean isParallel = annotationMetadata.hasStereotype(Parallel.class);
// In v6 we can assume everything was recompiled with v5 so we can modify the default method to return false and only add this one on true
classDefBuilder.addMethod(
MethodDef.override(IS_PARALLEL_METHOD).build((aThis, methodParameters) -> ExpressionDef.constant(isParallel).returning())
);
AnnotationValue<DefaultImplementation> defaultImplementationAnnotationValue = annotationMetadata.getAnnotation(DefaultImplementation.class);
if (defaultImplementationAnnotationValue != null) {
AnnotationClassValue<?> defaultImplementationClass = defaultImplementationAnnotationValue.annotationClassValue("name").orElse(null);
if (defaultImplementationClass != null) {
classDefBuilder.addMethod(MethodDef.override(METHOD_GET_DEFAULT_IMPLEMENTATION)
.build((aThis, methodParameters) ->
ExpressionDef.constant(TypeDef.of(defaultImplementationClass.getName()))
.returning()
.doTry()
.doCatch(Throwable.class, exceptionVar -> ExpressionDef.nullValue().returning())
)
);
}
}
if (annotationMetadata.hasStereotype(Infrastructure.class)) {
classDefBuilder.addMethod(
MethodDef.override(METHOD_IS_CAN_BE_REPLACED).build((aThis, methodParameters) -> ExpressionDef.constant(false).returning())
);
}
loadTypeMethods.values().forEach(classDefBuilder::addMethod);
if (requiresMethodProcessing() && executableMethodsDefinitionWriter != null) {
int methodsCount = executableMethodsDefinitionWriter.getMethodsCount();
List<ExpressionDef> expressions = new ArrayList<>(methodsCount);
for (int i = 0; i < methodsCount; i++) {
MethodElement method = executableMethodsDefinitionWriter.getMethodByIndex(i);
if (method.booleanValue(Executable.class, Executable.MEMBER_PROCESS_ON_STARTUP).orElse(false)) {
expressions.add(TypeDef.Primitive.INT.constant(i));
}
}
classDefBuilder.addMethod(MethodDef.override(BD_GET_INDEXES_OF_EXECUTABLE_METHODS_FOR_PROCESSING)
.build((aThis, methodParameters) -> TypeDef.Primitive.INT.array().instantiate(expressions).returning()));
}
output = new LinkedHashMap<>();
// Generate the bytecode in the round it's being invoked
generateFiles(classDefBuilder.build());
evaluatedExpressionProcessor.finish();
}
private void generateFiles(ObjectDef objectDef) {
output.put(objectDef.getName(), ByteCodeWriterUtils.writeByteCode(objectDef, visitorContext));
for (ObjectDef innerType : objectDef.getInnerTypes()) {
generateFiles(innerType);
}
}
private MethodDef getGetInterceptedType(TypeDef interceptedType) {
return MethodDef.override(GET_INTERCEPTED_TYPE_METHOD)
.build((aThis, methodParameters) -> ExpressionDef.constant(interceptedType).returning());
}
private MethodDef getBuildMethod(BuildMethodDefinition buildMethodDefinition) {
boolean isParametrized = isParametrized(buildMethodDefinition.getParameters());
MethodDef.MethodDefBuilder buildMethodBuilder;
if (isParametrized) {
buildMethodBuilder = MethodDef.override(DO_INSTANTIATE_METHOD);
classDefBuilder.addSuperinterface(TypeDef.of(ParametrizedInstantiatableBeanDefinition.class));
} else {
buildMethodBuilder = MethodDef.override(INSTANTIATE_METHOD);
}
return buildMethodBuilder.build((aThis, methodParameters) -> StatementDef.multi(
invokeCheckIfShouldLoadIfNecessary(aThis, methodParameters),
buildInstance(
aThis,
methodParameters,
buildMethodDefinition,
instance -> onBeanInstance(aThis, methodParameters, buildMethodDefinition, instance),
isParametrized
)
));
}
private MethodDef getInjectMethod(List<InjectMethodCommand> injectCommands) {
return MethodDef.override(INJECT_BEAN_METHOD)
.build((aThis, methodParameters) -> {
return methodParameters.get(2).cast(beanTypeDef).newLocal("beanInstance", instanceVar -> {
InjectMethodSignature injectMethodSignature = new InjectMethodSignature(aThis, methodParameters, instanceVar);
List<StatementDef> statements = new ArrayList<>();
boolean hasInjectPoint = false;
for (InjectMethodCommand injectCommand : injectCommands) {
statements.add(getInjectStatement(injectCommand, injectMethodSignature));
hasInjectPoint |= injectCommand.hasInjectScope();
}
List<StatementDef> returnStatements = new ArrayList<>();
if (hasInjectPoint) {
returnStatements.add(destroyInjectScopeBeansIfNecessary(methodParameters));
}
returnStatements.add(instanceVar.returning());
statements.addAll(returnStatements);
if (isConfigurationProperties) {
return aThis.invoke(
CONTAINS_PROPERTIES_METHOD,
injectMethodSignature.beanResolutionContext,
injectMethodSignature.beanContext
).ifTrue(
StatementDef.multi(statements),
StatementDef.multi(returnStatements)
);
}
return StatementDef.multi(statements);
});
});
}
private StatementDef getInjectStatement(InjectMethodCommand injectionPoint, InjectMethodSignature injectMethodSignature) {
if (injectionPoint instanceof SetterInjectionInjectCommand setterInjectionInjectCommand) {
return setSetterValue(
injectMethodSignature,
setterInjectionInjectCommand.declaringType,
setterInjectionInjectCommand.methodElement,
setterInjectionInjectCommand.annotationMetadata,
setterInjectionInjectCommand.requiresReflection,
setterInjectionInjectCommand.isOptional
);
}
if (injectionPoint instanceof InjectFieldInjectCommand injectFieldInjectCommand) {
return injectField(
injectMethodSignature,
injectFieldInjectCommand.declaringType,
injectFieldInjectCommand.fieldElement,
injectFieldInjectCommand.requiresReflection
);
}
if (injectionPoint instanceof InjectMethodInjectCommand injectMethodInjectCommand) {
return injectMethod(
injectMethodInjectCommand.methodElement,
injectMethodInjectCommand.requiresReflection,
injectMethodSignature.aThis,
injectMethodSignature.methodParameters,
injectMethodSignature.instanceVar,
injectMethodInjectCommand.methodIndex
);
}
if (injectionPoint instanceof InjectFieldValueInjectCommand injectFieldValueInjectCommand) {
return setFieldValue(
injectMethodSignature,
injectFieldValueInjectCommand.declaringType,
injectFieldValueInjectCommand.fieldElement,
injectFieldValueInjectCommand.requiresReflection,
injectFieldValueInjectCommand.isOptional
);
}
if (injectionPoint instanceof ConfigBuilderInjectCommand configBuilderInjectCommand) {
ConfigurationBuilderDefinition configurationBuilderDefinition = configBuilderInjectCommand.configurationBuilderDefinition;
if (configurationBuilderDefinition instanceof ConfigurationBuilderOfPropertyDefinition definitionList) {
String factoryMethod = definitionList.builderElement().getAnnotationMetadata()
.stringValue(ConfigurationBuilder.class, "factoryMethod").orElse(null);
ClassTypeDef builderType = ClassTypeDef.of(definitionList.builderType());
PropertyElement property = definitionList.property();
Optional<? extends MemberElement> readMember = property.getReadMember();
if (readMember.isPresent()) {
MemberElement memberElement = readMember.get();
if (memberElement instanceof MethodElement method) {
return buildMethodConfigBuilderInvocation(injectMethodSignature, factoryMethod, builderType, method.getName(), definitionList.elements());
}
if (memberElement instanceof FieldElement field) {
return buildFieldConfigBuilderInvocation(injectMethodSignature, factoryMethod, builderType, field.getName(), definitionList.elements());
}
}
throw new IllegalStateException("Unexpected configuration builder injection point: " + injectMethodSignature);
} else if (configurationBuilderDefinition instanceof ConfigurationBuilderOfFieldDefinition fieldDefinition) {
String factoryMethod = fieldDefinition.fieldElement().getAnnotationMetadata()
.stringValue(ConfigurationBuilder.class, "factoryMethod").orElse(null);
String field = fieldDefinition.fieldElement().getName();
ClassTypeDef builderType = ClassTypeDef.of(fieldDefinition.builderType());
List<ConfigurationBuilderPropertyDefinition> elements = fieldDefinition.elements();
return buildFieldConfigBuilderInvocation(injectMethodSignature, factoryMethod, builderType, field, elements);
} else {
throw new IllegalStateException("Unknown configuration builder def type: " + configurationBuilderDefinition.getClass());
}
}
throw new IllegalStateException();
}
private StatementDef buildFieldConfigBuilderInvocation(InjectMethodSignature injectMethodSignature, String factoryMethod, ClassTypeDef builderType, String field, List<ConfigurationBuilderPropertyDefinition> elements) {
if (StringUtils.isNotEmpty(factoryMethod)) {
return builderType.invokeStatic(factoryMethod, builderType).newLocal("builder" + NameUtils.capitalize(field), builderVar -> {
List<StatementDef> statements = getBuilderMethodStatements(injectMethodSignature, elements, (VariableDef.Local) builderVar);
statements.add(injectMethodSignature.instanceVar
.field(field, builderType)
.put(builderVar));
return StatementDef.multi(statements);
});
} else {
return injectMethodSignature.instanceVar
.field(field, builderType)
.newLocal("builder" + NameUtils.capitalize(field), builderVar -> StatementDef.multi(
getBuilderMethodStatements(injectMethodSignature, elements, (VariableDef.Local) builderVar)
));
}
}
private StatementDef buildMethodConfigBuilderInvocation(InjectMethodSignature injectMethodSignature, String factoryMethod, ClassTypeDef builderType, String methodName, List<ConfigurationBuilderPropertyDefinition> elements) {
if (StringUtils.isNotEmpty(factoryMethod)) {
return builderType.invokeStatic(factoryMethod, builderType).newLocal("builder" + NameUtils.capitalize(methodName), builderVar -> {
List<StatementDef> statements =
getBuilderMethodStatements(injectMethodSignature, elements, (VariableDef.Local) builderVar);
String propertyName = NameUtils.getPropertyNameForGetter(methodName);
String setterName = NameUtils.setterNameFor(propertyName);
statements.add(injectMethodSignature.instanceVar
.invoke(setterName, TypeDef.VOID, builderVar));
return StatementDef.multi(statements);
});
} else {
return injectMethodSignature.instanceVar
.invoke(methodName, builderType)
.newLocal("builder" + NameUtils.capitalize(methodName), builderVar -> StatementDef.multi(
getBuilderMethodStatements(injectMethodSignature, elements, (VariableDef.Local) builderVar)
));
}
}
private List<StatementDef> getBuilderMethodStatements(InjectMethodSignature injectMethodSignature,
List<ConfigurationBuilderPropertyDefinition> points, VariableDef.Local builderVar) {
List<StatementDef> statements = new ArrayList<>();
for (ConfigurationBuilderPropertyDefinition builderPoint : points) {
statements.add(
getConfigBuilderPointStatement(injectMethodSignature, builderVar, builderPoint)
);
}
return statements;
}
private StatementDef getConfigBuilderPointStatement(InjectMethodSignature injectMethodSignature,
VariableDef.Local builderVar,
ConfigurationBuilderPropertyDefinition builderPoint) {
boolean isDurationWithTimeUnit = builderPoint.parameter() == null && builderPoint.type().getName().equals(Duration.class.getName());
ClassElement paramType = builderPoint.type();
Map<String, ClassElement> generics = paramType.getTypeArguments();
boolean zeroArgs = builderPoint.parameter() == null && !isDurationWithTimeUnit;
// Optional optional = AbstractBeanDefinition.getValueForPath(...)
String propertyPath = builderPoint.path();
String localName = builderVar.name() + "_optional" + NameUtils.capitalize(builderPoint.name());
return getGetValueForPathCall(injectMethodSignature, paramType, builderPoint.name(), propertyPath, zeroArgs, generics)
.newLocal(localName, optionalVar -> {
return optionalVar.invoke(OPTIONAL_IS_PRESENT_METHOD)
.ifTrue(
optionalVar.invoke(OPTIONAL_GET_METHOD).newLocal(localName + "_value", valueVar -> {
if (zeroArgs) {
return valueVar.cast(boolean.class).ifTrue(
StatementDef.doTry(
builderVar.invoke(builderPoint.method())
).doCatch(NoSuchMethodError.class, exceptionVar -> StatementDef.multi())
);
}
List<ExpressionDef> values = new ArrayList<>(2);
if (isDurationWithTimeUnit) {
ClassTypeDef timeInitType = ClassTypeDef.of(TimeUnit.class);
values.add(
valueVar.cast(ClassTypeDef.of(Duration.class))
.invoke(DURATION_TO_MILLIS_METHOD)
);
values.add(
timeInitType.getStaticField("MILLISECONDS", timeInitType)
);
} else {
TypeDef paramTypeDef = TypeDef.erasure(paramType);
values.add(valueVar.cast(paramTypeDef));
}
return StatementDef.doTry(
builderVar.invoke(builderPoint.method(), values)
).doCatch(NoSuchMethodError.class, exceptionVar -> StatementDef.multi());
})
);
});
}
private StatementDef setFieldValue(InjectMethodSignature injectMethodSignature,
TypedElement declaringType,
FieldElement fieldElement,
boolean requiresReflection,
boolean isOptional) {
StatementDef setFieldValueStatement = setFieldValue(injectMethodSignature, fieldElement, isOptional, declaringType, requiresReflection);
if (isOptional) {
return getPropertyContainsCheck(
injectMethodSignature,
fieldElement.getType(),
fieldElement.getName(),
fieldElement.getAnnotationMetadata()
).ifTrue(setFieldValueStatement);
}
return setFieldValueStatement;
}
private StatementDef setFieldValue(InjectMethodSignature injectMethodSignature,
FieldElement fieldElement,
boolean isOptional,
TypedElement declaringType,
boolean requiresReflection) {
if (isInnerType(fieldElement.getGenericType())) {
return injectField(injectMethodSignature, declaringType, fieldElement, requiresReflection);
}
if (!isConfigurationProperties || requiresReflection) {
boolean isRequired = fieldElement
.booleanValue(AnnotationUtil.INJECT, AnnotationUtil.MEMBER_REQUIRED)
.orElse(true);
return visitFieldInjectionPointInternal(
injectMethodSignature,
declaringType,
fieldElement,
requiresReflection,
GET_VALUE_FOR_FIELD,
isOptional,
false,
isRequired
);
}
fieldInjectionPoints.add(new FieldVisitData(declaringType, fieldElement, false));
int fieldIndex = fieldInjectionPoints.size() - 1;
ExpressionDef value;
Optional<String> property = fieldElement.getAnnotationMetadata().stringValue(Property.class, "name");
if (property.isPresent()) {
value = getInvokeGetPropertyValueForField(injectMethodSignature, fieldElement, fieldElement.getAnnotationMetadata(), property.get(), fieldIndex);
} else {
Optional<String> valueValue = fieldElement.getAnnotationMetadata().stringValue(Value.class);
if (valueValue.isPresent()) {
value = getInvokeGetPropertyPlaceholderValueForField(injectMethodSignature, fieldElement, valueValue.get(), fieldIndex);
} else {
// ???
value = ExpressionDef.nullValue();
}
}
return injectMethodSignature.instanceVar.field(fieldElement).put(value);
}
private StatementDef onBeanInstance(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
BuildMethodDefinition buildMethodDefinition,
ExpressionDef beanInstance) {
boolean needsInjectMethod = !injectCommands.isEmpty() || superBeanDefinition;
boolean needsInjectScope = hasInjectScope(buildMethodDefinition.getParameters());
boolean needsPostConstruct = buildMethodDefinition.postConstruct != null;
if (!needsInjectScope && !needsInjectMethod && !needsPostConstruct) {
return beanInstance.returning();
}
return beanInstance.newLocal("instance", instanceVar -> {
List<StatementDef> statements = new ArrayList<>();
if (needsInjectMethod) {
statements.add(
aThis.invoke(INJECT_BEAN_METHOD, methodParameters.get(0), methodParameters.get(1), instanceVar)
);
}
if (needsInjectScope) {
statements.add(
destroyInjectScopeBeansIfNecessary(methodParameters)
);
}
if (needsPostConstruct) {
statements.add(
aThis.invoke(METHOD_INITIALIZE,
methodParameters.get(0),
methodParameters.get(1),
instanceVar
).returning()
);
} else {
statements.add(instanceVar.returning());
}
return StatementDef.multi(statements);
});
}
private MethodDef buildDisposeMethod(BuildMethodLifecycleDefinition def, MethodDef.MethodDefBuilder override) {
return buildLifeCycleMethod(override, PRE_DESTROY_METHOD, def);
}
private MethodDef buildInitializeMethod(BuildMethodLifecycleDefinition def, MethodDef.MethodDefBuilder override) {
return buildLifeCycleMethod(override, POST_CONSTRUCT_METHOD, def);
}
private MethodDef buildLifeCycleMethod(MethodDef.MethodDefBuilder methodDefBuilder,
Method superMethod,
BuildMethodLifecycleDefinition lifeCycleDefinition) {
return methodDefBuilder.build((aThis, methodParameters) -> {
return aThis.invoke(superMethod, methodParameters).cast(beanTypeDef).newLocal("beanInstance", beanInstance -> {
List<StatementDef> statements = new ArrayList<>();
boolean hasInjectScope = false;
for (InjectMethodBuildCommand injectionPoint : lifeCycleDefinition.injectionPoints) {
statements.add(injectMethod(injectionPoint.methodElement, injectionPoint.requiresReflection, aThis, methodParameters, beanInstance, injectionPoint.methodIndex));
if (!hasInjectScope) {
for (ParameterElement parameter : injectionPoint.methodElement.getSuspendParameters()) {
if (hasInjectScope(parameter)) {
hasInjectScope = true;
break;
}
}
}
}
if (hasInjectScope) {
statements.add(
destroyInjectScopeBeansIfNecessary(methodParameters)
);
}
statements.add(beanInstance.returning());
return StatementDef.multi(statements);
});
});
}
private StatementDef buildInstance(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
BuildMethodDefinition buildMethodDefinition,
Function<ExpressionDef, StatementDef> onBeanInstance,
boolean isParametrized) {
StatementDef.DefineAndAssign[] constructorDef = new StatementDef.DefineAndAssign[]{null};
Supplier<VariableDef> constructorDefSupplier = new Supplier<VariableDef>() {
@Override
public VariableDef get() {
if (constructorDef[0] == null) {
Class<?> constructorType;
if (constructor instanceof MethodElement) {
constructorType = AbstractInitializableBeanDefinition.MethodReference.class;
} else {
constructorType = AbstractInitializableBeanDefinition.FieldReference.class;
}
constructorDef[0] = aThis.type()
.getStaticField(FIELD_CONSTRUCTOR, ClassTypeDef.of(AbstractInitializableBeanDefinition.MethodOrFieldReference.class))
.cast(constructorType)
.newLocal("constructorDef");
}
return constructorDef[0].variable();
}
};
if (buildMethodDefinition instanceof FactoryBuildMethodDefinition factoryBuildMethodDefinition) {
if (factoryBuildMethodDefinition.parameters.length > 0) {
List<? extends ExpressionDef> values = getConstructorArgumentValues(aThis, methodParameters,
List.of(buildMethodDefinition.getParameters()), isParametrized, constructorDefSupplier);
StatementDef statement = buildFactoryGet(aThis, methodParameters, onBeanInstance, factoryBuildMethodDefinition, values);
if (constructorDef[0] != null) {
return StatementDef.multi(
constructorDef[0],
statement
);
}
return statement;
}
return buildFactoryGet(aThis, methodParameters, onBeanInstance, factoryBuildMethodDefinition, List.of());
}
if (buildMethodDefinition instanceof ConstructorBuildMethodDefinition constructorBuildMethodDefinition) {
if (constructorBuildMethodDefinition.constructor.hasParameters()) {
List<? extends ExpressionDef> values = getConstructorArgumentValues(aThis, methodParameters,
List.of(buildMethodDefinition.getParameters()), isParametrized, constructorDefSupplier);
StatementDef statement = buildConstructorInstantiate(aThis, methodParameters, onBeanInstance, constructorBuildMethodDefinition, values);
if (constructorDef[0] != null) {
return StatementDef.multi(
constructorDef[0],
statement
);
}
return statement;
}
return buildConstructorInstantiate(aThis, methodParameters, onBeanInstance, constructorBuildMethodDefinition, List.of());
}
if (buildMethodDefinition instanceof CustomBuildMethodDefinition customBuildMethodDefinition) {
List<? extends ExpressionDef> values = getConstructorArgumentValues(aThis, methodParameters,
List.of(buildMethodDefinition.getParameters()), isParametrized, constructorDefSupplier);
return buildCustomInstantiate(aThis, methodParameters, onBeanInstance, customBuildMethodDefinition, values);
}
throw new IllegalStateException("Unknown build method definition: " + buildMethodDefinition);
}
private StatementDef buildConstructorInstantiate(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
Function<ExpressionDef, StatementDef> onBeanInstance,
ConstructorBuildMethodDefinition constructorBuildMethodDefinition,
List<? extends ExpressionDef> values) {
List<ParameterElement> parameters = List.of(constructorBuildMethodDefinition.constructor.getSuspendParameters());
if (isConstructorIntercepted(constructorBuildMethodDefinition.constructor)) {
ClassTypeDef factoryInterceptor = createConstructorInterceptor(constructorBuildMethodDefinition);
return onBeanInstance.apply(
invokeConstructorChain(
aThis,
methodParameters,
factoryInterceptor.instantiate(CONSTRUCTOR_ABSTRACT_CONSTRUCTOR_IP, aThis),
TypeDef.OBJECT.array().instantiate(values),
parameters)
);
}
return onBeanInstance.apply(
initializeBean(aThis, methodParameters, constructorBuildMethodDefinition, values)
);
}
private StatementDef buildCustomInstantiate(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
Function<ExpressionDef, StatementDef> onBeanInstance,
CustomBuildMethodDefinition constructorBuildMethodDefinition,
List<? extends ExpressionDef> values) {
List<StatementDef> statements = new ArrayList<>();
statements.add(onBeanInstance.apply(
constructorBuildMethodDefinition.builder.build(statements, aThis, methodParameters, values)
));
return StatementDef.multi(statements);
}
private StatementDef buildFactoryGet(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
Function<ExpressionDef, StatementDef> onBeanInstance,
FactoryBuildMethodDefinition factoryBuildMethodDefinition, List<? extends ExpressionDef> values) {
return withGetFactoryBean(methodParameters, factoryBuildMethodDefinition, factoryVar -> {
List<ParameterElement> parameters = List.of(factoryBuildMethodDefinition.parameters);
if (isConstructorIntercepted(factoryBuildMethodDefinition.factoryElement)) {
ClassTypeDef factoryInterceptor = createFactoryInterceptor(factoryBuildMethodDefinition);
return onBeanInstance.apply(
invokeConstructorChain(
aThis,
methodParameters,
factoryInterceptor.instantiate(
List.of(
ClassTypeDef.of(BeanDefinition.class),
factoryVar.type()
), aThis, factoryVar),
TypeDef.OBJECT.array().instantiate(values),
parameters)
);
}
return onBeanInstance.apply(
getBeanFromFactory(factoryBuildMethodDefinition, factoryVar, values)
);
});
}
private ExpressionDef getBeanFromFactory(FactoryBuildMethodDefinition factoryBuildMethodDefinition,
ExpressionDef factoryVar,
List<? extends ExpressionDef> values) {
ClassTypeDef factoryType = ClassTypeDef.of(factoryBuildMethodDefinition.factoryClass);
Element factoryElement = factoryBuildMethodDefinition.factoryElement;
if (factoryElement instanceof MethodElement methodElement) {
if (methodElement.isReflectionRequired()) {
return TYPE_REFLECTION_UTILS.invokeStatic(
METHOD_INVOKE_INACCESSIBLE_METHOD,
methodElement.isStatic() ? ExpressionDef.nullValue() : factoryVar,
DispatchWriter.getTypeUtilsGetRequiredMethod(factoryType, methodElement),
TypeDef.OBJECT.array().instantiate(values)
);
}
if (methodElement.isStatic()) {
return factoryType.invokeStatic(methodElement, values);
}
return factoryVar.invoke(methodElement, values);
}
FieldElement fieldElement = (FieldElement) factoryElement;
if (fieldElement.isReflectionRequired()) {
return TYPE_REFLECTION_UTILS.invokeStatic(
GET_FIELD_WITH_REFLECTION_METHOD,
ExpressionDef.constant(factoryType),
ExpressionDef.constant(fieldElement.getName()),
fieldElement.isStatic() ? ExpressionDef.nullValue() : factoryVar
);
}
if (fieldElement.isStatic()) {
return factoryType.getStaticField(factoryElement.getName(), beanTypeDef);
}
return factoryVar.field(factoryElement.getName(), beanTypeDef);
}
private ExpressionDef initializeBean(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
ConstructorBuildMethodDefinition constructorBuildMethodDefinition,
List<? extends ExpressionDef> values) {
MethodElement constructor = constructorBuildMethodDefinition.constructor;
List<ExpressionDef> hasValuesExpressions;
if (values == null) {
hasValuesExpressions = null;
} else {
hasValuesExpressions = new ArrayList<>();
ParameterElement[] parameters = constructorBuildMethodDefinition.getParameters();
for (int i = 0; i < values.size(); i++) {
ExpressionDef value = values.get(i);
ParameterElement parameter = parameters[i];
if (parameter.hasAnnotation(Property.class)) {
hasValuesExpressions.add(
getContainsPropertyCheck(aThis, methodParameters, parameter)
);
} else {
hasValuesExpressions.add(value.isNonNull());
}
}
}
return MethodGenUtils.invokeBeanConstructor(constructor, constructorBuildMethodDefinition.requiresReflection, true, values, hasValuesExpressions);
}
private ExpressionDef getContainsPropertyCheck(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
ParameterElement parameterElement) {
String propertyName = parameterElement.stringValue(Property.class, "name").orElseThrow();
return aThis.invoke(
isMultiValueProperty(parameterElement.getType()) ? CONTAINS_PROPERTIES_VALUE_METHOD : CONTAINS_PROPERTY_VALUE_METHOD,
methodParameters.get(0),
methodParameters.get(1),
ExpressionDef.constant(propertyName)
);
}
private StatementDef withGetFactoryBean(List<VariableDef.MethodParameter> parameters,
FactoryBuildMethodDefinition factoryBuildMethodDefinition,
Function<ExpressionDef, StatementDef> fn) {
if (factoryBuildMethodDefinition.factoryElement.isStatic()) {
return fn.apply(ExpressionDef.nullValue());
}
// for Factory beans first we need to look up the factory bean
// before invoking the method to instantiate
// the below code looks up the factory bean.
TypeDef factoryTypeDef = TypeDef.erasure(factoryBuildMethodDefinition.factoryClass);
ExpressionDef argumentExpression = ClassTypeDef.of(Argument.class).invokeStatic(ArgumentExpUtils.METHOD_CREATE_ARGUMENT_SIMPLE,
ExpressionDef.constant(factoryTypeDef),
ExpressionDef.constant("factory")
);
return StatementDef.multi(
parameters.get(0)
.invoke(BEAN_LOCATOR_METHOD_GET_BEAN,
// first argument is the bean type
ExpressionDef.constant(factoryTypeDef),
// second argument is the qualifier for the factory if any
getQualifier(factoryBuildMethodDefinition.factoryClass, argumentExpression)
).cast(factoryTypeDef).newLocal("factoryBean", factoryBeanVar -> StatementDef.multi(
parameters.get(0).invoke(METHOD_BEAN_RESOLUTION_CONTEXT_MARK_FACTORY),
fn.apply(factoryBeanVar)
))
);
}
private ClassTypeDef createConstructorInterceptor(ConstructorBuildMethodDefinition constructorBuildMethodDefinition) {
String interceptedConstructorWriterName = "ConstructorInterceptor";
ClassDef.ClassDefBuilder innerClassBuilder = ClassDef.builder(interceptedConstructorWriterName)
.synthetic()
.addModifiers(Modifier.FINAL)
.superclass(ClassTypeDef.of(AbstractBeanDefinitionBeanConstructor.class))
.addAnnotation(Generated.class);
innerClassBuilder.addMethod(
MethodDef.constructor()
.addModifiers(Modifier.PUBLIC)
.addParameters(CONSTRUCTOR_ABSTRACT_CONSTRUCTOR_IP.getParameterTypes())
.build((aThis, methodParameters)
-> aThis.superRef().invokeConstructor(CONSTRUCTOR_ABSTRACT_CONSTRUCTOR_IP, methodParameters.get(0)))
);
innerClassBuilder.addMethod(
MethodDef.override(METHOD_BEAN_CONSTRUCTOR_INSTANTIATE)
.build((aThis, methodParameters) -> {
ParameterElement[] parameters = constructorBuildMethodDefinition.constructor.getSuspendParameters();
List<ExpressionDef> values = IntStream.range(0, parameters.length)
.<ExpressionDef>mapToObj(index -> methodParameters.get(0).arrayElement(index).cast(TypeDef.erasure(parameters[index].getType())))
.toList();
return MethodGenUtils.invokeBeanConstructor(ClassElement.of(beanDefinitionName), constructorBuildMethodDefinition.constructor, true, values)
.returning();
})
);
classDefBuilder.addInnerType(innerClassBuilder.build());
return ClassTypeDef.of(beanDefinitionName + "$" + interceptedConstructorWriterName);
}
private ClassTypeDef createFactoryInterceptor(FactoryBuildMethodDefinition factoryBuildMethodDefinition) {
String interceptedConstructorWriterName = "ConstructorInterceptor";
ClassDef.ClassDefBuilder innerClassBuilder = ClassDef.builder(interceptedConstructorWriterName)
.synthetic()
.addModifiers(Modifier.FINAL)
.superclass(ClassTypeDef.of(AbstractBeanDefinitionBeanConstructor.class))
.addAnnotation(Generated.class);
// for factory methods we have to store the factory instance in a field and modify the constructor pass the factory instance
ClassTypeDef factoryType = ClassTypeDef.of(factoryBuildMethodDefinition.factoryClass);
FieldDef factoryField = FieldDef.builder("$factory", factoryType)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL)
.build();
innerClassBuilder.addField(factoryField);
innerClassBuilder.addMethod(
MethodDef.constructor()
.addModifiers(Modifier.PROTECTED)
.addParameters(CONSTRUCTOR_ABSTRACT_CONSTRUCTOR_IP.getParameterTypes())
.addParameters(factoryType)
.build((aThis, methodParameters)
-> StatementDef.multi(
aThis.superRef().invokeConstructor(CONSTRUCTOR_ABSTRACT_CONSTRUCTOR_IP, methodParameters.get(0)),
aThis.field(factoryField).put(methodParameters.get(1))
))
);
// now we need to implement the invoke method to execute the actual instantiation
innerClassBuilder.addMethod(
MethodDef.override(METHOD_BEAN_CONSTRUCTOR_INSTANTIATE)
.build((aThis, methodParameters) -> {
List<ExpressionDef> values = IntStream.range(0, factoryBuildMethodDefinition.parameters.length)
.<ExpressionDef>mapToObj(index -> methodParameters.get(0)
.arrayElement(index)
.cast(TypeDef.erasure(factoryBuildMethodDefinition.parameters[index].getType())))
.toList();
return getBeanFromFactory(factoryBuildMethodDefinition, aThis.field(factoryField), values).returning();
})
);
classDefBuilder.addInnerType(innerClassBuilder.build());
return ClassTypeDef.of(beanDefinitionName + "$" + interceptedConstructorWriterName);
}
private StaticBlock getStaticInitializer() {
List<StatementDef> statements = new ArrayList<>();
FieldDef annotationMetadataField = AnnotationMetadataGenUtils.createAnnotationMetadataFieldAndInitialize(annotationMetadata, loadClassValueExpressionFn);
classDefBuilder.addField(annotationMetadataField);
FieldDef failedInitializationField = FieldDef.builder(FIELD_FAILED_INITIALIZATION, Throwable.class)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(failedInitializationField);
List<StatementDef> initStatements = new ArrayList<>();
List<StatementDef> failStatements = new ArrayList<>();
FieldDef constructorRefField = FieldDef.builder(FIELD_CONSTRUCTOR, AbstractInitializableBeanDefinition.MethodOrFieldReference.class)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(constructorRefField);
initStatements.add(beanDefinitionTypeDef.getStaticField(constructorRefField).put(getConstructorRef()));
FieldDef injectionMethodsField = null;
FieldDef injectionFieldsField = null;
FieldDef annotationInjectionsFieldType = null;
FieldDef typeArgumentsField = null;
FieldDef executableMethodsField = null;
boolean hasMethodInjection = !superBeanDefinition && !allMethodVisits.isEmpty();
if (hasMethodInjection) {
TypeDef.Array methodReferenceArray = ClassTypeDef.of(AbstractInitializableBeanDefinition.MethodReference.class).array();
injectionMethodsField = FieldDef.builder(FIELD_INJECTION_METHODS, methodReferenceArray)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(injectionMethodsField);
initStatements.add(beanDefinitionTypeDef.getStaticField(injectionMethodsField)
.put(methodReferenceArray.instantiate(allMethodVisits.stream()
.map(md -> getNewMethodReference(md.beanType, md.methodElement, md.annotationMetadata, md.postConstruct, md.preDestroy))
.toList())));
failStatements.add(beanDefinitionTypeDef.getStaticField(injectionMethodsField).put(ExpressionDef.nullValue()));
}
boolean hasFieldInjection = !fieldInjectionPoints.isEmpty();
if (hasFieldInjection) {
TypeDef.Array fieldReferenceArray = ClassTypeDef.of(AbstractInitializableBeanDefinition.FieldReference.class).array();
injectionFieldsField = FieldDef.builder(FIELD_INJECTION_FIELDS, fieldReferenceArray)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(injectionFieldsField);
initStatements.add(beanDefinitionTypeDef.getStaticField(injectionFieldsField)
.put(fieldReferenceArray.instantiate(fieldInjectionPoints.stream()
.map(fd -> getNewFieldReference(fd.beanType, fd.fieldElement))
.toList())));
failStatements.add(beanDefinitionTypeDef.getStaticField(injectionFieldsField).put(ExpressionDef.nullValue()));
}
boolean hasAnnotationInjection = !annotationInjectionPoints.isEmpty();
if (hasAnnotationInjection) {
TypeDef.Array annotationInjectionsFieldArray = ClassTypeDef.of(AbstractInitializableBeanDefinition.AnnotationReference.class).array();
annotationInjectionsFieldType = FieldDef.builder(FIELD_ANNOTATION_INJECTIONS, annotationInjectionsFieldArray)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(annotationInjectionsFieldType);
initStatements.add(beanDefinitionTypeDef.getStaticField(annotationInjectionsFieldType)
.put(annotationInjectionsFieldArray.instantiate(annotationInjectionPoints.keySet().stream()
.map(this::getNewAnnotationReference)
.toList())));
failStatements.add(beanDefinitionTypeDef.getStaticField(annotationInjectionsFieldType).put(ExpressionDef.nullValue()));
}
boolean hasTypeArguments = !superBeanDefinition && hasTypeArguments();
if (hasTypeArguments) {
typeArgumentsField = FieldDef.builder(FIELD_TYPE_ARGUMENTS, Map.class)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(typeArgumentsField);
initStatements.add(beanDefinitionTypeDef.getStaticField(typeArgumentsField)
.put(GenUtils.stringMapOf(
typeArguments, true, null, el -> ArgumentExpUtils.pushTypeArgumentElements(
annotationMetadata,
beanDefinitionTypeDef,
ClassElement.of(beanDefinitionName),
el,
loadClassValueExpressionFn
))
));
failStatements.add(beanDefinitionTypeDef.getStaticField(typeArgumentsField).put(ExpressionDef.nullValue()));
}
boolean hasExecutableMethods = executableMethodsDefinitionWriter != null;
if (hasExecutableMethods) {
ClassTypeDef execType = executableMethodsDefinitionWriter.getClassTypeDef();
executableMethodsField = FieldDef.builder(FIELD_EXECUTABLE_METHODS, execType)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(executableMethodsField);
initStatements.add(beanDefinitionTypeDef.getStaticField(executableMethodsField).put(execType.instantiate()));
failStatements.add(beanDefinitionTypeDef.getStaticField(executableMethodsField).put(ExpressionDef.nullValue()));
}
ClassTypeDef precalculatedInfoType = ClassTypeDef.of(AbstractInitializableBeanDefinition.PrecalculatedInfo.class);
FieldDef precalculatedInfoField = FieldDef.builder(FIELD_PRECALCULATED_INFO, precalculatedInfoType)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(precalculatedInfoField);
String scope = annotationMetadata.getAnnotationNameByStereotype(AnnotationUtil.SCOPE).orElse(null);
statements.add(
beanDefinitionTypeDef.getStaticField(precalculatedInfoField)
.put(
precalculatedInfoType.instantiate(
PRECALCULATED_INFO_CONSTRUCTOR,
// 1: `Optional` scope
scope == null ? TYPE_OPTIONAL.invokeStatic(METHOD_OPTIONAL_EMPTY)
: TYPE_OPTIONAL.invokeStatic(METHOD_OPTIONAL_OF, ExpressionDef.constant(scope)),
// 2: `boolean` isAbstract
ExpressionDef.constant(isAbstract),
// 3: `boolean` isIterable
ExpressionDef.constant(isIterable(annotationMetadata)),
// 4: `boolean` isSingleton
ExpressionDef.constant(isSingleton(scope)),
// 5: `boolean` isPrimary
ExpressionDef.constant(annotationMetadata.hasDeclaredStereotype(Primary.class)),
// 6: `boolean` isConfigurationProperties
ExpressionDef.constant(isConfigurationProperties),
// 7: isContainerType
ExpressionDef.constant(isContainerType()),
// 8: preprocessMethods
ExpressionDef.constant(preprocessMethods),
// 9: hasEvaluatedExpressions
ExpressionDef.constant(evaluatedExpressionProcessor.hasEvaluatedExpressions())
)
)
);
AnnotationMetadata declaredAnnotationMetadata;
if (beanProducingElement instanceof MethodElement methodElement) {
declaredAnnotationMetadata = methodElement.getMethodAnnotationMetadata();
} else {
declaredAnnotationMetadata = annotationMetadata;
}
List<AnnotationValue<Indexed>> indexes = declaredAnnotationMetadata.getAnnotationValuesByType(Indexed.class);
if (!indexes.isEmpty()) {
TypeDef.Array arrayOfClasses = TypeDef.Primitive.CLASS.array();
FieldDef indexesField = FieldDef.builder("$INDEXES")
.ofType(arrayOfClasses)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
initStatements.add(
beanDefinitionTypeDef.getStaticField(indexesField).put(
arrayOfClasses.instantiate(
indexes.stream().map(av -> asClassExpression(av.stringValue().orElseThrow())).toArray(ExpressionDef[]::new)
)
)
);
classDefBuilder.addField(indexesField);
classDefBuilder.addMethod(
MethodDef.override(GET_INDEXES_METHOD).build((aThis, methodParameters) -> aThis.type().getStaticField(indexesField).returning())
);
failStatements.add(beanDefinitionTypeDef.getStaticField(indexesField).put(arrayOfClasses.instantiate()));
}
statements.add(
StatementDef.doTry(
StatementDef.multi(
initStatements
)
).doCatch(Throwable.class, exceptionVar -> StatementDef.multi(
beanDefinitionTypeDef.getStaticField(failedInitializationField).put(exceptionVar),
StatementDef.multi(failStatements)
))
);
statements.add(addInnerConfigurationMethod());
statements.add(addGetExposedTypes());
statements.add(addReplacesDefinition());
FieldDef preStartConditionsField = null;
FieldDef postStartConditionsField = null;
List<AnnotationValue<Requires>> requirements = annotationMetadata.getAnnotationValuesByType(Requires.class);
if (!requirements.isEmpty()) {
TypeDef.Array conditionsArrayType = ClassTypeDef.of(Condition.class).array();
preStartConditionsField = FieldDef.builder(FIELD_PRE_START_CONDITIONS, conditionsArrayType)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
postStartConditionsField = FieldDef.builder(FIELD_POST_START_CONDITIONS, conditionsArrayType)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(preStartConditionsField);
classDefBuilder.addField(postStartConditionsField);
statements.add(addConditions(requirements, preStartConditionsField, postStartConditionsField));
}
// Defaults can be contributed by other static initializers, it should be at the end
AnnotationMetadataGenUtils.addAnnotationDefaults(statements, annotationMetadata, loadClassValueExpressionFn);
return new StaticBlock(
StatementDef.multi(statements),
annotationMetadataField,
failedInitializationField,
constructorRefField,
injectionMethodsField,
injectionFieldsField,
annotationInjectionsFieldType,
typeArgumentsField,
executableMethodsField,
precalculatedInfoField,
preStartConditionsField,
postStartConditionsField
);
}
private ExpressionDef getConstructorRef() {
if (constructor instanceof MethodElement methodElement) {
ParameterElement[] parameters = methodElement.getParameters();
List<ParameterElement> parameterList = Arrays.asList(parameters);
applyDefaultNamedToParameters(parameterList);
return getNewMethodReference(methodElement.getDeclaringType(), methodElement, methodElement.getAnnotationMetadata(), false, false);
} else if (constructor instanceof FieldElement fieldConstructor) {
return getNewFieldReference(fieldConstructor.getDeclaringType(), fieldConstructor);
} else {
throw new IllegalArgumentException("Unexpected constructor: " + constructor);
}
}
private StatementDef addConditions(List<AnnotationValue<Requires>> requirements, FieldDef preStartConditionsField, FieldDef postStartConditionsField) {
List<Condition> preConditions = new ArrayList<>();
List<Condition> postConditions = new ArrayList<>();
if (requirements.isEmpty()) {
return StatementDef.multi();
}
List<AnnotationValue<Requires>> dynamicRequirements = new ArrayList<>();
for (AnnotationValue<Requires> requirement : requirements) {
if (requirement.getValues().values().stream().anyMatch(value -> value instanceof EvaluatedExpressionReference)) {
dynamicRequirements.add(requirement);
continue;
}
MatchesConditionUtils.createConditions(requirement, preConditions, postConditions);
}
if (!dynamicRequirements.isEmpty()) {
MutableAnnotationMetadata annotationMetadata = new MutableAnnotationMetadata();
for (AnnotationValue<Requires> requirement : requirements) {
annotationMetadata.addRepeatable(Requirements.class.getName(), requirement);
}
postConditions.add(new MatchesDynamicCondition(annotationMetadata));
}
Function<Condition, ExpressionDef> writer = new Function<>() {
@Override
public ExpressionDef apply(Condition condition) {
if (condition instanceof MatchesPropertyCondition matchesPropertyCondition) {
return newRecord(
matchesPropertyCondition.getClass(),
ExpressionDef.constant(matchesPropertyCondition.property()),
ExpressionDef.constant(matchesPropertyCondition.value()),
ExpressionDef.constant(matchesPropertyCondition.defaultValue()),
ExpressionDef.constant(matchesPropertyCondition.condition())
);
} else if (condition instanceof MatchesAbsenceOfBeansCondition matchesAbsenceOfBeansCondition) {
return newRecord(
matchesAbsenceOfBeansCondition.getClass(),
getAnnotationClassValues(matchesAbsenceOfBeansCondition.missingBeans())
);
} else if (condition instanceof MatchesPresenceOfBeansCondition matchesPresenceOfBeansCondition) {
return newRecord(
matchesPresenceOfBeansCondition.getClass(),
getAnnotationClassValues(matchesPresenceOfBeansCondition.beans())
);
} else if (condition instanceof MatchesAbsenceOfClassesCondition matchesAbsenceOfClassesCondition) {
return newRecord(
matchesAbsenceOfClassesCondition.getClass(),
getAnnotationClassValues(matchesAbsenceOfClassesCondition.classes())
);
} else if (condition instanceof MatchesPresenceOfClassesCondition matchesPresenceOfClassesCondition) {
return newRecord(
matchesPresenceOfClassesCondition.getClass(),
getAnnotationClassValues(matchesPresenceOfClassesCondition.classes())
);
} else if (condition instanceof MatchesPresenceOfEntitiesCondition matchesPresenceOfEntitiesCondition) {
return newRecord(
matchesPresenceOfEntitiesCondition.getClass(),
getAnnotationClassValues(matchesPresenceOfEntitiesCondition.classes())
);
} else if (condition instanceof MatchesAbsenceOfClassNamesCondition matchesAbsenceOfClassNamesCondition) {
return newRecord(
matchesAbsenceOfClassNamesCondition.getClass(),
ExpressionDef.constant(matchesAbsenceOfClassNamesCondition.classes())
);
} else if (condition instanceof MatchesConfigurationCondition matchesConfigurationCondition) {
return newRecord(
matchesConfigurationCondition.getClass(),
ExpressionDef.constant(matchesConfigurationCondition.configurationName()),
ExpressionDef.constant(matchesConfigurationCondition.minimumVersion())
);
} else if (condition instanceof MatchesCurrentNotOsCondition matchesCurrentNotOsCondition) {
return newRecord(
matchesCurrentNotOsCondition.getClass(),
ClassTypeDef.of(CollectionUtils.class)
.invokeStatic(
COLLECTION_UTILS_ENUM_SET_METHOD,
ClassTypeDef.of(Requires.Family.class).array().instantiate(
matchesCurrentNotOsCondition.notOs().stream().map(ExpressionDef::constant).toList()
)
)
);
} else if (condition instanceof MatchesCurrentOsCondition currentOsCondition) {
return newRecord(
currentOsCondition.getClass(),
ClassTypeDef.of(CollectionUtils.class)
.invokeStatic(
COLLECTION_UTILS_ENUM_SET_METHOD,
ClassTypeDef.of(Requires.Family.class).array().instantiate(
currentOsCondition.os().stream().map(ExpressionDef::constant).toList()
)
)
);
} else if (condition instanceof MatchesCustomCondition matchesCustomCondition) {
return newRecord(
matchesCustomCondition.getClass(),
getAnnotationClassValue(matchesCustomCondition.customConditionClass())
);
} else if (condition instanceof MatchesEnvironmentCondition matchesEnvironmentCondition) {
return newRecord(
matchesEnvironmentCondition.getClass(),
ExpressionDef.constant(matchesEnvironmentCondition.env())
);
} else if (condition instanceof MatchesMissingPropertyCondition matchesMissingPropertyCondition) {
return newRecord(
matchesMissingPropertyCondition.getClass(),
ExpressionDef.constant(matchesMissingPropertyCondition.property())
);
} else if (condition instanceof MatchesNotEnvironmentCondition matchesNotEnvironmentCondition) {
return newRecord(
matchesNotEnvironmentCondition.getClass(),
ExpressionDef.constant(matchesNotEnvironmentCondition.env())
);
} else if (condition instanceof MatchesPresenceOfResourcesCondition matchesPresenceOfResourcesCondition) {
return newRecord(
matchesPresenceOfResourcesCondition.getClass(),
ExpressionDef.constant(matchesPresenceOfResourcesCondition.resourcePaths())
);
} else if (condition instanceof MatchesSdkCondition matchesSdkCondition) {
return newRecord(
matchesSdkCondition.getClass(),
ExpressionDef.constant(matchesSdkCondition.sdk()),
ExpressionDef.constant(matchesSdkCondition.version())
);
} else if (condition instanceof MatchesDynamicCondition matchesDynamicCondition) {
return newRecord(
matchesDynamicCondition.getClass(),
getAnnotationMetadataExpression(matchesDynamicCondition.annotationMetadata())
);
} else {
throw new IllegalStateException("Unsupported condition type: " + condition.getClass().getName());
}
}
private ExpressionDef getAnnotationClassValues(AnnotationClassValue<?>[] classValues) {
return ClassTypeDef.of(AnnotationClassValue.class)
.array()
.instantiate(Arrays.stream(classValues).map(this::getAnnotationClassValue).toList());
}
private ExpressionDef getAnnotationClassValue(AnnotationClassValue<?> annotationClassValue) {
return loadClassValueExpressionFn.apply(annotationClassValue.getName());
}
private ExpressionDef newRecord(Class<?> classType, ExpressionDef... values) {
return ClassTypeDef.of(classType).instantiate(classType.getConstructors()[0], values);
}
};
TypeDef.Array conditionsArrayType = ClassTypeDef.of(Condition.class).array();
return StatementDef.multi(
beanDefinitionTypeDef.getStaticField(preStartConditionsField).put(
conditionsArrayType.instantiate(preConditions.stream().map(writer).toList())
),
beanDefinitionTypeDef.getStaticField(postStartConditionsField).put(
conditionsArrayType.instantiate(postConditions.stream().map(writer).toList())
)
);
}
private void processAllBeanElementVisitors() {
for (BeanElementVisitor<?> visitor : VISITORS) {
if (visitor.isEnabled() && visitor.supports(this)) {
try {
this.disabled = visitor.visitBeanElement(this, visitorContext) == null;
if (disabled) {
break;
}
} catch (Exception e) {
visitorContext.fail(
"Error occurred visiting BeanElementVisitor of type [" + visitor.getClass().getName() + "]: " + e.getMessage(),
this
);
break;
}
}
}
}
private StatementDef addInnerConfigurationMethod() {
if (isConfigurationProperties && !beanTypeInnerClasses.isEmpty()) {
FieldDef innerClassesField = FieldDef.builder(FIELD_INNER_CLASSES, Set.class)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
classDefBuilder.addField(innerClassesField);
classDefBuilder.addMethod(
MethodDef.override(IS_INNER_CONFIGURATION_METHOD)
.build((aThis, methodParameters) -> aThis.type().getStaticField(innerClassesField)
.invoke(CONTAINS_METHOD, methodParameters.get(0))
.returning())
);
return beanDefinitionTypeDef.getStaticField(innerClassesField).put(
getClassesAsSetExpression(beanTypeInnerClasses.toArray(EMPTY_STRING_ARRAY))
);
}
return StatementDef.multi();
}
private StatementDef addGetExposedTypes() {
AnnotationMetadata producingAnnotationMetadata;
if (beanProducingElement instanceof MethodElement methodElement) {
producingAnnotationMetadata = methodElement.getMethodAnnotationMetadata();
} else {
producingAnnotationMetadata = annotationMetadata;
}
String[] exposedTypes = producingAnnotationMetadata.stringValues(Bean.class.getName(), "typed");
Set<String> exposedTypeNames;
if (exposedTypes.length != 0) {
exposedTypeNames = Set.of(exposedTypes);
} else {
exposedTypeNames = new LinkedHashSet<>();
if (interceptedType != null) {
collectExposedTypes(exposedTypeNames, visitorContext.getClassElement(interceptedType).orElseThrow(() -> new IllegalStateException("Intercepted type not found: " + interceptedType)));
exposedTypeNames.add(beanProducingElement.getName()); // Allow finding the proxy by it's name
} else if (exposes != null) {
exposes.forEach(name -> exposedTypeNames.add(name.getName()));
} else if (isContainerType()) {
if (beanTypeElement.isArray()) {
collectExposedTypes(exposedTypeNames, beanTypeElement.fromArray());
} else {
collectExposedTypes(exposedTypeNames, beanTypeElement.getFirstTypeArgument()
.orElseThrow(() -> new IllegalStateException("No type argument found for array type: " + beanTypeElement.getType())));
}
collectExposedTypes(exposedTypeNames, beanTypeElement);
} else {
collectExposedTypes(exposedTypeNames, beanTypeElement);
}
}
if (exposedTypeNames.isEmpty()) {
// This should never happen
return StatementDef.multi();
}
FieldDef exposedTypesField = FieldDef.builder(FIELD_EXPOSED_TYPES, TypeDef.parameterized(Set.class, TypeDef.Primitive.CLASS))
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
List<StatementDef> statements = new ArrayList<>();
classDefBuilder.addField(exposedTypesField);
VariableDef.StaticField staticFieldExposes = beanDefinitionTypeDef.getStaticField(exposedTypesField);
statements.add(StatementDef.doTry(
staticFieldExposes.put(getClassesAsSetExpression(exposedTypeNames))
).doCatch(Throwable.class,
exceptionVar -> staticFieldExposes.put(GenUtils.setOf(List.of())))
);
classDefBuilder.addMethod(
MethodDef.override(GET_EXPOSED_TYPES_METHOD)
.build((aThis, methodParameters) ->
aThis.type().getStaticField(exposedTypesField).returning())
);
if (!hasTypeArguments() && !isContainerType()) {
classDefBuilder.addMethod(
MethodDef.override(IS_CANDIDATE_BEAN_METHOD)
.build((aThis, methodParameters) -> {
if (exposedTypes.length != 0) { // User-defined exposed types
if (exposedTypeNames.size() == 1) {
return methodParameters.get(0).newLocal("type", variableDef ->
variableDef.isNonNull()
.and(
ArgumentExpUtils.getTypeExp(variableDef).equalsReferentially(
ExpressionDef.constant(TypeDef.of(exposedTypeNames.iterator().next()))
)
)
.returning()
);
} else {
return methodParameters.get(0).newLocal("type", variableDef ->
variableDef.isNonNull().and(
staticFieldExposes.invoke(CONTAINS_METHOD, ArgumentExpUtils.getTypeExp(variableDef)).isTrue()
).returning()
);
}
} else {
return ArgumentExpUtils.getTypeExp(methodParameters.get(0))
.invoke(IS_ASSIGNABLE_METHOD, ExpressionDef.constant(beanTypeDef))
.returning();
}
}
)
);
}
return StatementDef.multi(statements);
}
private StatementDef addReplacesDefinition() {
AnnotationMetadata producingAnnotationMetadata = annotationMetadata;
AnnotationValue<Replaces> replacesAnnotationValue = producingAnnotationMetadata.getAnnotation(Replaces.class);
if (replacesAnnotationValue == null) {
classDefBuilder.addMethod(
MethodDef.override(METHOD_GET_REPLACES_DEFINITION)
.build((aThis, methodParameters) ->
ExpressionDef.nullValue().returning())
);
return StatementDef.multi();
}
TypeDef replacesType = TypeDef.of(ReplacesDefinition.class);
FieldDef replacesField = FieldDef.builder(FIELD_REPLACES, replacesType)
.addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.build();
List<StatementDef> statements = new ArrayList<>();
classDefBuilder.addField(replacesField);
AnnotationClassValue<?> replacesBean = replacesAnnotationValue.annotationClassValue(Replaces.MEMBER_BEAN).orElse(null);
String named = replacesAnnotationValue.stringValue(Replaces.MEMBER_NAMED).orElse(null);
AnnotationClassValue<?> qualifier = replacesAnnotationValue.annotationClassValue(Replaces.MEMBER_QUALIFIER).orElse(null);
AnnotationClassValue<?> replacesFactory = replacesAnnotationValue.annotationClassValue(Replaces.MEMBER_FACTORY).orElse(null);
if (named != null && qualifier != null) {
throw new ProcessingException(beanProducingElement, "Both \"named\" and \"qualifier\" should not be present");
}
ExpressionDef qualifierExpression;
if (named != null) {
qualifierExpression = TYPE_QUALIFIERS.invokeStatic(METHOD_QUALIFIER_BY_NAME, ExpressionDef.constant(named));
} else if (qualifier != null) {
qualifierExpression = TYPE_QUALIFIERS.invokeStatic(METHOD_QUALIFIER_BY_STEREOTYPE, ExpressionDef.constant(TypeDef.of(qualifier.getName())));
} else {
qualifierExpression = ExpressionDef.nullValue();
}
VariableDef.StaticField staticFieldReplaces = beanDefinitionTypeDef.getStaticField(replacesField);
statements.add(StatementDef.doTry(
staticFieldReplaces.put(
ClassTypeDef.of(DefaultReplacesDefinition.class)
.instantiate(CONSTRUCTOR_DEFAULT_REPLACES_DEFINITION,
ExpressionDef.constant(beanTypeDef),
replacesBean == null ? ExpressionDef.nullValue() : ExpressionDef.constant(TypeDef.of(replacesBean.getName())),
qualifierExpression,
replacesFactory == null ? ExpressionDef.nullValue() : ExpressionDef.constant(TypeDef.of(replacesFactory.getName()))
)
)
).doCatch(Throwable.class,
exceptionVar -> staticFieldReplaces.put(ExpressionDef.nullValue()))
);
classDefBuilder.addMethod(
MethodDef.override(METHOD_GET_REPLACES_DEFINITION)
.build((aThis, methodParameters) ->
aThis.type().getStaticField(replacesField).returning())
);
return StatementDef.multi(statements);
}
private void collectExposedTypes(Set<String> exposedTypeNames, ClassElement element) {
String className = getClassName(element);
if (!exposedTypeNames.add(className) || IGNORED_EXPOSED_INTERFACES.contains(className)) {
return;
}
element.getSuperType().ifPresent(superType -> collectExposedTypes(exposedTypeNames, superType));
element.getInterfaces().forEach(iface -> collectExposedTypes(exposedTypeNames, iface));
}
private String getClassName(ClassElement element) {
if (element.isArray()) {
return getClassName(element.fromArray()) + "[]";
}
return element.getName();
}
@Nullable
private MethodDef getGetOrder() {
int order = OrderUtil.getOrder(annotationMetadata);
if (order != 0) {
return MethodDef.override(GET_ORDER_METHOD)
.build((aThis, methodParameters) -> TypeDef.Primitive.INT.constant(order).returning());
}
return null;
}
private ExpressionDef getClassesAsSetExpression(String[] classes) {
if (classes.length > 1) {
return ClassTypeDef.of(HashSet.class)
.instantiate(
HASH_SET_COLLECTION_CONSTRUCTOR,
ClassTypeDef.of(Arrays.class)
.invokeStatic(
ARRAYS_AS_LIST_METHOD,
getArrayOfClasses(classes)
)
);
}
return ClassTypeDef.of(Collections.class)
.invokeStatic(
COLLECTIONS_SINGLETON_METHOD,
asClassExpression(classes[0])
);
}
private ExpressionDef getClassesAsSetExpression(Collection<String> classes) {
return GenUtils.setOf(classes.stream().<ExpressionDef>map(this::asClassExpression).toList());
}
private boolean hasTypeArguments() {
return typeArguments != null && !typeArguments.isEmpty() && typeArguments.entrySet().stream().anyMatch(e -> !e.getValue().isEmpty());
}
private boolean isSingleton(String scope) {
if (beanProducingElement instanceof FieldElement && beanProducingElement.isFinal()) {
// final fields can't change so effectively singleton
return true;
}
if (scope != null) {
return scope.equals(Singleton.class.getName());
} else {
final AnnotationMetadata annotationMetadata;
if (beanProducingElement instanceof ClassElement) {
annotationMetadata = getAnnotationMetadata();
} else {
annotationMetadata = beanProducingElement.getDeclaredMetadata();
}
return annotationMetadata.stringValue(DefaultScope.class)
.map(t -> t.equals(Singleton.class.getName()))
.orElse(false);
}
}
/**
* @return The bytes of the class
*/
public byte[] toByteArray() {
if (output == null) {
throw new IllegalStateException("Bean definition not finalized. Call visitBeanDefinitionEnd() first.");
}
return ByteCodeWriterUtils.writeByteCode(classDefBuilder.build(), visitorContext);
}
@Override
public void accept(ClassWriterOutputVisitor visitor) throws IOException {
if (disabled) {
return;
}
visitor.visitServiceDescriptor(
BeanDefinitionReference.class,
beanDefinitionName,
getOriginatingElement()
);
for (Map.Entry<String, byte[]> e1 : output.entrySet()) {
try (OutputStream out = visitor.visitClass(e1.getKey(), getOriginatingElements())) {
out.write(e1.getValue());
}
}
try {
if (executableMethodsDefinitionWriter != null) {
executableMethodsDefinitionWriter.accept(visitor);
}
} catch (RuntimeException e) {
Throwable cause = e.getCause();
if (cause instanceof IOException exception) {
throw exception;
} else {
throw e;
}
}
evaluatedExpressionProcessor.writeEvaluatedExpressions(visitor);
}
@Override
public void visitSetterValue(
TypedElement declaringType,
MethodElement methodElement,
AnnotationMetadata annotationMetadata,
boolean requiresReflection,
boolean isOptional) {
injectCommands.add(new SetterInjectionInjectCommand(declaringType, methodElement, annotationMetadata, requiresReflection, isOptional));
}
private StatementDef setSetterValue(InjectMethodSignature injectMethodSignature,
TypedElement declaringType,
MethodElement methodElement,
AnnotationMetadata annotationMetadata,
boolean requiresReflection,
boolean isOptional) {
if (!requiresReflection) {
ParameterElement parameter = methodElement.getParameters()[0];
StatementDef setValueStatement = setSetterValue(injectMethodSignature, declaringType, methodElement, annotationMetadata, parameter);
if (isOptional) {
return getPropertyContainsCheck(
injectMethodSignature,
parameter.getType(),
parameter.getName(),
annotationMetadata
).ifTrue(setValueStatement);
}
return setValueStatement;
}
final MethodVisitData methodVisitData = new MethodVisitData(
declaringType,
methodElement,
false,
annotationMetadata);
methodInjectionPoints.add(methodVisitData);
allMethodVisits.add(methodVisitData);
return StatementDef.multi();
}
private StatementDef setSetterValue(InjectMethodSignature injectMethodSignature,
TypedElement declaringType,
MethodElement methodElement,
AnnotationMetadata annotationMetadata,
ParameterElement parameter) {
ClassElement genericType = parameter.getGenericType();
if (isConfigurationProperties && isValueType(annotationMetadata)) {
int methodIndex = -1;
if (keepConfPropInjectPoints) {
final MethodVisitData methodVisitData = new MethodVisitData(
declaringType,
methodElement,
false,
annotationMetadata);
methodInjectionPoints.add(methodVisitData);
allMethodVisits.add(methodVisitData);
methodIndex = allMethodVisits.size() - 1;
}
Function<ExpressionDef, StatementDef> onValue = value -> injectMethodSignature
.instanceVar.invoke(methodElement, value);
Optional<String> valueValue = annotationMetadata.stringValue(Value.class);
if (isInnerType(genericType)) {
boolean isArray = genericType.isArray();
boolean isCollection = genericType.isAssignable(Collection.class);
if (isCollection || isArray) {
ClassElement typeArgument = genericType.isArray() ? genericType.fromArray() : genericType.getFirstTypeArgument().orElse(null);
if (typeArgument != null && !typeArgument.isPrimitive()) {
return getInvokeGetBeansOfTypeForSetter(injectMethodSignature, methodElement.getName(), parameter, annotationMetadata, onValue, methodIndex);
}
return onValue.apply(
getInvokeGetBeanForSetter(injectMethodSignature, methodElement.getName(), parameter, annotationMetadata, methodIndex)
);
}
return onValue.apply(
getInvokeGetBeanForSetter(injectMethodSignature, methodElement.getName(), parameter, annotationMetadata, methodIndex)
);
}
Optional<String> property = annotationMetadata.stringValue(Property.class, "name");
if (property.isPresent()) {
return onValue.apply(
getInvokeGetPropertyValueForSetter(injectMethodSignature, methodElement.getName(), parameter, property.get(), annotationMetadata, methodIndex)
);
}
if (valueValue.isPresent()) {
return onValue.apply(
getInvokeGetPropertyPlaceholderValueForSetter(injectMethodSignature, methodElement.getName(), parameter, valueValue.get(), annotationMetadata, methodIndex)
);
}
throw new IllegalStateException();
} else {
final MethodVisitData methodVisitData = new MethodVisitData(
declaringType,
methodElement,
false,
annotationMetadata);
methodInjectionPoints.add(methodVisitData);
allMethodVisits.add(methodVisitData);
return injectMethod(
methodElement,
false,
injectMethodSignature.aThis,
injectMethodSignature.methodParameters,
injectMethodSignature.instanceVar,
allMethodVisits.size() - 1
);
}
}
@Override
public void visitPostConstructMethod(TypedElement declaringType,
MethodElement methodElement,
boolean requiresReflection,
VisitorContext visitorContext) {
buildMethodDefinition.postConstruct(false);
// for "super bean definitions" we just delegate to super
if (!superBeanDefinition || isInterceptedLifeCycleByType(this.annotationMetadata, "POST_CONSTRUCT")) {
MethodVisitData methodVisitData = new MethodVisitData(declaringType, methodElement, requiresReflection, methodElement.getAnnotationMetadata(), true, false);
postConstructMethodVisits.add(methodVisitData);
allMethodVisits.add(methodVisitData);
buildMethodDefinition.postConstruct.injectionPoints.add(new
InjectMethodBuildCommand(
declaringType,
methodElement,
requiresReflection,
allMethodVisits.size() - 1
)
);
}
}
@Override
public void visitPreDestroyMethod(TypedElement declaringType,
MethodElement methodElement,
boolean requiresReflection,
VisitorContext visitorContext) {
// for "super bean definitions" we just delegate to super
if (!superBeanDefinition || isInterceptedLifeCycleByType(this.annotationMetadata, "PRE_DESTROY")) {
buildMethodDefinition.preDestroy(false);
MethodVisitData methodVisitData = new MethodVisitData(declaringType, methodElement, requiresReflection, methodElement.getAnnotationMetadata(), false, true);
preDestroyMethodVisits.add(methodVisitData);
allMethodVisits.add(methodVisitData);
buildMethodDefinition.preDestroy.injectionPoints.add(new InjectMethodBuildCommand(
declaringType,
methodElement,
requiresReflection,
allMethodVisits.size() - 1
));
}
}
@Override
public void visitMethodInjectionPoint(TypedElement declaringType,
MethodElement methodElement,
boolean requiresReflection,
VisitorContext visitorContext) {
MethodVisitData methodVisitData = new MethodVisitData(declaringType, methodElement, requiresReflection, methodElement.getAnnotationMetadata());
evaluatedExpressionProcessor.processEvaluatedExpressions(methodElement.getAnnotationMetadata(), this.beanTypeElement);
methodInjectionPoints.add(methodVisitData);
allMethodVisits.add(methodVisitData);
injectCommands.add(new InjectMethodInjectCommand(
declaringType,
methodElement,
requiresReflection,
visitorContext,
allMethodVisits.size() - 1)
);
}
@Override
public int visitExecutableMethod(TypedElement declaringBean,
MethodElement methodElement, VisitorContext visitorContext) {
return visitExecutableMethod(
declaringBean,
methodElement,
null,
null
);
}
/**
* Visit a method that is to be made executable allow invocation of said method without reflection.
*
* @param declaringType The declaring type of the method. Either a Class or a string representing the
* name of the type
* @param methodElement The method element
* @param interceptedProxyType The intercepted proxy type
* @param interceptedProxyBridgeMethod The intercepted proxy bridge method name
* @return The index of a new method.
*/
public int visitExecutableMethod(TypedElement declaringType,
MethodElement methodElement,
ClassTypeDef interceptedProxyType,
MethodDef interceptedProxyBridgeMethod) {
if (executableMethodsDefinitionWriter == null) {
executableMethodsDefinitionWriter = new ExecutableMethodsDefinitionWriter(
evaluatedExpressionProcessor,
annotationMetadata,
beanDefinitionName,
getBeanDefinitionName(),
originatingElements,
visitorContext
);
}
return executableMethodsDefinitionWriter.visitExecutableMethod(declaringType, methodElement, interceptedProxyType, interceptedProxyBridgeMethod);
}
@Override
public String toString() {
return "BeanDefinitionWriter{" +
"beanFullClassName='" + beanFullClassName + '\'' +
'}';
}
@Override
public String getPackageName() {
return packageName;
}
@Override
public String getBeanSimpleName() {
return beanSimpleClassName;
}
@Override
public AnnotationMetadata getAnnotationMetadata() {
return annotationMetadata;
}
@Override
public void visitConfigBuilder(ConfigurationBuilderDefinition builderDefinition) {
injectCommands.add(new ConfigBuilderInjectCommand(builderDefinition));
}
@Override
public void visitConfigBuilderField(ClassElement type, String field, AnnotationMetadata annotationMetadata, boolean isInterface) {
}
@Override
public void visitConfigBuilderMethod(ClassElement type, String methodName, AnnotationMetadata annotationMetadata, boolean isInterface) {
}
@Override
public void visitConfigBuilderMethod(String propertyName, ClassElement returnType, String methodName, ClassElement paramType, Map<String, ClassElement> generics, String path) {
}
@Override
public void visitConfigBuilderDurationMethod(String propertyName, ClassElement returnType, String methodName, String path) {
}
@Override
public void visitConfigBuilderEnd() {
}
@Override
public void setRequiresMethodProcessing(boolean shouldPreProcess) {
this.preprocessMethods = shouldPreProcess;
}
@Override
public void visitTypeArguments(Map<String, Map<String, ClassElement>> typeArguments) {
this.typeArguments = typeArguments;
}
@Override
public boolean requiresMethodProcessing() {
return this.preprocessMethods;
}
@Override
public void visitFieldInjectionPoint(
TypedElement declaringType,
FieldElement fieldElement,
boolean requiresReflection,
VisitorContext visitorContext) {
injectCommands.add(new InjectFieldInjectCommand(declaringType, fieldElement, requiresReflection));
}
private StatementDef injectField(InjectMethodSignature injectMethodSignature,
TypedElement declaringType,
FieldElement fieldElement,
boolean requiresReflection) {
boolean isRequired = fieldElement
.booleanValue(AnnotationUtil.INJECT, AnnotationUtil.MEMBER_REQUIRED)
.orElse(true);
boolean requiresGenericType = false;
Method methodToInvoke;
final ClassElement genericType = fieldElement.getGenericType();
boolean isArray = genericType.isArray();
boolean isCollection = genericType.isAssignable(Collection.class);
boolean isMap = isInjectableMap(genericType);
if (isMap) {
requiresGenericType = true;
methodToInvoke = GET_MAP_OF_TYPE_FOR_FIELD;
} else if (isCollection || isArray) {
requiresGenericType = true;
ClassElement typeArgument = genericType.isArray() ? genericType.fromArray() : genericType.getFirstTypeArgument().orElse(null);
if (typeArgument != null && !typeArgument.isPrimitive()) {
if (typeArgument.isAssignable(BeanRegistration.class)) {
methodToInvoke = GET_BEAN_REGISTRATIONS_FOR_FIELD;
} else {
methodToInvoke = GET_BEANS_OF_TYPE_FOR_FIELD;
}
} else {
requiresGenericType = false;
methodToInvoke = GET_BEAN_FOR_FIELD;
}
} else if (genericType.isAssignable(Stream.class)) {
requiresGenericType = true;
methodToInvoke = GET_STREAM_OF_TYPE_FOR_FIELD;
} else if (genericType.isAssignable(Optional.class)) {
requiresGenericType = true;
methodToInvoke = FIND_BEAN_FOR_FIELD;
} else if (genericType.isAssignable(BeanRegistration.class)) {
requiresGenericType = true;
methodToInvoke = GET_BEAN_REGISTRATION_FOR_FIELD;
} else {
methodToInvoke = GET_BEAN_FOR_FIELD;
}
return visitFieldInjectionPointInternal(
injectMethodSignature,
declaringType,
fieldElement,
requiresReflection,
methodToInvoke,
isArray,
requiresGenericType,
isRequired
);
}
private static boolean isInjectableMap(ClassElement genericType) {
boolean typeMatches = Stream.of(Map.class, HashMap.class, LinkedHashMap.class, TreeMap.class)
.anyMatch(t -> genericType.getName().equals(t.getName()));
if (typeMatches) {
Map<String, ClassElement> typeArgs = genericType.getTypeArguments();
if (typeArgs.size() == 2) {
ClassElement k = typeArgs.get("K");
return k != null && k.isAssignable(CharSequence.class);
}
}
return false;
}
private boolean isInnerType(ClassElement genericType) {
String type;
if (genericType.isContainerType()) {
type = genericType.getFirstTypeArgument().map(Element::getName).orElse("");
} else if (genericType.isArray()) {
type = genericType.fromArray().getName();
} else {
type = genericType.getName();
}
return beanTypeInnerClasses.contains(type);
}
@Override
public void visitAnnotationMemberPropertyInjectionPoint(TypedElement annotationMemberBeanType,
String annotationMemberProperty,
@Nullable String requiredValue,
@Nullable String notEqualsValue) {
ClassElement annotationMemberClassElement = annotationMemberBeanType.getType();
MethodElement memberPropertyGetter = annotationMemberClassElement.getBeanProperties()
.stream()
.filter(property -> property.getSimpleName().equals(annotationMemberProperty))
.findFirst()
.flatMap(PropertyElement::getReadMethod)
.orElse(null);
if (memberPropertyGetter == null) {
final String[] readPrefixes = annotationMemberBeanType.getAnnotationMetadata()
.getValue(AccessorsStyle.class, "readPrefixes", String[].class)
.orElse(new String[]{AccessorsStyle.DEFAULT_READ_PREFIX});
memberPropertyGetter = annotationMemberClassElement.getEnclosedElement(
ElementQuery.ALL_METHODS
.onlyAccessible(beanTypeElement)
.onlyInstance()
.filter(m -> annotationMemberProperty.equals(NameUtils.getPropertyNameForGetter(m.getName(), readPrefixes)) && !m.hasParameters())
).orElse(null);
}
if (memberPropertyGetter == null) {
visitorContext.fail("Bean property [" + annotationMemberProperty + "] is not available on bean ["
+ annotationMemberBeanType.getName() + "]", annotationMemberBeanType);
} else {
annotationInjectionPoints.computeIfAbsent(annotationMemberClassElement, type -> new ArrayList<>(2))
.add(new AnnotationVisitData(annotationMemberBeanType, annotationMemberProperty, memberPropertyGetter, requiredValue, notEqualsValue));
}
}
@Override
public void visitFieldValue(TypedElement declaringType,
FieldElement fieldElement,
boolean requiresReflection,
boolean isOptional) {
injectCommands.add(new InjectFieldValueInjectCommand(declaringType, fieldElement, requiresReflection, isOptional));
}
private ExpressionDef getInvokeGetPropertyValueForField(InjectMethodSignature injectMethodSignature,
FieldElement fieldElement,
AnnotationMetadata annotationMetadata,
String value,
int fieldIndex) {
annotationMetadata = MutableAnnotationMetadata.of(annotationMetadata);
removeAnnotations(annotationMetadata, PropertySource.class.getName(), Property.class.getName());
return injectMethodSignature.aThis
.invoke(
GET_PROPERTY_VALUE_FOR_FIELD,
injectMethodSignature.beanResolutionContext,
injectMethodSignature.beanContext,
getFieldArgument(fieldElement, annotationMetadata, fieldIndex),
ExpressionDef.constant(value),
ExpressionDef.constant(getCliPrefix(fieldElement.getName()))
).cast(TypeDef.erasure(fieldElement.getType()));
}
private ExpressionDef getInvokeGetPropertyPlaceholderValueForField(InjectMethodSignature injectMethodSignature,
FieldElement fieldElement,
String value,
int fieldIndex) {
AnnotationMetadata annotationMetadata = MutableAnnotationMetadata.of(fieldElement.getAnnotationMetadata());
removeAnnotations(annotationMetadata, PropertySource.class.getName(), Property.class.getName());
return injectMethodSignature.aThis
.invoke(
GET_PROPERTY_PLACEHOLDER_VALUE_FOR_FIELD,
injectMethodSignature.beanResolutionContext,
injectMethodSignature.beanContext,
getFieldArgument(fieldElement, annotationMetadata, fieldIndex),
ExpressionDef.constant(value)
).cast(TypeDef.erasure(fieldElement.getType()));
}
private ExpressionDef getGetValueForPathCall(InjectMethodSignature injectMethodSignature,
ClassElement propertyType,
String propertyName,
String propertyPath,
boolean zeroArgs,
Map<String, ClassElement> generics) {
return injectMethodSignature.aThis
.invoke(
GET_VALUE_FOR_PATH,
injectMethodSignature.beanResolutionContext,
injectMethodSignature.beanContext,
zeroArgs ? ClassTypeDef.of(Argument.class).invokeStatic(
ArgumentExpUtils.METHOD_CREATE_ARGUMENT_SIMPLE,
ExpressionDef.constant(TypeDef.of(Boolean.class)),
ExpressionDef.constant("factory")
) : ArgumentExpUtils.buildArgumentWithGenerics(
annotationMetadata,
beanDefinitionTypeDef,
propertyName,
propertyType,
generics,
new HashSet<>(),
loadClassValueExpressionFn
),
ExpressionDef.constant(propertyPath)
);
}
private ExpressionDef getValueBypassingBeanContext(ClassElement type, List<VariableDef.MethodParameter> methodParameters) {
// Used in instantiate and inject methods
if (type.isAssignable(BeanResolutionContext.class)) {
return methodParameters.get(INSTANTIATE_METHOD_BEAN_RESOLUTION_CONTEXT_PARAM);
}
if (type.isAssignable(BeanContext.class)) {
return methodParameters.get(INSTANTIATE_METHOD_BEAN_CONTEXT_PARAM);
}
if (visitorContext.getClassElement(ConversionService.class).orElseThrow().equals(type)) {
// We only want to assign to exact `ConversionService` classes not to classes extending `ConversionService`
return methodParameters.get(INSTANTIATE_METHOD_BEAN_CONTEXT_PARAM)
.invoke(METHOD_BEAN_CONTEXT_GET_CONVERSION_SERVICE);
}
if (type.isAssignable(ConfigurationPath.class)) {
return methodParameters.get(INSTANTIATE_METHOD_BEAN_RESOLUTION_CONTEXT_PARAM)
.invoke(GET_CONFIGURATION_PATH_METHOD);
}
return null;
}
private StatementDef visitFieldInjectionPointInternal(InjectMethodSignature injectMethodSignature,
TypedElement declaringType,
FieldElement fieldElement,
boolean requiresReflection,
Method methodToInvoke,
boolean isArray,
boolean requiresGenericType,
boolean isRequired) {
evaluatedExpressionProcessor.processEvaluatedExpressions(fieldElement.getAnnotationMetadata(), null);
autoApplyNamedIfPresent(fieldElement, fieldElement.getAnnotationMetadata());
fieldInjectionPoints.add(new FieldVisitData(declaringType, fieldElement, requiresReflection));
int fieldIndex = fieldInjectionPoints.size() - 1;
ExpressionDef valueExpression = getValueBypassingBeanContext(fieldElement.getGenericField(), injectMethodSignature.methodParameters);
if (valueExpression == null) {
List<ExpressionDef> valueExpressions = new ArrayList<>(
List.of(
injectMethodSignature.beanResolutionContext,
injectMethodSignature.beanContext,
ExpressionDef.constant(fieldIndex)
)
);
if (requiresGenericType) {
valueExpressions.add(
resolveFieldArgumentGenericType(fieldElement.getGenericType(), fieldIndex)
);
}
valueExpressions.add(
getQualifier(fieldElement, resolveFieldArgument(fieldIndex))
);
valueExpression = injectMethodSignature.aThis
.invoke(methodToInvoke, valueExpressions);
if (isArray && requiresGenericType) {
valueExpression = convertToArray(fieldElement.getType().fromArray(), valueExpression);
}
valueExpression = valueExpression.cast(TypeDef.erasure(fieldElement.getType()));
}
if (!isRequired) {
return valueExpression.newLocal(fieldElement.getName() + "Value", valueVar ->
valueVar.ifNonNull(
putField(fieldElement, requiresReflection, injectMethodSignature, valueVar, fieldIndex)
));
}
return putField(fieldElement, requiresReflection, injectMethodSignature, valueExpression, fieldIndex);
}
private StatementDef putField(FieldElement fieldElement,
boolean requiresReflection,
InjectMethodSignature injectMethodSignature,
ExpressionDef valueExpression,
int fieldIndex) {
VariableDef instanceVar = injectMethodSignature.instanceVar;
if (requiresReflection) {
return injectMethodSignature.aThis
.invoke(
SET_FIELD_WITH_REFLECTION_METHOD,
injectMethodSignature.beanResolutionContext,
injectMethodSignature.beanContext,
ExpressionDef.constant(fieldIndex),
instanceVar,
valueExpression
);
}
return instanceVar
.cast(TypeDef.erasure(fieldElement.getDeclaringType()))
.field(fieldElement)
.put(valueExpression);
}
private ExpressionDef getPropertyContainsCheck(InjectMethodSignature injectMethodSignature,
ClassElement propertyType,
String propertyName,
AnnotationMetadata annotationMetadata) {
String propertyValue = annotationMetadata.stringValue(Property.class, "name").orElse(propertyName);
ExpressionDef.InvokeInstanceMethod containsProperty = injectMethodSignature.aThis.invoke(
isMultiValueProperty(propertyType) ? CONTAINS_PROPERTIES_VALUE_METHOD : CONTAINS_PROPERTY_VALUE_METHOD,
injectMethodSignature.beanResolutionContext,
injectMethodSignature.beanContext,
ExpressionDef.constant(propertyValue) // property name
);
String cliProperty = getCliPrefix(propertyName);
if (cliProperty == null) {
return containsProperty.isTrue();
}
return containsProperty.isTrue().or(
injectMethodSignature.aThis.invoke(
CONTAINS_PROPERTY_VALUE_METHOD,
injectMethodSignature.beanResolutionContext,
injectMethodSignature.beanContext,
ExpressionDef.constant(cliProperty) // property name
).isTrue()
);
}
private String getCliPrefix(String propertyName) {
if (isConfigurationProperties && this.annotationMetadata.isPresent(ConfigurationProperties.class, "cliPrefix")) {
return this.annotationMetadata.stringValue(ConfigurationProperties.class, "cliPrefix").map(val -> val + propertyName).orElse(null);
}
return null;
}
private boolean isMultiValueProperty(ClassElement type) {
return type.isAssignable(Map.class) || type.isAssignable(Collection.class) || isConfigurationProperties(type);
}
private ExpressionDef getQualifier(Element element, ExpressionDef argumentExpression) {
return getQualifier(element, () -> argumentExpression);
}
private ExpressionDef getQualifier(Element element, Supplier<ExpressionDef> argumentExpressionSupplier) {
final List<String> qualifierNames = element.getAnnotationNamesByStereotype(AnnotationUtil.QUALIFIER);
if (!qualifierNames.isEmpty()) {
if (qualifierNames.size() == 1) {
// simple qualifier
final String annotationName = qualifierNames.iterator().next();
return getQualifierForAnnotation(element, annotationName, argumentExpressionSupplier.get());
}
// composite qualifier
return TYPE_QUALIFIERS.invokeStatic(
METHOD_QUALIFIER_BY_QUALIFIERS,
TYPE_QUALIFIER.array().instantiate(
qualifierNames.stream().map(name -> getQualifierForAnnotation(element, name, argumentExpressionSupplier.get())).toList()
)
);
}
if (element.hasAnnotation(AnnotationUtil.ANN_INTERCEPTOR_BINDING_QUALIFIER)) {
return TYPE_QUALIFIERS.invokeStatic(
METHOD_QUALIFIER_BY_INTERCEPTOR_BINDING,
getAnnotationMetadataFromProvider(argumentExpressionSupplier.get())
);
}
String[] byType = element.hasDeclaredAnnotation(io.micronaut.context.annotation.Type.NAME) ? element.stringValues(io.micronaut.context.annotation.Type.NAME) : null;
if (byType != null && byType.length > 0) {
return TYPE_QUALIFIERS.invokeStatic(
METHOD_QUALIFIER_BY_TYPE,
TypeDef.CLASS.array().instantiate(Arrays.stream(byType).map(this::asClassExpression).toList())
);
}
return ExpressionDef.nullValue();
}
private ExpressionDef getAnnotationMetadataFromProvider(ExpressionDef argumentExpression) {
return argumentExpression.invoke(PROVIDER_GET_ANNOTATION_METADATA_METHOD);
}
private ExpressionDef getQualifierForAnnotation(Element element,
String annotationName,
ExpressionDef argumentExpression) {
if (annotationName.equals(Primary.NAME)) {
// primary is the same as no qualifier
return ExpressionDef.nullValue();
}
if (annotationName.equals(AnnotationUtil.NAMED)) {
final String n = element.stringValue(AnnotationUtil.NAMED).orElse(element.getName());
if (!n.contains("$")) {
return TYPE_QUALIFIERS.invokeStatic(METHOD_QUALIFIER_BY_NAME, ExpressionDef.constant(n));
}
return TYPE_QUALIFIERS.invokeStatic(METHOD_QUALIFIER_FOR_ARGUMENT, argumentExpression);
}
if (annotationName.equals(Any.NAME)) {
return ClassTypeDef.of(AnyQualifier.class).getStaticField("INSTANCE", ClassTypeDef.of(AnyQualifier.class));
}
final String repeatableContainerName = element.findRepeatableAnnotation(annotationName).orElse(null);
if (repeatableContainerName != null) {
return TYPE_QUALIFIERS.invokeStatic(
METHOD_QUALIFIER_BY_REPEATABLE_ANNOTATION,
getAnnotationMetadataFromProvider(argumentExpression),
ExpressionDef.constant(repeatableContainerName)
);
}
return TYPE_QUALIFIERS.invokeStatic(
METHOD_QUALIFIER_BY_ANNOTATION,
getAnnotationMetadataFromProvider(argumentExpression),
ExpressionDef.constant(annotationName)
);
}
private ExpressionDef getArrayOfClasses(String[] byType) {
return getArrayOfClasses(List.of(byType));
}
private ExpressionDef getArrayOfClasses(Collection<String> byType) {
return TypeDef.CLASS.array().instantiate(byType.stream().map(this::asClassExpression).toList());
}
private ExpressionDef.Constant asClassExpression(String type) {
return ExpressionDef.constant(TypeDef.of(type));
}
private ExpressionDef convertToArray(ClassElement arrayType, ExpressionDef value) {
return value
.cast(TypeDef.of(Collection.class))
.invoke(COLLECTION_TO_ARRAY, ClassTypeDef.of(arrayType).array().instantiate());
}
private void autoApplyNamedIfPresent(Element element, AnnotationMetadata annotationMetadata) {
if (annotationMetadata.hasAnnotation(AnnotationUtil.NAMED) || annotationMetadata.hasStereotype(AnnotationUtil.NAMED)) {
autoApplyNamed(element);
}
}
private void autoApplyNamed(Element element) {
if (element.stringValue(AnnotationUtil.NAMED).isEmpty()) {
element.annotate(AnnotationUtil.NAMED, (builder) -> {
final String name;
if (element instanceof ClassElement) {
name = NameUtils.decapitalize(element.getSimpleName());
} else {
if (element instanceof MethodElement) {
final String n = element.getName();
if (NameUtils.isGetterName(n)) {
name = NameUtils.getPropertyNameForGetter(n);
} else {
name = n;
}
} else {
name = element.getName();
}
}
builder.value(name);
});
}
}
private StatementDef injectMethod(MethodElement methodElement,
boolean requiresReflection,
VariableDef.This aThis,
List<VariableDef.MethodParameter> parameters,
VariableDef instanceVar,
int methodIndex) {
final List<ParameterElement> argumentTypes = Arrays.asList(methodElement.getParameters());
applyDefaultNamedToParameters(argumentTypes);
for (ParameterElement value : argumentTypes) {
evaluatedExpressionProcessor.processEvaluatedExpressions(value.getAnnotationMetadata(), null);
}
return injectStatement(aThis, parameters, methodElement, requiresReflection, instanceVar, methodIndex);
}
private StatementDef injectStatement(VariableDef.This aThis,
List<VariableDef.MethodParameter> parameters,
MethodElement methodElement,
boolean requiresReflection,
VariableDef instanceVar,
int methodIndex) {
final List<ParameterElement> argumentTypes = Arrays.asList(methodElement.getParameters());
boolean isRequiredInjection = InjectionPoint.isInjectionRequired(methodElement);
List<ExpressionDef> invocationValues = IntStream.range(0, argumentTypes.size())
.mapToObj(index -> getBeanForMethodParameter(aThis, parameters, index, argumentTypes.get(index), methodIndex))
.toList();
if (!isRequiredInjection && methodElement.hasParameters()) {
// store parameter values in local object[]
return TypeDef.OBJECT.array().instantiate(invocationValues).newLocal("values", valuesVar -> {
// invoke isMethodResolved with method parameters
List<? extends ExpressionDef> values = IntStream.range(0, argumentTypes.size())
.mapToObj(index -> valuesVar.arrayElement(index).cast(TypeDef.erasure(argumentTypes.get(index).getType())))
.toList();
return aThis.invoke(
IS_METHOD_RESOLVED,
ExpressionDef.constant(methodIndex),
valuesVar
).ifTrue(
instanceVar.invoke(methodElement, values)
);
});
}
if (!requiresReflection) {
return instanceVar.invoke(methodElement, invocationValues);
}
return aThis.invoke(
INVOKE_WITH_REFLECTION_METHOD,
parameters.get(INJECT_METHOD_BEAN_RESOLUTION_CONTEXT_PARAM),
parameters.get(INJECT_METHOD_BEAN_CONTEXT_PARAM),
ExpressionDef.constant(methodIndex),
instanceVar,
TypeDef.OBJECT.array().instantiate(invocationValues)
);
}
private StatementDef destroyInjectScopeBeansIfNecessary(List<VariableDef.MethodParameter> parameters) {
return parameters.get(0).invoke(DESTROY_INJECT_SCOPED_BEANS_METHOD);
}
private ExpressionDef getBeanForMethodParameter(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
int i,
ParameterElement entry,
int methodIndex) {
AnnotationMetadata argMetadata = entry.getAnnotationMetadata();
ExpressionDef expressionDef = getValueBypassingBeanContext(entry.getGenericType(), methodParameters);
if (expressionDef != null) {
return expressionDef;
}
boolean requiresGenericType = false;
final ClassElement genericType = entry.getGenericType();
Method methodToInvoke;
boolean isCollection = genericType.isAssignable(Collection.class);
boolean isMap = isInjectableMap(genericType);
boolean isArray = genericType.isArray();
if (isValueType(argMetadata) && !isInnerType(entry.getGenericType())) {
Optional<String> property = argMetadata.stringValue(Property.class, "name");
if (property.isPresent()) {
return getInvokeGetPropertyValueForMethod(aThis, methodParameters, i, entry, property.get(), methodIndex);
} else {
if (entry.getAnnotationMetadata().getValue(Value.class, EvaluatedExpressionReference.class).isPresent()) {
return getInvokeGetEvaluatedExpressionValueForMethodArgument(aThis, i, entry, methodIndex);
} else {
Optional<String> valueValue = entry.getAnnotationMetadata().stringValue(Value.class);
if (valueValue.isPresent()) {
return getInvokeGetPropertyPlaceholderValueForMethod(aThis, methodParameters, i, entry, valueValue.get(), methodIndex);
}
}
return ExpressionDef.nullValue();
}
} else if (isCollection || isArray) {
requiresGenericType = true;
ClassElement typeArgument = genericType.isArray() ? genericType.fromArray() : genericType.getFirstTypeArgument().orElse(null);
if (typeArgument != null && !typeArgument.isPrimitive()) {
if (typeArgument.isAssignable(BeanRegistration.class)) {
methodToInvoke = GET_BEAN_REGISTRATIONS_FOR_METHOD_ARGUMENT;
} else {
methodToInvoke = GET_BEANS_OF_TYPE_FOR_METHOD_ARGUMENT;
}
} else {
methodToInvoke = GET_BEAN_FOR_METHOD_ARGUMENT;
requiresGenericType = false;
}
} else if (isMap) {
requiresGenericType = true;
methodToInvoke = GET_MAP_OF_TYPE_FOR_METHOD_ARGUMENT;
} else if (genericType.isAssignable(Stream.class)) {
requiresGenericType = true;
methodToInvoke = GET_STREAM_OF_TYPE_FOR_METHOD_ARGUMENT;
} else if (genericType.isAssignable(Optional.class)) {
requiresGenericType = true;
methodToInvoke = FIND_BEAN_FOR_METHOD_ARGUMENT;
} else if (genericType.isAssignable(BeanRegistration.class)) {
requiresGenericType = true;
methodToInvoke = GET_BEAN_REGISTRATION_FOR_METHOD_ARGUMENT;
} else {
methodToInvoke = GET_BEAN_FOR_METHOD_ARGUMENT;
}
List<ExpressionDef> values = new ArrayList<>(
List.of(
// 1st argument load BeanResolutionContext
methodParameters.get(0),
// 2nd argument load BeanContext
methodParameters.get(1),
// 3rd argument the method index
ExpressionDef.constant(methodIndex),
// 4th argument the argument index
ExpressionDef.constant(i)
)
);
// invoke getBeanForField
if (requiresGenericType) {
values.add(
resolveMethodArgumentGenericType(genericType, methodIndex, i)
);
}
ExpressionDef argumentExpression = resolveMethodArgument(methodIndex, i);
values.add(
getQualifier(entry, argumentExpression)
);
ExpressionDef result = aThis.invoke(methodToInvoke, values);
if (isArray && requiresGenericType) {
result = convertToArray(genericType.fromArray(), result);
}
// cast the return value to the correct type
return result.cast(TypeDef.erasure(entry.getType()));
}
private ExpressionDef getInvokeGetPropertyValueForMethod(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
int i,
ParameterElement entry,
String value,
int methodIndex) {
return aThis.invoke(
GET_PROPERTY_VALUE_FOR_METHOD_ARGUMENT,
// 1st argument load BeanResolutionContext
methodParameters.get(0),
// 2nd argument load BeanContext
methodParameters.get(1),
// 3rd argument the method index
ExpressionDef.constant(methodIndex),
// 4th argument the argument index
ExpressionDef.constant(i),
// 5th property value
ExpressionDef.constant(value),
// 6 cli property name
ExpressionDef.constant(getCliPrefix(entry.getName()))
).cast(TypeDef.erasure(entry.getType()));
}
private ExpressionDef getInvokeGetEvaluatedExpressionValueForMethodArgument(VariableDef.This aThis,
int i,
ParameterElement entry,
int methodIndex) {
return aThis.invoke(
GET_EVALUATED_EXPRESSION_VALUE_FOR_METHOD_ARGUMENT,
// 1st argument the method index
ExpressionDef.constant(methodIndex),
// 2nd argument the argument index
ExpressionDef.constant(i)
).cast(TypeDef.erasure(entry.getType()));
}
private ExpressionDef getInvokeGetPropertyPlaceholderValueForMethod(VariableDef.This aThis,
List<VariableDef.MethodParameter> methodParameters,
int i,
ParameterElement entry,
String value,
int methodIndex) {
return aThis.invoke(
GET_PROPERTY_PLACEHOLDER_VALUE_FOR_METHOD_ARGUMENT,
// 1st argument load BeanResolutionContext
methodParameters.get(0),
// 2nd argument load BeanContext
methodParameters.get(1),
// 3rd argument the method index
ExpressionDef.constant(methodIndex),
// 4th argument the argument index
ExpressionDef.constant(i),
// 5th property value
ExpressionDef.constant(value)
).cast(TypeDef.erasure(entry.getType()));
}
private ExpressionDef getInvokeGetPropertyValueForSetter(InjectMethodSignature injectMethodSignature,
String setterName,
ParameterElement entry,
String value,
AnnotationMetadata annotationMetadata,
int methodIndex) {
annotationMetadata = MutableAnnotationMetadata.of(annotationMetadata);
removeAnnotations(annotationMetadata, PropertySource.class.getName(), Property.class.getName());
return injectMethodSignature.aThis.invoke(
GET_PROPERTY_VALUE_FOR_SETTER,
// 1st argument load BeanResolutionContext
injectMethodSignature.beanResolutionContext,
// 2nd argument load BeanContext
injectMethodSignature.beanContext,
// 3rd argument the method name
ExpressionDef.constant(setterName),
// 4th argument the argument
getMethodArgument(entry, annotationMetadata, methodIndex),
// 5th property value
ExpressionDef.constant(value),
// 6 cli property name
ExpressionDef.constant(getCliPrefix(entry.getName()))
).cast(TypeDef.erasure(entry.getType()));
}
private ExpressionDef getMethodArgument(ParameterElement entry, AnnotationMetadata annotationMetadata, int methodIndex) {
return keepConfPropInjectPoints ? resolveMethodArgument(methodIndex, 0) : ArgumentExpUtils.pushCreateArgument(
this.annotationMetadata,
ClassElement.of(beanFullClassName),
beanDefinitionTypeDef,
entry.getName(),
entry.getGenericType(),
annotationMetadata,
entry.getGenericType().getTypeArguments(),
loadClassValueExpressionFn
);
}
private ExpressionDef getFieldArgument(FieldElement fieldElement, AnnotationMetadata annotationMetadata, int fieldIndex) {
if (!keepConfPropInjectPoints) {
return ArgumentExpUtils.pushCreateArgument(
this.annotationMetadata,
ClassElement.of(beanFullClassName),
beanDefinitionTypeDef,
fieldElement.getName(),
fieldElement.getGenericType(),
annotationMetadata,
fieldElement.getGenericType().getTypeArguments(),
loadClassValueExpressionFn
);
}
return resolveFieldArgument(fieldIndex);
}
private ExpressionDef getInvokeGetBeanForSetter(InjectMethodSignature injectMethodSignature,
String setterName,
ParameterElement entry,
AnnotationMetadata annotationMetadata,
int methodIndex) {
annotationMetadata = MutableAnnotationMetadata.of(annotationMetadata);
removeAnnotations(annotationMetadata, PropertySource.class.getName(), Property.class.getName());
return injectMethodSignature.aThis.invoke(
GET_BEAN_FOR_SETTER,
// 1st argument load BeanResolutionContext
injectMethodSignature.beanResolutionContext,
// 2nd argument load BeanContext
injectMethodSignature.beanContext,
// 3rd argument the method name
ExpressionDef.constant(setterName),
// 4th argument the argument
getMethodArgument(entry, annotationMetadata, methodIndex),
// push qualifier
getQualifier(entry.getGenericType(), getMethodArgument(entry, annotationMetadata, methodIndex))
).cast(TypeDef.erasure(entry.getType()));
}
private StatementDef getInvokeGetBeansOfTypeForSetter(InjectMethodSignature injectMethodSignature,
String setterName,
ParameterElement entry,
AnnotationMetadata annotationMetadata,
Function<ExpressionDef, StatementDef> onValue,
int methodIndex) {
annotationMetadata = MutableAnnotationMetadata.of(annotationMetadata);
removeAnnotations(annotationMetadata, PropertySource.class.getName(), Property.class.getName());
// 4th argument the argument
ClassElement genericType = entry.getGenericType();
return getMethodArgument(entry, annotationMetadata, methodIndex).newLocal("argument", argumentVar -> {
ExpressionDef value = injectMethodSignature.aThis.invoke(
GET_BEANS_OF_TYPE_FOR_SETTER,
// 1st argument load BeanResolutionContext
injectMethodSignature.beanResolutionContext,
// 2nd argument load BeanContext
injectMethodSignature.beanContext,
// 3rd argument the method name
ExpressionDef.constant(setterName),
// 4th argument the argument
argumentVar,
// generic type
resolveGenericType(argumentVar, genericType),
// push qualifier
getQualifier(entry.getGenericType(), argumentVar)
).cast(TypeDef.erasure(entry.getType()));
return onValue.apply(value);
});
}
private ExpressionDef resolveGenericType(VariableDef argumentVar, ClassElement genericType) {
ExpressionDef argumentExpression = resolveArgumentGenericType(genericType);
if (argumentExpression == null) {
argumentExpression = resolveFirstTypeArgument(argumentVar);
return resolveInnerTypeArgumentIfNeeded(argumentExpression, genericType);
}
return argumentExpression;
}
private ExpressionDef getInvokeGetPropertyPlaceholderValueForSetter(InjectMethodSignature injectMethodSignature,
String setterName,
ParameterElement entry,
String value,
AnnotationMetadata annotationMetadata,
int methodIndex) {
annotationMetadata = MutableAnnotationMetadata.of(annotationMetadata);
removeAnnotations(annotationMetadata, PropertySource.class.getName(), Property.class.getName());
return injectMethodSignature.aThis
.invoke(
GET_PROPERTY_PLACEHOLDER_VALUE_FOR_SETTER,
// 1st argument load BeanResolutionContext
injectMethodSignature.beanResolutionContext,
// 2nd argument load BeanContext
injectMethodSignature.beanContext,
// 3rd argument the method name
ExpressionDef.constant(setterName),
// 4th argument the argument
getMethodArgument(entry, annotationMetadata, methodIndex),
// 5th property value
ExpressionDef.constant(value),
// 6 cli property name
ExpressionDef.constant(getCliPrefix(entry.getName())
).cast(TypeDef.erasure(entry.getType())));
}
private void removeAnnotations(AnnotationMetadata annotationMetadata, String... annotationNames) {
if (annotationMetadata instanceof MutableAnnotationMetadata mutableAnnotationMetadata) {
for (String annotation : annotationNames) {
mutableAnnotationMetadata.removeAnnotation(annotation);
}
}
}
private void applyDefaultNamedToParameters(List<ParameterElement> argumentTypes) {
for (ParameterElement parameterElement : argumentTypes) {
final AnnotationMetadata annotationMetadata = parameterElement.getAnnotationMetadata();
autoApplyNamedIfPresent(parameterElement, annotationMetadata);
}
}
@SuppressWarnings("MagicNumber")
private ClassTypeDef createExecutableMethodInterceptor(MethodDef interceptMethod, String name) {
// if there is method interception in place we need to construct an inner executable method
|
visitBuildConstructorDefinition
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/rpc/model/ScopeModelUtil.java
|
{
"start": 951,
"end": 4987
}
|
class ____ {
public static <T> ScopeModel getOrDefault(ScopeModel scopeModel, Class<T> type) {
if (scopeModel != null) {
return scopeModel;
}
return getDefaultScopeModel(type);
}
private static <T> ScopeModel getDefaultScopeModel(Class<T> type) {
SPI spi = type.getAnnotation(SPI.class);
if (spi == null) {
throw new IllegalArgumentException("SPI annotation not found for class: " + type.getName());
}
switch (spi.scope()) {
case FRAMEWORK:
return FrameworkModel.defaultModel();
case APPLICATION:
return ApplicationModel.defaultModel();
case MODULE:
return ApplicationModel.defaultModel().getDefaultModule();
default:
throw new IllegalStateException("Unable to get default scope model for type: " + type.getName());
}
}
public static ModuleModel getModuleModel(ScopeModel scopeModel) {
if (scopeModel == null) {
return ApplicationModel.defaultModel().getDefaultModule();
}
if (scopeModel instanceof ModuleModel) {
return (ModuleModel) scopeModel;
} else {
throw new IllegalArgumentException("Unable to get ModuleModel from " + scopeModel);
}
}
public static ApplicationModel getApplicationModel(ScopeModel scopeModel) {
return getOrDefaultApplicationModel(scopeModel);
}
public static ApplicationModel getOrDefaultApplicationModel(ScopeModel scopeModel) {
if (scopeModel == null) {
return ApplicationModel.defaultModel();
}
return getOrNullApplicationModel(scopeModel);
}
public static ApplicationModel getOrNullApplicationModel(ScopeModel scopeModel) {
if (scopeModel == null) {
return null;
}
if (scopeModel instanceof ApplicationModel) {
return (ApplicationModel) scopeModel;
} else if (scopeModel instanceof ModuleModel) {
ModuleModel moduleModel = (ModuleModel) scopeModel;
return moduleModel.getApplicationModel();
} else {
throw new IllegalArgumentException("Unable to get ApplicationModel from " + scopeModel);
}
}
public static FrameworkModel getFrameworkModel(ScopeModel scopeModel) {
if (scopeModel == null) {
return FrameworkModel.defaultModel();
}
if (scopeModel instanceof ApplicationModel) {
return ((ApplicationModel) scopeModel).getFrameworkModel();
} else if (scopeModel instanceof ModuleModel) {
ModuleModel moduleModel = (ModuleModel) scopeModel;
return moduleModel.getApplicationModel().getFrameworkModel();
} else if (scopeModel instanceof FrameworkModel) {
return (FrameworkModel) scopeModel;
} else {
throw new IllegalArgumentException("Unable to get FrameworkModel from " + scopeModel);
}
}
public static <T> ExtensionLoader<T> getExtensionLoader(Class<T> type, ScopeModel scopeModel) {
if (scopeModel != null) {
return scopeModel.getExtensionLoader(type);
} else {
SPI spi = type.getAnnotation(SPI.class);
if (spi == null) {
throw new IllegalArgumentException("SPI annotation not found for class: " + type.getName());
}
switch (spi.scope()) {
case FRAMEWORK:
return FrameworkModel.defaultModel().getExtensionLoader(type);
case APPLICATION:
return ApplicationModel.defaultModel().getExtensionLoader(type);
case MODULE:
return ApplicationModel.defaultModel().getDefaultModule().getExtensionLoader(type);
default:
throw new IllegalArgumentException("Unable to get ExtensionLoader for type: " + type.getName());
}
}
}
}
|
ScopeModelUtil
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NarrowingCompoundAssignmentTest.java
|
{
"start": 5068,
"end": 5970
}
|
class ____ {
void m() {
short s = 0;
byte b = 0;
s &= ~1;
s |= 1;
s ^= 1;
b &= ~1;
b |= 1;
b ^= 1;
b |= 128;
b &= 128;
b ^= 128;
b |= 1L;
b &= 1L;
b ^= 1L;
// BUG: Diagnostic contains: b = (byte) (b | 256)
b |= 256;
// BUG: Diagnostic contains: b = (byte) (b & ~256)
b &= ~256;
// BUG: Diagnostic contains: b = (byte) (b ^ 256)
b ^= 256;
}
}
""")
.doTest();
}
@Test
public void allowsBinopsOfDeficientTypes() {
compilationHelper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/shard/ShardToolCli.java
|
{
"start": 663,
"end": 903
}
|
class ____ extends MultiCommand {
ShardToolCli() {
super("A CLI tool to remove corrupted parts of unrecoverable shards");
subcommands.put("remove-corrupted-data", new RemoveCorruptedShardDataCommand());
}
}
|
ShardToolCli
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java
|
{
"start": 874,
"end": 2368
}
|
class ____ extends LegacyActionRequest implements ApplicationPrivilegesRequest {
@Nullable
private String application;
private String[] privileges;
public GetPrivilegesRequest(StreamInput in) throws IOException {
super(in);
application = in.readOptionalString();
privileges = in.readStringArray();
}
public GetPrivilegesRequest() {
privileges = Strings.EMPTY_ARRAY;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (privileges == null) {
validationException = addValidationError("privileges cannot be null", validationException);
}
return validationException;
}
public void application(String application) {
this.application = application;
}
public String application() {
return this.application;
}
@Override
public Collection<String> getApplicationNames() {
return application == null ? Collections.emptySet() : Collections.singleton(application);
}
public void privileges(String... privileges) {
this.privileges = privileges;
}
public String[] privileges() {
return this.privileges;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(application);
out.writeStringArray(privileges);
}
}
|
GetPrivilegesRequest
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/nestedresulthandler/Person.java
|
{
"start": 796,
"end": 1472
}
|
class ____ {
private Integer id;
private String name;
private final List<Item> items = new ArrayList<>();
@Override
public String toString() {
return "Person(" + id + ", " + name + ", " + items + " )";
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Collection<Item> getItems() {
return items;
}
public boolean owns(String name) {
for (Item item : getItems()) {
if (item.getName().equals(name)) {
return true;
}
}
return false;
}
}
|
Person
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestFailureToReadEdits.java
|
{
"start": 3508,
"end": 12371
}
|
enum ____ {
SHARED_DIR_HA,
QJM_HA;
};
/**
* Run this suite of tests for {QJM-based, file-based HA} x {async
* edit logging enabled, disabled}.
*
* TODO: Enable the test cases with async edit logging on. See HDFS-12603
* and HDFS-12660.
*/
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][]{
{TestType.SHARED_DIR_HA, Boolean.FALSE},
//{TestType.SHARED_DIR_HA, Boolean.TRUE},
{TestType.QJM_HA, Boolean.FALSE},
//{TestType.QJM_HA, Boolean.TRUE},
});
}
public TestFailureToReadEdits(TestType clusterType, Boolean
useAsyncEditLogging) {
this.clusterType = clusterType;
this.useAsyncEditLogging = useAsyncEditLogging;
}
@BeforeEach
public void setUpCluster() throws Exception {
conf = new Configuration();
conf.setInt(DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_CHECK_PERIOD_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_TXNS_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_NUM_CHECKPOINTS_RETAINED_KEY, 10);
conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 1);
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_EDITS_ASYNC_LOGGING,
useAsyncEditLogging);
HAUtil.setAllowStandbyReads(conf, true);
if (clusterType == TestType.SHARED_DIR_HA) {
int basePort = 10000;
int retryCount = 0;
while (true) {
try {
basePort = 10000 + RANDOM.nextInt(1000) * 4;
LOG.info("Set SHARED_DIR_HA cluster's basePort to " + basePort);
MiniDFSNNTopology topology =
MiniQJMHACluster.createDefaultTopology(basePort);
cluster = new MiniDFSCluster.Builder(conf)
.nnTopology(topology)
.numDataNodes(0)
.checkExitOnShutdown(false)
.build();
break;
} catch (BindException e) {
if (cluster != null) {
cluster.shutdown(true);
cluster = null;
}
++retryCount;
LOG.info("SHARED_DIR_HA: MiniQJMHACluster port conflicts, retried " +
retryCount + " times " + e);
}
}
} else {
Builder builder = new MiniQJMHACluster.Builder(conf);
builder.getDfsBuilder().numDataNodes(0).checkExitOnShutdown(false);
miniQjmHaCluster = builder.build();
cluster = miniQjmHaCluster.getDfsCluster();
}
cluster.waitActive();
nn0 = cluster.getNameNode(0);
nn1 = cluster.getNameNode(1);
cluster.transitionToActive(0);
fs = HATestUtil.configureFailoverFs(cluster, conf);
}
@AfterEach
public void tearDownCluster() throws Exception {
if (fs != null) {
fs.close();
fs = null;
}
if (clusterType == TestType.SHARED_DIR_HA) {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
} else {
if (miniQjmHaCluster != null) {
miniQjmHaCluster.shutdown();
miniQjmHaCluster = null;
}
}
}
/**
* Test that the standby NN won't double-replay earlier edits if it encounters
* a failure to read a later edit.
*/
@Test
public void testFailuretoReadEdits() throws Exception {
assertTrue(fs.mkdirs(new Path(TEST_DIR1)));
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
// If these two ops are applied twice, the first op will throw an
// exception the second time its replayed.
fs.setOwner(new Path(TEST_DIR1), "foo", "bar");
assertTrue(fs.delete(new Path(TEST_DIR1), true));
// This op should get applied just fine.
assertTrue(fs.mkdirs(new Path(TEST_DIR2)));
// This is the op the mocking will cause to fail to be read.
assertTrue(fs.mkdirs(new Path(TEST_DIR3)));
LimitedEditLogAnswer answer = causeFailureOnEditLogRead();
try {
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
fail("Standby fully caught up, but should not have been able to");
} catch (HATestUtil.CouldNotCatchUpException e) {
// Expected. The NN did not exit.
}
// Null because it was deleted.
assertNull(NameNodeAdapter.getFileInfo(nn1,
TEST_DIR1, false, false, false));
// Should have been successfully created.
assertTrue(NameNodeAdapter.getFileInfo(nn1,
TEST_DIR2, false, false, false).isDirectory());
// Null because it hasn't been created yet.
assertNull(NameNodeAdapter.getFileInfo(nn1,
TEST_DIR3, false, false, false));
// Now let the standby read ALL the edits.
answer.setThrowExceptionOnRead(false);
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
// Null because it was deleted.
assertNull(NameNodeAdapter.getFileInfo(nn1,
TEST_DIR1, false, false, false));
// Should have been successfully created.
assertTrue(NameNodeAdapter.getFileInfo(nn1,
TEST_DIR2, false, false, false).isDirectory());
// Should now have been successfully created.
assertTrue(NameNodeAdapter.getFileInfo(nn1,
TEST_DIR3, false, false, false).isDirectory());
}
/**
* Test the following case:
* 1. SBN is reading a finalized edits file when NFS disappears halfway
* through (or some intermittent error happens)
* 2. SBN performs a checkpoint and uploads it to the NN
* 3. NN receives a checkpoint that doesn't correspond to the end of any log
* segment
* 4. Both NN and SBN should be able to restart at this point.
*
* This is a regression test for HDFS-2766.
*/
@Test
public void testCheckpointStartingMidEditsFile() throws Exception {
assertTrue(fs.mkdirs(new Path(TEST_DIR1)));
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
// Once the standby catches up, it should notice that it needs to
// do a checkpoint and save one to its local directories.
HATestUtil.waitForCheckpoint(cluster, 1, ImmutableList.of(0, 5));
// It should also upload it back to the active.
HATestUtil.waitForCheckpoint(cluster, 0, ImmutableList.of(0, 5));
causeFailureOnEditLogRead();
assertTrue(fs.mkdirs(new Path(TEST_DIR2)));
assertTrue(fs.mkdirs(new Path(TEST_DIR3)));
try {
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
fail("Standby fully caught up, but should not have been able to");
} catch (HATestUtil.CouldNotCatchUpException e) {
// Expected. The NN did not exit.
}
// 5 because we should get OP_START_LOG_SEGMENT and one successful OP_MKDIR
HATestUtil.waitForCheckpoint(cluster, 1, ImmutableList.of(0, 5, 7));
// It should also upload it back to the active.
HATestUtil.waitForCheckpoint(cluster, 0, ImmutableList.of(0, 5, 7));
// Restart the active NN
cluster.restartNameNode(0);
HATestUtil.waitForCheckpoint(cluster, 0, ImmutableList.of(0, 5, 7));
FileSystem fs0 = null;
try {
// Make sure that when the active restarts, it loads all the edits.
fs0 = FileSystem.get(DFSUtilClient.getNNUri(nn0.getNameNodeAddress()),
conf);
assertTrue(fs0.exists(new Path(TEST_DIR1)));
assertTrue(fs0.exists(new Path(TEST_DIR2)));
assertTrue(fs0.exists(new Path(TEST_DIR3)));
} finally {
if (fs0 != null)
fs0.close();
}
}
/**
* Ensure that the standby fails to become active if it cannot read all
* available edits in the shared edits dir when it is transitioning to active
* state.
*/
@Test
public void testFailureToReadEditsOnTransitionToActive() throws Exception {
assertTrue(fs.mkdirs(new Path(TEST_DIR1)));
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
// It should also upload it back to the active.
HATestUtil.waitForCheckpoint(cluster, 0, ImmutableList.of(0, 5));
causeFailureOnEditLogRead();
assertTrue(fs.mkdirs(new Path(TEST_DIR2)));
assertTrue(fs.mkdirs(new Path(TEST_DIR3)));
try {
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
fail("Standby fully caught up, but should not have been able to");
} catch (HATestUtil.CouldNotCatchUpException e) {
// Expected. The NN did not exit.
}
// Shutdown the active NN.
cluster.shutdownNameNode(0);
try {
// Transition the standby to active.
cluster.transitionToActive(1);
fail("Standby transitioned to active, but should not have been able to");
} catch (ExitException ee) {
GenericTestUtils.assertExceptionContains("Error replaying edit log", ee);
}
}
private LimitedEditLogAnswer causeFailureOnEditLogRead() throws IOException {
FSEditLog spyEditLog = NameNodeAdapterMockitoUtil.spyOnEditLog(nn1);
LimitedEditLogAnswer answer = new LimitedEditLogAnswer();
doAnswer(answer).when(spyEditLog).selectInputStreams(
anyLong(), anyLong(), any(), anyBoolean(), anyBoolean());
return answer;
}
private static
|
TestType
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/PostgreSQLMultipleSchemaSequenceTest.java
|
{
"start": 6183,
"end": 6384
}
|
class ____ {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "TEST")
@SequenceGenerator(name = "TEST", sequenceName = "SEQ_TEST", allocationSize=1)
public Integer id;
}
}
|
Box
|
java
|
netty__netty
|
handler/src/main/java/io/netty/handler/ssl/OpenSslX509KeyManagerFactory.java
|
{
"start": 4149,
"end": 6322
}
|
class ____ extends KeyManagerFactorySpi {
final KeyManagerFactory kmf;
private volatile ProviderFactory providerFactory;
OpenSslKeyManagerFactorySpi(KeyManagerFactory kmf) {
this.kmf = ObjectUtil.checkNotNull(kmf, "kmf");
}
@Override
protected synchronized void engineInit(KeyStore keyStore, char[] chars)
throws KeyStoreException, NoSuchAlgorithmException, UnrecoverableKeyException {
if (providerFactory != null) {
throw new KeyStoreException("Already initialized");
}
if (!keyStore.aliases().hasMoreElements()) {
throw new KeyStoreException("No aliases found");
}
kmf.init(keyStore, chars);
providerFactory = new ProviderFactory(ReferenceCountedOpenSslContext.chooseX509KeyManager(
kmf.getKeyManagers()), password(chars), Collections.list(keyStore.aliases()));
}
private static String password(char[] password) {
if (password == null || password.length == 0) {
return null;
}
return new String(password);
}
@Override
protected void engineInit(ManagerFactoryParameters managerFactoryParameters)
throws InvalidAlgorithmParameterException {
throw new InvalidAlgorithmParameterException("Not supported");
}
@Override
protected KeyManager[] engineGetKeyManagers() {
ProviderFactory providerFactory = this.providerFactory;
if (providerFactory == null) {
throw new IllegalStateException("engineInit(...) not called yet");
}
return new KeyManager[] { providerFactory.keyManager };
}
OpenSslKeyMaterialProvider newProvider() {
ProviderFactory providerFactory = this.providerFactory;
if (providerFactory == null) {
throw new IllegalStateException("engineInit(...) not called yet");
}
return providerFactory.newProvider();
}
private static final
|
OpenSslKeyManagerFactorySpi
|
java
|
google__guice
|
core/test/com/google/inject/internal/ProxyFactoryTest.java
|
{
"start": 4996,
"end": 5830
}
|
class ____ {
final int i;
@Inject
public A(int i) {
this.i = i;
}
public void a() {}
}
@Test
public void testMultipleInterceptors()
throws NoSuchMethodException, InvocationTargetException, ErrorsException {
DoubleInterceptor doubleInterceptor = new DoubleInterceptor();
CountingInterceptor countingInterceptor = new CountingInterceptor();
aspects.add(new MethodAspect(any(), any(), doubleInterceptor, countingInterceptor));
ProxyFactory<Counter> factory =
new ProxyFactory<Counter>(InjectionPoint.forConstructorOf(Counter.class), aspects);
ConstructionProxy<Counter> constructor = factory.create();
Counter counter = constructor.newInstance();
counter.inc();
assertEquals(2, counter.count);
assertEquals(2, countingInterceptor.count);
}
static
|
A
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/batch/BatchFetchInstantiationTest.java
|
{
"start": 2646,
"end": 2952
}
|
class ____ {
@Id
private Long id;
@ManyToOne
private EntityB entityB;
public EntityA() {
}
public EntityA(Long id, EntityB entityB) {
this.id = id;
this.entityB = entityB;
}
public EntityB getEntityB() {
return entityB;
}
}
@Entity( name = "EntityB" )
public static
|
EntityA
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClient.java
|
{
"start": 73213,
"end": 82689
}
|
class ____ extends TimerTask {
TimerTaskImpl() {
runningTimerTask = this;
}
@Override
public void run() {
try {
if (timerOrchestrator(TimerFunctionality.SUSPEND, this)) {
try {
getMetricCall(getMetricTracingContext());
} finally {
abfsCounters.initializeMetrics(metricFormat);
}
}
} catch (IOException e) {
}
}
}
/**
* Creates an AbfsRestOperation with additional parameters for buffer and SAS token.
*
* @param operationType The type of the operation.
* @param httpMethod The HTTP method of the operation.
* @param url The URL associated with the operation.
* @param requestHeaders The list of HTTP headers for the request.
* @param buffer The byte buffer containing data for the operation.
* @param bufferOffset The offset within the buffer where the data starts.
* @param bufferLength The length of the data within the buffer.
* @param sasTokenForReuse The SAS token for reusing authentication.
* @return An AbfsRestOperation instance.
*/
AbfsRestOperation getAbfsRestOperation(final AbfsRestOperationType operationType,
final String httpMethod,
final URL url,
final List<AbfsHttpHeader> requestHeaders,
final byte[] buffer,
final int bufferOffset,
final int bufferLength,
final String sasTokenForReuse) {
return new AbfsRestOperation(
operationType,
this,
httpMethod,
url,
requestHeaders,
buffer,
bufferOffset,
bufferLength,
sasTokenForReuse,
abfsConfiguration);
}
/**
* Creates an AbfsRestOperation with basic parameters and no buffer or SAS token.
*
* @param operationType The type of the operation.
* @param httpMethod The HTTP method of the operation.
* @param url The URL associated with the operation.
* @param requestHeaders The list of HTTP headers for the request.
* @return An AbfsRestOperation instance.
*/
@VisibleForTesting
public AbfsRestOperation getAbfsRestOperation(final AbfsRestOperationType operationType,
final String httpMethod,
final URL url,
final List<AbfsHttpHeader> requestHeaders) {
return new AbfsRestOperation(
operationType,
this,
httpMethod,
url,
requestHeaders,
abfsConfiguration
);
}
/**
* Creates an AbfsRestOperation with parameters including request headers and SAS token.
*
* @param operationType The type of the operation.
* @param httpMethod The HTTP method of the operation.
* @param url The URL associated with the operation.
* @param requestHeaders The list of HTTP headers for the request.
* @param sasTokenForReuse The SAS token for reusing authentication.
* @return An AbfsRestOperation instance.
*/
AbfsRestOperation getAbfsRestOperation(final AbfsRestOperationType operationType,
final String httpMethod,
final URL url,
final List<AbfsHttpHeader> requestHeaders,
final String sasTokenForReuse) {
return new AbfsRestOperation(
operationType,
this,
httpMethod,
url,
requestHeaders, sasTokenForReuse, abfsConfiguration);
}
@VisibleForTesting
AbfsApacheHttpClient getAbfsApacheHttpClient() {
return abfsApacheHttpClient;
}
@VisibleForTesting
KeepAliveCache getKeepAliveCache() {
return keepAliveCache;
}
@VisibleForTesting
protected Timer getTimer() {
return timer;
}
protected String getUserAgent() {
return userAgent;
}
/**
* Checks if the namespace is enabled.
* Filesystem init will fail if namespace is not correctly configured,
* so instead of swallowing the exception, we should throw the exception
* in case namespace is not configured correctly.
*
* @return True if the namespace is enabled, false otherwise.
* @throws AzureBlobFileSystemException if the conversion fails.
*/
public boolean getIsNamespaceEnabled() throws AzureBlobFileSystemException {
try {
return getAbfsConfiguration().getIsNamespaceEnabledAccount().toBoolean();
} catch (TrileanConversionException ex) {
LOG.error("Failed to convert namespace enabled account property to boolean", ex);
throw new InvalidConfigurationValueException("Failed to determine account type", ex);
}
}
protected boolean isRenameResilience() {
return renameResilience;
}
/**
* Parses response of Listing API from server based on Endpoint used.
* @param result AbfsHttpOperation of list Operation.
* @param uri to be used for the path conversion.
* @return {@link ListResponseData} containing the list of entries.
* @throws IOException if parsing fails
*/
public abstract ListResponseData parseListPathResults(AbfsHttpOperation result, URI uri) throws IOException;
/**
* Parses response of Get Block List from server based on Endpoint used.
* @param stream InputStream of the response
* @return List of block IDs
* @throws IOException if parsing fails
*/
public abstract List<String> parseBlockListResponse(InputStream stream) throws IOException;
/**
* Parses response from ErrorStream returned by server based on Endpoint used.
* @param stream InputStream of the response
* @return StorageErrorResponseSchema
* @throws IOException if parsing fails
*/
public abstract StorageErrorResponseSchema processStorageErrorResponse(InputStream stream) throws IOException;
/**
* Returns user-defined metadata from server response based on Endpoint used.
* @param result response from server
* @return user-defined metadata key-value pairs
* @throws InvalidFileSystemPropertyException if parsing fails
* @throws InvalidAbfsRestOperationException if parsing fails
*/
public abstract Hashtable<String, String> getXMSProperties(AbfsHttpOperation result)
throws InvalidFileSystemPropertyException,
InvalidAbfsRestOperationException;
/**
* Encode attribute with encoding based on Endpoint used.
* @param value to be encoded
* @return encoded value
* @throws UnsupportedEncodingException if encoding fails
*/
public abstract byte[] encodeAttribute(String value) throws UnsupportedEncodingException;
/**
* Decode attribute with decoding based on Endpoint used.
* @param value to be decoded
* @return decoded value
* @throws UnsupportedEncodingException if decoding fails
*/
public abstract String decodeAttribute(byte[] value) throws UnsupportedEncodingException;
/**
* Get the dummy success operation.
* @param operationType type of the operation
* @param httpMethod http method
* @param url url to be used
* @param requestHeaders list of headers to be sent with the request
* @return success operation
*/
protected AbfsRestOperation getSuccessOp(final AbfsRestOperationType operationType,
final String httpMethod, final URL url,
final List<AbfsHttpHeader> requestHeaders) {
final AbfsRestOperation successOp = getAbfsRestOperation(
operationType, httpMethod, url, requestHeaders);
successOp.hardSetResult(HttpURLConnection.HTTP_OK);
return successOp;
}
/**
* Creates a VersionedFileStatus object from the ListResultEntrySchema.
* @param entry ListResultEntrySchema object.
* @param uri to be used for the path conversion.
* @return VersionedFileStatus object.
* @throws AzureBlobFileSystemException if transformation fails.
*/
protected VersionedFileStatus getVersionedFileStatusFromEntry(
ListResultEntrySchema entry, URI uri) throws AzureBlobFileSystemException {
long blockSize = abfsConfiguration.getAzureBlockSize();
String owner = null, group = null;
try{
if (identityTransformer != null) {
owner = identityTransformer.transformIdentityForGetRequest(entry.owner(),
true, userName);
group = identityTransformer.transformIdentityForGetRequest(entry.group(),
false, primaryUserGroup);
}
} catch (IOException ex) {
LOG.error("Failed to get owner/group for path {}", entry.name(), ex);
throw new AbfsDriverException(ex);
}
final String encryptionContext = entry.getXMsEncryptionContext();
final FsPermission fsPermission = entry.permissions() == null
? new AbfsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)
: AbfsPermission.valueOf(entry.permissions());
final boolean hasAcl = AbfsPermission.isExtendedAcl(entry.permissions());
long lastModifiedMillis = 0;
long contentLength = entry.contentLength() == null ? 0 : entry.contentLength();
boolean isDirectory = entry.isDirectory() != null && entry.isDirectory();
if (entry.lastModified() != null && !entry.lastModified().isEmpty()) {
lastModifiedMillis = DateTimeUtils.parseLastModifiedTime(
entry.lastModified());
}
Path entryPath = new Path(File.separator + entry.name());
if (uri != null) {
entryPath = entryPath.makeQualified(uri, entryPath);
}
return new VersionedFileStatus(
owner,
group,
fsPermission,
hasAcl,
contentLength,
isDirectory,
1,
blockSize,
lastModifiedMillis,
entryPath,
entry.eTag(),
encryptionContext);
}
}
|
TimerTaskImpl
|
java
|
netty__netty
|
transport/src/main/java/io/netty/channel/socket/ServerSocketChannel.java
|
{
"start": 838,
"end": 1053
}
|
interface ____ extends ServerChannel {
@Override
ServerSocketChannelConfig config();
@Override
InetSocketAddress localAddress();
@Override
InetSocketAddress remoteAddress();
}
|
ServerSocketChannel
|
java
|
alibaba__nacos
|
client/src/test/java/com/alibaba/nacos/client/config/filter/impl/ConfigFilterChainTest.java
|
{
"start": 824,
"end": 1615
}
|
class ____ {
@Test
void testConfigFilterChain() {
ConfigFilterChainManager configFilterChainManager = new ConfigFilterChainManager(null);
configFilterChainManager.addFilter(new DemoFilter1());
configFilterChainManager.addFilter(new DemoFilter2());
ConfigRequest configRequest = new ConfigRequest();
ConfigResponse configResponse = new ConfigResponse();
try {
configFilterChainManager.doFilter(configRequest, configResponse);
assertEquals(DemoFilter1.class.getName(), configRequest.getParameter("filter1"));
assertEquals(DemoFilter2.class.getName(), configRequest.getParameter("filter2"));
} catch (NacosException e) {
e.printStackTrace();
}
}
}
|
ConfigFilterChainTest
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/DefaultQueryUtilsUnitTests.java
|
{
"start": 1459,
"end": 19701
}
|
class ____ {
private static final String QUERY = "select u from User u";
private static final String FQ_QUERY = "select u from org.acme.domain.User$Foo_Bar u";
private static final String SIMPLE_QUERY = "from User u";
private static final String COUNT_QUERY = "select count(u) from User u";
private static final String QUERY_WITH_AS = "select u from User as u where u.username = ?";
@Test
void createsCountQueryCorrectly() {
assertCountQuery(QUERY, COUNT_QUERY);
}
@Test
void createsCountQueriesCorrectlyForCapitalLetterJPQL() {
assertCountQuery("FROM User u WHERE u.foo.bar = ?", "select count(u) FROM User u WHERE u.foo.bar = ?");
assertCountQuery("SELECT u FROM User u where u.foo.bar = ?", "select count(u) FROM User u where u.foo.bar = ?");
}
@Test
void createsCountQueryForDistinctQueries() {
assertCountQuery("select distinct u from User u where u.foo = ?",
"select count(distinct u) from User u where u.foo = ?");
}
@Test // GH-2812
void createsCountQueryForDeleteQuery() {
String result = createCountQueryFor("delete from some_table where id in :ids", null, true);
// ح(•̀ж•́)ง †
assertThat(result).isEqualTo("deleteselect count(where) from some_table where id in :ids");
}
@Test
void createsCountQueryForConstructorQueries() {
assertCountQuery("select distinct new User(u.name) from User u where u.foo = ?",
"select count(distinct u) from User u where u.foo = ?");
}
@Test
void createsCountQueryForJoins() {
assertCountQuery("select distinct new User(u.name) from User u left outer join u.roles r WHERE r = ?",
"select count(distinct u) from User u left outer join u.roles r WHERE r = ?");
}
@Test // GH-1869
void createsCountQueryForJoinsWithTwoArgs() {
assertCountQuery("select distinct new User(u.name, u.age) from User u left outer join u.roles r WHERE r = ?",
"select count(distinct u) from User u left outer join u.roles r WHERE r = ?");
}
@Test // GH-1869
void createsCountQueryForDtoWithOneArg() {
assertCountQuery(
"SELECT new org.springframework.data.jpa.repository.sample.FirstNameDto(u.firstname) from User u where u.firstname = ?",
"select count(u) from User u where u.firstname = ?");
}
@Test // GH-1869
void createsCountQueryForDtoWithTwoArgs() {
assertCountQuery(
"SELECT new org.springframework.data.jpa.repository.sample.NameOnlyDto(u.firstname, u.lastname) from User u where u.firstname = ?",
"select count(u) from User u where u.firstname = ?");
}
@Test
void createsCountQueryForQueriesWithSubSelects() {
assertCountQuery("select u from User u left outer join u.roles r where r in (select r from Role)",
"select count(u) from User u left outer join u.roles r where r in (select r from Role)");
}
@Test
void createsCountQueryForAliasesCorrectly() {
assertCountQuery("select u from User as u", "select count(u) from User as u");
}
@Test
void allowsShortJpaSyntax() {
assertCountQuery(SIMPLE_QUERY, COUNT_QUERY);
}
@Test
void detectsAliasCorrectly() {
assertThat(detectAlias(QUERY)).isEqualTo("u");
assertThat(detectAlias(SIMPLE_QUERY)).isEqualTo("u");
assertThat(detectAlias(COUNT_QUERY)).isEqualTo("u");
assertThat(detectAlias(QUERY_WITH_AS)).isEqualTo("u");
assertThat(detectAlias("SELECT FROM USER U")).isEqualTo("U");
assertThat(detectAlias("select u from User u")).isEqualTo("u");
assertThat(detectAlias("select u from com.acme.User u")).isEqualTo("u");
assertThat(detectAlias("select u from T05User u")).isEqualTo("u");
}
@Test
void allowsFullyQualifiedEntityNamesInQuery() {
assertThat(detectAlias(FQ_QUERY)).isEqualTo("u");
assertCountQuery(FQ_QUERY, "select count(u) from org.acme.domain.User$Foo_Bar u");
}
@Test // DATAJPA-252
void detectsJoinAliasesCorrectly() {
Set<String> aliases = getOuterJoinAliases("select p from Person p left outer join x.foo b2_$ar where …");
assertThat(aliases).hasSize(1);
assertThat(aliases).contains("b2_$ar");
aliases = getOuterJoinAliases("select p from Person p left join x.foo b2_$ar where …");
assertThat(aliases).hasSize(1);
assertThat(aliases).contains("b2_$ar");
aliases = getOuterJoinAliases(
"select p from Person p left outer join x.foo as b2_$ar, left join x.bar as foo where …");
assertThat(aliases).hasSize(2);
assertThat(aliases).contains("b2_$ar", "foo");
aliases = getOuterJoinAliases(
"select p from Person p left join x.foo as b2_$ar, left outer join x.bar foo where …");
assertThat(aliases).hasSize(2);
assertThat(aliases).contains("b2_$ar", "foo");
}
@Test // DATAJPA-252
void doesNotPrefixOrderReferenceIfOuterJoinAliasDetected() {
String query = "select p from Person p left join p.address address";
assertThat(applySorting(query, Sort.by("address.city"))).endsWith("order by address.city asc");
assertThat(applySorting(query, Sort.by("address.city", "lastname"), "p"))
.endsWith("order by address.city asc, p.lastname asc");
}
@Test // DATAJPA-252
void extendsExistingOrderByClausesCorrectly() {
String query = "select p from Person p order by p.lastname asc";
assertThat(applySorting(query, Sort.by("firstname"), "p")).endsWith("order by p.lastname asc, p.firstname asc");
}
@Test // DATAJPA-296
void appliesIgnoreCaseOrderingCorrectly() {
Sort sort = Sort.by(Order.by("firstname").ignoreCase());
String query = "select p from Person p";
assertThat(applySorting(query, sort, "p")).endsWith("order by lower(p.firstname) asc");
}
@Test // DATAJPA-296
void appendsIgnoreCaseOrderingCorrectly() {
Sort sort = Sort.by(Order.by("firstname").ignoreCase());
String query = "select p from Person p order by p.lastname asc";
assertThat(applySorting(query, sort, "p")).endsWith("order by p.lastname asc, lower(p.firstname) asc");
}
@Test // DATAJPA-342
void usesReturnedVariableInCOuntProjectionIfSet() {
assertCountQuery("select distinct m.genre from Media m where m.user = ?1 order by m.genre asc",
"select count(distinct m.genre) from Media m where m.user = ?1");
}
@Test // DATAJPA-343
void projectsCOuntQueriesForQueriesWithSubselects() {
assertCountQuery("select o from Foo o where cb.id in (select b from Bar b)",
"select count(o) from Foo o where cb.id in (select b from Bar b)");
}
@Test // DATAJPA-148
void doesNotPrefixSortsIfFunction() {
Sort sort = Sort.by("sum(foo)");
assertThatExceptionOfType(InvalidDataAccessApiUsageException.class)
.isThrownBy(() -> applySorting("select p from Person p", sort, "p"));
}
@Test // DATAJPA-377
void removesOrderByInGeneratedCountQueryFromOriginalQueryIfPresent() {
assertCountQuery("select distinct m.genre from Media m where m.user = ?1 OrDer By m.genre ASC",
"select count(distinct m.genre) from Media m where m.user = ?1");
}
@Test // DATAJPA-375
void findsExistingOrderByIndependentOfCase() {
Sort sort = Sort.by("lastname");
String query = applySorting("select p from Person p ORDER BY p.firstname", sort, "p");
assertThat(query).endsWith("ORDER BY p.firstname, p.lastname asc");
}
@Test // DATAJPA-409
void createsCountQueryForNestedReferenceCorrectly() {
assertCountQuery("select a.b from A a", "select count(a.b) from A a");
}
@Test // DATAJPA-420
void createsCountQueryForScalarSelects() {
assertCountQuery("select p.lastname,p.firstname from Person p", "select count(p) from Person p");
}
@Test // DATAJPA-456
void createCountQueryFromTheGivenCountProjection() {
assertThat(createCountQueryFor("select p.lastname,p.firstname from Person p", "p.lastname"))
.isEqualTo("select count(p.lastname) from Person p");
}
@Test // DATAJPA-726
void detectsAliassesInPlainJoins() {
String query = "select p from Customer c join c.productOrder p where p.delayed = true";
Sort sort = Sort.by("p.lineItems");
assertThat(applySorting(query, sort, "c")).endsWith("order by p.lineItems asc");
}
@Test // DATAJPA-736
void supportsNonAsciiCharactersInEntityNames() {
assertThat(createCountQueryFor("select u from Usèr u")).isEqualTo("select count(u) from Usèr u");
}
@Test // DATAJPA-798
void detectsAliasInQueryContainingLineBreaks() {
assertThat(detectAlias("select \n u \n from \n User \nu")).isEqualTo("u");
}
@Test // DATAJPA-815
void doesPrefixPropertyWith() {
String query = "from Cat c join Dog d";
Sort sort = Sort.by("dPropertyStartingWithJoinAlias");
assertThat(applySorting(query, sort, "c")).endsWith("order by c.dPropertyStartingWithJoinAlias asc");
}
@Test // DATAJPA-938
void detectsConstructorExpressionInDistinctQuery() {
assertThat(hasConstructorExpression("select distinct new Foo() from Bar b")).isTrue();
}
@Test // DATAJPA-938
void detectsComplexConstructorExpression() {
assertThat(hasConstructorExpression("select new foo.bar.Foo(ip.id, ip.name, sum(lp.amount)) " //
+ "from Bar lp join lp.investmentProduct ip " //
+ "where (lp.toDate is null and lp.fromDate <= :now and lp.fromDate is not null) and lp.accountId = :accountId " //
+ "group by ip.id, ip.name, lp.accountId " //
+ "order by ip.name ASC")).isTrue();
}
@Test // DATAJPA-938
void detectsConstructorExpressionWithLineBreaks() {
assertThat(hasConstructorExpression("select new foo.bar.FooBar(\na.id) from DtoA a ")).isTrue();
}
@Test // DATAJPA-960
void doesNotQualifySortIfNoAliasDetected() {
assertThat(applySorting("from mytable where ?1 is null", Sort.by("firstname"))).endsWith("order by firstname asc");
}
@Test // DATAJPA-965, DATAJPA-970
void doesNotAllowWhitespaceInSort() {
Sort sort = Sort.by("case when foo then bar");
assertThatExceptionOfType(InvalidDataAccessApiUsageException.class)
.isThrownBy(() -> applySorting("select p from Person p", sort, "p"));
}
@Test // DATAJPA-965, DATAJPA-970
void doesNotPrefixUnsageJpaSortFunctionCalls() {
JpaSort sort = JpaSort.unsafe("sum(foo)");
assertThat(applySorting("select p from Person p", sort, "p")).endsWith("order by sum(foo) asc");
}
@Test // DATAJPA-965, DATAJPA-970
void doesNotPrefixMultipleAliasedFunctionCalls() {
String query = "SELECT AVG(m.price) AS avgPrice, SUM(m.stocks) AS sumStocks FROM Magazine m";
Sort sort = Sort.by("avgPrice", "sumStocks");
assertThat(applySorting(query, sort, "m")).endsWith("order by avgPrice asc, sumStocks asc");
}
@Test // DATAJPA-965, DATAJPA-970
void doesNotPrefixSingleAliasedFunctionCalls() {
String query = "SELECT AVG(m.price) AS avgPrice FROM Magazine m";
Sort sort = Sort.by("avgPrice");
assertThat(applySorting(query, sort, "m")).endsWith("order by avgPrice asc");
}
@Test // DATAJPA-965, DATAJPA-970
void prefixesSingleNonAliasedFunctionCallRelatedSortProperty() {
String query = "SELECT AVG(m.price) AS avgPrice FROM Magazine m";
Sort sort = Sort.by("someOtherProperty");
assertThat(applySorting(query, sort, "m")).endsWith("order by m.someOtherProperty asc");
}
@Test // DATAJPA-965, DATAJPA-970
void prefixesNonAliasedFunctionCallRelatedSortPropertyWhenSelectClauseContainsAliasedFunctionForDifferentProperty() {
String query = "SELECT m.name, AVG(m.price) AS avgPrice FROM Magazine m";
Sort sort = Sort.by("name", "avgPrice");
assertThat(applySorting(query, sort, "m")).endsWith("order by m.name asc, avgPrice asc");
}
@Test // DATAJPA-965, DATAJPA-970
void doesNotPrefixAliasedFunctionCallNameWithMultipleNumericParameters() {
String query = "SELECT SUBSTRING(m.name, 2, 5) AS trimmedName FROM Magazine m";
Sort sort = Sort.by("trimmedName");
assertThat(applySorting(query, sort, "m")).endsWith("order by trimmedName asc");
}
@Test // DATAJPA-965, DATAJPA-970
void doesNotPrefixAliasedFunctionCallNameWithMultipleStringParameters() {
String query = "SELECT CONCAT(m.name, 'foo') AS extendedName FROM Magazine m";
Sort sort = Sort.by("extendedName");
assertThat(applySorting(query, sort, "m")).endsWith("order by extendedName asc");
}
@Test // DATAJPA-965, DATAJPA-970
void doesNotPrefixAliasedFunctionCallNameWithUnderscores() {
String query = "SELECT AVG(m.price) AS avg_price FROM Magazine m";
Sort sort = Sort.by("avg_price");
assertThat(applySorting(query, sort, "m")).endsWith("order by avg_price asc");
}
@Test // DATAJPA-965, DATAJPA-970
void doesNotPrefixAliasedFunctionCallNameWithDots() {
String query = "SELECT AVG(m.price) AS m.avg FROM Magazine m";
Sort sort = Sort.by("m.avg");
assertThat(applySorting(query, sort, "m")).endsWith("order by m.avg asc");
}
@Test // DATAJPA-965, DATAJPA-970
void doesNotPrefixAliasedFunctionCallNameWhenQueryStringContainsMultipleWhiteSpaces() {
String query = "SELECT AVG( m.price ) AS avgPrice FROM Magazine m";
Sort sort = Sort.by("avgPrice");
assertThat(applySorting(query, sort, "m")).endsWith("order by avgPrice asc");
}
@Test // DATAJPA-1000
void discoversCorrectAliasForJoinFetch() {
Set<String> aliases = QueryUtils
.getOuterJoinAliases("SELECT DISTINCT user FROM User user LEFT JOIN FETCH user.authorities AS authority");
assertThat(aliases).containsExactly("authority");
}
@Test // DATAJPA-1171
void doesNotContainStaticClauseInExistsQuery() {
assertThat(QueryUtils.getExistsQueryString("entity", "x", Collections.singleton("id"))) //
.endsWith("WHERE x.id = :id");
}
@Test // DATAJPA-1363
void discoversAliasWithComplexFunction() {
assertThat(
QueryUtils.getFunctionAliases("select new MyDto(sum(case when myEntity.prop3=0 then 1 else 0 end) as myAlias")) //
.contains("myAlias");
}
@Test // DATAJPA-1506
void detectsAliasWithGroupAndOrderBy() {
assertThat(detectAlias("select * from User group by name")).isNull();
assertThat(detectAlias("select * from User order by name")).isNull();
assertThat(detectAlias("select * from User u group by name")).isEqualTo("u");
assertThat(detectAlias("select * from User u order by name")).isEqualTo("u");
}
@Test // DATAJPA-1500
void createCountQuerySupportsWhitespaceCharacters() {
assertThat(createCountQueryFor("select * from User user\n" + //
" where user.age = 18\n" + //
" order by user.name\n ")).isEqualTo("select count(user) from User user\n" + //
" where user.age = 18");
}
@Test // GH-3329
void createCountQuerySupportsNewLineCharacters() {
assertThat(createCountQueryFor("select * from User user\n" + //
" where user.age = 18\n" + //
" order by user.name,\n user.age DESC")).isEqualTo("select count(user) from User user\n" + //
" where user.age = 18");
}
@Test
void createCountQuerySupportsLineBreaksInSelectClause() {
assertThat(createCountQueryFor("select user.age,\n" + //
" user.name\n" + //
" from User user\n" + //
" where user.age = 18\n" + //
" order\nby\nuser.name\n ")).isEqualTo("select count(user) from User user\n" + //
" where user.age = 18");
}
@Test // DATAJPA-1061
void appliesSortCorrectlyForFieldAliases() {
String query = "SELECT m.price, lower(m.title) AS title, a.name as authorName FROM Magazine m INNER JOIN m.author a";
Sort sort = Sort.by("authorName");
String fullQuery = applySorting(query, sort);
assertThat(fullQuery).endsWith("order by authorName asc");
}
@Test // GH-2280
void appliesOrderingCorrectlyForFieldAliasWithIgnoreCase() {
String query = "SELECT customer.id as id, customer.name as name FROM CustomerEntity customer";
Sort sort = Sort.by(Order.by("name").ignoreCase());
String fullQuery = applySorting(query, sort);
assertThat(fullQuery).isEqualTo(
"SELECT customer.id as id, customer.name as name FROM CustomerEntity customer order by lower(name) asc");
}
@Test // DATAJPA-1061
void appliesSortCorrectlyForFunctionAliases() {
String query = "SELECT m.price, lower(m.title) AS title, a.name as authorName FROM Magazine m INNER JOIN m.author a";
Sort sort = Sort.by("title");
String fullQuery = applySorting(query, sort);
assertThat(fullQuery).endsWith("order by title asc");
}
@Test // DATAJPA-1061
void appliesSortCorrectlyForSimpleField() {
String query = "SELECT m.price, lower(m.title) AS title, a.name as authorName FROM Magazine m INNER JOIN m.author a";
Sort sort = Sort.by("price");
String fullQuery = applySorting(query, sort);
assertThat(fullQuery).endsWith("order by m.price asc");
}
@Test
void createCountQuerySupportsLineBreakRightAfterDistinct() {
assertThat(createCountQueryFor("select\ndistinct\nuser.age,\n" + //
"user.name\n" + //
"from\nUser\nuser")).isEqualTo(createCountQueryFor("select\ndistinct user.age,\n" + //
"user.name\n" + //
"from\nUser\nuser"));
}
@Test
void detectsAliasWithGroupAndOrderByWithLineBreaks() {
assertThat(detectAlias("select * from User group\nby name")).isNull();
assertThat(detectAlias("select * from User order\nby name")).isNull();
assertThat(detectAlias("select * from User u group\nby name")).isEqualTo("u");
assertThat(detectAlias("select * from User u order\nby name")).isEqualTo("u");
assertThat(detectAlias("select * from User\nu\norder \n by name")).isEqualTo("u");
}
@Test // DATAJPA-1679
void findProjectionClauseWithDistinct() {
SoftAssertions.assertSoftly(sofly -> {
sofly.assertThat(QueryUtils.getProjection("select * from x")).isEqualTo("*");
sofly.assertThat(QueryUtils.getProjection("select a, b, c from x")).isEqualTo("a, b, c");
sofly.assertThat(QueryUtils.getProjection("select distinct a, b, c from x")).isEqualTo("a, b, c");
sofly.assertThat(QueryUtils.getProjection("select DISTINCT a, b, c from x")).isEqualTo("a, b, c");
});
}
@Test // DATAJPA-1696
void findProjectionClauseWithSubselect() {
// This is not a required behavior, in fact the opposite is,
// but it documents a current limitation.
// to fix this without breaking findProjectionClauseWithIncludedFrom we need a more sophisticated parser.
assertThat(QueryUtils.getProjection("select * from (select x from y)")).isNotEqualTo("*");
}
@Test // DATAJPA-1696
void findProjectionClauseWithIncludedFrom() {
assertThat(QueryUtils.getProjection("select x, frommage, y from t")).isEqualTo("x, frommage, y");
}
private static void assertCountQuery(String originalQuery, String countQuery) {
assertThat(createCountQueryFor(originalQuery)).isEqualTo(countQuery);
}
}
|
DefaultQueryUtilsUnitTests
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/dependencyinjection/code/javaguide/di/field/MyComponent.java
|
{
"start": 251,
"end": 316
}
|
class ____ {
@Inject WSClient ws;
// ...
}
// #field
|
MyComponent
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/IntervalSchedule.java
|
{
"start": 2197,
"end": 4012
}
|
class ____ implements Schedule.Parser<IntervalSchedule> {
@Override
public String type() {
return TYPE;
}
@Override
public IntervalSchedule parse(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
try {
if (token == XContentParser.Token.VALUE_NUMBER) {
return new IntervalSchedule(Interval.seconds(parser.longValue()));
}
if (token == XContentParser.Token.VALUE_STRING) {
String value = parser.text();
return new IntervalSchedule(Interval.parse(value));
}
} catch (Exception e) {
throw new ElasticsearchParseException("could not parse schedule: {}", e, e.getMessage());
}
throw new ElasticsearchParseException(
"could not parse [{}] schedule. expected either a numeric value "
+ "(millis) or a string value representing time value (e.g. '5s'), but found [{}]",
TYPE,
token
);
}
}
/**
* Represents a time interval. Ideally we would have used TimeValue here, but we don't because:
* 1. We should limit the time values that the user can configure (we don't want to support nanos & millis
* 2. TimeValue formatting & parsing is inconsistent (it doesn't format to a value that it can parse)
* 3. The equals of TimeValue is odd - it will only equate two time values that have the exact same unit & duration,
* this interval on the other hand, equates based on the millis value.
* 4. We have the advantage of making this interval construct a ToXContent
*/
public static
|
Parser
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnPropertyTests.java
|
{
"start": 11096,
"end": 11307
}
|
class ____ {
@Bean
String foo() {
return "foo";
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(prefix = "spring", name = "property")
static
|
RelaxedPropertiesRequiredConfiguration
|
java
|
quarkusio__quarkus
|
extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ImplicitSimpleKeyCacheTest.java
|
{
"start": 513,
"end": 3454
}
|
class ____ {
private static final Object KEY_1 = new Object();
private static final Object KEY_2 = new Object();
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest().withApplicationRoot(jar -> jar.addClass(CachedService.class));
@Inject
CachedService cachedService;
@Test
public void testAllCacheAnnotations() {
// STEP 1
// Action: @CacheResult-annotated method call.
// Expected effect: method invoked and result cached.
// Verified by: STEP 2.
String value1 = cachedService.cachedMethod(KEY_1);
// STEP 2
// Action: same call as STEP 1.
// Expected effect: method not invoked and result coming from the cache.
// Verified by: same object reference between STEPS 1 and 2 results.
String value2 = cachedService.cachedMethod(KEY_1);
assertTrue(value1 == value2);
// STEP 3
// Action: same call as STEP 2 with a new key.
// Expected effect: method invoked and result cached.
// Verified by: different objects references between STEPS 2 and 3 results.
String value3 = cachedService.cachedMethod(KEY_2);
assertTrue(value2 != value3);
// STEP 4
// Action: cache entry invalidation.
// Expected effect: STEP 2 cache entry removed.
// Verified by: STEP 5.
cachedService.invalidate(KEY_1);
// STEP 5
// Action: same call as STEP 2.
// Expected effect: method invoked because of STEP 4 and result cached.
// Verified by: different objects references between STEPS 2 and 5 results.
String value5 = cachedService.cachedMethod(KEY_1);
assertTrue(value2 != value5);
// STEP 6
// Action: same call as STEP 3.
// Expected effect: method not invoked and result coming from the cache.
// Verified by: same object reference between STEPS 3 and 6 results.
String value6 = cachedService.cachedMethod(KEY_2);
assertTrue(value3 == value6);
// STEP 7
// Action: full cache invalidation.
// Expected effect: empty cache.
// Verified by: STEPS 8 and 9.
cachedService.invalidateAll();
// STEP 8
// Action: same call as STEP 5.
// Expected effect: method invoked because of STEP 7 and result cached.
// Verified by: different objects references between STEPS 5 and 8 results.
String value8 = cachedService.cachedMethod(KEY_1);
assertTrue(value5 != value8);
// STEP 9
// Action: same call as STEP 6.
// Expected effect: method invoked because of STEP 7 and result cached.
// Verified by: different objects references between STEPS 6 and 9 results.
String value9 = cachedService.cachedMethod(KEY_2);
assertTrue(value6 != value9);
}
@ApplicationScoped
static
|
ImplicitSimpleKeyCacheTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/testFixtures/java/org/springframework/context/testfixture/context/annotation/AutowiredMixedCglibConfiguration.java
|
{
"start": 939,
"end": 1282
}
|
class ____ {
@Value("${world:World}")
private String world;
private final Environment environment;
public AutowiredMixedCglibConfiguration(Environment environment) {
this.environment = environment;
}
@Bean
public String text() {
return this.environment.getProperty("hello") + " " + this.world;
}
}
|
AutowiredMixedCglibConfiguration
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/InterfaceTest.java
|
{
"start": 752,
"end": 826
}
|
interface ____ {
@JSONField(name="ID")
int getId();
}
public static
|
IA
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateSerializationTests.java
|
{
"start": 567,
"end": 1636
}
|
class ____ extends AbstractExpressionSerializationTests<Locate> {
@Override
protected Locate createTestInstance() {
Source source = randomSource();
Expression str = randomChild();
Expression substr = randomChild();
Expression start = randomChild();
return new Locate(source, str, substr, start);
}
@Override
protected Locate mutateInstance(Locate instance) throws IOException {
Source source = instance.source();
Expression str = instance.str();
Expression substr = instance.substr();
Expression start = instance.start();
switch (between(0, 2)) {
case 0 -> str = randomValueOtherThan(str, AbstractExpressionSerializationTests::randomChild);
case 1 -> substr = randomValueOtherThan(substr, AbstractExpressionSerializationTests::randomChild);
case 2 -> start = randomValueOtherThan(start, AbstractExpressionSerializationTests::randomChild);
}
return new Locate(source, str, substr, start);
}
}
|
LocateSerializationTests
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/pool/CallableStatmentTest.java
|
{
"start": 1054,
"end": 3124
}
|
class ____ extends PoolTestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
super.setUp();
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setTestOnBorrow(false);
dataSource.setPoolPreparedStatements(true);
dataSource.setMaxOpenPreparedStatements(100);
dataSource.setFilters("log4j");
dataSource.setDriver(new MockDriver() {
public MockCallableStatement createMockCallableStatement(MockConnection conn, String sql) {
return new MyMockCallableStatement(conn, sql);
}
});
}
protected void tearDown() throws Exception {
dataSource.close();
super.tearDown();
}
public void test_connect() throws Exception {
MockCallableStatement rawStmt = null;
MockResultSet rawRs = null;
{
Connection conn = dataSource.getConnection();
CallableStatement stmt = conn.prepareCall("select 1");
stmt.execute();
rawStmt = stmt.unwrap(MockCallableStatement.class);
ResultSet rs = (ResultSet) stmt.getObject(0);
rawRs = rs.unwrap(MockResultSet.class);
rs.next();
rs.close();
stmt.close();
assertFalse(rawStmt.isClosed());
assertTrue(rawRs.isClosed());
rawRs = rs.unwrap(MockResultSet.class);
assertNotNull(rawRs);
conn.close();
}
{
Connection conn = dataSource.getConnection();
CallableStatement stmt = conn.prepareCall("select 1");
stmt.execute();
assertSame(rawStmt, stmt.unwrap(MockCallableStatement.class));
assertFalse(rawStmt.isClosed());
stmt.getObject(0);
ResultSet rs = (ResultSet) stmt.getObject(0);
rs.next();
rs.close();
stmt.close();
conn.close();
}
}
public static
|
CallableStatmentTest
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/dynamic/ReactiveTypeAdapters.java
|
{
"start": 10206,
"end": 10600
}
|
enum ____ implements Function<Completable, Mono<?>> {
INSTANCE;
@Override
public Mono<?> apply(Completable source) {
return Mono.from(RxJava1CompletableToPublisherAdapter.INSTANCE.apply(source));
}
}
/**
* An adapter {@link Function} to adopt an {@link Observable} to {@link Publisher}.
*/
public
|
RxJava1CompletableToMonoAdapter
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/TestNMTimelinePublisher.java
|
{
"start": 3274,
"end": 14524
}
|
class ____ {
private static final String MEMORY_ID = "MEMORY";
private static final String CPU_ID = "CPU";
private NMTimelinePublisher publisher;
private DummyTimelineClient timelineClient;
private Configuration conf;
private DrainDispatcher dispatcher;
@BeforeEach
public void setup() throws Exception {
conf = new Configuration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
conf.setLong(YarnConfiguration.ATS_APP_COLLECTOR_LINGER_PERIOD_IN_MS,
3000L);
conf.setBoolean(YarnConfiguration.NM_PUBLISH_CONTAINER_EVENTS_ENABLED,
true);
timelineClient = new DummyTimelineClient(null);
Context context = createMockContext();
dispatcher = new DrainDispatcher();
publisher = new NMTimelinePublisher(context) {
public void createTimelineClient(ApplicationId appId) {
if (!getAppToClientMap().containsKey(appId)) {
timelineClient.init(getConfig());
timelineClient.start();
getAppToClientMap().put(appId, timelineClient);
}
}
@Override protected AsyncDispatcher createDispatcher() {
return dispatcher;
}
};
publisher.init(conf);
publisher.start();
}
private Context createMockContext() {
Context context = mock(Context.class);
when(context.getNodeId()).thenReturn(NodeId.newInstance("localhost", 0));
ConcurrentMap<ContainerId, Container> containers =
new ConcurrentHashMap<>();
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 1);
Container container = mock(Container.class);
when(container.getContainerStartTime())
.thenReturn(System.currentTimeMillis());
containers.putIfAbsent(cId, container);
when(context.getContainers()).thenReturn(containers);
return context;
}
@AfterEach
public void tearDown() throws Exception {
if (publisher != null) {
publisher.stop();
}
if (timelineClient != null) {
timelineClient.stop();
}
}
@Test public void testPublishContainerFinish() throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 2);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 1);
String diag = "test-diagnostics";
int exitStatus = 0;
ContainerStatus cStatus = mock(ContainerStatus.class);
when(cStatus.getContainerId()).thenReturn(cId);
when(cStatus.getDiagnostics()).thenReturn(diag);
when(cStatus.getExitStatus()).thenReturn(exitStatus);
long timeStamp = System.currentTimeMillis();
ApplicationContainerFinishedEvent finishedEvent =
new ApplicationContainerFinishedEvent(cStatus, timeStamp);
publisher.createTimelineClient(appId);
publisher.publishApplicationEvent(finishedEvent);
publisher.stopTimelineClient(appId);
dispatcher.await();
ContainerEntity cEntity = new ContainerEntity();
cEntity.setId(cId.toString());
TimelineEntity[] lastPublishedEntities =
timelineClient.getLastPublishedEntities();
assertNotNull(lastPublishedEntities);
assertEquals(1, lastPublishedEntities.length);
TimelineEntity entity = lastPublishedEntities[0];
assertTrue(cEntity.equals(entity));
assertEquals(diag,
entity.getInfo().get(ContainerMetricsConstants.DIAGNOSTICS_INFO));
assertEquals(exitStatus,
entity.getInfo().get(ContainerMetricsConstants.EXIT_STATUS_INFO));
assertEquals(TimelineServiceHelper.invertLong(
cId.getContainerId()), entity.getIdPrefix());
}
@Test
public void testPublishContainerPausedEvent() {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 1);
ContainerEvent containerEvent =
new ContainerPauseEvent(cId, "test pause");
publisher.createTimelineClient(appId);
publisher.publishContainerEvent(containerEvent);
publisher.stopTimelineClient(appId);
dispatcher.await();
ContainerEntity cEntity = new ContainerEntity();
cEntity.setId(cId.toString());
TimelineEntity[] lastPublishedEntities =
timelineClient.getLastPublishedEntities();
assertNotNull(lastPublishedEntities);
assertEquals(1, lastPublishedEntities.length);
TimelineEntity entity = lastPublishedEntities[0];
assertEquals(cEntity, entity);
NavigableSet<TimelineEvent> events = entity.getEvents();
assertEquals(1, events.size());
assertEquals(ContainerMetricsConstants.PAUSED_EVENT_TYPE,
events.iterator().next().getId());
Map<String, Object> info = entity.getInfo();
assertTrue(
info.containsKey(ContainerMetricsConstants.DIAGNOSTICS_INFO));
assertEquals("test pause",
info.get(ContainerMetricsConstants.DIAGNOSTICS_INFO));
}
@Test
public void testPublishContainerResumedEvent() {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 1);
ContainerEvent containerEvent =
new ContainerResumeEvent(cId, "test resume");
publisher.createTimelineClient(appId);
publisher.publishContainerEvent(containerEvent);
publisher.stopTimelineClient(appId);
dispatcher.await();
ContainerEntity cEntity = new ContainerEntity();
cEntity.setId(cId.toString());
TimelineEntity[] lastPublishedEntities =
timelineClient.getLastPublishedEntities();
assertNotNull(lastPublishedEntities);
assertEquals(1, lastPublishedEntities.length);
TimelineEntity entity = lastPublishedEntities[0];
assertEquals(cEntity, entity);
NavigableSet<TimelineEvent> events = entity.getEvents();
assertEquals(1, events.size());
assertEquals(ContainerMetricsConstants.RESUMED_EVENT_TYPE,
events.iterator().next().getId());
Map<String, Object> info = entity.getInfo();
assertTrue(
info.containsKey(ContainerMetricsConstants.DIAGNOSTICS_INFO));
assertEquals("test resume",
info.get(ContainerMetricsConstants.DIAGNOSTICS_INFO));
}
@Test
public void testPublishContainerKilledEvent() {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 1);
ContainerEvent containerEvent =
new ContainerKillEvent(cId, 1, "test kill");
publisher.createTimelineClient(appId);
publisher.publishContainerEvent(containerEvent);
publisher.stopTimelineClient(appId);
dispatcher.await();
ContainerEntity cEntity = new ContainerEntity();
cEntity.setId(cId.toString());
TimelineEntity[] lastPublishedEntities =
timelineClient.getLastPublishedEntities();
assertNotNull(lastPublishedEntities);
assertEquals(1, lastPublishedEntities.length);
TimelineEntity entity = lastPublishedEntities[0];
assertEquals(cEntity, entity);
NavigableSet<TimelineEvent> events = entity.getEvents();
assertEquals(1, events.size());
assertEquals(ContainerMetricsConstants.KILLED_EVENT_TYPE,
events.iterator().next().getId());
Map<String, Object> info = entity.getInfo();
assertTrue(
info.containsKey(ContainerMetricsConstants.DIAGNOSTICS_INFO));
assertEquals("test kill",
info.get(ContainerMetricsConstants.DIAGNOSTICS_INFO));
assertTrue(
info.containsKey(ContainerMetricsConstants.EXIT_STATUS_INFO));
assertEquals(1,
info.get(ContainerMetricsConstants.EXIT_STATUS_INFO));
}
@Test public void testContainerResourceUsage() {
ApplicationId appId = ApplicationId.newInstance(0, 1);
publisher.createTimelineClient(appId);
Container aContainer = mock(Container.class);
when(aContainer.getContainerId()).thenReturn(ContainerId
.newContainerId(ApplicationAttemptId.newInstance(appId, 1), 0L));
long idPrefix = TimelineServiceHelper.invertLong(
aContainer.getContainerId().getContainerId());
publisher.reportContainerResourceUsage(aContainer, 1024L, 8F);
verifyPublishedResourceUsageMetrics(timelineClient, 1024L, 8, idPrefix);
timelineClient.reset();
publisher.reportContainerResourceUsage(aContainer, 1024L, 0.8F);
verifyPublishedResourceUsageMetrics(timelineClient, 1024L, 1, idPrefix);
timelineClient.reset();
publisher.reportContainerResourceUsage(aContainer, 1024L, 0.49F);
verifyPublishedResourceUsageMetrics(timelineClient, 1024L, 0, idPrefix);
timelineClient.reset();
publisher.reportContainerResourceUsage(aContainer, 1024L,
(float) ResourceCalculatorProcessTree.UNAVAILABLE);
verifyPublishedResourceUsageMetrics(timelineClient, 1024L,
ResourceCalculatorProcessTree.UNAVAILABLE, idPrefix);
}
private void verifyPublishedResourceUsageMetrics(DummyTimelineClient
dummyTimelineClient, long memoryUsage, int cpuUsage, long idPrefix) {
TimelineEntity[] entities = null;
for (int i = 0; i < 10; i++) {
entities = dummyTimelineClient.getLastPublishedEntities();
if (entities != null) {
break;
}
try {
Thread.sleep(150L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
int numberOfResourceMetrics = 0;
numberOfResourceMetrics +=
(memoryUsage == ResourceCalculatorProcessTree.UNAVAILABLE) ? 0 : 1;
numberOfResourceMetrics +=
(cpuUsage == ResourceCalculatorProcessTree.UNAVAILABLE) ? 0 : 1;
assertNotNull(entities, "entities are expected to be published");
assertEquals(numberOfResourceMetrics, entities[0].getMetrics().size(),
"Expected number of metrics notpublished");
assertEquals(idPrefix, entities[0].getIdPrefix());
Iterator<TimelineMetric> metrics = entities[0].getMetrics().iterator();
while (metrics.hasNext()) {
TimelineMetric metric = metrics.next();
Iterator<Entry<Long, Number>> entrySet;
switch (metric.getId()) {
case CPU_ID:
if (cpuUsage == ResourceCalculatorProcessTree.UNAVAILABLE) {
fail("Not Expecting CPU Metric to be published");
}
entrySet = metric.getValues().entrySet().iterator();
assertEquals(cpuUsage, entrySet.next().getValue(),
"CPU usage metric not matching");
break;
case MEMORY_ID:
if (memoryUsage == ResourceCalculatorProcessTree.UNAVAILABLE) {
fail("Not Expecting Memory Metric to be published");
}
entrySet = metric.getValues().entrySet().iterator();
assertEquals(memoryUsage, entrySet.next().getValue(),
"Memory usage metric not matching");
break;
default:
fail("Invalid Resource Usage metric");
break;
}
}
}
protected static
|
TestNMTimelinePublisher
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bulkid/OracleInlineMutationStrategyIdTest.java
|
{
"start": 706,
"end": 849
}
|
class ____ extends InlineMutationStrategyIdTest {
@Override
protected int entityCount() {
return 1100;
}
}
|
OracleInlineMutationStrategyIdTest
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/io/FastReaderBuilder.java
|
{
"start": 19488,
"end": 20520
}
|
enum ____ {
NEW, INITIALIZING, INITIALIZED
}
private ExecutionStep[] readSteps;
private InstanceSupplier supplier;
private Schema schema;
private Stage stage = Stage.NEW;
public Stage getInitializationStage() {
return this.stage;
}
public void reset() {
this.stage = Stage.NEW;
}
public void startInitialization() {
this.stage = Stage.INITIALIZING;
}
public void finishInitialization(ExecutionStep[] readSteps, Schema schema, InstanceSupplier supp) {
this.readSteps = readSteps;
this.schema = schema;
this.supplier = supp;
this.stage = Stage.INITIALIZED;
}
@Override
public boolean canReuse() {
return true;
}
@Override
public Object read(Object reuse, Decoder decoder) throws IOException {
Object object = supplier.newInstance(reuse, schema);
for (ExecutionStep thisStep : readSteps) {
thisStep.execute(object, decoder);
}
return object;
}
}
public static
|
Stage
|
java
|
apache__camel
|
components/camel-netty/src/main/java/org/apache/camel/component/netty/codec/DatagramPacketObjectEncoder.java
|
{
"start": 1291,
"end": 2426
}
|
class ____
extends
MessageToMessageEncoder<AddressedEnvelope<Object, InetSocketAddress>> {
private static final Logger LOG = LoggerFactory.getLogger(DatagramPacketObjectEncoder.class);
private ObjectEncoder delegateObjectEncoder;
public DatagramPacketObjectEncoder() {
delegateObjectEncoder = new ObjectEncoder();
}
@Override
protected void encode(
ChannelHandlerContext ctx, AddressedEnvelope<Object, InetSocketAddress> msg,
List<Object> out)
throws Exception {
if (msg.content() instanceof Serializable) {
Serializable payload = (Serializable) msg.content();
ByteBuf buf = ctx.alloc().buffer();
delegateObjectEncoder.encode(ctx, payload, buf);
AddressedEnvelope<Object, InetSocketAddress> addressedEnvelop
= new DefaultAddressedEnvelope<>(buf, msg.recipient(), msg.sender());
out.add(addressedEnvelop);
} else {
LOG.debug("Ignoring message content as it is not a java.io.Serializable instance.");
}
}
}
|
DatagramPacketObjectEncoder
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java
|
{
"start": 28720,
"end": 28796
}
|
interface ____<T, P> {
T get(P id);
}
public abstract static
|
DaoInterface
|
java
|
apache__avro
|
lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java
|
{
"start": 1419,
"end": 1838
}
|
class ____ {
public static List<Object[]> cases() throws IOException {
return CaseFinder.find(data(), "canonical", new ArrayList<>());
}
@ParameterizedTest
@MethodSource("cases")
void canonicalization(String input, String expectedOutput) {
assertEquals(SchemaNormalization.toParsingForm(new Schema.Parser().parse(input)), expectedOutput);
}
}
@Nested
public static
|
TestCanonical
|
java
|
google__guice
|
core/test/com/google/inject/InjectorTest.java
|
{
"start": 10442,
"end": 11402
}
|
class ____ implements Provider<Chicken> {
@Override
public Chicken get() {
return this;
}
}
public void testJitBindingFromAnotherThreadDuringInjection() {
final ExecutorService executorService = Executors.newSingleThreadExecutor();
final AtomicReference<JustInTime> got = new AtomicReference<>();
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
requestInjection(
new Object() {
@Inject
void initialize(final Injector injector)
throws ExecutionException, InterruptedException {
Future<JustInTime> future =
executorService.submit(() -> injector.getInstance(JustInTime.class));
got.set(future.get());
}
});
}
});
assertNotNull(got.get());
}
static
|
Chicken
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestReflectionUtils.java
|
{
"start": 6715,
"end": 6873
}
|
class ____ {
private int parentField;
@SuppressWarnings("unused")
public int getParentField() { return parentField; }
}
private static
|
Parent
|
java
|
apache__flink
|
flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/dsv2/wordcount/WordCount.java
|
{
"start": 12486,
"end": 13855
}
|
class ____
implements OneInputStreamProcessFunction<
Tuple2<String, Integer>, Tuple2<String, Integer>> {
// uses a ValueState to store the count of each word
private final ValueStateDeclaration<Integer> countStateDeclaration =
StateDeclarations.valueState("count", TypeDescriptors.INT);
@Override
public Set<StateDeclaration> usesStates() {
// declare a ValueState to store the count of each word
return Set.of(countStateDeclaration);
}
@Override
public void processRecord(
Tuple2<String, Integer> record,
Collector<Tuple2<String, Integer>> output,
PartitionedContext<Tuple2<String, Integer>> ctx)
throws Exception {
// calculate the new count of the word
String word = record.f0;
Integer count = record.f1;
Integer previousCount = ctx.getStateManager().getState(countStateDeclaration).value();
Integer newlyCount = previousCount == null ? count : previousCount + count;
// update the count of the word
ctx.getStateManager().getState(countStateDeclaration).update(newlyCount);
// output the result
output.collect(Tuple2.of(word, newlyCount));
}
}
}
|
Counter
|
java
|
apache__flink
|
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/utils/TestSqlStatement.java
|
{
"start": 950,
"end": 1282
}
|
class ____ {
private final String comment;
private final String sql;
public TestSqlStatement(String comment, String sql) {
this.comment = comment;
this.sql = sql;
}
public String getComment() {
return comment;
}
public String getSql() {
return sql;
}
}
|
TestSqlStatement
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/cache/annotation/ReactiveCachingTests.java
|
{
"start": 14074,
"end": 14369
}
|
class ____ {
@Bean
CacheManager cacheManager() {
ConcurrentMapCacheManager cm = new ConcurrentMapCacheManager("first");
cm.setAllowNullValues(false);
return cm;
}
}
@Configuration(proxyBeanMethods = false)
@EnableCaching
static
|
EarlyCacheHitDeterminationWithoutNullValuesConfig
|
java
|
spring-projects__spring-boot
|
module/spring-boot-cache/src/main/java/org/springframework/boot/cache/autoconfigure/JCachePropertiesCustomizer.java
|
{
"start": 801,
"end": 1091
}
|
interface ____ can be implemented by beans wishing to customize the properties
* used by the {@link CachingProvider} to create the {@link CacheManager}.
*
* @author Stephane Nicoll
* @since 4.0.0
* @see CachingProvider#getCacheManager(java.net.URI, ClassLoader, Properties)
*/
public
|
that
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy-common/deployment/src/main/java/io/quarkus/resteasy/common/deployment/RestClientBuildItem.java
|
{
"start": 406,
"end": 1234
}
|
class ____ extends MultiBuildItem {
private final ClassInfo classInfo;
private final Optional<String> configKey;
private final Optional<String> defaultBaseUri;
public RestClientBuildItem(ClassInfo classInfo, Optional<String> configKey, Optional<String> defaultBaseUri) {
this.classInfo = Objects.requireNonNull(classInfo);
this.configKey = Objects.requireNonNull(configKey);
this.defaultBaseUri = Objects.requireNonNull(defaultBaseUri);
}
public String getInterfaceName() {
return classInfo.name().toString();
}
public ClassInfo getClassInfo() {
return classInfo;
}
public Optional<String> getConfigKey() {
return configKey;
}
public Optional<String> getDefaultBaseUri() {
return defaultBaseUri;
}
}
|
RestClientBuildItem
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/FormListTest.java
|
{
"start": 536,
"end": 1223
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(VoidReturnTypeTest.Resource.class));
@TestHTTPResource
URI baseUri;
@Test
void test() {
Client client = RestClientBuilder.newBuilder().baseUri(baseUri).build(Client.class);
Holder holder = new Holder();
holder.input2 = List.of("1", "2");
assertThat(client.call(List.of("first", "second", "third"), holder)).isEqualTo("first-second-third/1-2");
assertThat(client.call(List.of("first"), holder)).isEqualTo("first/1-2");
}
@Path("/test")
public static
|
FormListTest
|
java
|
netty__netty
|
buffer/src/main/java/io/netty/buffer/AbstractDerivedByteBuf.java
|
{
"start": 855,
"end": 2815
}
|
class ____ extends AbstractByteBuf {
protected AbstractDerivedByteBuf(int maxCapacity) {
super(maxCapacity);
}
@Override
final boolean isAccessible() {
return isAccessible0();
}
boolean isAccessible0() {
return unwrap().isAccessible();
}
@Override
public final int refCnt() {
return refCnt0();
}
int refCnt0() {
return unwrap().refCnt();
}
@Override
public final ByteBuf retain() {
return retain0();
}
ByteBuf retain0() {
unwrap().retain();
return this;
}
@Override
public final ByteBuf retain(int increment) {
return retain0(increment);
}
ByteBuf retain0(int increment) {
unwrap().retain(increment);
return this;
}
@Override
public final ByteBuf touch() {
return touch0();
}
ByteBuf touch0() {
unwrap().touch();
return this;
}
@Override
public final ByteBuf touch(Object hint) {
return touch0(hint);
}
ByteBuf touch0(Object hint) {
unwrap().touch(hint);
return this;
}
@Override
public final boolean release() {
return release0();
}
boolean release0() {
return unwrap().release();
}
@Override
public final boolean release(int decrement) {
return release0(decrement);
}
boolean release0(int decrement) {
return unwrap().release(decrement);
}
@Override
public boolean isReadOnly() {
return unwrap().isReadOnly();
}
@Override
public ByteBuffer internalNioBuffer(int index, int length) {
return nioBuffer(index, length);
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
return unwrap().nioBuffer(index, length);
}
@Override
public boolean isContiguous() {
return unwrap().isContiguous();
}
}
|
AbstractDerivedByteBuf
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/config/OneRouteRefReverseOnExceptionAndDLCErrorHandlerTest.java
|
{
"start": 1086,
"end": 2208
}
|
class ____ extends SpringTestSupport {
@Test
public void testOneRouteRefReverseNoOnExceptionAndDLCErrorHandler() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:dead").expectedMessageCount(1);
getMockEndpoint("mock:handled").expectedMessageCount(0);
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testOneRouteRefReverseOnExceptionAndDLCErrorHandler() throws Exception {
getMockEndpoint("mock:bar").expectedMessageCount(1);
getMockEndpoint("mock:dead").expectedMessageCount(0);
getMockEndpoint("mock:handled").expectedMessageCount(1);
template.sendBody("direct:bar", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"org/apache/camel/spring/config/OneRouteRefReverseOnExceptionAndDLCErrorHandler.xml");
}
}
|
OneRouteRefReverseOnExceptionAndDLCErrorHandlerTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/PatternMatchingInstanceofTest.java
|
{
"start": 14261,
"end": 14728
}
|
class ____ {
private final int x = 0;
private final int y = 1;
@Override
public boolean equals(Object o) {
return o instanceof Test test && test.x == this.x && test.y == this.y;
}
}
""")
.doTest();
}
@Test
public void withinIfCondition_andUsedAfter() {
helper
.addInputLines(
"Test.java",
"""
|
Test
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/WireTapMDCTest.java
|
{
"start": 1238,
"end": 3182
}
|
class ____ extends ContextTestSupport {
@Test
public void testMdcPreserved() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:end");
mock.expectedMessageCount(2);
template.sendBody("seda:a", "A");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// enable MDC and breadcrumb
context.setUseMDCLogging(true);
context.setUseBreadcrumb(true);
context.setMDCLoggingKeysPattern("custom*,my*");
MdcCheckerProcessor checker = new MdcCheckerProcessor("route-a", "World", "MyValue");
MdcCheckerProcessor checker2 = new MdcCheckerProcessor("route-b", "Moon", "MyValue2");
from("seda:a").routeId("route-a")
.process(e -> {
MDC.put("custom.hello", "World");
MDC.put("foo", "Bar");
MDC.put("myKey", "MyValue");
})
.process(checker)
.to("log:a")
.wireTap("direct:b")
.process(checker)
.to("mock:end");
from("direct:b").routeId("route-b")
.process(e -> {
MDC.put("custom.hello", "Moon");
MDC.put("foo", "Bar2");
MDC.put("myKey", "MyValue2");
})
.process(checker2)
.to("log:b")
.to("mock:end");
}
};
}
/**
* Stores values from the first invocation to compare them with the second invocation later.
*/
private static
|
WireTapMDCTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/results/Dto2.java
|
{
"start": 236,
"end": 377
}
|
class ____ {
private final String text;
public Dto2(String text) {
this.text = text;
}
public String getText() {
return text;
}
}
|
Dto2
|
java
|
elastic__elasticsearch
|
x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotDiskThresholdIntegTests.java
|
{
"start": 3939,
"end": 18847
}
|
class ____ extends DiskUsageIntegTestCase {
private static final long WATERMARK_BYTES = ByteSizeValue.of(10, ByteSizeUnit.KB).getBytes();
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), WATERMARK_BYTES + "b")
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), WATERMARK_BYTES + "b")
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "0b")
.put(SELF_GENERATED_LICENSE_TYPE.getKey(), "trial")
// we want to control the refresh of cluster info updates
.put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.getKey(), "60m")
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Stream.concat(super.nodePlugins().stream(), Stream.of(LocalStateSearchableSnapshots.class, CustomMockRepositoryPlugin.class))
.toList();
}
@Override
protected boolean addMockInternalEngine() {
return false;
}
private int createIndices() throws InterruptedException {
final int nbIndices = randomIntBetween(1, 5);
final CountDownLatch latch = new CountDownLatch(nbIndices);
for (int i = 0; i < nbIndices; i++) {
final String index = "index-" + i;
var thread = new Thread(() -> {
try {
createIndex(
index,
indexSettings(1, 0).put(DataTier.TIER_PREFERENCE, DataTier.DATA_HOT)
.put(INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), "0ms")
.put(DataTier.TIER_PREFERENCE_SETTING.getKey(), DataTier.DATA_HOT)
// Disable merges. A merge can cause discrepancy between the size we detect and the size in the snapshot,
// which could make room for more shards.
.put(MergePolicyConfig.INDEX_MERGE_ENABLED, false)
.build()
);
int nbDocs = 100;
try (BackgroundIndexer indexer = new BackgroundIndexer(index, client(), nbDocs)) {
while (true) {
waitForDocs(nbDocs, indexer);
indexer.assertNoFailures();
assertNoFailures(
indicesAdmin().prepareForceMerge().setFlush(true).setIndices(index).setMaxNumSegments(1).get()
);
Map<String, Long> storeSize = sizeOfShardsStores(index);
if (storeSize.get(index) > WATERMARK_BYTES) {
break;
}
int moreDocs = scaledRandomIntBetween(100, 1_000);
indexer.continueIndexing(moreDocs);
nbDocs += moreDocs;
}
} catch (Exception e) {
throw new AssertionError(e);
}
} finally {
latch.countDown();
}
});
thread.start();
}
latch.await();
return nbIndices;
}
private void createRepository(String name, String type) {
assertAcked(
clusterAdmin().preparePutRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, name)
.setType(type)
.setSettings(Settings.builder().put("location", randomRepoPath()).build())
);
}
private void createSnapshot(String repository, String snapshot, int nbIndices) {
var snapshotInfo = clusterAdmin().prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repository, snapshot)
.setIndices("index-*")
.setIncludeGlobalState(false)
.setWaitForCompletion(true)
.get()
.getSnapshotInfo();
assertThat(snapshotInfo.state(), is(SnapshotState.SUCCESS));
assertThat(snapshotInfo.successfulShards(), equalTo(nbIndices));
assertThat(snapshotInfo.failedShards(), equalTo(0));
}
private void mountIndices(Collection<String> indices, String prefix, String repositoryName, String snapshotName, Storage storage)
throws InterruptedException {
CountDownLatch mountLatch = new CountDownLatch(indices.size());
logger.info("--> mounting [{}] indices with [{}] prefix", indices.size(), prefix);
for (String index : indices) {
logger.info("Mounting index {}", index);
client().execute(
MountSearchableSnapshotAction.INSTANCE,
new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
prefix + index,
repositoryName,
snapshotName,
index,
Settings.EMPTY,
Strings.EMPTY_ARRAY,
false,
storage
),
ActionListener.wrap(response -> mountLatch.countDown(), e -> mountLatch.countDown())
);
}
mountLatch.await();
}
public void testHighWatermarkCanNotBeExceededOnColdNode() throws Exception {
internalCluster().startMasterOnlyNode();
internalCluster().startNode(onlyRole(DATA_HOT_NODE_ROLE));
final int nbIndices = createIndices();
final String repositoryName = "repository";
createRepository(repositoryName, FsRepository.TYPE);
final String snapshot = "snapshot";
createSnapshot(repositoryName, snapshot, nbIndices);
final Map<String, Long> indicesStoresSizes = sizeOfShardsStores("index-*");
assertAcked(indicesAdmin().prepareDelete("index-*"));
// The test completes reliably successfully only when we do a full copy, we can overcommit on SHARED_CACHE
final Storage storage = FULL_COPY;
logger.info("--> using storage [{}]", storage);
final Settings.Builder otherDataNodeSettings = Settings.builder();
if (storage == FULL_COPY) {
otherDataNodeSettings.put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.DATA_COLD_NODE_ROLE.roleName());
} else {
otherDataNodeSettings.put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE.roleName())
.put(
SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(),
ByteSizeValue.ofBytes(Math.min(indicesStoresSizes.values().stream().mapToLong(value -> value).sum(), 5 * 1024L * 1024L))
);
}
final String otherDataNode = internalCluster().startNode(otherDataNodeSettings.build());
ensureStableCluster(3);
final String otherDataNodeId = internalCluster().getInstance(NodeEnvironment.class, otherDataNode).nodeId();
logger.info("--> reducing disk size of node [{}/{}] so that all shards can fit on the node", otherDataNode, otherDataNodeId);
final long totalSpace = indicesStoresSizes.values().stream().mapToLong(size -> size).sum() + WATERMARK_BYTES + 1024L;
getTestFileStore(otherDataNode).setTotalSpace(totalSpace);
logger.info("--> refreshing cluster info");
final var masterInfoService = (InternalClusterInfoService) internalCluster().getCurrentMasterNodeInstance(ClusterInfoService.class);
ClusterInfoServiceUtils.refresh(masterInfoService);
DiskUsage usage = masterInfoService.getClusterInfo().getNodeMostAvailableDiskUsages().get(otherDataNodeId);
assertThat(usage.totalBytes(), equalTo(totalSpace));
mountIndices(indicesStoresSizes.keySet(), "mounted-", repositoryName, snapshot, storage);
// The cold/frozen data node has enough disk space to hold all the shards
assertBusy(() -> {
var state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).setRoutingTable(true).get().getState();
assertThat(
state.routingTable()
.allShards()
.filter(shardRouting -> state.metadata().getProject().index(shardRouting.shardId().getIndex()).isSearchableSnapshot())
.allMatch(
shardRouting -> shardRouting.state() == ShardRoutingState.STARTED
&& otherDataNodeId.equals(shardRouting.currentNodeId())
),
equalTo(true)
);
});
mountIndices(indicesStoresSizes.keySet(), "extra-", repositoryName, snapshot, storage);
assertBusy(() -> {
var state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).setRoutingTable(true).get().getState();
assertThat(
state.routingTable()
.allShards()
.filter(
shardRouting -> shardRouting.shardId().getIndexName().startsWith("extra-")
&& state.metadata().getProject().index(shardRouting.shardId().getIndex()).isSearchableSnapshot()
)
.noneMatch(
shardRouting -> shardRouting.state() == ShardRoutingState.STARTED
&& otherDataNodeId.equals(shardRouting.currentNodeId())
),
equalTo(true)
);
});
}
public void testHighWatermarkCanNotBeExceededWithInitializingSearchableSnapshots() throws Exception {
internalCluster().startMasterOnlyNode();
internalCluster().startNode(onlyRole(DATA_HOT_NODE_ROLE));
int nbIndices = createIndices();
String repositoryName = "repository";
createRepository(repositoryName, CustomMockRepositoryPlugin.TYPE);
String snapshotName = "snapshot";
createSnapshot(repositoryName, snapshotName, nbIndices);
Map<String, Long> indicesStoresSizes = sizeOfShardsStores("index-*");
assertAcked(indicesAdmin().prepareDelete("index-*"));
String coldNodeName = internalCluster().startNode(
Settings.builder().put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.DATA_COLD_NODE_ROLE.roleName()).build()
);
ensureStableCluster(3);
String coldNodeId = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState().nodes().resolveNode(coldNodeName).getId();
logger.info("--> reducing disk size of node [{}/{}] so that all shards except one can fit on the node", coldNodeName, coldNodeId);
String indexToSkip = randomFrom(indicesStoresSizes.keySet());
Map<String, Long> indicesToBeMounted = indicesStoresSizes.entrySet()
.stream()
.filter(e -> e.getKey().equals(indexToSkip) == false)
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
long totalSpace = indicesToBeMounted.values().stream().mapToLong(e -> e).sum() + WATERMARK_BYTES + 1024L;
getTestFileStore(coldNodeName).setTotalSpace(totalSpace);
logger.info("--> refreshing cluster info");
InternalClusterInfoService masterInfoService = (InternalClusterInfoService) internalCluster().getCurrentMasterNodeInstance(
ClusterInfoService.class
);
ClusterInfoServiceUtils.refresh(masterInfoService);
DiskUsage usage = masterInfoService.getClusterInfo().getNodeMostAvailableDiskUsages().get(coldNodeId);
assertThat(usage.totalBytes(), equalTo(totalSpace));
String prefix = "mounted-";
mountIndices(indicesToBeMounted.keySet(), prefix, repositoryName, snapshotName, FULL_COPY);
assertBusy(() -> {
var state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).setRoutingTable(true).get().getState();
assertThat(
state.routingTable()
.allShards()
.filter(s -> indicesToBeMounted.containsKey(s.shardId().getIndexName().replace(prefix, "")))
.filter(s -> state.metadata().getProject().index(s.shardId().getIndex()).isSearchableSnapshot())
.filter(s -> coldNodeId.equals(s.currentNodeId()))
.filter(s -> s.state() == ShardRoutingState.INITIALIZING)
.count(),
equalTo((long) indicesToBeMounted.size())
);
});
logger.info("--> All shards are being initialized, attempt to mount an extra index");
mountIndices(List.of(indexToSkip), prefix, repositoryName, snapshotName, FULL_COPY);
assertBusy(() -> {
var state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).setRoutingTable(true).get().getState();
assertThat(state.routingTable().index(prefix + indexToSkip).shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1));
});
logger.info("--> Unlocking the initialized shards");
var mockRepository = (CustomMockRepository) internalCluster().getCurrentMasterNodeInstance(RepositoriesService.class)
.repository(repositoryName);
mockRepository.unlockRestore();
assertBusy(() -> {
var state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).setRoutingTable(true).get().getState();
assertThat(state.routingTable().index(prefix + indexToSkip).shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1));
assertThat(
state.routingTable()
.allShards()
.filter(s -> indicesToBeMounted.containsKey(s.shardId().getIndexName().replace(prefix, "")))
.filter(s -> state.metadata().getProject().index(s.shardId().getIndex()).isSearchableSnapshot())
.filter(s -> coldNodeId.equals(s.currentNodeId()))
.filter(s -> s.state() == ShardRoutingState.STARTED)
.count(),
equalTo((long) indicesToBeMounted.size())
);
});
}
private static Map<String, Long> sizeOfShardsStores(String indexPattern) {
return Arrays.stream(indicesAdmin().prepareStats(indexPattern).clear().setStore(true).get().getShards())
.collect(
Collectors.toUnmodifiableMap(s -> s.getShardRouting().getIndexName(), s -> s.getStats().getStore().sizeInBytes(), Long::sum)
);
}
public static
|
SearchableSnapshotDiskThresholdIntegTests
|
java
|
quarkusio__quarkus
|
integration-tests/opentelemetry-jdbc-instrumentation/src/test/java/io/quarkus/it/opentelemetry/Db2LifecycleManager.java
|
{
"start": 1777,
"end": 2235
}
|
class ____ extends Db2Container {
public StartedDb2Container() {
super(DockerImageName
.parse(DB2_IMAGE)
.asCompatibleSubstituteFor(DockerImageName.parse("ibmcom/db2")));
withDatabaseName(QUARKUS);
withUsername(QUARKUS);
withPassword(QUARKUS);
addExposedPort(5000);
acceptLicense();
start();
}
}
}
|
StartedDb2Container
|
java
|
quarkusio__quarkus
|
extensions/devui/deployment-spi/src/main/java/io/quarkus/devui/spi/DevContextBuildItem.java
|
{
"start": 149,
"end": 443
}
|
class ____ extends SimpleBuildItem {
private final String devUIContextRoot;
public DevContextBuildItem(String devUIContextRoot) {
this.devUIContextRoot = devUIContextRoot;
}
public String getDevUIContextRoot() {
return devUIContextRoot;
}
}
|
DevContextBuildItem
|
java
|
elastic__elasticsearch
|
modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java
|
{
"start": 2242,
"end": 14099
}
|
class ____ extends Plugin implements NetworkPlugin {
public static final String NETTY_TRANSPORT_NAME = "netty4";
public static final String NETTY_HTTP_TRANSPORT_NAME = "netty4";
public static final Setting<Integer> SETTING_HTTP_WORKER_COUNT = Setting.intSetting(
"http.netty.worker_count",
0,
Setting.Property.NodeScope
);
public static final Setting<ByteSizeValue> SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE = byteSizeSetting(
"http.netty.receive_predictor_size",
ByteSizeValue.of(64, ByteSizeUnit.KB),
Setting.Property.NodeScope
);
public static final Setting<Integer> WORKER_COUNT = new Setting<>(
"transport.netty.worker_count",
(s) -> Integer.toString(EsExecutors.allocatedProcessors(s)),
(s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"),
Setting.Property.NodeScope
);
private static final Setting<ByteSizeValue> NETTY_RECEIVE_PREDICTOR_SIZE = byteSizeSetting(
"transport.netty.receive_predictor_size",
ByteSizeValue.of(64, ByteSizeUnit.KB),
Setting.Property.NodeScope
);
public static final Setting<ByteSizeValue> NETTY_RECEIVE_PREDICTOR_MAX = byteSizeSetting(
"transport.netty.receive_predictor_max",
NETTY_RECEIVE_PREDICTOR_SIZE,
Setting.Property.NodeScope
);
public static final Setting<ByteSizeValue> NETTY_RECEIVE_PREDICTOR_MIN = byteSizeSetting(
"transport.netty.receive_predictor_min",
NETTY_RECEIVE_PREDICTOR_SIZE,
Setting.Property.NodeScope
);
public static final Setting<Integer> NETTY_BOSS_COUNT = intSetting("transport.netty.boss_count", 1, 1, Setting.Property.NodeScope);
/*
* Size in bytes of an individual message received by io.netty.handler.codec.MessageAggregator which accumulates the content for an
* HTTP request. This number is used for estimating the maximum number of allowed buffers before the MessageAggregator's internal
* collection of buffers is resized.
*
* By default we assume the Ethernet MTU (1500 bytes) but users can override it with a system property.
*/
private static final ByteSizeValue MTU = ByteSizeValue.ofBytes(Long.parseLong(System.getProperty("es.net.mtu", "1500")));
private static final String SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = "http.netty.max_composite_buffer_components";
public static final Setting<Integer> SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = new Setting<>(
SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS,
(s) -> {
ByteSizeValue maxContentLength = SETTING_HTTP_MAX_CONTENT_LENGTH.get(s);
/*
* Netty accumulates buffers containing data from all incoming network packets that make up one HTTP request in an instance of
* io.netty.buffer.CompositeByteBuf (think of it as a buffer of buffers). Once its capacity is reached, the buffer will iterate
* over its individual entries and put them into larger buffers (see io.netty.buffer.CompositeByteBuf#consolidateIfNeeded()
* for implementation details). We want to to resize that buffer because this leads to additional garbage on the heap and also
* increases the application's native memory footprint (as direct byte buffers hold their contents off-heap).
*
* With this setting we control the CompositeByteBuf's capacity (which is by default 1024, see
* io.netty.handler.codec.MessageAggregator#DEFAULT_MAX_COMPOSITEBUFFER_COMPONENTS). To determine a proper default capacity for
* that buffer, we need to consider that the upper bound for the size of HTTP requests is determined by `maxContentLength`. The
* number of buffers that are needed depend on how often Netty reads network packets which depends on the network type (MTU).
* We assume here that Elasticsearch receives HTTP requests via an Ethernet connection which has a MTU of 1500 bytes.
*
* Note that we are *not* pre-allocating any memory based on this setting but rather determine the CompositeByteBuf's capacity.
* The tradeoff is between less (but larger) buffers that are contained in the CompositeByteBuf and more (but smaller) buffers.
* With the default max content length of 100MB and a MTU of 1500 bytes we would allow 69905 entries.
*/
long maxBufferComponentsEstimate = Math.round((double) (maxContentLength.getBytes() / MTU.getBytes()));
// clamp value to the allowed range
long maxBufferComponents = Math.max(2, Math.min(maxBufferComponentsEstimate, Integer.MAX_VALUE));
return String.valueOf(maxBufferComponents);
// Netty's CompositeByteBuf implementation does not allow less than two components.
},
s -> Setting.parseInt(s, 2, Integer.MAX_VALUE, SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS),
Setting.Property.NodeScope
);
/*
* [NOTE: TLS Handshake Throttling]
*
* Each TLS handshake takes around 2.5ms of CPU to process, so each transport worker thread can process up to 400 handshakes per second.
* This is much slower than the rate at which we can accept new connections, so the handshakes can form a backlog of work. Clients
* typically impose a 10s timeout on TLS handshakes, so if we fall behind by more than 4000 handshakes then (even without any other
* CPU-bound work) each new client's handshake will take more than 10s to reach the head of the queue, and yet we will still attempt to
* complete it, delaying yet more client's handshake attempts, and ending up in a state where egregiously few new clients will be able
* to connect.
*
* We prevent this by restricting the number of handshakes in progress at once: by default we permit a backlog of up to 2000 handshakes
* per worker. This represents 5s of CPU time, half of the usual client timeout of 10s, which should be enough margin that we can work
* through this backlog before any of them time out (even in the -- likely -- situation that the CPU has something other than TLS
* handshakes to do).
*
* By default, the permitted 2000 handshakes are further divided into 200 in-flight handshake tasks (500ms of CPU time) enqueued on the
* Netty event loop as normal, and 1800 more delayed handshake tasks which are held in a separate queue and processed in LIFO order. The
* LIFO order yields better behaviour than FIFO in the situation that we cannot even spend 50% of CPU time on TLS handshakes, because in
* that case some of the enqueued handshakes will still hit the client timeout, so there's more value in focussing our limited attention
* on younger handshakes which we're more likely to complete before timing out. As long as we can devote at least 5% (200/4000) of a CPU
* to this work, we'll be dealing only with handshakes that we can actually complete within the 10s timeout.
*
* In future we may decide to adjust this division of work dynamically based on available CPU time, rather than relying on constant
* limits as described above.
*/
/**
* Maximum number of in-flight TLS handshakes to permit on each event loop.
*/
public static final Setting<Integer> SETTING_HTTP_NETTY_TLS_HANDSHAKES_MAX_IN_PROGRESS = intSetting(
"http.netty.tls_handshakes.max_in_progress",
200, // See [NOTE: TLS Handshake Throttling] above
0,
Setting.Property.NodeScope,
Setting.Property.Dynamic
);
/**
* Maximum number of TLS handshakes to delay by holding in a queue on each event loop.
*/
public static final Setting<Integer> SETTING_HTTP_NETTY_TLS_HANDSHAKES_MAX_DELAYED = intSetting(
"http.netty.tls_handshakes.max_delayed",
1800, // See [NOTE: TLS Handshake Throttling] above
0,
Setting.Property.NodeScope,
Setting.Property.Dynamic
);
private final SetOnce<SharedGroupFactory> groupFactory = new SetOnce<>();
@Override
public List<Setting<?>> getSettings() {
return Arrays.asList(
SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS,
SETTING_HTTP_WORKER_COUNT,
SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE,
SETTING_HTTP_NETTY_TLS_HANDSHAKES_MAX_IN_PROGRESS,
SETTING_HTTP_NETTY_TLS_HANDSHAKES_MAX_DELAYED,
WORKER_COUNT,
NETTY_RECEIVE_PREDICTOR_SIZE,
NETTY_RECEIVE_PREDICTOR_MIN,
NETTY_RECEIVE_PREDICTOR_MAX,
NETTY_BOSS_COUNT
);
}
@Override
public Settings additionalSettings() {
return Settings.builder()
// here we set the netty4 transport and http transport as the default. This is a set once setting
// ie. if another plugin does that as well the server will fail - only one default network can exist!
.put(NetworkModule.HTTP_DEFAULT_TYPE_SETTING.getKey(), NETTY_HTTP_TRANSPORT_NAME)
.put(NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING.getKey(), NETTY_TRANSPORT_NAME)
.build();
}
@Override
public Map<String, Supplier<Transport>> getTransports(
Settings settings,
ThreadPool threadPool,
PageCacheRecycler pageCacheRecycler,
CircuitBreakerService circuitBreakerService,
NamedWriteableRegistry namedWriteableRegistry,
NetworkService networkService
) {
return Collections.singletonMap(
NETTY_TRANSPORT_NAME,
() -> new Netty4Transport(
settings,
TransportVersion.current(),
threadPool,
networkService,
pageCacheRecycler,
namedWriteableRegistry,
circuitBreakerService,
getSharedGroupFactory(settings)
)
);
}
@Override
public Map<String, Supplier<HttpServerTransport>> getHttpTransports(
Settings settings,
ThreadPool threadPool,
BigArrays bigArrays,
PageCacheRecycler pageCacheRecycler,
CircuitBreakerService circuitBreakerService,
NamedXContentRegistry xContentRegistry,
NetworkService networkService,
HttpServerTransport.Dispatcher dispatcher,
BiConsumer<HttpPreRequest, ThreadContext> perRequestThreadContext,
ClusterSettings clusterSettings,
TelemetryProvider telemetryProvider
) {
return Collections.singletonMap(
NETTY_HTTP_TRANSPORT_NAME,
() -> new Netty4HttpServerTransport(
settings,
networkService,
threadPool,
xContentRegistry,
dispatcher,
clusterSettings,
getSharedGroupFactory(settings),
telemetryProvider,
TLSConfig.noTLS(),
null,
null
) {
@Override
protected void populatePerRequestThreadContext(RestRequest restRequest, ThreadContext threadContext) {
perRequestThreadContext.accept(restRequest.getHttpRequest(), threadContext);
}
}
);
}
private SharedGroupFactory getSharedGroupFactory(Settings settings) {
SharedGroupFactory factory = this.groupFactory.get();
if (factory != null) {
assert factory.getSettings().equals(settings) : "Different settings than originally provided";
return factory;
} else {
this.groupFactory.set(new SharedGroupFactory(settings));
return this.groupFactory.get();
}
}
}
|
Netty4Plugin
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnusedVariableTest.java
|
{
"start": 22099,
"end": 22519
}
|
interface ____ {}
""")
.setArgs(
ImmutableList.of("-XepOpt:Unused:methodAnnotationsExemptingParameters=unusedvars.B"))
.doTest();
}
@Test
public void usedUnaryExpression() {
helper
.addSourceLines(
"Test.java",
"""
package unusedvars;
import java.util.Map;
import java.util.HashMap;
public
|
B
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/issues/AdviceWithUrlIssueTest.java
|
{
"start": 2289,
"end": 2506
}
|
class ____ extends AdviceWithRouteBuilder {
@Override
public void configure() {
interceptSendToEndpoint("mock:target").skipSendToOriginalEndpoint().to("mock:target2");
}
}
}
|
Advice
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/internal/CollectionBinder.java
|
{
"start": 16114,
"end": 85632
}
|
class ____ is contained in an '@ElementCollection' and may not be a "
+ annotationName( oneToMany, manyToMany, elementCollection ));
}
if ( oneToMany != null && property.hasDirectAnnotationUsage( SoftDelete.class ) ) {
throw new UnsupportedMappingException(
"@SoftDelete cannot be applied to @OneToMany - " +
property.getDeclaringType().getName() + "." + property.getName()
);
}
if ( property.hasDirectAnnotationUsage( OrderColumn.class )
&& manyToMany != null
&& isNotBlank( manyToMany.mappedBy() ) ) {
throw new AnnotationException("Collection '" + getPath( propertyHolder, inferredData ) +
"' is the unowned side of a bidirectional '@ManyToMany' and may not have an '@OrderColumn'");
}
if ( manyToMany != null || elementCollection != null ) {
if ( property.hasDirectAnnotationUsage( JoinColumn.class )
|| property.hasDirectAnnotationUsage( JoinColumns.class ) ) {
throw new AnnotationException( "Property '" + getPath( propertyHolder, inferredData )
+ "' is a " + annotationName( oneToMany, manyToMany, elementCollection )
+ " and is directly annotated '@JoinColumn'"
+ " (specify '@JoinColumn' inside '@JoinTable' or '@CollectionTable')" );
}
}
}
private static String annotationName(
OneToMany oneToMany,
ManyToMany manyToMany,
ElementCollection elementCollection) {
return oneToMany != null ? "'@OneToMany'" : manyToMany != null ? "'@ManyToMany'" : "'@ElementCollection'";
}
private static IndexColumn getIndexColumn(
PropertyHolder propertyHolder,
PropertyData inferredData,
EntityBinder entityBinder,
MetadataBuildingContext context) {
final var memberDetails = inferredData.getAttributeMember();
return IndexColumn.fromAnnotations(
memberDetails.getDirectAnnotationUsage( OrderColumn.class ),
memberDetails.getDirectAnnotationUsage( ListIndexBase.class ),
propertyHolder,
inferredData,
entityBinder.getSecondaryTables(),
context
);
}
private static String handleTargetEntity(
PropertyHolder propertyHolder,
PropertyData inferredData,
MetadataBuildingContext context,
MemberDetails property,
AnnotatedJoinColumns joinColumns,
OneToMany oneToManyAnn,
ManyToMany manyToManyAnn,
ElementCollection elementCollectionAnn,
CollectionBinder collectionBinder,
Cascade hibernateCascade) {
//TODO enhance exception with @ManyToAny and @CollectionOfElements
if ( oneToManyAnn != null && manyToManyAnn != null ) {
throw new AnnotationException( "Property '" + getPath( propertyHolder, inferredData )
+ "' is annotated both '@OneToMany' and '@ManyToMany'" );
}
final String mappedBy;
if ( oneToManyAnn != null ) {
if ( joinColumns.isSecondary() ) {
throw new AnnotationException( "Collection '" + getPath( propertyHolder, inferredData )
+ "' has foreign key in secondary table" );
}
collectionBinder.setFkJoinColumns( joinColumns );
mappedBy = nullIfEmpty( oneToManyAnn.mappedBy() );
collectionBinder.setTargetEntity( oneToManyAnn.targetEntity() );
collectionBinder.setCascadeStrategy(
aggregateCascadeTypes( oneToManyAnn.cascade(), hibernateCascade,
oneToManyAnn.orphanRemoval(), context ) );
collectionBinder.setOneToMany( true );
}
else if ( elementCollectionAnn != null ) {
if ( joinColumns.isSecondary() ) {
throw new AnnotationException( "Collection '" + getPath( propertyHolder, inferredData )
+ "' has foreign key in secondary table" );
}
collectionBinder.setFkJoinColumns( joinColumns );
mappedBy = null;
collectionBinder.setTargetEntity( elementCollectionAnn.targetClass() );
collectionBinder.setOneToMany( false );
}
else if ( manyToManyAnn != null ) {
mappedBy = nullIfEmpty( manyToManyAnn.mappedBy() );
collectionBinder.setTargetEntity( manyToManyAnn.targetEntity() );
collectionBinder.setCascadeStrategy(
aggregateCascadeTypes( manyToManyAnn.cascade(), hibernateCascade, false, context ) );
collectionBinder.setOneToMany( false );
}
else if ( property.hasDirectAnnotationUsage( ManyToAny.class ) ) {
mappedBy = null;
collectionBinder.setTargetEntity( ClassDetails.VOID_CLASS_DETAILS );
collectionBinder.setCascadeStrategy(
aggregateCascadeTypes( null, hibernateCascade, false, context ) );
collectionBinder.setOneToMany( false );
}
else {
mappedBy = null;
}
collectionBinder.setMappedBy( mappedBy );
return mappedBy;
}
private static boolean hasMapKeyAnnotation(MemberDetails property) {
return property.hasDirectAnnotationUsage(MapKeyJavaType.class)
|| property.hasDirectAnnotationUsage(MapKeyJdbcType.class)
|| property.hasDirectAnnotationUsage(MapKeyJdbcTypeCode.class)
|| property.hasDirectAnnotationUsage(MapKeyMutability.class)
|| property.hasDirectAnnotationUsage(MapKey.class)
|| property.hasDirectAnnotationUsage(MapKeyType.class);
}
private static boolean isToManyAssociationWithinEmbeddableCollection(PropertyHolder propertyHolder) {
return propertyHolder instanceof ComponentPropertyHolder componentPropertyHolder
&& componentPropertyHolder.isWithinElementCollection();
}
private static AnnotatedColumns elementColumns(
PropertyHolder propertyHolder,
Nullability nullability,
EntityBinder entityBinder,
MetadataBuildingContext context,
MemberDetails property,
PropertyData virtualProperty) {
// Comment comment) {
if ( property.hasDirectAnnotationUsage( jakarta.persistence.Column.class ) ) {
return buildColumnFromAnnotation(
property.getDirectAnnotationUsage( jakarta.persistence.Column.class ),
null,
// comment,
nullability,
propertyHolder,
virtualProperty,
entityBinder.getSecondaryTables(),
context
);
}
else if ( property.hasDirectAnnotationUsage( Formula.class ) ) {
return buildFormulaFromAnnotation(
getOverridableAnnotation(property, Formula.class, context),
// comment,
nullability,
propertyHolder,
virtualProperty,
entityBinder.getSecondaryTables(),
context
);
}
else if ( property.hasDirectAnnotationUsage( Columns.class ) ) {
return buildColumnsFromAnnotations(
property.getDirectAnnotationUsage( Columns.class ).columns(),
null,
// comment,
nullability,
propertyHolder,
virtualProperty,
entityBinder.getSecondaryTables(),
context
);
}
else {
return buildColumnFromNoAnnotation(
null,
// comment,
nullability,
propertyHolder,
virtualProperty,
entityBinder.getSecondaryTables(),
context
);
}
}
private static JoinColumn[] mapKeyJoinColumnAnnotations(
MemberDetails property,
MetadataBuildingContext context) {
final var modelsContext = context.getBootstrapContext().getModelsContext();
final var mapKeyJoinColumns =
property.getRepeatedAnnotationUsages( JpaAnnotations.MAP_KEY_JOIN_COLUMN, modelsContext );
if ( isEmpty( mapKeyJoinColumns ) ) {
return null;
}
else {
final var joinColumns = new JoinColumn[mapKeyJoinColumns.length];
for ( int i = 0; i < mapKeyJoinColumns.length; i++ ) {
joinColumns[i] = toJoinColumn( mapKeyJoinColumns[i], modelsContext );
}
return joinColumns;
}
}
private static AnnotatedColumns mapKeyColumns(
PropertyHolder propertyHolder,
PropertyData inferredData,
EntityBinder entityBinder,
MetadataBuildingContext context,
MemberDetails property) {
// Comment comment) {
return buildColumnFromAnnotations(
property.hasDirectAnnotationUsage( MapKeyColumn.class )
? MapKeyColumnJpaAnnotation.toColumnAnnotation(
property.getDirectAnnotationUsage( MapKeyColumn.class ),
context.getBootstrapContext().getModelsContext()
)
: null,
// comment,
Nullability.FORCED_NOT_NULL,
propertyHolder,
inferredData,
"_KEY",
entityBinder.getSecondaryTables(),
context
);
}
private static void bindJoinedTableAssociation(
MemberDetails property,
MetadataBuildingContext buildingContext,
EntityBinder entityBinder,
CollectionBinder collectionBinder,
PropertyHolder propertyHolder,
PropertyData inferredData,
String mappedBy) {
final var associationTableBinder = new TableBinder();
final var assocTable = propertyHolder.getJoinTable( property );
final var collectionTable = property.getDirectAnnotationUsage( CollectionTable.class );
final JoinColumn[] annJoins;
final JoinColumn[] annInverseJoins;
if ( assocTable != null || collectionTable != null ) {
final String catalog;
final String schema;
final String tableName;
final UniqueConstraint[] uniqueConstraints;
final JoinColumn[] joins;
final JoinColumn[] inverseJoins;
final Index[] jpaIndexes;
final String options;
//JPA 2 has priority
if ( collectionTable != null ) {
catalog = collectionTable.catalog();
schema = collectionTable.schema();
tableName = collectionTable.name();
uniqueConstraints = collectionTable.uniqueConstraints();
joins = collectionTable.joinColumns();
inverseJoins = null;
jpaIndexes = collectionTable.indexes();
options = collectionTable.options();
}
else {
catalog = assocTable.catalog();
schema = assocTable.schema();
tableName = assocTable.name();
uniqueConstraints = assocTable.uniqueConstraints();
joins = assocTable.joinColumns();
inverseJoins = assocTable.inverseJoinColumns();
jpaIndexes = assocTable.indexes();
options = assocTable.options();
}
collectionBinder.setExplicitAssociationTable( true );
if ( isNotEmpty( jpaIndexes ) ) {
associationTableBinder.setJpaIndex( jpaIndexes );
}
if ( !schema.isBlank() ) {
associationTableBinder.setSchema( schema );
}
if ( !catalog.isBlank() ) {
associationTableBinder.setCatalog( catalog );
}
if ( !tableName.isBlank() ) {
associationTableBinder.setName( tableName );
}
associationTableBinder.setUniqueConstraints( uniqueConstraints );
associationTableBinder.setJpaIndex( jpaIndexes );
associationTableBinder.setOptions( options );
//set check constraint in the second pass
annJoins = ArrayHelper.isEmpty( joins ) ? null : joins;
annInverseJoins = inverseJoins == null || ArrayHelper.isEmpty( inverseJoins ) ? null : inverseJoins;
}
else {
annJoins = null;
annInverseJoins = null;
}
associationTableBinder.setBuildingContext( buildingContext );
collectionBinder.setTableBinder( associationTableBinder );
collectionBinder.setJoinColumns( buildJoinTableJoinColumns(
annJoins,
entityBinder.getSecondaryTables(),
propertyHolder,
inferredData,
mappedBy,
buildingContext
) );
collectionBinder.setInverseJoinColumns( buildJoinTableJoinColumns(
annInverseJoins,
entityBinder.getSecondaryTables(),
propertyHolder,
inferredData,
mappedBy,
buildingContext
) );
}
protected MetadataBuildingContext getBuildingContext() {
return buildingContext;
}
Supplier<ManagedBean<? extends UserCollectionType>> getCustomTypeBeanResolver() {
return customTypeBeanResolver;
}
boolean isMap() {
return false;
}
protected void setIsHibernateExtensionMapping(boolean hibernateExtensionMapping) {
this.hibernateExtensionMapping = hibernateExtensionMapping;
}
protected boolean isHibernateExtensionMapping() {
return hibernateExtensionMapping;
}
private void setUpdatable(boolean updatable) {
this.updatable = updatable;
}
private void setInheritanceStatePerClass(Map<ClassDetails, InheritanceState> inheritanceStatePerClass) {
this.inheritanceStatePerClass = inheritanceStatePerClass;
}
private void setInsertable(boolean insertable) {
this.insertable = insertable;
}
private void setCascadeStrategy(EnumSet<CascadeType> cascadeTypes) {
this.cascadeTypes = cascadeTypes;
}
private void setAccessType(AccessType accessType) {
this.accessType = accessType;
}
private void setInverseJoinColumns(AnnotatedJoinColumns inverseJoinColumns) {
this.inverseJoinColumns = inverseJoinColumns;
}
private void setJoinColumns(AnnotatedJoinColumns joinColumns) {
this.joinColumns = joinColumns;
}
private void setPropertyHolder(PropertyHolder propertyHolder) {
this.propertyHolder = propertyHolder;
}
private void setJpaOrderBy(jakarta.persistence.OrderBy jpaOrderBy) {
this.jpaOrderBy = jpaOrderBy;
}
private void setSqlOrder(SQLOrder sqlOrder) {
this.sqlOrder = sqlOrder;
}
private void setNaturalSort(SortNatural naturalSort) {
this.naturalSort = naturalSort;
}
private void setComparatorSort(SortComparator comparatorSort) {
this.comparatorSort = comparatorSort;
}
private static CollectionBinder getCollectionBinder(
MemberDetails property,
boolean isHibernateExtensionMapping,
MetadataBuildingContext buildingContext) {
final var typeAnnotation =
property.getAnnotationUsage( CollectionType.class,
buildingContext.getBootstrapContext().getModelsContext() );
final var binder = typeAnnotation != null
? createBinderFromCustomTypeAnnotation( property, typeAnnotation, buildingContext )
: createBinderAutomatically( property, buildingContext );
binder.setIsHibernateExtensionMapping( isHibernateExtensionMapping );
return binder;
}
private static CollectionBinder createBinderAutomatically(MemberDetails property, MetadataBuildingContext context) {
final var classification = determineCollectionClassification( property, context );
final var typeRegistration =
context.getMetadataCollector().findCollectionTypeRegistration( classification );
return typeRegistration != null
? createBinderFromTypeRegistration( property, classification, typeRegistration, context )
: createBinderFromProperty( property, context );
}
private static CollectionBinder createBinderFromTypeRegistration(
MemberDetails property,
CollectionClassification classification,
CollectionTypeRegistrationDescriptor typeRegistration,
MetadataBuildingContext context) {
return createBinder(
property,
() -> createUserTypeBean(
property.getDeclaringType().getName() + "#" + property.getName(),
typeRegistration.implementation(),
typeRegistration.parameters(),
context.getBootstrapContext(),
context.getMetadataCollector().getMetadataBuildingOptions().isAllowExtensionsInCdi()
),
classification,
context
);
}
private static CollectionBinder createBinderFromProperty(MemberDetails property, MetadataBuildingContext context) {
final var classification = determineCollectionClassification( property, context );
return createBinder( property, null, classification, context );
}
private static CollectionBinder createBinderFromCustomTypeAnnotation(
MemberDetails property,
CollectionType typeAnnotation,
MetadataBuildingContext buildingContext) {
determineSemanticJavaType( property );
final var customTypeBean = resolveCustomType( property, typeAnnotation, buildingContext );
return createBinder(
property,
() -> customTypeBean,
customTypeBean.getBeanInstance().getClassification(),
buildingContext
);
}
private static ManagedBean<? extends UserCollectionType> resolveCustomType(
MemberDetails property,
CollectionType typeAnnotation,
MetadataBuildingContext context) {
return createUserTypeBean(
property.getDeclaringType().getName() + "." + property.getName(),
typeAnnotation.type(),
PropertiesHelper.map( extractParameters( typeAnnotation ) ),
context.getBootstrapContext(),
context.getMetadataCollector().getMetadataBuildingOptions().isAllowExtensionsInCdi()
);
}
private static Properties extractParameters(CollectionType typeAnnotation) {
final var parameters = typeAnnotation.parameters();
final var configParams = new Properties( parameters.length );
for ( var parameter : parameters ) {
configParams.put( parameter.name(), parameter.value() );
}
return configParams;
}
private static CollectionBinder createBinder(
MemberDetails property,
Supplier<ManagedBean<? extends UserCollectionType>> customTypeBeanAccess,
CollectionClassification classification,
MetadataBuildingContext buildingContext) {
return switch ( classification ) {
case ARRAY -> property.getElementType().getTypeKind() == TypeDetails.Kind.PRIMITIVE
? new PrimitiveArrayBinder( customTypeBeanAccess, buildingContext )
: new ArrayBinder( customTypeBeanAccess, buildingContext );
case BAG -> new BagBinder( customTypeBeanAccess, buildingContext );
case ID_BAG -> new IdBagBinder( customTypeBeanAccess, buildingContext );
case LIST -> new ListBinder( customTypeBeanAccess, buildingContext );
case MAP, ORDERED_MAP -> new MapBinder( customTypeBeanAccess, false, buildingContext );
case SORTED_MAP -> new MapBinder( customTypeBeanAccess, true, buildingContext );
case SET, ORDERED_SET -> new SetBinder( customTypeBeanAccess, false, buildingContext );
case SORTED_SET -> new SetBinder( customTypeBeanAccess, true, buildingContext );
};
}
private static CollectionClassification determineCollectionClassification(
MemberDetails property,
MetadataBuildingContext buildingContext) {
if ( property.isArray() ) {
return CollectionClassification.ARRAY;
}
final var modelsContext = buildingContext.getBootstrapContext().getModelsContext();
if ( !property.hasAnnotationUsage( Bag.class, modelsContext ) ) {
return determineCollectionClassification( determineSemanticJavaType( property ), property, buildingContext );
}
if ( property.hasAnnotationUsage( OrderColumn.class, modelsContext ) ) {
throw new AnnotationException( "Attribute '"
+ qualify( property.getDeclaringType().getName(), property.getName() )
+ "' is annotated '@Bag' and may not also be annotated '@OrderColumn'" );
}
if ( property.hasAnnotationUsage( ListIndexBase.class, modelsContext ) ) {
throw new AnnotationException( "Attribute '"
+ qualify( property.getDeclaringType().getName(), property.getName() )
+ "' is annotated '@Bag' and may not also be annotated '@ListIndexBase'" );
}
final var collectionJavaType = property.getType().determineRawClass().toJavaClass();
if ( java.util.List.class.equals( collectionJavaType )
|| java.util.Collection.class.equals( collectionJavaType ) ) {
return CollectionClassification.BAG;
}
else {
throw new AnnotationException(
String.format(
Locale.ROOT,
"Attribute '%s.%s' of type '%s' is annotated '@Bag' (bags are of type '%s' or '%s')",
property.getDeclaringType().getName(),
property.getName(),
collectionJavaType.getName(),
java.util.List.class.getName(),
java.util.Collection.class.getName()
)
);
}
}
private static CollectionClassification determineCollectionClassification(
Class<?> semanticJavaType,
MemberDetails property,
MetadataBuildingContext buildingContext) {
if ( semanticJavaType.isArray() ) {
return CollectionClassification.ARRAY;
}
if ( property.hasDirectAnnotationUsage( CollectionId.class )
|| property.hasDirectAnnotationUsage( CollectionIdJavaClass.class )
|| property.hasDirectAnnotationUsage( CollectionIdJdbcType.class )
|| property.hasDirectAnnotationUsage( CollectionIdJdbcTypeCode.class )
|| property.hasDirectAnnotationUsage( CollectionIdJavaType.class ) ) {
// explicitly an ID_BAG
return CollectionClassification.ID_BAG;
}
if ( java.util.List.class.isAssignableFrom( semanticJavaType ) ) {
if ( property.hasDirectAnnotationUsage( OrderColumn.class )
|| property.hasDirectAnnotationUsage( ListIndexBase.class )
|| property.hasDirectAnnotationUsage( ListIndexJdbcType.class )
|| property.hasDirectAnnotationUsage( ListIndexJdbcTypeCode.class )
|| property.hasDirectAnnotationUsage( ListIndexJavaType.class ) ) {
// it is implicitly a LIST because of presence of explicit List index config
return CollectionClassification.LIST;
}
if ( property.hasDirectAnnotationUsage( jakarta.persistence.OrderBy.class )
|| property.hasDirectAnnotationUsage( org.hibernate.annotations.SQLOrder.class ) ) {
return CollectionClassification.BAG;
}
final var modelsContext = buildingContext.getBootstrapContext().getModelsContext();
final var manyToMany = property.getAnnotationUsage( ManyToMany.class, modelsContext );
if ( manyToMany != null && !manyToMany.mappedBy().isBlank() ) {
// We don't support @OrderColumn on the non-owning side of a many-to-many association.
return CollectionClassification.BAG;
}
final var oneToMany = property.getAnnotationUsage( OneToMany.class, modelsContext );
if ( oneToMany != null && !oneToMany.mappedBy().isBlank() ) {
// Unowned to-many mappings are always considered BAG by default
return CollectionClassification.BAG;
}
// otherwise, return the implicit classification for List attributes
return buildingContext.getBuildingOptions().getMappingDefaults().getImplicitListClassification();
}
if ( java.util.SortedSet.class.isAssignableFrom( semanticJavaType ) ) {
return CollectionClassification.SORTED_SET;
}
if ( java.util.Set.class.isAssignableFrom( semanticJavaType ) ) {
return CollectionClassification.SET;
}
if ( java.util.SortedMap.class.isAssignableFrom( semanticJavaType ) ) {
return CollectionClassification.SORTED_MAP;
}
if ( java.util.Map.class.isAssignableFrom( semanticJavaType ) ) {
return CollectionClassification.MAP;
}
if ( java.util.Collection.class.isAssignableFrom( semanticJavaType ) ) {
return property.hasDirectAnnotationUsage( CollectionId.class )
? CollectionClassification.ID_BAG
: CollectionClassification.BAG;
}
return null;
}
private static Class<?> determineSemanticJavaType(MemberDetails property) {
if ( property.isPlural() ) {
return inferCollectionClassFromSubclass(
property.getType().determineRawClass().toJavaClass() );
}
else {
throw new AnnotationException(
String.format(
Locale.ROOT,
"Property '%s.%s' is not a collection and may not be a '@OneToMany', '@ManyToMany', or '@ElementCollection'",
property.getDeclaringType().getName(),
property.resolveAttributeName()
)
);
}
}
private static Class<?> inferCollectionClassFromSubclass(Class<?> clazz) {
for ( var priorityClass : INFERRED_CLASS_PRIORITY ) {
if ( priorityClass.isAssignableFrom( clazz ) ) {
return priorityClass;
}
}
return null;
}
private void setMappedBy(String mappedBy) {
this.mappedBy = nullIfEmpty( mappedBy );
}
private void setTableBinder(TableBinder tableBinder) {
this.tableBinder = tableBinder;
}
private void setElementType(TypeDetails collectionElementType) {
this.collectionElementType = collectionElementType;
}
private void setTargetEntity(Class<?> targetEntity) {
setTargetEntity( modelsContext().getClassDetailsRegistry()
.resolveClassDetails( targetEntity.getName() ) );
}
private void setTargetEntity(ClassDetails targetEntity) {
setTargetEntity( new ClassTypeDetailsImpl( targetEntity, TypeDetails.Kind.CLASS ) );
}
private void setTargetEntity(TypeDetails targetEntity) {
this.targetEntity = targetEntity;
}
protected abstract Collection createCollection(PersistentClass persistentClass);
private Collection getCollection() {
return collection;
}
private void setPropertyName(String propertyName) {
this.propertyName = propertyName;
}
private void setDeclaringClass(ClassDetails declaringClass) {
this.declaringClass = declaringClass;
this.declaringClassSet = true;
}
private void bind() {
if ( property != null ) {
final EmbeddedTable misplaced = property.getDirectAnnotationUsage( EmbeddedTable.class );
if ( misplaced != null ) {
// not allowed
throw new AnnotationPlacementException( "@EmbeddedTable only supported for use on entity or mapped-superclass" );
}
}
collection = createCollection( propertyHolder.getPersistentClass() );
final String role = qualify( propertyHolder.getPath(), propertyName );
if ( BOOT_LOGGER.isTraceEnabled() ) {
BOOT_LOGGER.bindingCollectionRole( role );
}
collection.setRole( role );
collection.setMappedByProperty( mappedBy );
checkMapKeyColumn();
//set laziness
defineFetchingStrategy();
collection.setMutable( isMutable() );
//work on association
final boolean isUnowned = isUnownedCollection();
bindOptimisticLock( isUnowned );
applySortingAndOrdering();
bindCache();
bindLoader();
detectMappedByProblem( isUnowned );
collection.setInverse( isUnowned );
//TODO reduce tableBinder != null and oneToMany
scheduleSecondPass( isUnowned );
getMetadataCollector().addCollectionBinding( collection );
bindProperty();
}
private boolean isUnownedCollection() {
return mappedBy != null;
}
private boolean isMutable() {
return !property.hasDirectAnnotationUsage( Immutable.class );
}
private void checkMapKeyColumn() {
if ( property.hasDirectAnnotationUsage( MapKeyColumn.class ) && hasMapKeyProperty ) {
throw new AnnotationException( "Collection '" + qualify( propertyHolder.getPath(), propertyName )
+ "' is annotated both '@MapKey' and '@MapKeyColumn'" );
}
}
private void scheduleSecondPass(boolean isMappedBy) {
final var metadataCollector = getMetadataCollector();
//many to many may need some second pass information
if ( !oneToMany && isMappedBy ) {
metadataCollector.addMappedBy( getElementType().getName(), mappedBy, propertyName );
}
if ( inheritanceStatePerClass == null) {
throw new AssertionFailure( "inheritanceStatePerClass not set" );
}
metadataCollector.addSecondPass( getSecondPass(), !isMappedBy );
}
private void bindOptimisticLock(boolean isMappedBy) {
final var lockAnn = property.getDirectAnnotationUsage( OptimisticLock.class );
final boolean includeInOptimisticLockChecks = lockAnn != null ? !lockAnn.excluded() : !isMappedBy;
collection.setOptimisticLocked( includeInOptimisticLockChecks );
}
private void bindCache() {
//set cache
if ( isNotBlank( cacheConcurrencyStrategy ) ) {
collection.setCacheConcurrencyStrategy( cacheConcurrencyStrategy );
collection.setCacheRegionName( cacheRegionName );
}
collection.setQueryCacheLayout( queryCacheLayout );
}
private void detectMappedByProblem(boolean isMappedBy) {
if ( isMappedBy ) {
if ( property.hasDirectAnnotationUsage( JoinColumn.class )
|| property.hasDirectAnnotationUsage( JoinColumns.class ) ) {
throw new AnnotationException( "Association '"
+ qualify( propertyHolder.getPath(), propertyName )
+ "' is 'mappedBy' another entity and may not specify the '@JoinColumn'" );
}
if ( propertyHolder.getJoinTable( property ) != null ) {
throw new AnnotationException( "Association '"
+ qualify( propertyHolder.getPath(), propertyName )
+ "' is 'mappedBy' another entity and may not specify the '@JoinTable'" );
}
if ( oneToMany ) {
if ( property.hasDirectAnnotationUsage( MapKeyColumn.class ) ) {
BOOT_LOGGER.warn( "Association '"
+ qualify( propertyHolder.getPath(), propertyName )
+ "' is 'mappedBy' another entity and should not specify a '@MapKeyColumn'"
+ " (use '@MapKey' instead)" );
}
if ( property.hasDirectAnnotationUsage( OrderColumn.class ) ) {
BOOT_LOGGER.warn( "Association '"
+ qualify( propertyHolder.getPath(), propertyName )
+ "' is 'mappedBy' another entity and should not specify an '@OrderColumn'"
+ " (use '@OrderBy' instead)" );
}
}
else {
if ( property.hasDirectAnnotationUsage( MapKeyColumn.class ) ) {
throw new AnnotationException( "Association '"
+ qualify( propertyHolder.getPath(), propertyName )
+ "' is 'mappedBy' another entity and may not specify a '@MapKeyColumn'"
+ " (use '@MapKey' instead)" );
}
if ( property.hasDirectAnnotationUsage( OrderColumn.class ) ) {
throw new AnnotationException( "Association '"
+ qualify( propertyHolder.getPath(), propertyName )
+ "' is 'mappedBy' another entity and may not specify an '@OrderColumn'"
+ " (use '@OrderBy' instead)" );
}
}
}
else if ( oneToMany
&& property.hasDirectAnnotationUsage( OnDelete.class )
&& !hasExplicitJoinColumn() ) {
throw new AnnotationException( "Unidirectional '@OneToMany' association '"
+ qualify( propertyHolder.getPath(), propertyName )
+ "' is annotated '@OnDelete' and must explicitly specify a '@JoinColumn'" );
}
}
private boolean hasExplicitJoinColumn() {
return property.hasDirectAnnotationUsage( JoinColumn.class )
|| property.hasDirectAnnotationUsage( JoinColumns.class )
|| property.hasDirectAnnotationUsage( JoinTable.class )
&& property.getDirectAnnotationUsage( JoinTable.class )
.joinColumns().length > 0;
}
private void bindProperty() {
//property building
final var binder = new PropertyBinder();
binder.setName( propertyName );
binder.setValue( collection );
binder.setCascade( cascadeTypes );
if ( cascadeTypes != null && cascadeTypes.contains( DELETE_ORPHAN ) ) {
collection.setOrphanDelete( true );
}
binder.setLazy( collection.isLazy() );
final var lazyGroupAnnotation = property.getDirectAnnotationUsage( LazyGroup.class );
if ( lazyGroupAnnotation != null ) {
binder.setLazyGroup( lazyGroupAnnotation.value() );
}
binder.setAccessType( accessType );
binder.setMemberDetails( property );
binder.setInsertable( insertable );
binder.setUpdatable( updatable );
binder.setBuildingContext( buildingContext );
binder.setHolder( propertyHolder );
Property prop = binder.makeProperty();
//we don't care about the join stuffs because the column is on the association table.
if ( !declaringClassSet ) {
throw new AssertionFailure( "DeclaringClass is not set in CollectionBinder while binding" );
}
propertyHolder.addProperty( prop, property, declaringClass );
binder.callAttributeBindersInSecondPass( prop );
}
private void bindLoader() {
//SQL overriding
final var sqlInsert = property.getDirectAnnotationUsage( SQLInsert.class );
if ( sqlInsert != null ) {
collection.setCustomSQLInsert(
sqlInsert.sql().trim(),
sqlInsert.callable(),
fromResultCheckStyle( sqlInsert.check() )
);
final var verifier = sqlInsert.verify();
if ( verifier != Expectation.class ) {
collection.setInsertExpectation( getDefaultSupplier( verifier ) );
}
}
final var sqlUpdate = property.getDirectAnnotationUsage( SQLUpdate.class );
if ( sqlUpdate != null ) {
collection.setCustomSQLUpdate(
sqlUpdate.sql().trim(),
sqlUpdate.callable(),
fromResultCheckStyle( sqlUpdate.check() )
);
final var verifier = sqlUpdate.verify();
if ( verifier != Expectation.class ) {
collection.setUpdateExpectation( getDefaultSupplier( verifier ) );
}
}
final var sqlDelete = property.getDirectAnnotationUsage( SQLDelete.class );
if ( sqlDelete != null ) {
collection.setCustomSQLDelete(
sqlDelete.sql().trim(),
sqlDelete.callable(),
fromResultCheckStyle( sqlDelete.check() )
);
final var verifier = sqlDelete.verify();
if ( verifier != Expectation.class ) {
collection.setDeleteExpectation( getDefaultSupplier( verifier ) );
}
}
final var sqlDeleteAll = property.getDirectAnnotationUsage( SQLDeleteAll.class );
if ( sqlDeleteAll != null ) {
collection.setCustomSQLDeleteAll(
sqlDeleteAll.sql().trim(),
sqlDeleteAll.callable(),
fromResultCheckStyle( sqlDeleteAll.check() )
);
final var verifier = sqlDeleteAll.verify();
if ( verifier != Expectation.class ) {
collection.setDeleteAllExpectation( getDefaultSupplier( verifier ) );
}
}
final var sqlSelect = property.getDirectAnnotationUsage( SQLSelect.class );
if ( sqlSelect != null ) {
final String loaderName = getRole() + "$SQLSelect";
collection.setLoaderName( loaderName );
// TODO: pass in the collection element type here
bindNativeQuery( loaderName, sqlSelect, null, buildingContext );
}
final var hqlSelect = property.getDirectAnnotationUsage( HQLSelect.class );
if ( hqlSelect != null ) {
final String loaderName = getRole() + "$HQLSelect";
collection.setLoaderName( loaderName );
bindQuery( loaderName, hqlSelect, buildingContext );
}
}
private void applySortingAndOrdering() {
if ( naturalSort != null && comparatorSort != null ) {
throw buildIllegalSortCombination();
}
final boolean sorted = naturalSort != null || comparatorSort != null;
final Class<? extends Comparator<?>> comparatorClass;
if ( naturalSort != null ) {
comparatorClass = null;
}
else if ( comparatorSort != null ) {
comparatorClass = comparatorSort.value();
}
else {
comparatorClass = null;
}
if ( jpaOrderBy != null && sqlOrder != null ) {
throw buildIllegalOrderCombination();
}
final boolean ordered = jpaOrderBy != null || sqlOrder != null ;
if ( ordered ) {
// we can only apply the sql-based order by up front. The jpa order by has to wait for second pass
if ( sqlOrder != null ) {
collection.setOrderBy( sqlOrder.value() );
}
}
final boolean isSorted = isSortedCollection || sorted;
if ( isSorted && ordered ) {
throw buildIllegalOrderAndSortCombination();
}
collection.setSorted( isSorted );
instantiateComparator( collection, comparatorClass );
}
private void instantiateComparator(Collection collection, Class<? extends Comparator<?>> comparatorClass) {
if ( comparatorClass != null ) {
try {
collection.setComparator( comparatorClass.newInstance() );
}
catch (Exception e) {
throw new AnnotationException(
String.format(
"Could not instantiate comparator class '%s' for collection '%s'",
comparatorClass.getName(),
safeCollectionRole()
),
e
);
}
}
}
private AnnotationException buildIllegalOrderCombination() {
return new AnnotationException(
String.format(
Locale.ROOT,
"Collection '%s' is annotated both '@%s' and '@%s'",
safeCollectionRole(),
jakarta.persistence.OrderBy.class.getName(),
org.hibernate.annotations.SQLOrder.class.getName()
)
);
}
private AnnotationException buildIllegalOrderAndSortCombination() {
throw new AnnotationException(
String.format(
Locale.ROOT,
"Collection '%s' is both sorted and ordered (only one of '@%s', '@%s', '@%s', and '@%s' may be used)",
safeCollectionRole(),
jakarta.persistence.OrderBy.class.getName(),
org.hibernate.annotations.SQLOrder.class.getName(),
SortComparator.class.getName(),
SortNatural.class.getName()
)
);
}
private AnnotationException buildIllegalSortCombination() {
return new AnnotationException(
String.format(
"Collection '%s' is annotated both '@%s' and '@%s'",
safeCollectionRole(),
SortNatural.class.getName(),
SortComparator.class.getName()
)
);
}
private void defineFetchingStrategy() {
handleLazy();
handleFetch();
handleFetchProfileOverrides();
}
private ModelsContext modelsContext() {
return buildingContext.getBootstrapContext().getModelsContext();
}
private void handleFetchProfileOverrides() {
property.forEachAnnotationUsage( FetchProfileOverride.class, modelsContext(), (usage) -> {
getMetadataCollector().addSecondPass( new FetchSecondPass(
usage,
propertyHolder,
propertyName,
buildingContext
) );
} );
}
private void handleFetch() {
final var fetchAnnotation = property.getDirectAnnotationUsage( Fetch.class );
if ( fetchAnnotation != null ) {
// Hibernate @Fetch annotation takes precedence
setHibernateFetchMode( fetchAnnotation.value() );
}
else {
collection.setFetchMode( getFetchMode( getJpaFetchType() ) );
}
}
private void setHibernateFetchMode(org.hibernate.annotations.FetchMode fetchMode) {
switch ( fetchMode ) {
case JOIN :
collection.setFetchMode( FetchMode.JOIN );
collection.setLazy( false );
break;
case SELECT:
collection.setFetchMode( FetchMode.SELECT );
break;
case SUBSELECT:
collection.setFetchMode( FetchMode.SELECT );
collection.setSubselectLoadable( true );
collection.getOwner().setSubselectLoadableCollections( true );
break;
default:
throw new AssertionFailure( "unknown fetch type" );
}
}
private void handleLazy() {
collection.setLazy( getJpaFetchType() == LAZY );
collection.setExtraLazy( false );
}
private FetchType getJpaFetchType() {
final var oneToMany = property.getDirectAnnotationUsage( OneToMany.class );
final var manyToMany = property.getDirectAnnotationUsage( ManyToMany.class );
final var elementCollection = property.getDirectAnnotationUsage( ElementCollection.class );
final var manyToAny = property.getDirectAnnotationUsage( ManyToAny.class );
if ( oneToMany != null ) {
return oneToMany.fetch();
}
if ( manyToMany != null ) {
return manyToMany.fetch();
}
if ( elementCollection != null ) {
return elementCollection.fetch();
}
if ( manyToAny != null ) {
return manyToAny.fetch();
}
throw new AssertionFailure(
"Define fetch strategy for collection not annotated @ManyToMany, @OneToMany, nor @ElementCollection"
);
}
TypeDetails getElementType() {
if ( isDefault( targetEntity ) ) {
if ( collectionElementType != null ) {
return collectionElementType;
}
else {
throw new AnnotationException( "Collection '" + safeCollectionRole()
+ "' is declared with a raw type and has an explicit 'targetEntity'" );
}
}
else {
return targetEntity;
}
}
SecondPass getSecondPass() {
return new CollectionSecondPass( collection ) {
@Override
public void secondPass(Map<String, PersistentClass> persistentClasses) {
bindStarToManySecondPass( persistentClasses );
}
};
}
/**
* @return true if it's a foreign key, false if it's an association table
*/
protected boolean bindStarToManySecondPass(Map<String, PersistentClass> persistentClasses) {
if ( noAssociationTable( persistentClasses ) ) {
//this is a foreign key
bindOneToManySecondPass( persistentClasses );
return true;
}
else {
//this is an association table
bindManyToManySecondPass( persistentClasses );
return false;
}
}
private boolean isReversePropertyInJoin(
TypeDetails elementType,
PersistentClass persistentClass,
Map<String, PersistentClass> persistentClasses) {
if ( persistentClass != null && isUnownedCollection() ) {
final var mappedByProperty = getMappedByProperty( elementType, persistentClass );
checkMappedByType( mappedBy, mappedByProperty.getValue(), propertyName, propertyHolder, persistentClasses );
return persistentClass.getJoinNumber( mappedByProperty ) != 0;
}
else {
return false;
}
}
private Property getMappedByProperty(TypeDetails elementType, PersistentClass persistentClass) {
try {
return persistentClass.getRecursiveProperty( mappedBy );
}
catch (MappingException e) {
throw new AnnotationException(
"Collection '" + safeCollectionRole()
+ "' is 'mappedBy' a property named '" + mappedBy
+ "' which does not exist in the target entity '" + elementType.getName() + "'"
);
}
}
private boolean noAssociationTable(Map<String, PersistentClass> persistentClasses) {
final var persistentClass = persistentClasses.get( getElementType().getName() );
return persistentClass != null
&& !isReversePropertyInJoin( getElementType(), persistentClass, persistentClasses )
&& oneToMany
&& !isExplicitAssociationTable
&& ( implicitJoinColumn() || explicitForeignJoinColumn() );
}
private boolean implicitJoinColumn() {
return joinColumns.getJoinColumns().get(0).isImplicit()
&& isUnownedCollection(); //implicit @JoinColumn
}
private boolean explicitForeignJoinColumn() {
return !foreignJoinColumns.getJoinColumns().get(0).isImplicit(); //this is an explicit @JoinColumn
}
/**
* Bind a {@link OneToMany} association.
*/
protected void bindOneToManySecondPass(Map<String, PersistentClass> persistentClasses) {
if ( property == null ) {
throw new AssertionFailure( "Null property" );
}
logOneToManySecondPass();
final var oneToMany =
new org.hibernate.mapping.OneToMany( buildingContext, getCollection().getOwner() );
collection.setElement( oneToMany );
oneToMany.setReferencedEntityName( getElementType().getName() );
oneToMany.setNotFoundAction( notFoundAction );
final String referencedEntityName = oneToMany.getReferencedEntityName();
final var associatedClass = persistentClasses.get( referencedEntityName );
handleJpaOrderBy( collection, associatedClass );
if ( associatedClass == null ) {
throw new MappingException(
String.format( "Association [%s] for entity [%s] references unmapped class [%s]",
propertyName, propertyHolder.getClassName(), referencedEntityName )
);
}
oneToMany.setAssociatedClass( associatedClass );
final var joins = getMetadataCollector().getJoins( referencedEntityName );
foreignJoinColumns.setPropertyHolder( buildPropertyHolder(
associatedClass,
joins,
foreignJoinColumns.getBuildingContext(),
inheritanceStatePerClass
) );
foreignJoinColumns.setJoins( joins );
final var collectionTable =
foreignJoinColumns.hasMappedBy()
? associatedClass.getRecursiveProperty( foreignJoinColumns.getMappedBy() )
.getValue().getTable()
: foreignJoinColumns.getTable();
collection.setCollectionTable( collectionTable );
bindSynchronize();
bindFilters( false );
handleWhere( false );
final var targetEntity = persistentClasses.get( getElementType().getName() );
bindCollectionSecondPass( targetEntity, foreignJoinColumns );
if ( !collection.isInverse() && !collection.getKey().isNullable() ) {
createOneToManyBackref( oneToMany );
}
}
private void createOneToManyBackref(org.hibernate.mapping.OneToMany oneToMany) {
final var collector = getMetadataCollector();
// for non-inverse one-to-many, with a not-null fk, add a backref!
final String entityName = oneToMany.getReferencedEntityName();
final var referencedEntity = collector.getEntityBinding( entityName );
final Backref backref = new Backref();
final String backrefName = '_' + foreignJoinColumns.getPropertyName()
+ '_' + foreignJoinColumns.getColumns().get(0).getLogicalColumnName()
+ "Backref";
backref.setName( backrefName );
backref.setOptional( true );
backref.setUpdatable( false);
backref.setSelectable( false );
backref.setCollectionRole( getRole() );
backref.setEntityName( collection.getOwner().getEntityName() );
backref.setValue( collection.getKey() );
referencedEntity.addProperty( backref );
}
private void handleJpaOrderBy(Collection collection, PersistentClass associatedClass) {
final String hqlOrderBy = extractHqlOrderBy( jpaOrderBy );
if ( hqlOrderBy != null ) {
final String orderByFragment = buildOrderByClauseFromHql( hqlOrderBy, associatedClass );
if ( isNotBlank( orderByFragment ) ) {
collection.setOrderBy( orderByFragment );
}
}
}
private void bindSynchronize() {
final var synchronizeAnnotation = property.getDirectAnnotationUsage( Synchronize.class );
if ( synchronizeAnnotation != null ) {
for ( String table : synchronizeAnnotation.value() ) {
final String physicalName =
synchronizeAnnotation.logical()
? toPhysicalName( table )
: table;
collection.addSynchronizedTable( physicalName );
}
}
}
private String toPhysicalName(String logicalName) {
final var jdbcEnvironment = getMetadataCollector().getDatabase().getJdbcEnvironment();
return buildingContext.getBuildingOptions().getPhysicalNamingStrategy()
.toPhysicalTableName( jdbcEnvironment.getIdentifierHelper().toIdentifier( logicalName ), jdbcEnvironment )
.render( jdbcEnvironment.getDialect() );
}
private static void fillAliasMaps(
SqlFragmentAlias[] aliases,
Map<String, String> aliasTableMap,
Map<String, String> aliasEntityMap) {
for ( var aliasAnnotation : aliases ) {
final String alias = aliasAnnotation.alias();
final String table = aliasAnnotation.table();
if ( isNotBlank( table ) ) {
aliasTableMap.put( alias, table );
}
final var entityClassDetails = aliasAnnotation.entity();
if ( entityClassDetails != void.class ) {
aliasEntityMap.put( alias, entityClassDetails.getName() );
}
}
}
private void bindFilters(boolean hasAssociationTable) {
final var context = modelsContext();
property.forEachAnnotationUsage( Filter.class, context,
usage -> addFilter( hasAssociationTable, usage ) );
property.forEachAnnotationUsage( FilterJoinTable.class, context,
usage -> addFilterJoinTable( hasAssociationTable, usage ) );
}
private void addFilter(boolean hasAssociationTable, Filter filter) {
final Map<String,String> aliasTableMap = new HashMap<>();
final Map<String,String> aliasEntityMap = new HashMap<>();
fillAliasMaps( filter.aliases(), aliasTableMap, aliasEntityMap );
final String filterCondition = getFilterCondition( filter );
if ( hasAssociationTable ) {
collection.addManyToManyFilter(
filter.name(),
filterCondition,
filter.deduceAliasInjectionPoints(),
aliasTableMap,
aliasEntityMap
);
}
else {
collection.addFilter(
filter.name(),
filterCondition,
filter.deduceAliasInjectionPoints(),
aliasTableMap,
aliasEntityMap
);
}
}
private void handleWhere(boolean hasAssociationTable) {
final String whereClause = getWhereClause();
if ( hasAssociationTable ) {
// A many-to-many association has an association (join) table
// Collection#setManytoManyWhere is used to set the "where" clause that applies
// to the many-to-many associated entity table (not the join table).
collection.setManyToManyWhere( whereClause );
}
else {
// A one-to-many association does not have an association (join) table.
// Collection#setWhere is used to set the "where" clause that applies to the collection table
// (which is the associated entity table for a one-to-many association).
collection.setWhere( whereClause );
}
final String whereJoinTableClause = getWhereJoinTableClause();
if ( isNotBlank( whereJoinTableClause ) ) {
if ( hasAssociationTable ) {
// This is a many-to-many association.
// Collection#setWhere is used to set the "where" clause that applies to the collection table
// (which is the join table for a many-to-many association).
collection.setWhere( whereJoinTableClause );
}
else {
throw new AnnotationException(
"Collection '" + qualify( propertyHolder.getPath(), propertyName )
+ "' is an association with no join table and may not have a 'WhereJoinTable'"
);
}
}
}
private String getWhereJoinTableClause() {
final var joinTableRestriction = property.getDirectAnnotationUsage( SQLJoinTableRestriction.class );
return joinTableRestriction != null ? joinTableRestriction.value() : null;
}
private String getWhereClause() {
// There are 2 possible sources of "where" clauses that apply to the associated entity table:
// 1) from the associated entity mapping; i.e., @Entity @Where(clause="...")
// (ignored if useEntityWhereClauseForCollections == false)
// 2) from the collection mapping;
// for one-to-many, e.g., @OneToMany @JoinColumn @Where(clause="...") public Set<Rating> getRatings();
// for many-to-many e.g., @ManyToMany @Where(clause="...") public Set<Rating> getRatings();
return getNonEmptyOrConjunctionIfBothNonEmpty( getWhereOnClassClause(), getWhereOnCollectionClause() );
}
private String getWhereOnCollectionClause() {
final var restrictionOnCollection =
getOverridableAnnotation( property, SQLRestriction.class, getBuildingContext() );
return restrictionOnCollection != null ? restrictionOnCollection.value() : null;
}
private String getWhereOnClassClause() {
final var restrictionOnClass = getOverridableAnnotation(
property.getAssociatedType().determineRawClass(),
SQLRestriction.class,
buildingContext
);
return restrictionOnClass != null ? restrictionOnClass.value() : null;
}
private void addFilterJoinTable(boolean hasAssociationTable, FilterJoinTable filter) {
if ( hasAssociationTable ) {
final Map<String,String> aliasTableMap = new HashMap<>();
final Map<String,String> aliasEntityMap = new HashMap<>();
fillAliasMaps( filter.aliases(), aliasTableMap, aliasEntityMap );
collection.addFilter(
filter.name(),
getFilterConditionForJoinTable( filter ),
filter.deduceAliasInjectionPoints(),
aliasTableMap,
aliasEntityMap
);
}
else {
throw new AnnotationException( "Collection '" + qualify( propertyHolder.getPath(), propertyName )
+ "' is an association with no join table and may not have a '@FilterJoinTable'" );
}
}
private String getFilterConditionForJoinTable(FilterJoinTable filterJoinTableAnnotation) {
final String condition = filterJoinTableAnnotation.condition();
return condition.isBlank()
? getDefaultFilterCondition( filterJoinTableAnnotation.name(), filterJoinTableAnnotation )
: condition;
}
private String getFilterCondition(Filter filter) {
final String condition = filter.condition();
return condition.isBlank()
? getDefaultFilterCondition( filter.name(), filter )
: condition;
}
private String getDefaultFilterCondition(String name, Annotation annotation) {
final var definition = getMetadataCollector().getFilterDefinition( name );
if ( definition == null ) {
throw new AnnotationException( "Collection '" + qualify( propertyHolder.getPath(), propertyName )
+ "' has a '@" + annotation.annotationType().getSimpleName()
+ "' for an undefined filter named '" + name + "'" );
}
final String defaultCondition = definition.getDefaultFilterCondition();
if ( isBlank( defaultCondition ) ) {
throw new AnnotationException( "Collection '" + qualify( propertyHolder.getPath(), propertyName ) +
"' has a '@" + annotation.annotationType().getSimpleName()
+ "' with no 'condition' and no default condition was given by the '@FilterDef' named '"
+ name + "'" );
}
return defaultCondition;
}
private void setCache(Cache cache) {
if ( cache != null ) {
cacheRegionName = nullIfEmpty( cache.region() );
cacheConcurrencyStrategy = EntityBinder.getCacheConcurrencyStrategy( cache.usage() );
}
else {
cacheConcurrencyStrategy = null;
cacheRegionName = null;
}
}
private void setQueryCacheLayout(QueryCacheLayout queryCacheLayout) {
this.queryCacheLayout = queryCacheLayout == null ? null : queryCacheLayout.layout();
}
private void setOneToMany(boolean oneToMany) {
this.oneToMany = oneToMany;
}
private void setIndexColumn(IndexColumn indexColumn) {
this.indexColumn = indexColumn;
}
private void setMapKey(MapKey key) {
hasMapKeyProperty = key != null;
if ( hasMapKeyProperty ) {
// JPA says: if missing, use primary key of associated entity
mapKeyPropertyName = nullIfEmpty( key.name() );
}
}
private static String buildOrderByClauseFromHql(String orderByFragment, PersistentClass associatedClass) {
if ( orderByFragment == null ) {
return null;
}
else if ( orderByFragment.isBlank() ) {
//order by id
return buildOrderById( associatedClass, " asc" );
}
else if ( "desc".equalsIgnoreCase( orderByFragment ) ) {
return buildOrderById( associatedClass, " desc" );
}
else {
return orderByFragment;
}
}
private static String buildOrderById(PersistentClass associatedClass, String direction) {
final var order = new StringBuilder();
for ( var selectable: associatedClass.getIdentifier().getSelectables() ) {
order.append( selectable.getText() );
order.append( direction );
order.append( ", " );
}
order.setLength( order.length() - 2 );
return order.toString();
}
private static String adjustUserSuppliedValueCollectionOrderingFragment(String orderByFragment) {
if ( orderByFragment != null ) {
orderByFragment = orderByFragment.trim();
if ( orderByFragment.isBlank() || orderByFragment.equalsIgnoreCase( "asc" ) ) {
// This indicates something like either:
// `@OrderBy()`
// `@OrderBy("asc")
//
// JPA says this should indicate an ascending natural ordering of the elements - id for
// entity associations or the value(s) for "element collections"
return "$element$ asc";
}
else if ( orderByFragment.equalsIgnoreCase( "desc" ) ) {
// This indicates:
// `@OrderBy("desc")`
//
// JPA says this should indicate a descending natural ordering of the elements - id for
// entity associations or the value(s) for "element collections"
return "$element$ desc";
}
}
return orderByFragment;
}
private DependantValue buildCollectionKey(AnnotatedJoinColumns joinColumns, OnDeleteAction onDeleteAction) {
final boolean noConstraintByDefault = buildingContext.getBuildingOptions().isNoConstraintByDefault();
// give a chance to override the referenced property name
// has to do that here because the referencedProperty creation happens in a FKSecondPass for ManyToOne yuk!
overrideReferencedPropertyName( collection, joinColumns );
final String referencedPropertyName = collection.getReferencedPropertyName();
// binding key reference using column
final var owner = collection.getOwner();
final var keyValue =
referencedPropertyName == null
? owner.getIdentifier()
: (KeyValue) owner.getReferencedProperty( referencedPropertyName ).getValue();
final var key = new DependantValue( buildingContext, collection.getCollectionTable(), keyValue );
key.setTypeName( null );
joinColumns.checkPropertyConsistency();
final var columns = joinColumns.getColumns();
key.setNullable( columns.isEmpty() || columns.get(0).isNullable() );
key.setUpdateable( columns.isEmpty() || columns.get(0).isUpdatable() );
key.setOnDeleteAction( onDeleteAction );
collection.setKey( key );
if ( property != null ) {
final var collectionTable = property.getDirectAnnotationUsage( CollectionTable.class );
if ( collectionTable != null ) {
final var foreignKey = collectionTable.foreignKey();
final var constraintMode = foreignKey.value();
if ( constraintMode == NO_CONSTRAINT
|| constraintMode == PROVIDER_DEFAULT && noConstraintByDefault ) {
key.disableForeignKey();
}
else {
key.setForeignKeyName( nullIfEmpty( foreignKey.name() ) );
key.setForeignKeyDefinition( nullIfEmpty( foreignKey.foreignKeyDefinition() ) );
key.setForeignKeyOptions( foreignKey.options() );
if ( key.getForeignKeyName() == null
&& key.getForeignKeyDefinition() == null
&& collectionTable.joinColumns().length == 1 ) {
final var joinColumn = collectionTable.joinColumns()[0];
final var nestedForeignKey = joinColumn.foreignKey();
key.setForeignKeyName( nullIfEmpty( nestedForeignKey.name() ) );
key.setForeignKeyDefinition( nullIfEmpty( nestedForeignKey.foreignKeyDefinition() ) );
key.setForeignKeyOptions( nestedForeignKey.options() );
}
}
}
else {
final var joinTable = property.getDirectAnnotationUsage( JoinTable.class );
if ( joinTable != null ) {
final var foreignKey = joinTable.foreignKey();
String foreignKeyName = foreignKey.name();
String foreignKeyDefinition = foreignKey.foreignKeyDefinition();
String foreignKeyOptions = foreignKey.options();
ConstraintMode foreignKeyValue = foreignKey.value();
final var joinColumnAnnotations = joinTable.joinColumns();
if ( !ArrayHelper.isEmpty( joinColumnAnnotations ) ) {
final var joinColumnAnn = joinColumnAnnotations[0];
final var joinColumnForeignKey = joinColumnAnn.foreignKey();
if ( foreignKeyName.isBlank() ) {
foreignKeyName = joinColumnForeignKey.name();
foreignKeyDefinition = joinColumnForeignKey.foreignKeyDefinition();
foreignKeyOptions = joinColumnForeignKey.options();
}
if ( foreignKeyValue != NO_CONSTRAINT ) {
foreignKeyValue = joinColumnForeignKey.value();
}
}
if ( foreignKeyValue == NO_CONSTRAINT
|| foreignKeyValue == PROVIDER_DEFAULT && noConstraintByDefault ) {
key.disableForeignKey();
}
else {
key.setForeignKeyName( nullIfEmpty( foreignKeyName ) );
key.setForeignKeyDefinition( nullIfEmpty( foreignKeyDefinition ) );
key.setForeignKeyOptions( foreignKeyOptions );
}
}
else {
final String propertyPath = qualify( propertyHolder.getPath(), property.getName() );
final var foreignKey = propertyHolder.getOverriddenForeignKey( propertyPath );
if ( foreignKey != null ) {
handleForeignKeyConstraint( noConstraintByDefault, key, foreignKey );
}
else {
final var oneToMany = property.getDirectAnnotationUsage( OneToMany.class );
final var onDelete = property.getDirectAnnotationUsage( OnDelete.class );
if ( oneToMany != null
&& !oneToMany.mappedBy().isBlank()
&& ( onDelete == null || onDelete.action() != OnDeleteAction.CASCADE ) ) {
// foreign key should be up to @ManyToOne side
// @OnDelete generate "on delete cascade" foreign key
key.disableForeignKey();
}
else {
final var joinColumn = property.getDirectAnnotationUsage( JoinColumn.class );
if ( joinColumn != null ) {
handleForeignKeyConstraint( noConstraintByDefault, key, joinColumn.foreignKey() );
}
}
}
}
}
}
return key;
}
private static void handleForeignKeyConstraint(
boolean noConstraintByDefault,
DependantValue key,
ForeignKey foreignKey) {
final var constraintMode = foreignKey.value();
if ( constraintMode == NO_CONSTRAINT
|| constraintMode == PROVIDER_DEFAULT && noConstraintByDefault) {
key.disableForeignKey();
}
else {
key.setForeignKeyName( nullIfEmpty( foreignKey.name() ) );
key.setForeignKeyDefinition( nullIfEmpty( foreignKey.foreignKeyDefinition() ) );
key.setForeignKeyOptions( foreignKey.options() );
}
}
private void overrideReferencedPropertyName(Collection collection, AnnotatedJoinColumns joinColumns) {
if ( isUnownedCollection() && !joinColumns.getColumns().isEmpty() ) {
final String entityName =
joinColumns.getManyToManyOwnerSideEntityName() != null
? "inverse__" + joinColumns.getManyToManyOwnerSideEntityName()
: joinColumns.getPropertyHolder().getEntityName();
final var collector = getMetadataCollector();
final String referencedProperty = collector.getPropertyReferencedAssociation( entityName, mappedBy );
if ( referencedProperty != null ) {
collection.setReferencedPropertyName( referencedProperty );
collector.addPropertyReference( collection.getOwnerEntityName(), referencedProperty );
}
}
}
/**
* Bind a {@link ManyToMany} association or {@link ElementCollection}.
*/
private void bindManyToManySecondPass(Map<String, PersistentClass> persistentClasses) throws MappingException {
if ( property == null ) {
throw new AssertionFailure( "Null property" );
}
final var elementTypeDetails = getElementType();
final var targetEntity = persistentClasses.get( elementTypeDetails.getName() ); //null if this is an @ElementCollection
final String hqlOrderBy = extractHqlOrderBy( jpaOrderBy );
final boolean isCollectionOfEntities = targetEntity != null;
final boolean isManyToAny = property.hasDirectAnnotationUsage( ManyToAny.class );
logManyToManySecondPass( oneToMany, isCollectionOfEntities, isManyToAny );
//check for user error
detectManyToManyProblems( elementTypeDetails, isCollectionOfEntities, isManyToAny );
if ( isUnownedCollection() ) {
handleUnownedManyToMany( elementTypeDetails, targetEntity, isCollectionOfEntities );
}
else {
handleOwnedManyToMany( targetEntity, isCollectionOfEntities );
}
bindSynchronize();
bindFilters( isCollectionOfEntities );
handleWhere( isCollectionOfEntities );
bindCollectionSecondPass( targetEntity, joinColumns );
if ( isCollectionOfEntities ) {
final ManyToOne element = handleCollectionOfEntities( elementTypeDetails, targetEntity, hqlOrderBy );
bindManyToManyInverseForeignKey( targetEntity, inverseJoinColumns, element, oneToMany );
}
else if ( isManyToAny ) {
handleManyToAny();
}
else {
handleElementCollection( elementTypeDetails, hqlOrderBy );
}
checkFilterConditions( collection );
checkConsistentColumnMutability( collection );
}
private void handleElementCollection(TypeDetails elementType, String hqlOrderBy) {
// 'propertyHolder' is the PropertyHolder for the owner of the collection
// 'holder' is the CollectionPropertyHolder.
// 'property' is the collection XProperty
final boolean isPrimitive = isPrimitive( elementType.getName() );
final var elementClass = isPrimitive ? null : elementType.determineRawClass();
final var classType = annotatedElementType( isEmbedded, isPrimitive, property, elementClass );
if ( !isPrimitive ) {
propertyHolder.startingProperty( property );
}
final var holder =
buildPropertyHolder( collection, getRole(), elementClass, property, propertyHolder, buildingContext );
final var compositeUserType = resolveCompositeUserType( property, elementClass, buildingContext );
final boolean isComposite = classType == EMBEDDABLE || compositeUserType != null;
holder.prepare( property, isComposite );
if ( isComposite ) {
handleCompositeCollectionElement( hqlOrderBy, elementType, holder, compositeUserType );
}
else {
handleCollectionElement( elementType, hqlOrderBy, elementClass, holder );
}
}
private void handleCollectionElement(
TypeDetails elementType,
String hqlOrderBy,
ClassDetails elementClass,
CollectionPropertyHolder holder) {
final var elementBinder = new BasicValueBinder( COLLECTION_ELEMENT, buildingContext );
elementBinder.setReturnedClassName( elementType.getName() );
final var actualColumns = createElementColumnsIfNecessary(
collection,
elementColumns,
Collection.DEFAULT_ELEMENT_COLUMN_NAME,
null,
buildingContext
);
elementBinder.setColumns( actualColumns );
elementBinder.setType(
property,
elementType,
collection.getOwnerEntityName(),
holder.resolveElementAttributeConverterDescriptor( property, elementClass )
);
elementBinder.setPersistentClassName( propertyHolder.getEntityName() );
elementBinder.setAccessType( accessType );
collection.setElement( elementBinder.make() );
final String orderBy = adjustUserSuppliedValueCollectionOrderingFragment( hqlOrderBy );
if ( orderBy != null ) {
collection.setOrderBy( orderBy );
}
}
private void handleCompositeCollectionElement(
String hqlOrderBy,
TypeDetails elementType,
CollectionPropertyHolder holder,
Class<? extends CompositeUserType<?>> compositeUserType) {
//TODO be smart with isNullable
final var accessType = accessType( property, collection.getOwner() );
// We create a new entity binder here because it's needed for processing the embeddable
// Since this is an element collection, there is no real entity binder though,
// so we just create an "empty shell" for the purpose of avoiding null checks in the fillEmbeddable() method etc.
final var entityBinder = new EntityBinder( buildingContext );
// Copy over the access type that we resolve for the element collection,
// so that nested components use the same access type. This fixes HHH-15966
entityBinder.setPropertyAccessType( accessType );
final var embeddable = fillEmbeddable(
holder,
getSpecialMembers( elementType ),
accessType,
true,
entityBinder,
false,
false,
true,
resolveCustomInstantiator( property, elementType, buildingContext ),
compositeUserType,
null,
buildingContext,
inheritanceStatePerClass
);
collection.setElement( embeddable );
if ( isNotBlank( hqlOrderBy ) ) {
final String orderBy = adjustUserSuppliedValueCollectionOrderingFragment( hqlOrderBy );
if ( orderBy != null ) {
collection.setOrderBy( orderBy );
}
}
}
static AccessType accessType(MemberDetails property, PersistentClass owner) {
final var access = property.getDirectAnnotationUsage( Access.class );
if ( access != null ) {
// the attribute is locally annotated with `@Access`, use that
return access.value() == PROPERTY
? AccessType.PROPERTY
: AccessType.FIELD;
}
else {
final var identifierProperty = owner.getIdentifierProperty();
if ( identifierProperty != null ) {
// use the access for the owning entity's id attribute, if one
return identifierProperty.getPropertyAccessorName().equals( BASIC.getExternalName() )
? AccessType.PROPERTY
: AccessType.FIELD;
}
else {
final var identifierMapper = owner.getIdentifierMapper();
if ( identifierMapper != null && identifierMapper.getPropertySpan() > 0 ) {
// use the access for the owning entity's "id mapper"
final Property first = identifierMapper.getProperties().get( 0 );
return first.getPropertyAccessorName().equals( BASIC.getExternalName() )
? AccessType.PROPERTY
: AccessType.FIELD;
}
else {
throw new AssertionFailure( "Unable to guess collection property accessor name" );
}
}
}
}
private AnnotatedClassType annotatedElementType(
boolean isEmbedded,
boolean isPrimitive,
MemberDetails property,
ClassDetails elementClass) {
if ( isPrimitive ) {
return NONE;
}
else {
//force in case of attribute override
final boolean attributeOverride = mappingDefinedAttributeOverrideOnElement(property);
// todo : force in the case of Convert annotation(s) with embedded paths (beyond key/value prefixes)?
return isEmbedded || attributeOverride
? EMBEDDABLE
: getMetadataCollector().getClassType( elementClass );
}
}
protected boolean mappingDefinedAttributeOverrideOnElement(MemberDetails property) {
return property.hasDirectAnnotationUsage( AttributeOverride.class )
|| property.hasDirectAnnotationUsage( AttributeOverrides.class );
}
static AnnotatedColumns createElementColumnsIfNecessary(
Collection collection,
AnnotatedColumns elementColumns,
String defaultName,
Long defaultLength,
MetadataBuildingContext context) {
if ( elementColumns == null || elementColumns.getColumns().isEmpty() ) {
final var columns = new AnnotatedColumns();
columns.setBuildingContext( context );
final var column = new AnnotatedColumn();
column.setLogicalColumnName( defaultName );
if ( defaultLength != null ) {
column.setLength( defaultLength );
}
column.setImplicit( false );
//not following the spec but more clean
column.setNullable( true );
// column.setContext( context );
column.setParent( columns );
column.bind();
elementColumns = columns;
}
//override the table
elementColumns.setTable( collection.getCollectionTable() );
return elementColumns;
}
private ManyToOne handleCollectionOfEntities(
TypeDetails elementType,
PersistentClass collectionEntity,
String hqlOrderBy) {
final var element = new ManyToOne( buildingContext, collection.getCollectionTable() );
collection.setElement( element );
element.setReferencedEntityName( elementType.getName() );
//element.setFetchMode( fetchMode );
//element.setLazy( fetchMode != FetchMode.JOIN );
//make the second join non-lazy
element.setFetchMode( FetchMode.JOIN );
element.setLazy( false );
element.setNotFoundAction( notFoundAction );
// as per 11.1.38 of JPA 2.0 spec, default to primary key if no column is specified by @OrderBy.
if ( hqlOrderBy != null ) {
collection.setManyToManyOrdering( buildOrderByClauseFromHql( hqlOrderBy, collectionEntity ) );
}
final var joinTableAnn = property.getDirectAnnotationUsage( JoinTable.class );
if ( joinTableAnn != null ) {
final var inverseForeignKey = joinTableAnn.inverseForeignKey();
String foreignKeyName = inverseForeignKey.name();
String foreignKeyDefinition = inverseForeignKey.foreignKeyDefinition();
String foreignKeyOptions = inverseForeignKey.options();
final var inverseJoinColumns = joinTableAnn.inverseJoinColumns();
if ( !ArrayHelper.isEmpty( inverseJoinColumns ) ) {
final var joinColumnAnn = inverseJoinColumns[0];
if ( foreignKeyName.isBlank() ) {
final var inverseJoinColumnForeignKey = joinColumnAnn.foreignKey();
foreignKeyName = inverseJoinColumnForeignKey.name();
foreignKeyDefinition = inverseJoinColumnForeignKey.foreignKeyDefinition();
foreignKeyOptions = inverseJoinColumnForeignKey.options();
}
}
final var constraintMode = inverseForeignKey.value();
if ( constraintMode == NO_CONSTRAINT
|| constraintMode == PROVIDER_DEFAULT
&& buildingContext.getBuildingOptions().isNoConstraintByDefault() ) {
element.disableForeignKey();
}
else {
element.setForeignKeyName( nullIfEmpty( foreignKeyName ) );
element.setForeignKeyDefinition( nullIfEmpty( foreignKeyDefinition ) );
element.setForeignKeyOptions( foreignKeyOptions );
}
}
return element;
}
private void handleManyToAny() {
//@ManyToAny
//Make sure that collTyp is never used during the @ManyToAny branch: it will be set to void.
|
that
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/NXYSignificanceHeuristic.java
|
{
"start": 3613,
"end": 3758
}
|
class ____ do not contain term
frequencies.N00 = supersetSize - supersetFreq - (subsetSize - subsetFreq);
// documents in
|
and
|
java
|
square__moshi
|
moshi/src/test/java/com/squareup/moshi/JsonReaderPathTest.java
|
{
"start": 1021,
"end": 7871
}
|
class ____ {
@Parameter public JsonCodecFactory factory;
@Parameters(name = "{0}")
public static List<Object[]> parameters() {
return JsonCodecFactory.factories();
}
@SuppressWarnings("CheckReturnValue")
@Test
public void path() throws IOException {
JsonReader reader = factory.newReader("{\"a\":[2,true,false,null,\"b\",{\"c\":\"d\"},[3]]}");
assertThat(reader.getPath()).isEqualTo("$");
reader.beginObject();
assertThat(reader.getPath()).isEqualTo("$.");
reader.nextName();
assertThat(reader.getPath()).isEqualTo("$.a");
reader.beginArray();
assertThat(reader.getPath()).isEqualTo("$.a[0]");
reader.nextInt();
assertThat(reader.getPath()).isEqualTo("$.a[1]");
reader.nextBoolean();
assertThat(reader.getPath()).isEqualTo("$.a[2]");
reader.nextBoolean();
assertThat(reader.getPath()).isEqualTo("$.a[3]");
reader.nextNull();
assertThat(reader.getPath()).isEqualTo("$.a[4]");
reader.nextString();
assertThat(reader.getPath()).isEqualTo("$.a[5]");
reader.beginObject();
assertThat(reader.getPath()).isEqualTo("$.a[5].");
reader.nextName();
assertThat(reader.getPath()).isEqualTo("$.a[5].c");
reader.nextString();
assertThat(reader.getPath()).isEqualTo("$.a[5].c");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$.a[6]");
reader.beginArray();
assertThat(reader.getPath()).isEqualTo("$.a[6][0]");
reader.nextInt();
assertThat(reader.getPath()).isEqualTo("$.a[6][1]");
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$.a[7]");
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$.a");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void arrayOfObjects() throws IOException {
JsonReader reader = factory.newReader("[{},{},{}]");
reader.beginArray();
assertThat(reader.getPath()).isEqualTo("$[0]");
reader.beginObject();
assertThat(reader.getPath()).isEqualTo("$[0].");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$[1]");
reader.beginObject();
assertThat(reader.getPath()).isEqualTo("$[1].");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$[2]");
reader.beginObject();
assertThat(reader.getPath()).isEqualTo("$[2].");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$[3]");
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void arrayOfArrays() throws IOException {
JsonReader reader = factory.newReader("[[],[],[]]");
reader.beginArray();
assertThat(reader.getPath()).isEqualTo("$[0]");
reader.beginArray();
assertThat(reader.getPath()).isEqualTo("$[0][0]");
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$[1]");
reader.beginArray();
assertThat(reader.getPath()).isEqualTo("$[1][0]");
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$[2]");
reader.beginArray();
assertThat(reader.getPath()).isEqualTo("$[2][0]");
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$[3]");
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$");
}
@SuppressWarnings("CheckReturnValue")
@Test
public void objectPath() throws IOException {
JsonReader reader = factory.newReader("{\"a\":1,\"b\":2}");
assertThat(reader.getPath()).isEqualTo("$");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$");
reader.beginObject();
assertThat(reader.getPath()).isEqualTo("$.");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$.");
reader.nextName();
assertThat(reader.getPath()).isEqualTo("$.a");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$.a");
reader.nextInt();
assertThat(reader.getPath()).isEqualTo("$.a");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$.a");
reader.nextName();
assertThat(reader.getPath()).isEqualTo("$.b");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$.b");
reader.nextInt();
assertThat(reader.getPath()).isEqualTo("$.b");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$.b");
reader.endObject();
assertThat(reader.getPath()).isEqualTo("$");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$");
reader.close();
assertThat(reader.getPath()).isEqualTo("$");
}
@SuppressWarnings("CheckReturnValue")
@Test
public void arrayPath() throws IOException {
JsonReader reader = factory.newReader("[1,2]");
assertThat(reader.getPath()).isEqualTo("$");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$");
reader.beginArray();
assertThat(reader.getPath()).isEqualTo("$[0]");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$[0]");
reader.nextInt();
assertThat(reader.getPath()).isEqualTo("$[1]");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$[1]");
reader.nextInt();
assertThat(reader.getPath()).isEqualTo("$[2]");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$[2]");
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$");
reader.peek();
assertThat(reader.getPath()).isEqualTo("$");
reader.close();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void multipleTopLevelValuesInOneDocument() throws IOException {
assumeTrue(factory.encodesToBytes());
JsonReader reader = factory.newReader("[][]");
reader.setLenient(true);
reader.beginArray();
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$");
reader.beginArray();
reader.endArray();
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void skipArrayElements() throws IOException {
JsonReader reader = factory.newReader("[1,2,3]");
reader.beginArray();
reader.skipValue();
reader.skipValue();
assertThat(reader.getPath()).isEqualTo("$[2]");
}
@Test
public void skipObjectNames() throws IOException {
JsonReader reader = factory.newReader("{\"a\":1}");
reader.beginObject();
reader.skipValue();
assertThat(reader.getPath()).isEqualTo("$.null");
}
@SuppressWarnings("CheckReturnValue")
@Test
public void skipObjectValues() throws IOException {
JsonReader reader = factory.newReader("{\"a\":1,\"b\":2}");
reader.beginObject();
reader.nextName();
reader.skipValue();
assertThat(reader.getPath()).isEqualTo("$.null");
reader.nextName();
assertThat(reader.getPath()).isEqualTo("$.b");
}
@Test
public void skipNestedStructures() throws IOException {
JsonReader reader = factory.newReader("[[1,2,3],4]");
reader.beginArray();
reader.skipValue();
assertThat(reader.getPath()).isEqualTo("$[1]");
}
}
|
JsonReaderPathTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/matchers/AnnotationHasArgumentWithValueTest.java
|
{
"start": 1793,
"end": 1945
}
|
interface ____ {
String value();
}
""");
writeFile(
"A.java",
"""
@Thing2(("y"))
public
|
Thing2
|
java
|
apache__camel
|
components/camel-joor/src/test/java/org/apache/camel/language/joor/CompilationUnitTest.java
|
{
"start": 2104,
"end": 2406
}
|
class ____ {
}
}
""",
"InnerClass");
}
@Test
void shouldSupportMethodLocalInnerClass() {
compile(
"""
package com.foo;
|
InnerClass
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/testfixture/TestGroupParsingTests.java
|
{
"start": 1027,
"end": 2614
}
|
class ____ {
@Test
void parseNull() {
assertThat(TestGroup.parse(null)).isEqualTo(Collections.emptySet());
}
@Test
void parseEmptyString() {
assertThat(TestGroup.parse("")).isEqualTo(Collections.emptySet());
}
@Test
void parseBlankString() {
assertThat(TestGroup.parse(" ")).isEqualTo(Collections.emptySet());
}
@Test
void parseWithSpaces() {
assertThat(TestGroup.parse(" LONG_RUNNING, LONG_RUNNING ")).containsOnly(TestGroup.LONG_RUNNING);
}
@Test
void parseInMixedCase() {
assertThat(TestGroup.parse("long_running, LonG_RunnING")).containsOnly(TestGroup.LONG_RUNNING);
}
@Test
void parseMissing() {
assertThatIllegalArgumentException()
.isThrownBy(() -> TestGroup.parse("long_running, missing"))
.withMessageContaining("Unable to find test group 'missing' when parsing " +
"testGroups value: 'long_running, missing'. Available groups include: " +
"[LONG_RUNNING]");
}
@Test
void parseAll() {
assertThat(TestGroup.parse("all")).isEqualTo(EnumSet.allOf(TestGroup.class));
}
@Test
void parseAllExceptLongRunning() {
Set<TestGroup> expected = EnumSet.allOf(TestGroup.class);
expected.remove(TestGroup.LONG_RUNNING);
assertThat(TestGroup.parse("all-long_running")).isEqualTo(expected);
}
@Test
void parseAllExceptMissing() {
assertThatIllegalArgumentException()
.isThrownBy(() -> TestGroup.parse("all-missing"))
.withMessageContaining("Unable to find test group 'missing' when parsing " +
"testGroups value: 'all-missing'. Available groups include: " +
"[LONG_RUNNING]");
}
}
|
TestGroupParsingTests
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ZooKeeperMasterEndpointBuilderFactory.java
|
{
"start": 10003,
"end": 10365
}
|
class ____ extends AbstractEndpointBuilder implements ZooKeeperMasterEndpointBuilder, AdvancedZooKeeperMasterEndpointBuilder {
public ZooKeeperMasterEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new ZooKeeperMasterEndpointBuilderImpl(path);
}
}
|
ZooKeeperMasterEndpointBuilderImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/GenericAssociationOrderColumnTest.java
|
{
"start": 1181,
"end": 2242
}
|
class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final ParentEntity p1 = new ParentEntity( 1L );
final ChildEntity c1 = new ChildEntity( 2L, p1 );
p1.getChildren().add( c1 );
final ChildEntity c2 = new ChildEntity( 3L, p1 );
p1.getChildren().add( c2 );
final ChildEntity c3 = new ChildEntity( 4L, p1 );
p1.getChildren().add( c3 );
session.persist( p1 );
} );
}
@AfterAll
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.createMutationQuery( "delete from ChildEntity" ).executeUpdate();
session.createMutationQuery( "delete from ParentEntity" ).executeUpdate();
} );
}
@Test
public void test(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final ParentEntity parent = session.find( ParentEntity.class, 1L );
assertThat( parent.getChildren().stream().map( ChildEntity::getId ) ).containsExactly( 2L, 3L, 4L );
} );
}
@MappedSuperclass
public static abstract
|
GenericAssociationOrderColumnTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
|
{
"start": 46633,
"end": 48303
}
|
class ____ {
final List<String> missingIds = new ArrayList<String>();
long missingSize = 0L;
long corruptFiles = 0L;
long corruptBlocks = 0L;
long corruptSize = 0L;
long excessiveReplicas = 0L;
long missingReplicas = 0L;
long decommissionedReplicas = 0L;
long decommissioningReplicas = 0L;
long enteringMaintenanceReplicas = 0L;
long inMaintenanceReplicas = 0L;
long numUnderMinReplicatedBlocks = 0L;
long numOverReplicatedBlocks = 0L;
long numUnderReplicatedBlocks = 0L;
long numMisReplicatedBlocks = 0L; // blocks that do not satisfy block placement policy
long numMinReplicatedBlocks = 0L; // minimally replicatedblocks
long totalBlocks = 0L;
long numExpectedReplicas = 0L;
long totalOpenFilesBlocks = 0L;
long totalFiles = 0L;
long totalOpenFiles = 0L;
long totalSize = 0L;
long totalOpenFilesSize = 0L;
long totalReplicas = 0L;
long numBlocksQueuedForReplication = 0L;
/**
* DFS is considered healthy if there are no missing blocks.
*/
boolean isHealthy() {
return ((missingIds.size() == 0) && (corruptBlocks == 0));
}
/** Add a missing block name, plus its size. */
void addMissing(String id, long size) {
missingIds.add(id);
missingSize += size;
}
/** Add a corrupt block. */
void addCorrupt(long size) {
corruptBlocks++;
corruptSize += size;
}
/** Return the actual replication factor. */
float getReplicationFactor() {
if (totalBlocks == 0)
return 0.0f;
return (float) (totalReplicas) / (float) totalBlocks;
}
}
@VisibleForTesting
static
|
Result
|
java
|
quarkusio__quarkus
|
integration-tests/jpa/src/main/java/io/quarkus/it/jpa/generics/IntermediateAbstractMapped.java
|
{
"start": 542,
"end": 1282
}
|
class ____ implements Serializable {
private String keyOne;
private String keyTwo;
private String keyThree;
public PK() {
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PK pk = (PK) o;
return Objects.equals(keyOne, pk.keyOne) &&
Objects.equals(keyTwo, pk.keyTwo) &&
Objects.equals(keyThree, pk.keyThree);
}
@Override
public int hashCode() {
return Objects.hash(keyOne, keyTwo, keyThree);
}
}
}
|
PK
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/discriminator/joined/DiscriminatorQueryUsageTests.java
|
{
"start": 806,
"end": 3045
}
|
class ____ {
@Test
public void testUsageAsSelection(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final Tuple resultTuple = session
.createQuery( "select p.id as id, type(p) as type from ParentEntity p", Tuple.class )
.uniqueResult();
Assertions.assertThat( resultTuple.get( "id" ) ).isEqualTo( 1 );
Assertions.assertThat( resultTuple.get( "type" ) ).isEqualTo( ChildEntity.class );
} );
}
@Test
public void testUsageAsPredicate(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final Integer id = session.createQuery( "select p.id from ParentEntity p where type(p) = ChildEntity", Integer.class ).uniqueResult();
Assertions.assertThat( id ).isEqualTo( 1 );
} );
}
@Test
public void testUsageAsPredicateOfUnderlyingType(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final Integer id = session.createQuery( "select p.id from ParentEntity p where type(p) = 'ce'", Integer.class ).uniqueResult();
Assertions.assertThat( id ).isEqualTo( 1 );
} );
}
@Test
public void testUsageAsPredicateWithParam(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final Integer id = session.createQuery( "select p.id from ParentEntity p where type(p) = :type", Integer.class )
.setParameter( "type", ChildEntity.class )
.uniqueResult();
Assertions.assertThat( id ).isEqualTo( 1 );
} );
}
@Test
public void testUsageAsPredicateWithParamOfUnderlyingType(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
Query<Integer> query = session.createQuery(
"select p.id from ParentEntity p where type(p) = :type",
Integer.class
);
try {
query.setParameter( "type", "ce" );
fail( "Expected that setting the underlying type for a parameter of type Class<?> to fail!" );
}
catch (IllegalArgumentException ex) {
// We expect this to fail
}
} );
}
@BeforeEach
public void createTestData(SessionFactoryScope scope) {
scope.inTransaction( (session) -> session.persist( new ChildEntity( 1, "Child" ) ) );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
}
|
DiscriminatorQueryUsageTests
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/launcher/MethodFilterTests.java
|
{
"start": 1226,
"end": 7179
}
|
class ____ {
private static final String CLASS1_TEST1_NAME = "org.junit.platform.launcher.MethodFilterTests$Class1#test1";
private static final String CLASS1_TEST2_NAME = "org.junit.platform.launcher.MethodFilterTests$Class1#test2";
private static final String CLASS2_TEST1_NAME = "org.junit.platform.launcher.MethodFilterTests$Class2#test1";
private static final String CLASS2_TEST2_NAME = "org.junit.platform.launcher.MethodFilterTests$Class2#test2";
private static final TestDescriptor CLASS1_TEST1 = methodTestDescriptor("class1", Class1.class, "test1");
private static final TestDescriptor CLASS1_TEST2 = methodTestDescriptor("class1", Class1.class, "test2");
private static final TestDescriptor CLASS2_TEST1 = methodTestDescriptor("class2", Class2.class, "test1");
private static final TestDescriptor CLASS2_TEST2 = methodTestDescriptor("class2", Class2.class, "test2");
@SuppressWarnings("DataFlowIssue")
@Test
void includeMethodNamePatternsChecksPreconditions() {
assertPreconditionViolationNotNullOrEmptyFor("patterns array",
() -> includeMethodNamePatterns((String[]) null));
assertPreconditionViolationNotNullOrEmptyFor("patterns array", () -> includeMethodNamePatterns(new String[0]));
assertPreconditionViolationFor(() -> includeMethodNamePatterns(new String[] { null }))//
.withMessage("patterns array must not contain null elements");
}
@Test
void includeSingleMethodNamePattern() {
var regex = "^org\\.junit\\.platform\\.launcher\\.MethodFilterTests\\$Class1#test.*";
var filter = includeMethodNamePatterns(regex);
assertIncluded(filter.apply(CLASS1_TEST1),
"Method name [%s] matches included pattern: '%s'".formatted(CLASS1_TEST1_NAME, regex));
assertIncluded(filter.apply(CLASS1_TEST2),
"Method name [%s] matches included pattern: '%s'".formatted(CLASS1_TEST2_NAME, regex));
assertExcluded(filter.apply(CLASS2_TEST1),
"Method name [%s] does not match any included pattern: '%s'".formatted(CLASS2_TEST1_NAME, regex));
assertExcluded(filter.apply(CLASS2_TEST2),
"Method name [%s] does not match any included pattern: '%s'".formatted(CLASS2_TEST2_NAME, regex));
}
@Test
void includeMultipleMethodNamePatterns() {
var firstRegex = "^org\\.junit\\.platform\\.launcher\\.MethodFilterTests\\$Class1#test.*";
var secondRegex = ".+Class.+#test1";
var filter = includeMethodNamePatterns(firstRegex, secondRegex);
assertIncluded(filter.apply(CLASS1_TEST1),
"Method name [%s] matches included pattern: '%s'".formatted(CLASS1_TEST1_NAME, firstRegex));
assertIncluded(filter.apply(CLASS1_TEST2),
"Method name [%s] matches included pattern: '%s'".formatted(CLASS1_TEST2_NAME, firstRegex));
assertIncluded(filter.apply(CLASS2_TEST1),
"Method name [%s] matches included pattern: '%s'".formatted(CLASS2_TEST1_NAME, secondRegex));
assertExcluded(filter.apply(CLASS2_TEST2),
"Method name [%s] does not match any included pattern: '%s' OR '%s'".formatted(CLASS2_TEST2_NAME,
firstRegex, secondRegex));
}
@SuppressWarnings("DataFlowIssue")
@Test
void excludeMethodNamePatternsChecksPreconditions() {
assertPreconditionViolationNotNullOrEmptyFor("patterns array",
() -> excludeMethodNamePatterns((String[]) null));
assertPreconditionViolationNotNullOrEmptyFor("patterns array", () -> excludeMethodNamePatterns(new String[0]));
assertPreconditionViolationFor(() -> excludeMethodNamePatterns(new String[] { null }))//
.withMessage("patterns array must not contain null elements");
}
@Test
void excludeSingleMethodNamePattern() {
var regex = "^org\\.junit\\.platform\\.launcher\\.MethodFilterTests\\$Class1#test.*";
var filter = excludeMethodNamePatterns(regex);
assertExcluded(filter.apply(CLASS1_TEST1),
"Method name [%s] matches excluded pattern: '%s'".formatted(CLASS1_TEST1_NAME, regex));
assertExcluded(filter.apply(CLASS1_TEST2),
"Method name [%s] matches excluded pattern: '%s'".formatted(CLASS1_TEST2_NAME, regex));
assertIncluded(filter.apply(CLASS2_TEST1),
"Method name [%s] does not match any excluded pattern: '%s'".formatted(CLASS2_TEST1_NAME, regex));
assertIncluded(filter.apply(CLASS2_TEST2),
"Method name [%s] does not match any excluded pattern: '%s'".formatted(CLASS2_TEST2_NAME, regex));
}
@Test
void excludeMultipleMethodNamePatterns() {
var firstRegex = "^org\\.junit\\.platform\\.launcher\\.MethodFilterTests\\$Class1#test.*";
var secondRegex = ".+Class.+#test1";
var filter = excludeMethodNamePatterns(firstRegex, secondRegex);
assertExcluded(filter.apply(CLASS1_TEST1),
"Method name [%s] matches excluded pattern: '%s'".formatted(CLASS1_TEST1_NAME, firstRegex));
assertExcluded(filter.apply(CLASS1_TEST2),
"Method name [%s] matches excluded pattern: '%s'".formatted(CLASS1_TEST2_NAME, firstRegex));
assertExcluded(filter.apply(CLASS2_TEST1),
"Method name [%s] matches excluded pattern: '%s'".formatted(CLASS2_TEST1_NAME, secondRegex));
assertIncluded(filter.apply(CLASS2_TEST2),
"Method name [%s] does not match any excluded pattern: '%s' OR '%s'".formatted(CLASS2_TEST2_NAME,
firstRegex, secondRegex));
}
private void assertIncluded(FilterResult filterResult, String expectedReason) {
assertTrue(filterResult.included());
assertThat(filterResult.getReason()).isPresent().contains(expectedReason);
}
private void assertExcluded(FilterResult filterResult, String excludedPattern) {
assertTrue(filterResult.excluded());
assertThat(filterResult.getReason()).isPresent().contains(excludedPattern);
}
private static TestDescriptor methodTestDescriptor(String uniqueId, Class<?> testClass, String methodName) {
var method = ReflectionUtils.findMethod(testClass, methodName, new Class<?>[0]).orElseThrow();
return new DemoMethodTestDescriptor(UniqueId.root("method", uniqueId), method);
}
// -------------------------------------------------------------------------
@SuppressWarnings("JUnitMalformedDeclaration")
private static
|
MethodFilterTests
|
java
|
google__dagger
|
javatests/dagger/functional/binds/RecursiveBindsTest.java
|
{
"start": 1169,
"end": 1299
}
|
class ____ implements Foo {
@Inject FooImpl(@SuppressWarnings("unused") Provider<Foo> provider) {}
}
@Module
public
|
FooImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/array/ArrayTests.java
|
{
"start": 4046,
"end": 4653
}
|
class ____ {
private Integer id;
private String name;
private String[] toDoList;
public Employee() {
}
public Employee(Integer id, String name) {
this.id = id;
this.name = name;
}
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@ElementCollection
@OrderColumn
public String[] getToDoList() {
return toDoList;
}
public void setToDoList(String[] toDoList) {
this.toDoList = toDoList;
}
}
}
|
Employee
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/spi/AdditionalMappingContributor.java
|
{
"start": 486,
"end": 1262
}
|
interface ____ {
/**
* The name of this contributor. May be {@code null}.
*
* @see org.hibernate.mapping.Contributable
*/
default String getContributorName() {
return null;
}
/**
* Contribute the additional mappings
*
* @param contributions Collector of the contributions.
* @param metadata Current (live) metadata. Can be used to access already known mappings.
* @param resourceStreamLocator Delegate for locating XML resources via class-path lookup.
* @param buildingContext Access to useful contextual references.
*/
void contribute(
AdditionalMappingContributions contributions,
InFlightMetadataCollector metadata,
ResourceStreamLocator resourceStreamLocator,
MetadataBuildingContext buildingContext);
}
|
AdditionalMappingContributor
|
java
|
google__gson
|
extras/src/test/java/com/google/gson/typeadapters/RuntimeTypeAdapterFactoryTest.java
|
{
"start": 930,
"end": 1887
}
|
class ____ {
@Test
public void testRuntimeTypeAdapter() {
RuntimeTypeAdapterFactory<BillingInstrument> rta =
RuntimeTypeAdapterFactory.of(BillingInstrument.class).registerSubtype(CreditCard.class);
Gson gson = new GsonBuilder().registerTypeAdapterFactory(rta).create();
CreditCard original = new CreditCard("Jesse", 234);
assertThat(gson.toJson(original, BillingInstrument.class))
.isEqualTo("{\"type\":\"CreditCard\",\"cvv\":234,\"ownerName\":\"Jesse\"}");
BillingInstrument deserialized =
gson.fromJson("{type:'CreditCard',cvv:234,ownerName:'Jesse'}", BillingInstrument.class);
assertThat(deserialized.ownerName).isEqualTo("Jesse");
assertThat(deserialized).isInstanceOf(CreditCard.class);
}
@Test
public void testRuntimeTypeAdapterRecognizeSubtypes() {
// We don't have an explicit factory for CreditCard.class, but we do have one for
// BillingInstrument.
|
RuntimeTypeAdapterFactoryTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/indices/SystemIndices.java
|
{
"start": 6094,
"end": 6958
}
|
class ____ checks for system index “access levels” (see {@link #getSystemIndexAccessLevel(ThreadContext)}).
* If a request has the wrong access level for a system index it is targeting, then we will issue a deprecation warning. In the future,
* we will block access. The non-deprecated way to access certain external system indices is to use the correct request headers. This
* behavior is already in place in {@link SystemDataStreamDescriptor} and “net-new” system indices (see
* {@link SystemIndexDescriptor#isNetNew()}).
*
* <p>The implementation of the system index name checks makes heavy use of the Lucene {@link Automaton} class. At a high level, an
* automaton is a kind of matcher that can be created from a regex. Lucene Automata give us the ability to check for overlapping
* patterns, and to create efficient unions of patterns.
*/
public
|
provides
|
java
|
apache__avro
|
lang/java/tools/src/test/compiler/output/NoSettersTest.java
|
{
"start": 407,
"end": 1840
}
|
class ____ extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 8604146783520861700L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"NoSettersTest\",\"namespace\":\"avro.examples.baseball\",\"doc\":\"Test that setters are omitted\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"favorite_number\",\"type\":[\"int\",\"null\"]}]}");
public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder<NoSettersTest> ENCODER =
new BinaryMessageEncoder<>(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder<NoSettersTest> DECODER =
new BinaryMessageDecoder<>(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder<NoSettersTest> getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder<NoSettersTest> getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this
|
NoSettersTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.