language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/metrics/ReporterSetupBuilder.java | {
"start": 7858,
"end": 8029
} | interface ____<REPORTER_FACTORY, REPORTER> {
REPORTER invoke(REPORTER_FACTORY factory, MetricConfig metricConfig);
}
/**
* Factory | ReporterFactoryAdapter |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/message/TestBinaryMessageEncoding.java | {
"start": 1352,
"end": 10178
} | class ____ {
private static final Schema SCHEMA_V1 = SchemaBuilder.record("TestRecord").fields().requiredInt("id")
.optionalString("msg").endRecord();
private static final GenericRecordBuilder V1_BUILDER = new GenericRecordBuilder(SCHEMA_V1);
private static final List<Record> V1_RECORDS = Arrays.asList(V1_BUILDER.set("id", 1).set("msg", "m-1").build(),
V1_BUILDER.set("id", 2).set("msg", "m-2").build(), V1_BUILDER.set("id", 4).set("msg", "m-4").build(),
V1_BUILDER.set("id", 6).set("msg", "m-6").build());
private static final Schema SCHEMA_V2 = SchemaBuilder.record("TestRecord").fields().requiredLong("id").name("message")
.aliases("msg").type().optional().stringType().optionalDouble("data").endRecord();
private static final GenericRecordBuilder V2_BUILDER = new GenericRecordBuilder(SCHEMA_V2);
private static final List<Record> V2_RECORDS = Arrays.asList(
V2_BUILDER.set("id", 3L).set("message", "m-3").set("data", 12.3).build(),
V2_BUILDER.set("id", 5L).set("message", "m-5").set("data", 23.4).build(),
V2_BUILDER.set("id", 7L).set("message", "m-7").set("data", 34.5).build(),
V2_BUILDER.set("id", 8L).set("message", "m-8").set("data", 35.6).build());
@Test
void byteBufferRoundTrip() throws Exception {
MessageEncoder<Record> encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V2);
MessageDecoder<Record> decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2);
Record copy = decoder.decode(encoder.encode(V2_RECORDS.get(0)));
assertNotSame(copy, V2_RECORDS.get(0), "Copy should not be the same object");
assertEquals(V2_RECORDS.get(0), copy, "Record should be identical after round-trip");
}
@Test
void schemaEvolution() throws Exception {
List<ByteBuffer> buffers = new ArrayList<>();
List<Record> records = new ArrayList<>();
records.addAll(V1_RECORDS);
records.addAll(V2_RECORDS);
MessageEncoder<Record> v1Encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V1);
MessageEncoder<Record> v2Encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V2);
for (Record record : records) {
if (record.getSchema().equals(SCHEMA_V1)) {
buffers.add(v1Encoder.encode(record));
} else {
buffers.add(v2Encoder.encode(record));
}
}
Set<Record> allAsV2 = new HashSet<>(V2_RECORDS);
allAsV2.add(V2_BUILDER.set("id", 1L).set("message", "m-1").clear("data").build());
allAsV2.add(V2_BUILDER.set("id", 2L).set("message", "m-2").clear("data").build());
allAsV2.add(V2_BUILDER.set("id", 4L).set("message", "m-4").clear("data").build());
allAsV2.add(V2_BUILDER.set("id", 6L).set("message", "m-6").clear("data").build());
BinaryMessageDecoder<Record> v2Decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2);
v2Decoder.addSchema(SCHEMA_V1);
Set<Record> decodedUsingV2 = new HashSet<>();
for (ByteBuffer buffer : buffers) {
decodedUsingV2.add(v2Decoder.decode(buffer));
}
assertEquals(allAsV2, decodedUsingV2);
}
@Test
void compatibleReadFailsWithoutSchema() throws Exception {
assertThrows(MissingSchemaException.class, () -> {
MessageEncoder<Record> v1Encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V1);
BinaryMessageDecoder<Record> v2Decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2);
ByteBuffer v1Buffer = v1Encoder.encode(V1_RECORDS.get(3));
v2Decoder.decode(v1Buffer);
});
}
@Test
void compatibleReadWithSchema() throws Exception {
MessageEncoder<Record> v1Encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V1);
BinaryMessageDecoder<Record> v2Decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2);
v2Decoder.addSchema(SCHEMA_V1);
ByteBuffer v1Buffer = v1Encoder.encode(V1_RECORDS.get(3));
Record record = v2Decoder.decode(v1Buffer);
assertEquals(V2_BUILDER.set("id", 6L).set("message", "m-6").clear("data").build(), record);
}
@Test
void compatibleReadWithSchemaFromLookup() throws Exception {
MessageEncoder<Record> v1Encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V1);
SchemaStore.Cache schemaCache = new SchemaStore.Cache();
schemaCache.addSchema(SCHEMA_V1);
BinaryMessageDecoder<Record> v2Decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2, schemaCache);
ByteBuffer v1Buffer = v1Encoder.encode(V1_RECORDS.get(2));
Record record = v2Decoder.decode(v1Buffer);
assertEquals(V2_BUILDER.set("id", 4L).set("message", "m-4").clear("data").build(), record);
}
@Test
void identicalReadWithSchemaFromLookup() throws Exception {
MessageEncoder<Record> v1Encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V1);
SchemaStore.Cache schemaCache = new SchemaStore.Cache();
schemaCache.addSchema(SCHEMA_V1);
// The null readSchema should not throw an NPE, but trigger the
// BinaryMessageEncoder to use the write schema as read schema
BinaryMessageDecoder<Record> genericDecoder = new BinaryMessageDecoder<>(GenericData.get(), null, schemaCache);
ByteBuffer v1Buffer = v1Encoder.encode(V1_RECORDS.get(2));
Record record = genericDecoder.decode(v1Buffer);
assertEquals(V1_RECORDS.get(2), record);
}
@Test
void bufferReuse() throws Exception {
// This test depends on the serialized version of record 1 being smaller or
// the same size as record 0 so that the reused ByteArrayOutputStream won't
// expand its internal buffer.
MessageEncoder<Record> encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V1, false);
ByteBuffer b0 = encoder.encode(V1_RECORDS.get(0));
ByteBuffer b1 = encoder.encode(V1_RECORDS.get(1));
assertEquals(b0.array(), b1.array());
MessageDecoder<Record> decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V1);
assertEquals(V1_RECORDS.get(1), decoder.decode(b0), "Buffer was reused, decode(b0) should be record 1");
}
@Test
void bufferCopy() throws Exception {
MessageEncoder<Record> encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V1);
ByteBuffer b0 = encoder.encode(V1_RECORDS.get(0));
ByteBuffer b1 = encoder.encode(V1_RECORDS.get(1));
assertNotEquals(b0.array(), b1.array());
MessageDecoder<Record> decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V1);
// bytes are not changed by reusing the encoder
assertEquals(V1_RECORDS.get(0), decoder.decode(b0), "Buffer was copied, decode(b0) should be record 0");
}
@Test
void byteBufferMissingPayload() throws Exception {
assertThrows(AvroRuntimeException.class, () -> {
MessageEncoder<Record> encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V2);
MessageDecoder<Record> decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2);
ByteBuffer buffer = encoder.encode(V2_RECORDS.get(0));
buffer.limit(12);
decoder.decode(buffer);
});
}
@Test
void byteBufferMissingFullHeader() throws Exception {
assertThrows(BadHeaderException.class, () -> {
MessageEncoder<Record> encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V2);
MessageDecoder<Record> decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2);
ByteBuffer buffer = encoder.encode(V2_RECORDS.get(0));
buffer.limit(8);
decoder.decode(buffer);
});
}
@Test
void byteBufferBadMarkerByte() throws Exception {
assertThrows(BadHeaderException.class, () -> {
MessageEncoder<Record> encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V2);
MessageDecoder<Record> decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2);
ByteBuffer buffer = encoder.encode(V2_RECORDS.get(0));
buffer.array()[0] = 0x00;
decoder.decode(buffer);
});
}
@Test
void byteBufferBadVersionByte() throws Exception {
assertThrows(BadHeaderException.class, () -> {
MessageEncoder<Record> encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V2);
MessageDecoder<Record> decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2);
ByteBuffer buffer = encoder.encode(V2_RECORDS.get(0));
buffer.array()[1] = 0x00;
decoder.decode(buffer);
});
}
@Test
void byteBufferUnknownSchema() throws Exception {
assertThrows(MissingSchemaException.class, () -> {
MessageEncoder<Record> encoder = new BinaryMessageEncoder<>(GenericData.get(), SCHEMA_V2);
MessageDecoder<Record> decoder = new BinaryMessageDecoder<>(GenericData.get(), SCHEMA_V2);
ByteBuffer buffer = encoder.encode(V2_RECORDS.get(0));
buffer.array()[4] = 0x00;
decoder.decode(buffer);
});
}
}
| TestBinaryMessageEncoding |
java | apache__hadoop | hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/TestRegistrySecurityHelper.java | {
"start": 1514,
"end": 6890
} | class ____ extends Assertions {
private static final Logger LOG =
LoggerFactory.getLogger(TestRegistrySecurityHelper.class);
public static final String YARN_EXAMPLE_COM = "yarn@example.com";
public static final String SASL_YARN_EXAMPLE_COM =
"sasl:" + YARN_EXAMPLE_COM;
public static final String MAPRED_EXAMPLE_COM = "mapred@example.com";
public static final String SASL_MAPRED_EXAMPLE_COM =
"sasl:" + MAPRED_EXAMPLE_COM;
public static final String SASL_MAPRED_APACHE = "sasl:mapred@APACHE";
public static final String DIGEST_F0AF = "digest:f0afbeeb00baa";
public static final String SASL_YARN_SHORT = "sasl:yarn@";
public static final String SASL_MAPRED_SHORT = "sasl:mapred@";
public static final String REALM_EXAMPLE_COM = "example.com";
private static RegistrySecurity registrySecurity;
@BeforeAll
public static void setupTestRegistrySecurityHelper() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(KEY_REGISTRY_SECURE, true);
conf.set(KEY_REGISTRY_KERBEROS_REALM, "KERBEROS");
registrySecurity = new RegistrySecurity("");
// init the ACLs OUTSIDE A KERBEROS CLUSTER
registrySecurity.init(conf);
}
@Test
public void testACLSplitRealmed() throws Throwable {
List<String> pairs =
registrySecurity.splitAclPairs(
SASL_YARN_EXAMPLE_COM +
", " +
SASL_MAPRED_EXAMPLE_COM,
"");
assertEquals(SASL_YARN_EXAMPLE_COM, pairs.get(0));
assertEquals(SASL_MAPRED_EXAMPLE_COM, pairs.get(1));
}
@Test
public void testBuildAclsRealmed() throws Throwable {
List<ACL> acls = registrySecurity.buildACLs(
SASL_YARN_EXAMPLE_COM +
", " +
SASL_MAPRED_EXAMPLE_COM,
"",
ZooDefs.Perms.ALL);
assertEquals(YARN_EXAMPLE_COM, acls.get(0).getId().getId());
assertEquals(MAPRED_EXAMPLE_COM, acls.get(1).getId().getId());
}
@Test
public void testACLDefaultRealm() throws Throwable {
List<String> pairs =
registrySecurity.splitAclPairs(
SASL_YARN_SHORT +
", " +
SASL_MAPRED_SHORT,
REALM_EXAMPLE_COM);
assertEquals(SASL_YARN_EXAMPLE_COM, pairs.get(0));
assertEquals(SASL_MAPRED_EXAMPLE_COM, pairs.get(1));
}
@Test
public void testBuildAclsDefaultRealm() throws Throwable {
List<ACL> acls = registrySecurity.buildACLs(
SASL_YARN_SHORT +
", " +
SASL_MAPRED_SHORT,
REALM_EXAMPLE_COM, ZooDefs.Perms.ALL);
assertEquals(YARN_EXAMPLE_COM, acls.get(0).getId().getId());
assertEquals(MAPRED_EXAMPLE_COM, acls.get(1).getId().getId());
}
@Test
public void testACLSplitNullRealm() throws Throwable {
List<String> pairs =
registrySecurity.splitAclPairs(
SASL_YARN_SHORT +
", " +
SASL_MAPRED_SHORT,
"");
assertEquals(SASL_YARN_SHORT, pairs.get(0));
assertEquals(SASL_MAPRED_SHORT, pairs.get(1));
}
@Test
public void testBuildAclsNullRealm() throws Throwable {
assertThrows(IllegalArgumentException.class, () -> {
registrySecurity.buildACLs(
SASL_YARN_SHORT +
", " +
SASL_MAPRED_SHORT,
"", ZooDefs.Perms.ALL);
fail("");
});
}
@Test
public void testACLDefaultRealmOnlySASL() throws Throwable {
List<String> pairs =
registrySecurity.splitAclPairs(
SASL_YARN_SHORT +
", " +
DIGEST_F0AF,
REALM_EXAMPLE_COM);
assertEquals(SASL_YARN_EXAMPLE_COM, pairs.get(0));
assertEquals(DIGEST_F0AF, pairs.get(1));
}
@Test
public void testACLSplitMixed() throws Throwable {
List<String> pairs =
registrySecurity.splitAclPairs(
SASL_YARN_SHORT +
", " +
SASL_MAPRED_APACHE +
", ,," +
DIGEST_F0AF,
REALM_EXAMPLE_COM);
assertEquals(SASL_YARN_EXAMPLE_COM, pairs.get(0));
assertEquals(SASL_MAPRED_APACHE, pairs.get(1));
assertEquals(DIGEST_F0AF, pairs.get(2));
}
@Test
public void testDefaultAClsValid() throws Throwable {
registrySecurity.buildACLs(
RegistryConstants.DEFAULT_REGISTRY_SYSTEM_ACCOUNTS,
REALM_EXAMPLE_COM, ZooDefs.Perms.ALL);
}
@Test
public void testDefaultRealm() throws Throwable {
String realm = RegistrySecurity.getDefaultRealmInJVM();
LOG.info("Realm {}", realm);
}
@Test
public void testUGIProperties() throws Throwable {
UserGroupInformation user = UserGroupInformation.getCurrentUser();
ACL acl = registrySecurity.createACLForUser(user, ZooDefs.Perms.ALL);
assertFalse(RegistrySecurity.ALL_READWRITE_ACCESS.equals(acl));
LOG.info("User {} has ACL {}", user, acl);
}
@Test
public void testSecurityImpliesKerberos() throws Throwable {
Configuration conf = new Configuration();
conf.setBoolean("hadoop.security.authentication", true);
conf.setBoolean(KEY_REGISTRY_SECURE, true);
conf.set(KEY_REGISTRY_KERBEROS_REALM, "KERBEROS");
RegistrySecurity security = new RegistrySecurity("registry security");
try {
security.init(conf);
} catch (Exception e) {
assertTrue(e.toString().contains(RegistrySecurity.E_NO_KERBEROS),
"did not find "+ RegistrySecurity.E_NO_KERBEROS + " in " + e);
}
}
}
| TestRegistrySecurityHelper |
java | spring-projects__spring-boot | module/spring-boot-data-neo4j-test/src/dockerTest/java/org/springframework/boot/data/neo4j/test/autoconfigure/DataNeo4jTestReactiveIntegrationTests.java | {
"start": 3268,
"end": 3551
} | class ____ {
@Bean
ReactiveNeo4jTransactionManager reactiveTransactionManager(Driver driver,
ReactiveDatabaseSelectionProvider databaseNameProvider) {
return new ReactiveNeo4jTransactionManager(driver, databaseNameProvider);
}
}
}
| ReactiveTransactionManagerConfiguration |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java | {
"start": 1087,
"end": 3141
} | class ____ extends ESTestCase {
public void testSerialization() throws Exception {
InetAddress[] inetAddresses = InetAddress.getAllByName("0.0.0.0");
List<TransportAddress> transportAddressList = new ArrayList<>();
for (InetAddress address : inetAddresses) {
transportAddressList.add(new TransportAddress(address, randomIntBetween(9200, 9299)));
}
final BoundTransportAddress transportAddress = new BoundTransportAddress(
transportAddressList.toArray(new TransportAddress[0]),
transportAddressList.get(0)
);
assertThat(transportAddress.boundAddresses().length, equalTo(transportAddressList.size()));
// serialize
BytesStreamOutput streamOutput = new BytesStreamOutput();
transportAddress.writeTo(streamOutput);
StreamInput in = streamOutput.bytes().streamInput();
BoundTransportAddress serializedAddress = new BoundTransportAddress(in);
assertThat(serializedAddress, not(sameInstance(transportAddress)));
assertThat(serializedAddress.boundAddresses().length, equalTo(transportAddress.boundAddresses().length));
assertThat(serializedAddress.publishAddress(), equalTo(transportAddress.publishAddress()));
TransportAddress[] serializedBoundAddresses = serializedAddress.boundAddresses();
TransportAddress[] boundAddresses = transportAddress.boundAddresses();
for (int i = 0; i < serializedBoundAddresses.length; i++) {
assertThat(serializedBoundAddresses[i], equalTo(boundAddresses[i]));
}
}
public void testBadBoundAddressArray() {
try {
TransportAddress[] badArray = randomBoolean() ? null : new TransportAddress[0];
new BoundTransportAddress(badArray, new TransportAddress(InetAddress.getLoopbackAddress(), 80));
fail("expected an exception to be thrown due to no bound address");
} catch (IllegalArgumentException e) {
// expected
}
}
}
| BoundTransportAddressTests |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/main/java/org/apache/camel/spring/xml/CamelRedeliveryPolicyFactoryBean.java | {
"start": 1678,
"end": 2264
} | class ____ extends AbstractCamelRedeliveryPolicyFactoryBean
implements FactoryBean<RedeliveryPolicy>, ApplicationContextAware {
@XmlTransient
private ApplicationContext applicationContext;
@Override
protected CamelContext getCamelContextWithId(String camelContextId) {
return CamelContextResolverHelper.getCamelContextWithId(applicationContext, camelContextId);
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
}
| CamelRedeliveryPolicyFactoryBean |
java | apache__camel | components/camel-opentelemetry-metrics/src/generated/java/org/apache/camel/opentelemetry/metrics/OpenTelemetryComponentConfigurer.java | {
"start": 738,
"end": 2583
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
OpenTelemetryComponent target = (OpenTelemetryComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "meter": target.setMeter(property(camelContext, io.opentelemetry.api.metrics.Meter.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "meter": return io.opentelemetry.api.metrics.Meter.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
OpenTelemetryComponent target = (OpenTelemetryComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "meter": return target.getMeter();
default: return null;
}
}
}
| OpenTelemetryComponentConfigurer |
java | quarkusio__quarkus | extensions/smallrye-openapi/spi/src/main/java/io/quarkus/smallrye/openapi/deployment/spi/IgnoreStaticDocumentBuildItem.java | {
"start": 264,
"end": 699
} | class ____ extends MultiBuildItem {
private Pattern urlIgnorePattern = null;
/**
* @param urlIgnorePattern pattern to ignore when scanning static documents
*/
public IgnoreStaticDocumentBuildItem(String urlIgnorePattern) {
this.urlIgnorePattern = Pattern.compile(urlIgnorePattern);
}
public Pattern getUrlIgnorePattern() {
return this.urlIgnorePattern;
}
}
| IgnoreStaticDocumentBuildItem |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ConvertFunction.java | {
"start": 541,
"end": 770
} | interface ____ {
/**
* Expression containing the values to be converted.
*/
Expression field();
/**
* The types that {@link #field()} can have.
*/
Set<DataType> supportedTypes();
}
| ConvertFunction |
java | google__gson | test-shrinker/src/test/java/com/google/gson/it/ShrinkingIT.java | {
"start": 1481,
"end": 1574
} | class ____ must end with 'IT' for Maven Failsafe Plugin
@RunWith(Parameterized.class)
public | name |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/shuffle/TieredInternalShuffleMaster.java | {
"start": 2658,
"end": 7914
} | class ____ {
private final TieredStorageMasterClient tieredStorageMasterClient;
private final ShuffleMasterContext shuffleMasterContext;
private final boolean useOnlyExternalTier;
public TieredInternalShuffleMaster(
ShuffleMasterContext shuffleMasterContext,
ShuffleDescriptorRetriever shuffleDescriptorRetriever) {
this.shuffleMasterContext = shuffleMasterContext;
Configuration conf = shuffleMasterContext.getConfiguration();
String externalTierFactoryClass =
conf.get(
NettyShuffleEnvironmentOptions
.NETWORK_HYBRID_SHUFFLE_EXTERNAL_REMOTE_TIER_FACTORY_CLASS_NAME);
this.useOnlyExternalTier = externalTierFactoryClass != null;
TieredStorageConfiguration tieredStorageConfiguration =
TieredStorageConfiguration.fromConfiguration(conf);
TieredStorageResourceRegistry resourceRegistry = new TieredStorageResourceRegistry();
List<Tuple2<String, TierMasterAgent>> tierFactories =
tieredStorageConfiguration.getTierFactories().stream()
.map(
tierFactory ->
Tuple2.of(
tierFactory.identifier(),
tierFactory.createMasterAgent(resourceRegistry)))
.collect(Collectors.toList());
this.tieredStorageMasterClient =
new TieredStorageMasterClient(tierFactories, shuffleDescriptorRetriever);
}
public boolean supportsBatchSnapshot() {
return useOnlyExternalTier;
}
public void snapshotState(
CompletableFuture<AllTieredShuffleMasterSnapshots> snapshotFuture,
ShuffleMasterSnapshotContext context,
JobID jobId) {
// only external tier supports snapshot for now.
if (useOnlyExternalTier) {
tieredStorageMasterClient.snapshotState(snapshotFuture, context, jobId);
}
}
public void snapshotState(CompletableFuture<AllTieredShuffleMasterSnapshots> snapshotFuture) {
if (useOnlyExternalTier) {
tieredStorageMasterClient.snapshotState(snapshotFuture);
}
}
public void restoreState(List<TieredInternalShuffleMasterSnapshot> snapshots, JobID jobId) {
if (useOnlyExternalTier) {
tieredStorageMasterClient.restoreState(snapshots, jobId);
}
}
public void restoreState(TieredInternalShuffleMasterSnapshot clusterSnapshot) {
if (useOnlyExternalTier) {
tieredStorageMasterClient.restoreState(clusterSnapshot);
}
}
public CompletableFuture<Collection<PartitionWithMetrics>> getPartitionWithMetrics(
JobShuffleContext jobShuffleContext,
Duration timeout,
Set<ResultPartitionID> expectedPartitions) {
if (useOnlyExternalTier) {
return tieredStorageMasterClient.getPartitionWithMetrics(
jobShuffleContext, timeout, expectedPartitions);
} else {
return CompletableFuture.completedFuture(Collections.emptyList());
}
}
/**
* Registers the target job together with the corresponding {@link JobShuffleContext} to this
* shuffle master.
*/
public void registerJob(JobShuffleContext context) {
tieredStorageMasterClient.registerJob(context.getJobId(), getTierShuffleHandler(context));
}
/**
* Unregisters the target job from this shuffle master, which means the corresponding job has
* reached a global termination state and all the allocated resources except for the cluster
* partitions can be cleared.
*
* @param jobID ID of the target job to be unregistered.
*/
public void unregisterJob(JobID jobID) {
tieredStorageMasterClient.unregisterJob(jobID);
}
public List<TierShuffleDescriptor> addPartitionAndGetShuffleDescriptor(
JobID jobID, int numSubpartitions, ResultPartitionID resultPartitionID) {
return tieredStorageMasterClient.addPartitionAndGetShuffleDescriptor(
jobID, numSubpartitions, resultPartitionID);
}
public void releasePartition(ShuffleDescriptor shuffleDescriptor) {
tieredStorageMasterClient.releasePartition(shuffleDescriptor);
}
public void close() {
tieredStorageMasterClient.close();
}
private TierShuffleHandler getTierShuffleHandler(JobShuffleContext context) {
return new TierShuffleHandler() {
@Override
public CompletableFuture<?> onReleasePartitions(
Collection<TieredStoragePartitionId> partitionIds) {
return context.stopTrackingAndReleasePartitions(
partitionIds.stream()
.map(TieredStorageIdMappingUtils::convertId)
.collect(Collectors.toList()));
}
@Override
public void onFatalError(Throwable throwable) {
shuffleMasterContext.onFatalError(throwable);
}
};
}
}
| TieredInternalShuffleMaster |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/std/ReferenceTypeSerializer.java | {
"start": 843,
"end": 17316
} | class ____<T>
extends StdDynamicSerializer<T>
{
public final static Object MARKER_FOR_EMPTY = JsonInclude.Include.NON_EMPTY;
/**
* Value type
*/
protected final JavaType _referredType;
/**
* In case of unwrapping, need name transformer.
*/
protected final NameTransformer _unwrapper;
/*
/**********************************************************************
/* Config settings, filtering
/**********************************************************************
*/
/**
* Value that indicates suppression mechanism to use for <b>values contained</b>;
* either "filter" (of which <code>equals()</code> is called), or marker
* value of {@link #MARKER_FOR_EMPTY}, or null to indicate no filtering for
* non-null values.
* Note that inclusion value for Map instance itself is handled by caller (POJO
* property that refers to the Map value).
*/
protected final Object _suppressableValue;
/**
* Flag that indicates what to do with `null` values, distinct from
* handling of {@link #_suppressableValue}
*/
protected final boolean _suppressNulls;
/*
/**********************************************************************
/* Constructors, factory methods
/**********************************************************************
*/
public ReferenceTypeSerializer(ReferenceType fullType, boolean staticTyping,
TypeSerializer vts, ValueSerializer<Object> ser)
{
super(fullType, null, vts, ser);
_referredType = fullType.getReferencedType();
_unwrapper = null;
_suppressableValue = null;
_suppressNulls = false;
}
protected ReferenceTypeSerializer(ReferenceTypeSerializer<?> base, BeanProperty property,
TypeSerializer vts, ValueSerializer<?> valueSer,
NameTransformer unwrapper,
Object suppressableValue, boolean suppressNulls)
{
super(base, property, vts, valueSer);
_referredType = base._referredType;
_unwrapper = unwrapper;
_suppressableValue = suppressableValue;
_suppressNulls = suppressNulls;
}
@Override
public ValueSerializer<T> unwrappingSerializer(NameTransformer transformer) {
ValueSerializer<Object> valueSer = _valueSerializer;
if (valueSer != null) {
// 09-Dec-2019, tatu: [databind#2565] Cannot assume that serializer in
// question actually can unwrap
valueSer = valueSer.unwrappingSerializer(transformer);
if (valueSer == _valueSerializer) {
return this;
}
}
NameTransformer unwrapper = (_unwrapper == null) ? transformer
: NameTransformer.chainedTransformer(transformer, _unwrapper);
if ((_valueSerializer == valueSer) && (_unwrapper == unwrapper)) {
return this;
}
return withResolved(_property, _valueTypeSerializer, valueSer, unwrapper);
}
/*
/**********************************************************************
/* Abstract methods to implement
/**********************************************************************
*/
/**
* Mutant factory method called when changes are needed; should construct
* newly configured instance with new values as indicated.
*<p>
* NOTE: caller has verified that there are changes, so implementations
* need NOT check if a new instance is needed.
*/
protected abstract ReferenceTypeSerializer<T> withResolved(BeanProperty prop,
TypeSerializer vts, ValueSerializer<?> valueSer,
NameTransformer unwrapper);
/**
* Mutant factory method called to create a differently constructed instance,
* specifically with different exclusion rules for contained value.
*<p>
* NOTE: caller has verified that there are changes, so implementations
* need NOT check if a new instance is needed.
*/
public abstract ReferenceTypeSerializer<T> withContentInclusion(Object suppressableValue,
boolean suppressNulls);
/**
* Method called to see if there is a value present or not.
* Note that value itself may still be `null`, even if present,
* if referential type allows three states (absent, present-null,
* present-non-null); some only allow two (absent, present-non-null).
*/
protected abstract boolean _isValuePresent(T value);
protected abstract Object _getReferenced(T value);
protected abstract Object _getReferencedIfPresent(T value);
/*
/**********************************************************************
/* Contextualization (support for property annotations)
/**********************************************************************
*/
@Override
public ValueSerializer<?> createContextual(SerializationContext ctxt,
BeanProperty property)
{
TypeSerializer typeSer = _valueTypeSerializer;
if (typeSer != null) {
typeSer = typeSer.forProperty(ctxt, property);
}
// First: do we have an annotation override from property?
ValueSerializer<?> ser = findAnnotatedContentSerializer(ctxt, property);
if (ser == null) {
// If not, use whatever was configured by type
ser = _valueSerializer;
if (ser == null) {
// A few conditions needed to be able to fetch serializer here:
if (_useStatic(ctxt, property, _referredType)) {
ser = _findSerializer(ctxt, _referredType, property);
}
} else {
ser = ctxt.handlePrimaryContextualization(ser, property);
}
}
// 23-Jan-2024, tatu: [databind#4337]: May have a content converter
ser = findContextualConvertingSerializer(ctxt, property, ser);
// First, resolve wrt property, resolved serializers
ReferenceTypeSerializer<?> refSer;
if ((_property == property)
&& (_valueTypeSerializer == typeSer) && (_valueSerializer == ser)) {
refSer = this;
} else {
refSer = withResolved(property, typeSer, ser, _unwrapper);
}
// and then see if we have property-inclusion overrides
if (property != null) {
JsonInclude.Value inclV = property.findPropertyInclusion(ctxt.getConfig(), handledType());
if (inclV != null) {
JsonInclude.Include incl = inclV.getContentInclusion();
if (incl != JsonInclude.Include.USE_DEFAULTS) {
Object valueToSuppress;
boolean suppressNulls;
switch (incl) {
case NON_DEFAULT:
valueToSuppress = BeanUtil.getDefaultValue(_referredType);
suppressNulls = true;
if (valueToSuppress != null) {
if (valueToSuppress.getClass().isArray()) {
valueToSuppress = ArrayBuilders.getArrayComparator(valueToSuppress);
}
}
break;
case NON_ABSENT:
suppressNulls = true;
valueToSuppress = _referredType.isReferenceType() ? MARKER_FOR_EMPTY : null;
break;
case NON_EMPTY:
suppressNulls = true;
valueToSuppress = MARKER_FOR_EMPTY;
break;
case CUSTOM:
valueToSuppress = ctxt.includeFilterInstance(null, inclV.getContentFilter());
if (valueToSuppress == null) { // is this legal?
suppressNulls = true;
} else {
suppressNulls = ctxt.includeFilterSuppressNulls(valueToSuppress);
}
break;
case NON_NULL:
valueToSuppress = null;
suppressNulls = true;
break;
case ALWAYS: // default
default:
valueToSuppress = null;
suppressNulls = false;
break;
}
if ((_suppressableValue != valueToSuppress)
|| (_suppressNulls != suppressNulls)) {
refSer = refSer.withContentInclusion(valueToSuppress, suppressNulls);
}
}
}
}
return refSer;
}
protected boolean _useStatic(SerializationContext serializers, BeanProperty property,
JavaType referredType)
{
// First: no serializer for `Object.class`, must be dynamic
if (referredType.isJavaLangObject()) {
return false;
}
// but if type is final, might as well fetch
if (referredType.isFinal()) { // or should we allow annotation override? (only if requested...)
return true;
}
// also: if indicated by typing, should be considered static
if (referredType.useStaticType()) {
return true;
}
// if neither, maybe explicit annotation?
AnnotationIntrospector intr = serializers.getAnnotationIntrospector();
if ((intr != null) && (property != null)) {
Annotated ann = property.getMember();
if (ann != null) {
JsonSerialize.Typing t = intr.findSerializationTyping(serializers.getConfig(),
property.getMember());
if (t == JsonSerialize.Typing.STATIC) {
return true;
}
if (t == JsonSerialize.Typing.DYNAMIC) {
return false;
}
}
}
// and finally, may be forced by global static typing (unlikely...)
return serializers.isEnabled(MapperFeature.USE_STATIC_TYPING);
}
/*
/**********************************************************************
/* Accessors
/**********************************************************************
*/
@Override
public boolean isEmpty(SerializationContext provider, T value) throws JacksonException
{
// First, absent value (note: null check is just sanity check here)
if (!_isValuePresent(value)) {
return true;
}
Object contents = _getReferenced(value);
if (contents == null) { // possible for explicitly contained `null`
return _suppressNulls;
}
if (_suppressableValue == null) {
return false;
}
ValueSerializer<Object> ser = _valueSerializer;
if (ser == null) {
ser = _findCachedSerializer(provider, contents.getClass());
}
if (_suppressableValue == MARKER_FOR_EMPTY) {
return ser.isEmpty(provider, contents);
}
return _suppressableValue.equals(contents);
}
@Override
public boolean isUnwrappingSerializer() {
return (_unwrapper != null);
}
public JavaType getReferredType() {
return _referredType;
}
/*
/**********************************************************************
/* Serialization methods
/**********************************************************************
*/
@Override
public void serialize(T ref, JsonGenerator g, SerializationContext provider)
throws JacksonException
{
Object value = _getReferencedIfPresent(ref);
if (value == null) {
if (_unwrapper == null) {
provider.defaultSerializeNullValue(g);
}
return;
}
ValueSerializer<Object> ser = _valueSerializer;
if (ser == null) {
ser = _findCachedSerializer(provider, value.getClass());
}
if (_valueTypeSerializer != null) {
ser.serializeWithType(value, g, provider, _valueTypeSerializer);
} else {
ser.serialize(value, g, provider);
}
}
@Override
public void serializeWithType(T ref, JsonGenerator g, SerializationContext provider,
TypeSerializer typeSer)
throws JacksonException
{
Object value = _getReferencedIfPresent(ref);
if (value == null) {
if (_unwrapper == null) {
provider.defaultSerializeNullValue(g);
}
return;
}
// 19-Apr-2016, tatu: In order to basically "skip" the whole wrapper level
// (which is what non-polymorphic serialization does too), we will need
// to simply delegate call, I think, and NOT try to use it here.
// Otherwise apply type-prefix/suffix, then std serialize:
/*
typeSer.writeTypePrefixForScalar(ref, g);
serialize(ref, g, provider);
typeSer.writeTypeSuffixForScalar(ref, g);
*/
ValueSerializer<Object> ser = _valueSerializer;
if (ser == null) {
ser = _findCachedSerializer(provider, value.getClass());
}
ser.serializeWithType(value, g, provider, typeSer);
}
/*
/**********************************************************************
/* Introspection support
/**********************************************************************
*/
@Override
public void acceptJsonFormatVisitor(JsonFormatVisitorWrapper visitor, JavaType typeHint)
{
ValueSerializer<?> ser = _valueSerializer;
if (ser == null) {
ser = _findSerializer(visitor.getContext(), _referredType, _property);
if (_unwrapper != null) {
ser = ser.unwrappingSerializer(_unwrapper);
}
}
ser.acceptJsonFormatVisitor(visitor, _referredType);
}
/*
/**********************************************************************
/* Helper methods
/**********************************************************************
*/
/**
* Helper method that encapsulates logic of retrieving and caching required
* serializer.
*/
private final ValueSerializer<Object> _findCachedSerializer(SerializationContext provider,
Class<?> rawType)
{
ValueSerializer<Object> ser = _dynamicValueSerializers.serializerFor(rawType);
if (ser == null) {
// NOTE: call this instead of `map._findAndAddDynamic(...)` (which in turn calls
// `findAndAddSecondarySerializer`) since we may need to apply unwrapper
// too, before caching. But calls made are the same
if (_referredType.hasGenericTypes()) {
// [databind#1673] Must ensure we will resolve all available type information
// so as not to miss generic declaration of, say, `List<GenericPojo>`...
JavaType fullType = provider.constructSpecializedType(_referredType, rawType);
// 23-Oct-2019, tatu: I _think_ we actually need to consider referenced
// type as "primary" to allow applying various handlers -- done since 2.11
ser = provider.findPrimaryPropertySerializer(fullType, _property);
} else {
ser = provider.findPrimaryPropertySerializer(rawType, _property);
}
if (_unwrapper != null) {
ser = ser.unwrappingSerializer(_unwrapper);
}
_dynamicValueSerializers = _dynamicValueSerializers.newWith(rawType, ser);
}
return ser;
}
private final ValueSerializer<Object> _findSerializer(SerializationContext provider,
JavaType type, BeanProperty prop)
{
// 13-Mar-2017, tatu: Used to call `findTypeValueSerializer()`, but contextualization
// not working for that case for some reason
// 15-Jan-2017, tatu: ... possibly because we need to access "secondary" serializer,
// not primary (primary being one for Reference type itself, not value)
// return provider.findTypedValueSerializer(type, true, prop);
return provider.findPrimaryPropertySerializer(type, prop);
}
}
| ReferenceTypeSerializer |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/tags/form/Country.java | {
"start": 837,
"end": 2106
} | class ____ {
public static final Country COUNTRY_AT = new Country("AT", "Austria");
public static final Country COUNTRY_NL = new Country("NL", "Netherlands");
public static final Country COUNTRY_UK = new Country("UK", "United Kingdom");
public static final Country COUNTRY_US = new Country("US", "United States");
private final String isoCode;
private final String name;
public Country(String isoCode, String name) {
this.isoCode = isoCode;
this.name = name;
}
public String getIsoCode() {
return this.isoCode;
}
public String getName() {
return this.name;
}
@Override
public String toString() {
return this.name + "(" + this.isoCode + ")";
}
public static Country getCountryWithIsoCode(final String isoCode) {
if (COUNTRY_AT.isoCode.equals(isoCode)) {
return COUNTRY_AT;
}
if (COUNTRY_NL.isoCode.equals(isoCode)) {
return COUNTRY_NL;
}
if (COUNTRY_UK.isoCode.equals(isoCode)) {
return COUNTRY_UK;
}
if (COUNTRY_US.isoCode.equals(isoCode)) {
return COUNTRY_US;
}
return null;
}
public static List getCountries() {
List countries = new ArrayList();
countries.add(COUNTRY_AT);
countries.add(COUNTRY_NL);
countries.add(COUNTRY_UK);
countries.add(COUNTRY_US);
return countries;
}
}
| Country |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/aot/BeanDefinitionPropertyValueCodeGeneratorDelegatesTests.java | {
"start": 9617,
"end": 10138
} | class ____ {
@Test
void generateWhenStringManagedSet() {
ManagedSet<String> set = new ManagedSet<>();
set.add("a");
set.add("b");
set.add("c");
compile(set, (instance, compiler) -> assertThat(instance).isEqualTo(set)
.isInstanceOf(ManagedSet.class));
}
@Test
void generateWhenEmptyManagedSet() {
ManagedSet<String> set = new ManagedSet<>();
compile(set, (instance, compiler) -> assertThat(instance).isEqualTo(set)
.isInstanceOf(ManagedSet.class));
}
}
@Nested
| ManagedSetTests |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/OracleDropFunctionTest.java | {
"start": 1014,
"end": 2110
} | class ____ extends OracleTest {
public void testDropFunction() {
String sql = "DROP FUNCTION oe.SecondMax";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statement = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("DROP FUNCTION oe.SecondMax", SQLUtils.toSQLString(statement, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
statement.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("conditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(0, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
}
}
| OracleDropFunctionTest |
java | apache__spark | common/utils-java/src/main/java/org/apache/spark/api/java/function/ForeachPartitionFunction.java | {
"start": 916,
"end": 1018
} | interface ____ a function used in Dataset's foreachPartition function.
*/
@FunctionalInterface
public | for |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/DefaultQueryEnhancerUnitTests.java | {
"start": 1094,
"end": 2399
} | class ____ extends QueryEnhancerTckTests {
@Override
QueryEnhancer createQueryEnhancer(DeclaredQuery query) {
return new DefaultQueryEnhancer(query);
}
@Override
@Test // GH-2511, GH-2773
@Disabled("Not properly supported by QueryUtils")
void shouldDeriveNativeCountQueryWithVariable(String query, String expected) {}
@Test // GH-3546
void shouldApplySorting() {
QueryEnhancer enhancer = createQueryEnhancer(DeclaredQuery.nativeQuery("SELECT e FROM Employee e"));
String sql = enhancer.rewrite(new DefaultQueryRewriteInformation(Sort.by("foo", "bar"),
ReturnedType.of(Object.class, Object.class, new SpelAwareProxyProjectionFactory())));
assertThat(sql).isEqualTo("SELECT e FROM Employee e order by e.foo asc, e.bar asc");
}
@Test // GH-3811
void shouldApplySortingWithNullHandling() {
QueryEnhancer enhancer = createQueryEnhancer(DeclaredQuery.nativeQuery("SELECT e FROM Employee e"));
String sql = enhancer.rewrite(new DefaultQueryRewriteInformation(
Sort.by(Sort.Order.asc("foo").nullsFirst(), Sort.Order.asc("bar").nullsLast()),
ReturnedType.of(Object.class, Object.class, new SpelAwareProxyProjectionFactory())));
assertThat(sql).isEqualTo("SELECT e FROM Employee e order by e.foo asc nulls first, e.bar asc nulls last");
}
}
| DefaultQueryEnhancerUnitTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/internal/ParameterizedTypeImpl.java | {
"start": 475,
"end": 3475
} | class ____ implements ParameterizedType {
private final Type[] substTypeArgs;
private final Type rawType;
private final Type ownerType;
public ParameterizedTypeImpl(Type rawType, Type[] substTypeArgs, Type ownerType) {
this.substTypeArgs = substTypeArgs;
this.rawType = rawType;
this.ownerType = ownerType;
}
public static ParameterizedTypeImpl from(ParameterizedTypeDetails typeDetails) {
final java.lang.reflect.Type attributeType = typeDetails.determineRawClass().toJavaClass();
final List<TypeDetails> arguments = typeDetails.asParameterizedType().getArguments();
final int argumentsSize = arguments.size();
final java.lang.reflect.Type[] argumentTypes = new java.lang.reflect.Type[argumentsSize];
for ( int i = 0; i < argumentsSize; i++ ) {
TypeDetails argument = arguments.get( i );
if ( argument.getTypeKind() == TypeDetails.Kind.PARAMETERIZED_TYPE ) {
argumentTypes[i] = from( argument.asParameterizedType() );
}
else {
argumentTypes[i] = argument.determineRawClass().toJavaClass();
}
}
final TypeVariableScope owner = typeDetails.asParameterizedType().getOwner();
final java.lang.reflect.Type ownerType;
if ( owner != null ) {
ownerType = owner.determineRawClass().toJavaClass();
}
else {
ownerType = null;
}
return new ParameterizedTypeImpl( attributeType, argumentTypes, ownerType );
}
public Type[] getActualTypeArguments() {
return substTypeArgs;
}
public Type getRawType() {
return rawType;
}
public Type getOwnerType() {
return ownerType;
}
@Override
public boolean equals(Object obj) {
if ( !(obj instanceof ParameterizedType other) ) {
return false;
}
return Objects.equals( getOwnerType(), other.getOwnerType() )
&& Objects.equals( getRawType(), other.getRawType() )
&& Arrays.equals( getActualTypeArguments(), other.getActualTypeArguments() );
}
@Override
public int hashCode() {
return Arrays.hashCode( getActualTypeArguments() )
^ Objects.hashCode( getOwnerType() )
^ Objects.hashCode( getRawType() );
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
if ( ownerType != null ) {
sb.append( ownerType.getTypeName() );
sb.append( "$" );
if ( ownerType instanceof ParameterizedType parameterizedType ) {
// Find simple name of nested type by removing the
// shared prefix with owner.
sb.append(
rawType.getTypeName().replace(
parameterizedType.getRawType().getTypeName() + "$",
""
)
);
}
else if ( rawType instanceof Class<?> clazz ) {
sb.append( clazz.getSimpleName() );
}
else {
sb.append( rawType.getTypeName() );
}
}
else {
sb.append( rawType.getTypeName() );
}
if ( substTypeArgs != null ) {
final StringJoiner sj = new StringJoiner( ", ", "<", ">" );
sj.setEmptyValue( "" );
for ( Type t : substTypeArgs ) {
sj.add( t.getTypeName() );
}
sb.append( sj );
}
return sb.toString();
}
}
| ParameterizedTypeImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sql/internal/NativeQueryImpl.java | {
"start": 18890,
"end": 61784
} | interface ____ {
boolean resolveResultSetMapping(
ResultSetMapping resultSetMapping,
Consumer<String> querySpaceConsumer,
ResultSetMappingResolutionContext context);
}
private static ResultSetMapping buildResultSetMapping(
String registeredName,
boolean isDynamic,
SharedSessionContractImplementor session) {
return resolveResultSetMapping( registeredName, isDynamic, session.getFactory() );
}
public List<ParameterOccurrence> getParameterOccurrences() {
return parameterOccurrences;
}
private ParameterInterpretation resolveParameterInterpretation(
String sqlString, SharedSessionContractImplementor session) {
return getInterpretationCache( session )
.resolveNativeQueryParameters( sqlString,
s -> parameterInterpretation( sqlString ) );
}
private ParameterInterpretationImpl parameterInterpretation(String sqlString) {
final var parameterRecognizer = new ParameterRecognizerImpl();
getNativeQueryInterpreter().recognizeParameters( sqlString, parameterRecognizer );
return new ParameterInterpretationImpl( parameterRecognizer );
}
protected void applyOptions(NamedNativeQueryMemento<?> memento) {
super.applyOptions( memento );
if ( memento.getMaxResults() != null ) {
setMaxResults( memento.getMaxResults() );
}
if ( memento.getFirstResult() != null ) {
setFirstResult( memento.getFirstResult() );
}
final var mementoQuerySpaces = memento.getQuerySpaces();
if ( mementoQuerySpaces != null ) {
querySpaces = makeCopy( mementoQuerySpaces );
}
// todo (6.0) : query returns
}
private IllegalArgumentException buildIncompatibleException(Class<?> resultClass, Class<?> actualResultClass) {
final String resultClassName = resultClass.getName();
final String actualResultClassName = actualResultClass.getName();
if ( resultClassName.equals( actualResultClassName ) ) {
return new IllegalArgumentException(
"Type specified for TypedQuery [" + resultClassName +
"] is incompatible with the query return type of the same name." +
" Both classes have the same name but are different as they have been loaded respectively by Classloaders " +
resultClass.getClassLoader().toString() + ", " + actualResultClass.getClassLoader().toString() +
". This suggests a classloader bug in the Runtime executing Hibernate ORM, or in the integration code."
);
}
else {
return new IllegalArgumentException(
"Type specified for TypedQuery [" + resultClassName +
"] is incompatible with query return type [" + actualResultClass + "]"
);
}
}
@Override
public String getQueryString() {
return sqlString;
}
@Override
public ParameterMetadataImplementor getParameterMetadata() {
return parameterMetadata;
}
@Override
public MutableQueryOptions getQueryOptions() {
return queryOptions;
}
@Override
public Callback getCallback() {
if ( callback == null ) {
callback = new CallbackImpl();
}
return callback;
}
@Override
public boolean hasCallbackActions() {
return callback != null && callback.hasAfterLoadActions();
}
@Override
public QueryParameterBindings getQueryParameterBindings() {
return parameterBindings;
}
@Override
public QueryParameterBindings getParameterBindings() {
return getQueryParameterBindings();
}
@Override
public Class<R> getResultType() {
return resultType;
}
@Override
public NamedNativeQueryMemento<R> toMemento(String name) {
final QueryOptions options = getQueryOptions();
return new NamedNativeQueryMementoImpl<>(
name,
resultType == null
? extractResultClass( resultSetMapping )
: resultType,
sqlString,
originalSqlString,
resultSetMapping.getMappingIdentifier(),
querySpaces,
isCacheable(),
getCacheRegion(),
getCacheMode(),
options.getFlushMode(),
isReadOnly(),
getTimeout(),
getFetchSize(),
getComment(),
options.getLimit().getFirstRow(),
options.getLimit().getMaxRows(),
getHints()
);
}
private Class<R> extractResultClass(ResultSetMapping resultSetMapping) {
final List<ResultBuilder> resultBuilders = resultSetMapping.getResultBuilders();
if ( resultBuilders.size() == 1 ) {
final ResultBuilder resultBuilder = resultBuilders.get( 0 );
if ( resultBuilder instanceof ImplicitResultClassBuilder
|| resultBuilder instanceof ImplicitModelPartResultBuilderEntity
|| resultBuilder instanceof DynamicResultBuilderEntityCalculated ) {
return (Class<R>) resultBuilder.getJavaType();
}
}
return null;
}
@Override
public LockModeType getLockMode() {
// the JPA spec requires IllegalStateException here, even
// though it's logically an UnsupportedOperationException
throw new IllegalStateException( "Illegal attempt to get lock mode on a native-query" );
}
@Override @Deprecated
public NativeQueryImplementor<R> setLockOptions(LockOptions lockOptions) {
super.setLockOptions( lockOptions );
return this;
}
@Override
public NativeQueryImplementor<R> setHibernateLockMode(LockMode lockMode) {
super.setHibernateLockMode( lockMode );
return this;
}
@Override
public NativeQueryImplementor<R> setTimeout(Timeout timeout) {
super.setTimeout( timeout );
return this;
}
@Override
public NativeQueryImplementor<R> setLockScope(PessimisticLockScope lockScope) {
super.setLockScope( lockScope );
return this;
}
@Override
public QueryImplementor<R> setLockScope(Locking.Scope lockScope) {
super.setLockScope( lockScope );
return this;
}
@Override
public NativeQueryImplementor<R> setLockMode(LockModeType lockModeType) {
// the JPA spec requires IllegalStateException here, even
// though it's logically an UnsupportedOperationException
throw new IllegalStateException( "Illegal attempt to set lock mode for a native query" );
}
@Override
protected void applyGraph(String graphString, GraphSemantic graphSemantic) {
throw new HibernateException( "A native SQL query cannot use EntityGraphs" );
}
@Override
protected void applyGraph(RootGraphImplementor<?> entityGraph, GraphSemantic graphSemantic) {
throw new HibernateException( "A native SQL query cannot use EntityGraphs" );
}
@Override
public Query<R> applyGraph(@SuppressWarnings("rawtypes") RootGraph graph, GraphSemantic semantic) {
throw new HibernateException( "A native SQL query cannot use EntityGraphs" );
}
@Override
protected void applyEntityGraphHint(GraphSemantic graphSemantic, Object value, String hintName) {
super.applyEntityGraphHint( graphSemantic, value, hintName );
}
@Override
public <T> NativeQueryImplementor<T> setTupleTransformer(TupleTransformer<T> transformer) {
super.setTupleTransformer( transformer );
//TODO: this is bad, we should really return a new instance
return (NativeQueryImplementor<T>) this;
}
@Override
public NativeQueryImplementor<R> setResultListTransformer(ResultListTransformer<R> transformer) {
super.setResultListTransformer( transformer );
return this;
}
@Override
public Boolean isSelectQuery() {
if ( resultMappingSuppliedToCtor
|| resultSetMapping.getNumberOfResultBuilders() > 0
|| isReadOnly()
// as a last resort, see if the SQL starts with "select"
|| startsWithSelect() ) {
return true;
}
else {
return null;
}
}
private boolean startsWithSelect() {
if ( startsWithSelect == null ) {
startsWithSelect = sqlString.toLowerCase( Locale.ROOT ).startsWith( "select " );
}
return startsWithSelect;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Execution
@Override
protected void prepareForExecution() {
final var spaces = getSynchronizedQuerySpaces();
if ( spaces == null || spaces.isEmpty() ) {
// We need to flush. The query itself is not required to execute in a
// transaction; if there is no transaction, the flush would throw a
// TransactionRequiredException which would potentially break existing
// apps, so we only do the flush if a transaction is in progress.
if ( shouldFlush() ) {
getSession().flush();
}
// Reset the callback before every execution
callback = null;
}
// Otherwise, the application specified query spaces via the Hibernate
// SynchronizeableQuery and so the query will already perform a partial
// flush according to the defined query spaces - no need for a full flush.
}
private boolean shouldFlush() {
if ( getSession().isTransactionInProgress() ) {
final var flushMode = getQueryOptions().getFlushMode();
return switch ( flushMode == null ? getSession().getHibernateFlushMode() : flushMode ) {
// The JPA spec requires that we auto-flush before native queries
case AUTO -> getSessionFactory().getSessionFactoryOptions().isJpaBootstrap();
case ALWAYS -> true;
default -> false;
};
}
else {
return false;
}
}
@Override
protected List<R> doList() {
return resolveSelectQueryPlan().performList( this );
}
@Override
public long getResultCount() {
final var context = new DelegatingDomainQueryExecutionContext(this) {
@Override
public QueryOptions getQueryOptions() {
return QueryOptions.NONE;
}
};
return createCountQueryPlan().executeQuery( context, SingleResultConsumer.instance() );
}
@Override
public KeyedResultList<R> getKeyedResultList(KeyedPage<R> page) {
throw new UnsupportedOperationException("native queries do not support key-based pagination");
}
protected SelectQueryPlan<R> resolveSelectQueryPlan() {
final var mapping = resultSetMapping();
checkResultType( resultType, mapping );
final int parameterStartPosition = parameterStartPosition();
if ( isCacheableQuery() ) {
return getInterpretationCache().resolveSelectQueryPlan(
selectInterpretationsKey( mapping, parameterStartPosition ),
key -> createQueryPlan( key.getResultSetMapping(), key.getStartPosition() )
);
}
else {
return createQueryPlan( mapping, parameterStartPosition );
}
}
private int parameterStartPosition() {
final var jdbcServices = getSessionFactory().getJdbcServices();
if ( !isStandardRenderer( jdbcServices.getParameterMarkerStrategy() )
&& hasLimit( getQueryOptions().getLimit() ) ) {
final var limitHandler = jdbcServices.getDialect().getLimitHandler();
if ( limitHandler.processSqlMutatesState() ) {
limitHandler.processSql( sqlString, -1, null, getQueryOptions() );
}
// A non-standard parameter marker strategy is in use, and the limit handler wants to bind parameters
// before the main parameters. This requires recording the start position in the cache key
// because the generated SQL depends on this information
return limitHandler.getParameterPositionStart( getQueryOptions().getLimit() );
}
else {
return 1;
}
}
private ResultSetMapping resultSetMapping() {
if ( resultType != null
&& resultSetMapping.isDynamic()
&& resultSetMapping.getNumberOfResultBuilders() == 0 ) {
final var sessionFactory = getSessionFactory();
final var mapping = resolveResultSetMapping( originalSqlString, true, sessionFactory );
final var metamodel = getMappingMetamodel();
if ( metamodel.isEntityClass( resultType ) ) {
mapping.addResultBuilder(
Builders.entityCalculated( unqualify( resultType.getName() ), resultType.getName(),
LockMode.READ, sessionFactory ) );
}
else if ( !isResultTypeAlwaysAllowed( resultType )
&& (!isClass( resultType ) || hasJavaTypeDescriptor( resultType )) ) {
mapping.addResultBuilder( Builders.resultClassBuilder( resultType, metamodel ) );
}
return mapping;
}
else {
return resultSetMapping;
}
}
private NativeSelectQueryPlan<R> createQueryPlan(ResultSetMapping resultSetMapping, int parameterStartPosition) {
final NativeSelectQueryDefinition<R> queryDefinition = new NativeSelectQueryDefinition<>() {
final String sqlString = expandParameterLists( parameterStartPosition );
@Override
public String getSqlString() {
return sqlString;
}
@Override
public boolean isCallable() {
return false;
}
@Override
public List<ParameterOccurrence> getQueryParameterOccurrences() {
return parameterOccurrences;
}
@Override
public ResultSetMapping getResultSetMapping() {
return resultSetMapping;
}
@Override
public Set<String> getAffectedTableNames() {
return querySpaces;
}
};
return getNativeQueryInterpreter().createQueryPlan( queryDefinition, getSessionFactory() );
}
/*
* Used by Hibernate Reactive
*/
protected NativeSelectQueryPlan<Long> createCountQueryPlan() {
final NativeSelectQueryDefinition<Long> queryDefinition = new NativeSelectQueryDefinition<>() {
final BasicType<Long> longType = getTypeConfiguration().getBasicTypeForJavaType(Long.class);
final String sqlString = expandParameterLists( 1 );
@Override
public String getSqlString() {
return "select count(*) from (" + sqlString + ") a_";
}
@Override
public boolean isCallable() {
return false;
}
@Override
public List<ParameterOccurrence> getQueryParameterOccurrences() {
return parameterOccurrences;
}
@Override
public ResultSetMapping getResultSetMapping() {
final ResultSetMappingImpl mapping = new ResultSetMappingImpl( "", true );
mapping.addResultBuilder( new DynamicResultBuilderBasicStandard( 1, longType ) );
return mapping;
}
@Override
public Set<String> getAffectedTableNames() {
return querySpaces;
}
};
return getNativeQueryInterpreter().createQueryPlan( queryDefinition, getSessionFactory() );
}
private NativeQueryInterpreter getNativeQueryInterpreter() {
return getSessionFactory().getQueryEngine().getNativeQueryInterpreter();
}
protected String expandParameterLists(int parameterStartPosition) {
if ( parameterOccurrences == null || parameterOccurrences.isEmpty() ) {
return sqlString;
}
// HHH-1123
// Some DBs limit number of IN expressions. For now, warn...
final var factory = getSessionFactory();
final var dialect = factory.getJdbcServices().getDialect();
final boolean paddingEnabled = factory.getSessionFactoryOptions().inClauseParameterPaddingEnabled();
final int inExprLimit = dialect.getInExpressionCountLimit();
final var parameterMarkerStrategy = factory.getJdbcServices().getParameterMarkerStrategy();
final boolean needsMarker = !isStandardRenderer( parameterMarkerStrategy );
var sql =
needsMarker
? new StringBuilder( sqlString.length() + parameterOccurrences.size() * 10 )
.append( sqlString )
: null;
// Handle parameter lists
int offset = 0;
int parameterPosition = parameterStartPosition;
for ( var occurrence : parameterOccurrences ) {
final var queryParameter = occurrence.parameter();
final var binding = parameterBindings.getBinding( queryParameter );
if ( binding.isMultiValued() ) {
final int bindValueCount = binding.getBindValues().size();
logTooManyExpressions( inExprLimit, bindValueCount, dialect, queryParameter );
final int sourcePosition = occurrence.sourcePosition();
if ( sourcePosition >= 0 ) {
// check if placeholder is already immediately enclosed in parentheses
// (ignoring whitespace)
final boolean isEnclosedInParens = isEnclosedInParens( sourcePosition );
// short-circuit for performance when only 1 value and the
// placeholder is already enclosed in parentheses...
if ( bindValueCount != 1 || !isEnclosedInParens ) {
if ( sql == null ) {
sql = new StringBuilder( sqlString.length() + 20 )
.append( sqlString );
}
final int bindValueMaxCount =
determineBindValueMaxCount( paddingEnabled, inExprLimit, bindValueCount );
final String expansionListAsString = expandList(
bindValueMaxCount,
isEnclosedInParens,
parameterPosition,
parameterMarkerStrategy,
needsMarker
);
final int start = sourcePosition + offset;
final int end = start + 1;
sql.replace( start, end, expansionListAsString );
offset += expansionListAsString.length() - 1;
parameterPosition += bindValueMaxCount;
}
else if ( needsMarker ) {
final int start = sourcePosition + offset;
final int end = start + 1;
final String parameterMarker = parameterMarkerStrategy.createMarker( parameterPosition, null );
sql.replace( start, end, parameterMarker );
offset += parameterMarker.length() - 1;
parameterPosition++;
}
}
}
else if ( needsMarker ) {
final int sourcePosition = occurrence.sourcePosition();
final int start = sourcePosition + offset;
final int end = start + 1;
final String parameterMarker = parameterMarkerStrategy.createMarker( parameterPosition, null );
sql.replace( start, end, parameterMarker );
offset += parameterMarker.length() - 1;
parameterPosition++;
}
}
return sql == null ? sqlString : sql.toString();
}
private static void logTooManyExpressions(
int inExprLimit, int bindValueCount,
Dialect dialect, QueryParameterImplementor<?> queryParameter) {
if ( inExprLimit > 0 && bindValueCount > inExprLimit ) {
CORE_LOGGER.tooManyInExpressions(
dialect.getClass().getName(),
inExprLimit,
queryParameter.getName() == null
? queryParameter.getPosition().toString()
: queryParameter.getName(),
bindValueCount
);
}
}
private static String expandList(int bindValueMaxCount, boolean isEnclosedInParens, int parameterPosition, ParameterMarkerStrategy parameterMarkerStrategy, boolean needsMarker) {
// HHH-8901
if ( bindValueMaxCount == 0 ) {
return isEnclosedInParens ? "null" : "(null)";
}
else if ( needsMarker ) {
final StringBuilder sb = new StringBuilder( bindValueMaxCount * 4 );
if ( !isEnclosedInParens ) {
sb.append( '(' );
}
for ( int i = 0; i < bindValueMaxCount; i++ ) {
sb.append( parameterMarkerStrategy.createMarker( parameterPosition + i, null ) );
sb.append( ',' );
}
sb.setLength( sb.length() - 1 );
if ( !isEnclosedInParens ) {
sb.append( ')' );
}
return sb.toString();
}
else {
// Shift 1 bit instead of multiplication by 2
final char[] chars;
if ( isEnclosedInParens ) {
chars = new char[(bindValueMaxCount << 1) - 1];
chars[0] = '?';
for ( int i = 1; i < bindValueMaxCount; i++ ) {
final int index = i << 1;
chars[index - 1] = ',';
chars[index] = '?';
}
}
else {
chars = new char[(bindValueMaxCount << 1) + 1];
chars[0] = '(';
chars[1] = '?';
for ( int i = 1; i < bindValueMaxCount; i++ ) {
final int index = i << 1;
chars[index] = ',';
chars[index + 1] = '?';
}
chars[chars.length - 1] = ')';
}
return new String( chars );
}
}
private boolean isEnclosedInParens(int sourcePosition) {
boolean isEnclosedInParens = true;
for ( int i = sourcePosition - 1; i >= 0; i-- ) {
final char ch = sqlString.charAt( i );
if ( !isWhitespace( ch ) ) {
isEnclosedInParens = ch == '(';
break;
}
}
if ( isEnclosedInParens ) {
for ( int i = sourcePosition + 1; i < sqlString.length(); i++ ) {
final char ch = sqlString.charAt( i );
if ( !isWhitespace( ch ) ) {
isEnclosedInParens = ch == ')';
break;
}
}
}
return isEnclosedInParens;
}
public static int determineBindValueMaxCount(boolean paddingEnabled, int inExprLimit, int bindValueCount) {
int bindValueMaxCount = bindValueCount;
final boolean inClauseParameterPaddingEnabled = paddingEnabled && bindValueCount > 2;
if ( inClauseParameterPaddingEnabled ) {
int bindValuePaddingCount = MathHelper.ceilingPowerOfTwo( bindValueCount );
if ( inExprLimit > 0 && bindValuePaddingCount > inExprLimit ) {
bindValuePaddingCount = inExprLimit;
}
if ( bindValueCount < bindValuePaddingCount ) {
bindValueMaxCount = bindValuePaddingCount;
}
}
return bindValueMaxCount;
}
private SelectInterpretationsKey selectInterpretationsKey(ResultSetMapping resultSetMapping, int parameterStartPosition) {
return new SelectInterpretationsKey(
getQueryString(),
resultSetMapping,
getSynchronizedQuerySpaces(),
parameterStartPosition
);
}
private boolean isCacheableQuery() {
// todo (6.0): unless we move the limit rendering from DeferredResultSetAccess to NativeSelectQueryPlanImpl
// we don't need to consider the limit here at all because that is applied on demand.
// It certainly is better for performance to include the limit early, but then we might trash the cache
// if ( hasLimit( query.getQueryOptions().getLimit() ) ) {
// return false;
// }
// For now, don't cache plans that have parameter lists
return !parameterBindings.hasAnyMultiValuedBindings();
}
private boolean hasLimit(Limit limit) {
return limit != null && !limit.isEmpty();
}
@Override
protected ScrollableResultsImplementor<R> doScroll(ScrollMode scrollMode) {
return resolveSelectQueryPlan().performScroll( scrollMode, this );
}
protected int doExecuteUpdate() {
return resolveNonSelectQueryPlan().executeUpdate( this );
}
private BasicTypeRegistry getBasicTypeRegistry() {
return getTypeConfiguration().getBasicTypeRegistry();
}
protected QueryInterpretationCache getInterpretationCache() {
return getInterpretationCache( getSession() );
}
private NonSelectQueryPlan resolveNonSelectQueryPlan() {
NonSelectQueryPlan queryPlan = null;
final var cacheKey = generateNonSelectInterpretationsKey();
if ( cacheKey != null ) {
queryPlan = getInterpretationCache().getNonSelectQueryPlan( cacheKey );
}
if ( queryPlan == null ) {
final String sqlString = expandParameterLists( 1 );
queryPlan = new NativeNonSelectQueryPlanImpl( sqlString, querySpaces, parameterOccurrences );
if ( cacheKey != null ) {
getInterpretationCache().cacheNonSelectQueryPlan( cacheKey, queryPlan );
}
}
return queryPlan;
}
protected NonSelectInterpretationsKey generateNonSelectInterpretationsKey() {
// todo (6.0) - should this account for query spaces in determining "cacheable"?
return isCacheableQuery()
? new NonSelectInterpretationsKey( getQueryString(), getSynchronizedQuerySpaces() )
: null;
}
@Override
public void addResultTypeClass(Class<?> resultClass) {
assert resultSetMapping.getNumberOfResultBuilders() == 0;
registerBuilder( Builders.resultClassBuilder( resultClass, getSessionFactory().getMappingMetamodel() ) );
}
@Override
public NativeQueryImplementor<R> addScalar(String columnAlias) {
return registerBuilder( Builders.scalar( columnAlias ) );
}
public NativeQueryImplementor<R> addScalar(int position, Class<?> type) {
return registerBuilder( Builders.scalar( position, getBasicTypeRegistry().getRegisteredType( type ) ) );
}
protected NativeQueryImplementor<R> registerBuilder(ResultBuilder builder) {
resultSetMapping.addResultBuilder( builder );
return this;
}
@Override
public NativeQuery<R> addScalar(String columnAlias, @SuppressWarnings("rawtypes") BasicTypeReference type) {
return registerBuilder( Builders.scalar( columnAlias,
getBasicTypeRegistry().resolve( (BasicTypeReference<?>) type ) ) );
}
@Override
public NativeQueryImplementor<R> addScalar(String columnAlias, @SuppressWarnings("rawtypes") BasicDomainType type) {
return registerBuilder( Builders.scalar( columnAlias, (BasicType<?>) type ) );
}
@Override
public NativeQueryImplementor<R> addScalar(String columnAlias, @SuppressWarnings("rawtypes") Class javaType) {
@SuppressWarnings("unchecked")
final BasicType<?> basicType = getBasicTypeRegistry().getRegisteredType( javaType );
return basicType != null
? registerBuilder( Builders.scalar( columnAlias, basicType ) )
: registerBuilder( Builders.scalar( columnAlias, javaType, getSessionFactory() ) );
}
@Override
public <C> NativeQueryImplementor<R> addScalar(
String columnAlias,
Class<C> jdbcJavaType,
AttributeConverter<?, C> converter) {
return registerBuilder( Builders.converted( columnAlias, jdbcJavaType, converter, getSessionFactory() ) );
}
@Override
public <O, J> NativeQueryImplementor<R> addScalar(
String columnAlias,
Class<O> domainJavaType,
Class<J> jdbcJavaType,
AttributeConverter<O, J> converter) {
return registerBuilder( Builders.converted( columnAlias, domainJavaType, jdbcJavaType, converter, getSessionFactory() ) );
}
@Override
public <C> NativeQueryImplementor<R> addScalar(
String columnAlias,
Class<C> relationalJavaType,
Class<? extends AttributeConverter<?, C>> converter) {
return registerBuilder( Builders.converted( columnAlias, relationalJavaType, converter, getSessionFactory() ) );
}
@Override
public <O, J> NativeQueryImplementor<R> addScalar(
String columnAlias,
Class<O> domainJavaType,
Class<J> jdbcJavaType,
Class<? extends AttributeConverter<O, J>> converterJavaType) {
return registerBuilder( Builders.converted( columnAlias, domainJavaType, jdbcJavaType, converterJavaType, getSessionFactory() ) );
}
@Override
public <J> InstantiationResultNode<J> addInstantiation(Class<J> targetJavaType) {
final DynamicResultBuilderInstantiation<J> builder =
Builders.instantiation( targetJavaType, getSessionFactory() );
registerBuilder( builder );
return builder;
}
@Override
public NativeQueryImplementor<R> addAttributeResult(
String columnAlias,
@SuppressWarnings("rawtypes") Class entityJavaType,
String attributePath) {
return addAttributeResult( columnAlias, entityJavaType.getName(), attributePath );
}
@Override
public NativeQueryImplementor<R> addAttributeResult(
String columnAlias,
String entityName,
String attributePath) {
registerBuilder( Builders.attributeResult( columnAlias, entityName, attributePath, getSessionFactory() ) );
return this;
}
@Override
public NativeQueryImplementor<R> addAttributeResult(
String columnAlias,
@SuppressWarnings("rawtypes") SingularAttribute attribute) {
registerBuilder( Builders.attributeResult( columnAlias, attribute, getSessionFactory() ) );
return this;
}
@Override
public DynamicResultBuilderEntityStandard addRoot(String tableAlias, String entityName) {
final var resultBuilder = Builders.entity( tableAlias, entityName, getSessionFactory() );
resultSetMapping.addResultBuilder( resultBuilder );
entityMappingTypeByTableAlias.put( tableAlias, resultBuilder.getEntityMapping() );
return resultBuilder;
}
@Override
public DynamicResultBuilderEntityStandard addRoot(String tableAlias, @SuppressWarnings("rawtypes") Class entityType) {
return addRoot( tableAlias, entityType.getName() );
}
@Override
public NativeQueryImplementor<R> addEntity(String entityName) {
return addEntity( unqualify( entityName ), entityName );
}
@Override
public NativeQueryImplementor<R> addEntity(String tableAlias, String entityName) {
final var builder = Builders.entityCalculated( tableAlias, entityName, getSessionFactory() );
entityMappingTypeByTableAlias.put( tableAlias, builder.getEntityMapping() );
registerBuilder( builder );
return this;
}
@Override
public NativeQueryImplementor<R> addEntity(String tableAlias, String entityName, LockMode lockMode) {
final var builder = Builders.entityCalculated( tableAlias, entityName, lockMode, getSessionFactory() );
entityMappingTypeByTableAlias.put( tableAlias, builder.getEntityMapping() );
registerBuilder( builder );
return this;
}
@Override
public NativeQueryImplementor<R> addEntity(@SuppressWarnings("rawtypes") Class entityType) {
return addEntity( entityType.getName() );
}
@Override
public NativeQueryImplementor<R> addEntity(Class<R> entityType, LockMode lockMode) {
return addEntity( unqualify( entityType.getName() ), entityType.getName(), lockMode);
}
@Override
public NativeQueryImplementor<R> addEntity(String tableAlias, @SuppressWarnings("rawtypes") Class entityClass) {
return addEntity( tableAlias, entityClass.getName() );
}
@Override
public NativeQueryImplementor<R> addEntity(String tableAlias, @SuppressWarnings("rawtypes") Class entityClass, LockMode lockMode) {
return addEntity( tableAlias, entityClass.getName(), lockMode );
}
@Override
public FetchReturn addFetch(String tableAlias, String ownerTableAlias, String joinPropertyName) {
final var subPart = entityMappingTypeByTableAlias.get( ownerTableAlias ).findSubPart( joinPropertyName );
addEntityMappingType( tableAlias, subPart );
final var fetchBuilder = Builders.fetch( tableAlias, ownerTableAlias, (Fetchable) subPart );
resultSetMapping.addLegacyFetchBuilder( fetchBuilder );
return fetchBuilder;
}
private void addEntityMappingType(String tableAlias, ModelPart part) {
if ( part instanceof PluralAttributeMapping pluralAttributeMapping ) {
final var partMappingType = pluralAttributeMapping.getElementDescriptor().getPartMappingType();
if ( partMappingType instanceof EntityMappingType entityMappingType ) {
entityMappingTypeByTableAlias.put( tableAlias, entityMappingType );
}
}
else if ( part instanceof EntityAssociationMapping entityAssociationMapping ) {
entityMappingTypeByTableAlias.put( tableAlias, entityAssociationMapping.asEntityMappingType() );
}
else if ( part instanceof EmbeddedAttributeMapping ) {
throw new UnsupportedOperationException();
}
}
@Override
public NativeQueryImplementor<R> addJoin(String tableAlias, String path) {
createFetchJoin( tableAlias, path );
return this;
}
private FetchReturn createFetchJoin(String tableAlias, String path) {
final int loc = path.indexOf( '.' );
if ( loc < 0 ) {
throw new PathException( "Not a property path '" + path + "'" );
}
final String ownerTableAlias = path.substring( 0, loc );
final String joinedPropertyName = path.substring( loc + 1 );
return addFetch( tableAlias, ownerTableAlias, joinedPropertyName );
}
@Override
public NativeQueryImplementor<R> addJoin(String tableAlias, String ownerTableAlias, String joinPropertyName) {
addFetch( tableAlias, ownerTableAlias, joinPropertyName );
return this;
}
@Override
public NativeQueryImplementor<R> addJoin(String tableAlias, String path, LockMode lockMode) {
createFetchJoin( tableAlias, path ).setLockMode( lockMode );
return this;
}
@Override
public Collection<String> getSynchronizedQuerySpaces() {
return querySpaces;
}
@Override
public NativeQueryImplementor<R> addSynchronizedQuerySpace(String querySpace) {
addQuerySpaces( querySpace );
return this;
}
protected void addQuerySpaces(String... spaces) {
if ( spaces != null ) {
if ( querySpaces == null ) {
querySpaces = new HashSet<>();
}
addAll( querySpaces, spaces );
}
}
protected void addQuerySpaces(Serializable... spaces) {
if ( spaces != null ) {
if ( querySpaces == null ) {
querySpaces = new HashSet<>();
}
addAll( querySpaces, (String[]) spaces );
}
}
@Override
public NativeQueryImplementor<R> addSynchronizedEntityName(String entityName) {
addQuerySpaces( getMappingMetamodel().getEntityDescriptor( entityName ).getQuerySpaces() );
return this;
}
@Override
public NativeQueryImplementor<R> addSynchronizedEntityClass(@SuppressWarnings("rawtypes") Class entityClass) {
addQuerySpaces( getMappingMetamodel().getEntityDescriptor( entityClass ).getQuerySpaces() );
return this;
}
@Override
public NativeQueryImplementor<R> setHibernateFlushMode(FlushMode flushMode) {
super.setHibernateFlushMode( flushMode );
return this;
}
@Override
public NativeQueryImplementor<R> setQueryFlushMode(QueryFlushMode queryFlushMode) {
super.setQueryFlushMode(queryFlushMode);
return this;
}
@Override
public NativeQueryImplementor<R> setFlushMode(FlushModeType flushModeType) {
super.setFlushMode( flushModeType );
return this;
}
@Override
public NativeQueryImplementor<R> setCacheMode(CacheMode cacheMode) {
super.setCacheMode( cacheMode );
return this;
}
@Override
public NativeQueryImplementor<R> setCacheRetrieveMode(CacheRetrieveMode cacheRetrieveMode) {
super.setCacheRetrieveMode( cacheRetrieveMode );
return this;
}
@Override
public NativeQueryImplementor<R> setCacheStoreMode(CacheStoreMode cacheStoreMode) {
super.setCacheStoreMode( cacheStoreMode );
return this;
}
@Override
public TypedQuery<R> setTimeout(Integer timeout) {
if ( timeout == null ) {
timeout = -1;
}
super.setTimeout( (int) timeout );
return this;
}
@Override
public NativeQueryImplementor<R> setCacheable(boolean cacheable) {
super.setCacheable( cacheable );
return this;
}
@Override
public NativeQueryImplementor<R> setCacheRegion(String cacheRegion) {
super.setCacheRegion( cacheRegion );
return this;
}
@Override
public NativeQueryImplementor<R> setQueryPlanCacheable(boolean queryPlanCacheable) {
super.setQueryPlanCacheable( queryPlanCacheable );
return this;
}
@Override
public NativeQueryImplementor<R> setTimeout(int timeout) {
super.setTimeout( timeout );
return this;
}
@Override
public NativeQueryImplementor<R> setFetchSize(int fetchSize) {
super.setFetchSize( fetchSize );
return this;
}
@Override
public NativeQueryImplementor<R> setReadOnly(boolean readOnly) {
super.setReadOnly( readOnly );
return this;
}
@Override
public <T> T unwrap(Class<T> type) {
if ( type.isInstance( this ) ) {
return type.cast( this );
}
if ( type.isInstance( parameterMetadata ) ) {
return type.cast( parameterMetadata );
}
if ( type.isInstance( parameterBindings ) ) {
return type.cast( parameterBindings );
}
if ( type.isInstance( getQueryOptions() ) ) {
return type.cast( getQueryOptions() );
}
if ( type.isInstance( getQueryOptions().getAppliedGraph() ) ) {
return type.cast( getQueryOptions().getAppliedGraph() );
}
if ( type.isInstance( getSession() ) ) {
return type.cast( getSession() );
}
throw new PersistenceException( "Unrecognized unwrap type [" + type.getName() + "]" );
}
@Override
public NativeQueryImplementor<R> setComment(String comment) {
super.setComment( comment );
return this;
}
@Override
public NativeQueryImplementor<R> addQueryHint(String hint) {
super.addQueryHint( hint );
return this;
}
@Override
protected void collectHints(Map<String, Object> hints) {
super.collectHints( hints );
putIfNotNull( hints, HINT_NATIVE_LOCK_MODE, getLockOptions().getLockMode() );
}
protected void applySynchronizeSpacesHint(Object value) {
applySynchronizeSpace( value );
}
protected void applySynchronizeSpace(Object value) {
if ( value instanceof String string ) {
addSynchronizedQuerySpace( string );
}
else if ( value instanceof Class<?> clazz ) {
addSynchronizedEntityClass( clazz );
}
else if ( value instanceof Object[] array ) {
for ( Object element : array ) {
applySynchronizeSpace( element );
}
}
else if ( value instanceof Iterable<?> iterable ) {
for ( Object element : iterable ) {
applySynchronizeSpace( element );
}
}
}
@Override
public NativeQueryImplementor<R> setParameter(String name, Object value) {
super.setParameter( name, value );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameter(String name, P value, Class<P> javaTypeClass) {
super.setParameter( name, value, javaTypeClass );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameter(String name, P value, Type<P> type) {
super.setParameter( name, value, type );
return this;
}
@Override @Deprecated
public NativeQueryImplementor<R> setParameter(String name, Calendar value, TemporalType temporalType) {
super.setParameter( name, value, temporalType );
return this;
}
@Override @Deprecated
public NativeQueryImplementor<R> setParameter(String name, Instant value, TemporalType temporalType) {
super.setParameter( name, value, temporalType );
return this;
}
@Override @Deprecated
public NativeQueryImplementor<R> setParameter(String name, Date value, TemporalType temporalType) {
super.setParameter( name, value, temporalType );
return this;
}
@Override
public NativeQueryImplementor<R> setParameter(int position, Object value) {
super.setParameter( position, value );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameter(int position, P value, Class<P> javaTypeClass) {
super.setParameter( position, value, javaTypeClass );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameter(int position, P value, Type<P> type) {
super.setParameter( position, value, type );
return this;
}
@Override @Deprecated
public NativeQueryImplementor<R> setParameter(int position, Instant value, TemporalType temporalType) {
super.setParameter( position, value, temporalType );
return this;
}
@Override @Deprecated
public NativeQueryImplementor<R> setParameter(int position, Calendar value, TemporalType temporalType) {
super.setParameter( position, value, temporalType );
return this;
}
@Override @Deprecated
public NativeQueryImplementor<R> setParameter(int position, Date value, TemporalType temporalType) {
super.setParameter( position, value, temporalType );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameter(QueryParameter<P> parameter, P value) {
super.setParameter( parameter, value );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameter(QueryParameter<P> parameter, P value, Class<P> javaTypeClass) {
super.setParameter( parameter, value, javaTypeClass );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameter(QueryParameter<P> parameter, P value, Type<P> type) {
super.setParameter( parameter, value, type );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameter(Parameter<P> parameter, P value) {
super.setParameter( parameter, value );
return this;
}
@Override @Deprecated
public NativeQueryImplementor<R> setParameter(Parameter<Calendar> param, Calendar value, TemporalType temporalType) {
super.setParameter( param, value, temporalType );
return this;
}
@Override @Deprecated
public NativeQueryImplementor<R> setParameter(Parameter<Date> param, Date value, TemporalType temporalType) {
super.setParameter( param, value, temporalType );
return this;
}
@Override
public NativeQueryImplementor<R> setParameterList(String name, @SuppressWarnings("rawtypes") Collection values) {
super.setParameterList( name, values );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(String name, Collection<? extends P> values, Class<P> javaTypeClass) {
super.setParameterList( name, values, javaTypeClass );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(String name, Collection<? extends P> values, Type<P> type) {
super.setParameterList( name, values, type );
return this;
}
@Override
public NativeQueryImplementor<R> setParameterList(String name, Object[] values) {
super.setParameterList( name, values );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(String name, P[] values, Class<P> javaTypeClass) {
super.setParameterList( name, values, javaTypeClass );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(String name, P[] values, Type<P> type) {
super.setParameterList( name, values, type );
return this;
}
@Override
public NativeQueryImplementor<R> setParameterList(int position, @SuppressWarnings("rawtypes") Collection values) {
super.setParameterList( position, values );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(int position, Collection<? extends P> values, Class<P> javaTypeClass) {
super.setParameterList( position, values, javaTypeClass );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(int position, Collection<? extends P> values, Type<P> type) {
super.setParameterList( position, values, type );
return this;
}
@Override
public NativeQueryImplementor<R> setParameterList(int position, Object[] values) {
super.setParameterList( position, values );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(int position, P[] values, Class<P> javaTypeClass) {
super.setParameterList( position, values, javaTypeClass );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(int position, P[] values, Type<P> type) {
super.setParameterList( position, values, type );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values) {
super.setParameterList( parameter, values );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Class<P> javaTypeClass) {
super.setParameterList( parameter, values, javaTypeClass );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Type<P> type) {
super.setParameterList( parameter, values, type );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(QueryParameter<P> parameter, P[] values) {
super.setParameterList( parameter, values );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(QueryParameter<P> parameter, P[] values, Class<P> javaTypeClass) {
super.setParameterList( parameter, values, javaTypeClass );
return this;
}
@Override
public <P> NativeQueryImplementor<R> setParameterList(QueryParameter<P> parameter, P[] values, Type<P> type) {
super.setParameterList( parameter, values, type );
return this;
}
@Override
public NativeQueryImplementor<R> setProperties(@SuppressWarnings("rawtypes") Map map) {
super.setProperties( map );
return this;
}
@Override
public NativeQueryImplementor<R> setProperties(Object bean) {
super.setProperties( bean );
return this;
}
@Override @Deprecated @SuppressWarnings("deprecation")
public <S> NativeQueryImplementor<S> setResultTransformer(ResultTransformer<S> transformer) {
return setTupleTransformer( transformer ).setResultListTransformer( transformer );
}
@Override
public NativeQueryImplementor<R> setMaxResults(int maxResults) {
super.setMaxResults( maxResults );
return this;
}
@Override
public NativeQueryImplementor<R> setFirstResult(int startPosition) {
super.setFirstResult( startPosition );
return this;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Hints
@Override
public NativeQueryImplementor<R> setHint(String hintName, Object value) {
super.setHint( hintName, value );
return this;
}
private static | ResultSetMappingHandler |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ClassUtils.java | {
"start": 1719,
"end": 1862
} | class ____ {
/**
* Inclusivity literals for {@link #hierarchy(Class, Interfaces)}.
*
* @since 3.2
*/
public | ClassUtils |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_hasSize_Test.java | {
"start": 834,
"end": 1171
} | class ____ extends AtomicReferenceArrayAssertBaseTest {
@Override
protected AtomicReferenceArrayAssert<Object> invoke_api_method() {
return assertions.hasSize(6);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHasSize(info(), internalArray(), 6);
}
}
| AtomicReferenceArrayAssert_hasSize_Test |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/testutil/runner/CompilerTestEnabledOnJreCondition.java | {
"start": 648,
"end": 1584
} | class ____ implements ExecutionCondition {
static final ConditionEvaluationResult ENABLED_ON_CURRENT_JRE =
ConditionEvaluationResult.enabled( "Enabled on JRE version: " + System.getProperty( "java.version" ) );
static final ConditionEvaluationResult DISABLED_ON_CURRENT_JRE =
ConditionEvaluationResult.disabled( "Disabled on JRE version: " + System.getProperty( "java.version" ) );
protected final Compiler compiler;
public CompilerTestEnabledOnJreCondition(Compiler compiler) {
this.compiler = compiler;
}
@Override
public ConditionEvaluationResult evaluateExecutionCondition(ExtensionContext context) {
// If the max JRE is greater or equal to the current version the test is enabled
return compiler.latestSupportedJre().compareTo( JRE.currentVersion() ) >= 0 ? ENABLED_ON_CURRENT_JRE :
DISABLED_ON_CURRENT_JRE;
}
}
| CompilerTestEnabledOnJreCondition |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/BeanUtilsTests.java | {
"start": 30344,
"end": 30589
} | class ____ extends GenericBaseModel<Integer> {
private String address;
public User() {
super();
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
}
}
| User |
java | apache__camel | components/camel-twilio/src/generated/java/org/apache/camel/component/twilio/MessageMediaEndpointConfiguration.java | {
"start": 1529,
"end": 2803
} | class ____ extends TwilioConfiguration {
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "deleter"), @ApiMethod(methodName = "fetcher"), @ApiMethod(methodName = "reader")})
private String pathAccountSid;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "deleter"), @ApiMethod(methodName = "deleter"), @ApiMethod(methodName = "fetcher"), @ApiMethod(methodName = "fetcher"), @ApiMethod(methodName = "reader"), @ApiMethod(methodName = "reader")})
private String pathMessageSid;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "deleter"), @ApiMethod(methodName = "fetcher")})
private String pathSid;
public String getPathAccountSid() {
return pathAccountSid;
}
public void setPathAccountSid(String pathAccountSid) {
this.pathAccountSid = pathAccountSid;
}
public String getPathMessageSid() {
return pathMessageSid;
}
public void setPathMessageSid(String pathMessageSid) {
this.pathMessageSid = pathMessageSid;
}
public String getPathSid() {
return pathSid;
}
public void setPathSid(String pathSid) {
this.pathSid = pathSid;
}
}
| MessageMediaEndpointConfiguration |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 30546,
"end": 30722
} | class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return definesFunction( dialect, "json_insert" );
}
}
public static | SupportsJsonInsert |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/SerializationUtils.java | {
"start": 2360,
"end": 3395
} | class ____ extends ObjectInputStream {
private final ClassLoader classLoader;
/**
* Constructs a new instance.
* @param in The {@link InputStream}.
* @param classLoader classloader to use
* @throws IOException if an I/O error occurs while reading stream header.
* @see java.io.ObjectInputStream
*/
ClassLoaderAwareObjectInputStream(final InputStream in, final ClassLoader classLoader) throws IOException {
super(in);
this.classLoader = classLoader;
}
/**
* Overridden version that uses the parameterized {@link ClassLoader} or the {@link ClassLoader}
* of the current {@link Thread} to resolve the class.
* @param desc An instance of class {@link ObjectStreamClass}.
* @return A {@link Class} object corresponding to {@code desc}.
* @throws IOException Any of the usual Input/Output exceptions.
* @throws ClassNotFoundException If | ClassLoaderAwareObjectInputStream |
java | google__guice | core/test/com/google/inject/ProvisionListenerTest.java | {
"start": 18897,
"end": 19016
} | class ____ implements Provider<Foo> {
@Override
public Foo get() {
return new Foo();
}
}
static | FooP |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/commands/CreateExtension.java | {
"start": 5518,
"end": 26146
} | enum ____ {
OTHER_PLATFORM,
QUARKUS_CORE,
QUARKIVERSE,
STANDALONE
}
public static final String DEFAULT_BOM_GROUP_ID = "io.quarkus";
public static final String DEFAULT_BOM_ARTIFACT_ID = "quarkus-bom";
public static final String DEFAULT_BOM_VERSION = "${quarkus.version}";
public static final String DEFAULT_VERSION = "1.0.0-SNAPSHOT";
public static final String DEFAULT_QUARKIVERSE_VERSION = "999-SNAPSHOT";
public static final String DEFAULT_CORE_NAMESPACE_ID = "quarkus-";
public static final String DEFAULT_EXTERNAL_NAMESPACE_ID = "";
public static final String DEFAULT_QUARKIVERSE_PARENT_GROUP_ID = "io.quarkiverse";
public static final String DEFAULT_QUARKIVERSE_PARENT_ARTIFACT_ID = "quarkiverse-parent";
public static final String DEFAULT_QUARKIVERSE_PARENT_VERSION = "20";
public static final String DEFAULT_QUARKIVERSE_NAMESPACE_ID = "quarkus-";
public static final String DEFAULT_QUARKIVERSE_GUIDE_URL = "https://docs.quarkiverse.io/%s/dev/";
private static final String DEFAULT_SUREFIRE_PLUGIN_VERSION = "3.5.4";
private static final String DEFAULT_COMPILER_PLUGIN_VERSION = "3.14.1";
private final QuarkusExtensionCodestartProjectInputBuilder builder = QuarkusExtensionCodestartProjectInput.builder();
private final Path baseDir;
private final EnhancedDataMap data = new EnhancedDataMap();
private MessageWriter log = MessageWriter.info();
private String extensionId;
private String itTestRelativeDir = "integration-tests";
private String bomRelativeDir = "bom/application";
private String extensionsRelativeDir = "extensions";
private boolean withCodestart;
private String javaVersion;
private boolean hasIntegrationTestsModule;
public CreateExtension(final Path baseDir) {
this.baseDir = requireNonNull(baseDir, "extensionDirPath is required");
}
public CreateExtension groupId(String groupId) {
data.putIfNonEmptyString(GROUP_ID, groupId);
return this;
}
public CreateExtension extensionId(String extensionId) {
if (!StringUtils.isEmpty(extensionId)) {
this.extensionId = extensionId;
}
return this;
}
public CreateExtension extensionName(String name) {
data.putIfNonEmptyString(EXTENSION_NAME, name);
return this;
}
public CreateExtension extensionDescription(String description) {
data.putIfNonEmptyString(EXTENSION_DESCRIPTION, description);
return this;
}
public CreateExtension version(String version) {
data.putIfNonEmptyString(VERSION, version);
return this;
}
public CreateExtension packageName(String packageName) {
data.putIfNonEmptyString(PACKAGE_NAME, packageName);
return this;
}
public CreateExtension classNameBase(String classNameBase) {
data.putIfNonEmptyString(CLASS_NAME_BASE, classNameBase);
return this;
}
public CreateExtension namespaceId(String extensionArtifactIdPrefix) {
data.putIfNonNull(NAMESPACE_ID, extensionArtifactIdPrefix);
return this;
}
public CreateExtension namespaceName(String namespaceName) {
data.putIfNonNull(NAMESPACE_NAME, namespaceName);
return this;
}
public CreateExtension parentGroupId(String groupId) {
data.putIfNonEmptyString(PARENT_GROUP_ID, groupId);
return this;
}
public CreateExtension parentArtifactId(String artifactId) {
data.putIfNonEmptyString(PARENT_ARTIFACT_ID, artifactId);
return this;
}
public CreateExtension parentVersion(String version) {
data.putIfNonEmptyString(PARENT_VERSION, version);
return this;
}
public CreateExtension parentRelativePath(String parentRelativePath) {
data.putIfNonEmptyString(PARENT_RELATIVE_PATH, parentRelativePath);
return this;
}
public CreateExtension quarkusVersion(String quarkusVersion) {
data.putIfNonEmptyString(QUARKUS_VERSION, quarkusVersion);
return this;
}
public CreateExtension quarkusBomGroupId(String quarkusBomGroupId) {
data.putIfNonEmptyString(QUARKUS_BOM_GROUP_ID, quarkusBomGroupId);
return this;
}
public CreateExtension quarkusBomArtifactId(String quarkusBomArtifactId) {
data.putIfNonEmptyString(QUARKUS_BOM_ARTIFACT_ID, quarkusBomArtifactId);
return this;
}
public CreateExtension quarkusBomVersion(String quarkusBomVersion) {
data.putIfNonEmptyString(QUARKUS_BOM_VERSION, quarkusBomVersion);
return this;
}
public CreateExtension javaVersion(String javaVersion) {
this.javaVersion = javaVersion;
return this;
}
public CreateExtension withCodestart(boolean withCodestart) {
this.withCodestart = withCodestart;
return this;
}
public CreateExtension withoutUnitTest(boolean withoutUnitTest) {
this.builder.withoutUnitTest(withoutUnitTest);
return this;
}
public CreateExtension withoutDevModeTest(boolean withoutDevModeTest) {
this.builder.withoutDevModeTest(withoutDevModeTest);
return this;
}
public CreateExtension withoutIntegrationTests(boolean withoutIntegrationTest) {
hasIntegrationTestsModule = !withoutIntegrationTest;
this.builder.withoutIntegrationTests(withoutIntegrationTest);
return this;
}
public CreateExtension itTestRelativeDir(String itTestRelativeDir) {
if (!isEmpty(itTestRelativeDir)) {
this.itTestRelativeDir = itTestRelativeDir;
}
return this;
}
public CreateExtension bomRelativeDir(String bomRelativeDir) {
if (!isEmpty(bomRelativeDir)) {
this.bomRelativeDir = bomRelativeDir;
}
return this;
}
public CreateExtension extensionsRelativeDir(String extensionsRelativeDir) {
if (!isEmpty(extensionsRelativeDir)) {
this.extensionsRelativeDir = extensionsRelativeDir;
}
return this;
}
public CreateExtension messageWriter(MessageWriter log) {
this.log = log;
return this;
}
public CreateExtensionCommandHandler prepare() throws QuarkusCommandException {
final Path workingDir = resolveWorkingDir(baseDir);
final Model baseModel = resolveModel(baseDir);
final LayoutType layoutType = detectLayoutType(baseModel, data.getStringValue(GROUP_ID).orElse(null));
String namespaceId = getDefaultNamespaceId(layoutType);
data.putIfAbsent(NAMESPACE_ID, namespaceId);
String namespaceName = computeDefaultNamespaceName(data.getRequiredStringValue(NAMESPACE_ID));
String resolvedExtensionId = resolveExtensionId();
ensureRequiredStringData(EXTENSION_ID, resolvedExtensionId);
data.putIfAbsent(EXTENSION_NAME, capitalize(extensionId));
data.putIfAbsent(NAMESPACE_NAME, namespaceName);
data.putIfAbsent(CLASS_NAME_BASE, toCapCamelCase(extensionId));
data.put(EXTENSION_FULL_NAME,
data.getRequiredStringValue(NAMESPACE_NAME) + data.getRequiredStringValue(EXTENSION_NAME));
data.put(HAS_INTEGRATION_TESTS_MODULE, hasIntegrationTestsModule);
// for now, we only support Java extensions
data.put(JAVA_VERSION, javaVersion == null ? JavaVersion.DEFAULT_JAVA_VERSION_FOR_EXTENSION
: computeJavaVersion(SourceType.JAVA, javaVersion));
final String runtimeArtifactId = getRuntimeArtifactIdFromData();
ensureRequiredStringData(GROUP_ID, resolveGroupId(baseModel));
ensureRequiredStringData(PACKAGE_NAME,
resolveExtensionPackage(data.getRequiredStringValue(GROUP_ID), extensionId));
final String groupId = data.getRequiredStringValue(GROUP_ID);
final String defaultVersion;
final Model itTestModel;
String extensionDirName = runtimeArtifactId;
switch (layoutType) {
case QUARKUS_CORE:
case OTHER_PLATFORM:
defaultVersion = DEFAULT_VERSION;
extensionDirName = extensionId;
final Model extensionsParentModel = readPom(workingDir.resolve(extensionsRelativeDir));
data.putIfAbsent(PROPERTIES_FROM_PARENT, true);
ensureRequiredStringData(PARENT_GROUP_ID, resolveGroupId(extensionsParentModel));
ensureRequiredStringData(PARENT_ARTIFACT_ID, resolveArtifactId(extensionsParentModel));
ensureRequiredStringData(PARENT_VERSION, resolveVersion(extensionsParentModel, defaultVersion));
data.putIfAbsent(PARENT_RELATIVE_PATH, "../pom.xml");
itTestModel = readPom(workingDir.resolve(itTestRelativeDir));
if (withCodestart) {
log.warn("\nExtension Codestart is not yet available for '%s' extension (skipped).\n",
layoutType.toString().toLowerCase());
}
break;
case QUARKIVERSE:
defaultVersion = DEFAULT_QUARKIVERSE_VERSION;
data.putIfAbsent(PARENT_GROUP_ID, DEFAULT_QUARKIVERSE_PARENT_GROUP_ID);
data.putIfAbsent(PARENT_ARTIFACT_ID, DEFAULT_QUARKIVERSE_PARENT_ARTIFACT_ID);
data.putIfAbsent(PARENT_VERSION, DEFAULT_QUARKIVERSE_PARENT_VERSION);
data.putIfAbsent(QUARKUS_BOM_GROUP_ID, DEFAULT_BOM_GROUP_ID);
data.putIfAbsent(QUARKUS_BOM_ARTIFACT_ID, DEFAULT_BOM_ARTIFACT_ID);
data.putIfAbsent(QUARKUS_BOM_VERSION, DEFAULT_BOM_VERSION);
data.putIfAbsent(MAVEN_COMPILER_PLUGIN_VERSION, DEFAULT_COMPILER_PLUGIN_VERSION);
data.putIfAbsent(EXTENSION_GUIDE,
String.format(DEFAULT_QUARKIVERSE_GUIDE_URL, resolvedExtensionId));
ensureRequiredStringData(QUARKUS_VERSION);
data.putIfAbsent(HAS_DOCS_MODULE, true);
data.put(EXTENSION_FULL_NAME,
capitalize(data.getRequiredStringValue(NAMESPACE_ID)) + " "
+ data.getRequiredStringValue(EXTENSION_NAME));
// TODO: Support Quarkiverse multi extensions repo
builder.addCodestart(QuarkusExtensionCodestartCatalog.Code.QUARKIVERSE.key());
builder.addCodestart(QuarkusExtensionCodestartCatalog.Tooling.GIT.key());
if (withCodestart) {
builder.addCodestart(QuarkusExtensionCodestartCatalog.Code.EXTENSION_CODESTART.key());
}
itTestModel = getStandaloneTempModel(workingDir, runtimeArtifactId, defaultVersion);
break;
default:
defaultVersion = DEFAULT_VERSION;
data.putIfAbsent(QUARKUS_BOM_GROUP_ID, DEFAULT_BOM_GROUP_ID);
data.putIfAbsent(QUARKUS_BOM_ARTIFACT_ID, DEFAULT_BOM_ARTIFACT_ID);
data.putIfAbsent(QUARKUS_BOM_VERSION, DEFAULT_BOM_VERSION);
data.putIfAbsent(MAVEN_SUREFIRE_PLUGIN_VERSION, DEFAULT_SUREFIRE_PLUGIN_VERSION);
data.putIfAbsent(MAVEN_COMPILER_PLUGIN_VERSION, DEFAULT_COMPILER_PLUGIN_VERSION);
ensureRequiredStringData(QUARKUS_VERSION);
if (withCodestart) {
builder.addCodestart(QuarkusExtensionCodestartCatalog.Code.EXTENSION_CODESTART.key());
}
// In standalone mode, the base pom is used as parent for integration tests
itTestModel = getStandaloneTempModel(workingDir, runtimeArtifactId, defaultVersion);
break;
}
ensureRequiredStringData(VERSION, resolveVersion(baseModel, defaultVersion));
ensureRequiredStringData(IT_PARENT_GROUP_ID, resolveGroupId(itTestModel));
ensureRequiredStringData(IT_PARENT_ARTIFACT_ID, resolveArtifactId(itTestModel));
ensureRequiredStringData(IT_PARENT_VERSION, resolveVersion(itTestModel, defaultVersion));
ensureRequiredStringData(IT_PARENT_RELATIVE_PATH, "../pom.xml");
final Optional<String> quarkusVersion = data.getStringValue(QUARKUS_VERSION);
// in 2.10.0.CR1 quarkus-bootstrap-maven-plugin was deprecated in favor of quarkus-extension-maven-plugin
if (quarkusVersion.isPresent() &&
new ComparableVersion("2.10.0.CR1").compareTo(new ComparableVersion(quarkusVersion.get())) > 0) {
// the legacy bootstrap plugin, if MAVEN_QUARKUS_EXTENSION_PLUGIN isn't set, it will default to the quarkus-extension-maven-plugin
data.putIfAbsent(MAVEN_QUARKUS_EXTENSION_PLUGIN, "quarkus-bootstrap-maven-plugin");
}
builder.addData(data);
log.info("\nDetected layout type is '%s' ", layoutType.toString().toLowerCase());
log.info("Generated runtime artifactId is '%s'\n", runtimeArtifactId);
if (LayoutType.QUARKUS_CORE.equals(layoutType) || LayoutType.OTHER_PLATFORM.equals(layoutType)) {
final Path extensionsDir = workingDir.resolve(extensionsRelativeDir);
final Path itTestDir = workingDir.resolve(itTestRelativeDir);
final Path bomDir = workingDir.resolve(bomRelativeDir);
return new CreateExtensionCommandHandler(groupId, runtimeArtifactId, builder.build(),
extensionsDir.resolve(extensionDirName),
extensionsDir,
itTestDir, bomDir);
}
return new CreateExtensionCommandHandler(groupId, runtimeArtifactId, builder.build(),
workingDir.resolve(extensionDirName));
}
public QuarkusCommandOutcome execute() throws QuarkusCommandException {
return prepare().execute(log);
}
private String resolveExtensionId() {
String namespaceId = data.getRequiredStringValue(NAMESPACE_ID);
if (extensionId.startsWith(namespaceId)) {
extensionId = extensionId.substring(namespaceId.length());
}
return extensionId;
}
private String getDefaultNamespaceId(LayoutType layoutType) {
switch (layoutType) {
case QUARKIVERSE:
return DEFAULT_QUARKIVERSE_NAMESPACE_ID;
case QUARKUS_CORE:
return DEFAULT_CORE_NAMESPACE_ID;
default:
return DEFAULT_EXTERNAL_NAMESPACE_ID;
}
}
private String computeDefaultNamespaceName(String namespaceId) {
if (isEmpty(namespaceId)) {
return "";
}
return capitalize(namespaceId) + " - ";
}
public static Model resolveModel(Path dir) throws QuarkusCommandException {
final Path workingDir = resolveWorkingDir(dir);
final Path basePom = workingDir.resolve("pom.xml");
if (!Files.isRegularFile(basePom)) {
return null;
}
try {
return MojoUtils.readPom(basePom.toFile());
} catch (IOException e) {
throw new QuarkusCommandException("Error while reading base pom.xml", e);
}
}
private Model getStandaloneTempModel(Path workingDir, String runtimeArtifactId, String defaultVersion) {
final Model model = new Model();
model.setGroupId(data.getRequiredStringValue(GROUP_ID));
model.setArtifactId(runtimeArtifactId + "-parent");
model.setVersion(data.getStringValue(VERSION).orElse(defaultVersion));
model.setPomFile(workingDir.resolve("pom.xml").toFile());
return model;
}
private String getRuntimeArtifactIdFromData() {
return data.getStringValue(NAMESPACE_ID).orElse("")
+ data.getRequiredStringValue(EXTENSION_ID);
}
private static Path resolveWorkingDir(Path dir) {
return "extensions".equals(dir.getFileName().toString()) ? dir.resolve("..") : dir;
}
public static LayoutType detectLayoutType(Model basePom, String groupId) {
if (basePom != null) {
if (basePom.getArtifactId().endsWith("quarkus-parent")) {
return LayoutType.QUARKUS_CORE;
}
if (basePom.getModules().stream().anyMatch(s -> s.contains("bom"))) {
return LayoutType.OTHER_PLATFORM;
}
}
if (isQuarkiverseGroupId(groupId))
return LayoutType.QUARKIVERSE;
return LayoutType.STANDALONE;
}
public static boolean isQuarkiverseGroupId(String groupId) {
return groupId != null && groupId.contains(DEFAULT_QUARKIVERSE_PARENT_GROUP_ID);
}
public static String extractQuarkiverseExtensionId(String groupId) {
return groupId.replace(DEFAULT_QUARKIVERSE_PARENT_GROUP_ID + ".", "");
}
private void ensureRequiredStringData(QuarkusExtensionData key) throws QuarkusCommandException {
if (!data.containsNonEmptyStringForKey(key)) {
throw new QuarkusCommandException("'" + key.toString() + "' value is required.");
}
}
private void ensureRequiredStringData(QuarkusExtensionData key, String detectedValue) throws QuarkusCommandException {
if (!data.containsNonEmptyStringForKey(key)) {
if (isEmpty(detectedValue)) {
throw new QuarkusCommandException(
"You need to define '" + key.toString() + "' because it was not found in the project hierarchy.");
}
data.putIfNonEmptyString(key, detectedValue);
}
}
static String resolveExtensionPackage(String groupId, String artifactId) {
final Deque<String> segments = new ArrayDeque<>();
for (String segment : groupId.split("[.\\-]+")) {
if (segments.isEmpty() || !segments.peek().equals(segment)) {
segments.add(segment);
}
}
for (String segment : artifactId.split("[.\\-]+")) {
if (!segments.contains(segment)) {
segments.add(segment);
}
}
return segments.stream() //
.map(s -> s.toLowerCase(Locale.ROOT)) //
.map(s -> SourceVersion.isKeyword(s) ? s + "_" : s) //
.collect(Collectors.joining("."));
}
static String resolveGroupId(Model basePom) {
return basePom != null ? basePom.getGroupId() != null ? basePom.getGroupId()
: basePom.getParent() != null && basePom.getParent().getGroupId() != null
? basePom.getParent().getGroupId()
: null
: null;
}
static String resolveArtifactId(Model basePom) {
return basePom != null ? basePom.getArtifactId() != null ? basePom.getArtifactId()
: basePom.getParent() != null && basePom.getParent().getArtifactId() != null
? basePom.getParent().getArtifactId()
: null
: null;
}
static String resolveVersion(Model basePom, String defaultVersion) {
return basePom != null ? basePom.getVersion() != null ? basePom.getVersion()
: basePom.getParent() != null && basePom.getParent().getVersion() != null
? basePom.getParent().getVersion()
: defaultVersion
: defaultVersion;
}
static String toCapCamelCase(String name) {
final StringBuilder sb = new StringBuilder(name.length());
for (String segment : name.split("[.\\-]+")) {
sb.append(Character.toUpperCase(segment.charAt(0)));
if (segment.length() > 1) {
sb.append(segment.substring(1));
}
}
return sb.toString();
}
static String capitalize(String name) {
// do not capitalize if the string already contains upper case characters
if (hasUpperCaseCharacter(name)) {
return name;
}
final StringBuilder sb = new StringBuilder(name.length());
for (String segment : name.split("[.\\-]+")) {
if (sb.length() > 0) {
sb.append(' ');
}
sb.append(Character.toUpperCase(segment.charAt(0)));
if (segment.length() > 1) {
sb.append(segment.substring(1));
}
}
return sb.toString();
}
public static boolean hasUpperCaseCharacter(String s) {
for (int i = 0; i < s.length(); i++) {
if (Character.isUpperCase(s.charAt(i))) {
return true;
}
}
return false;
}
private static | LayoutType |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/result/view/UrlBasedViewResolverTests.java | {
"start": 4576,
"end": 4964
} | class ____ extends AbstractUrlBasedView {
public TestView() {
setRequestContextAttribute("testRequestContext");
}
@Override
public boolean checkResourceExists(Locale locale) {
return true;
}
@Override
protected Mono<Void> renderInternal(Map<String, Object> attributes, MediaType contentType,
ServerWebExchange exchange) {
return Mono.empty();
}
}
}
| TestView |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/struct/FormatFeatureAcceptSingleTest.java | {
"start": 3075,
"end": 3219
} | class ____ {
@JsonFormat(with = JsonFormat.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY)
public Role[] roles;
}
static | RolesInArray |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/server/reactive/AbstractListenerReadPublisher.java | {
"start": 1298,
"end": 1441
} | class ____ {@code Publisher} implementations that bridge between
* event-listener read APIs and Reactive Streams.
*
* <p>Specifically a base | for |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java | {
"start": 1667,
"end": 1907
} | class ____ provides direct and reverse lookup functionalities, allowing
* the querying of specific network interfaces or nameservers.
*
*
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Unstable
public | that |
java | quarkusio__quarkus | integration-tests/spring-data-jpa/src/test/java/io/quarkus/it/spring/data/jpa/PhoneCallResourceTest.java | {
"start": 239,
"end": 978
} | class ____ {
@Test
public void testFindById() {
when().get("/phonecall/1234/56789").then()
.statusCode(200)
.body(containsString("25"));
}
@Test
public void testFindAllIds() {
when().get("/phonecall/ids").then()
.statusCode(200)
.body(containsString("11111"))
.body(containsString("56789"));
}
@Test
public void testFindAllCallAgents() {
when().get("/phonecall/call-agents").then()
.statusCode(200)
.body(containsString("General")).body(containsString("Specific"))
.body(containsString("Major")).body(containsString("Minor"));
}
}
| PhoneCallResourceTest |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/security/oauthbearer/internals/secured/assertion/DefaultAssertionCreatorTest.java | {
"start": 6982,
"end": 7803
} | class ____ {
private final Time time = new MockTime();
private String algorithm = TOKEN_SIGNING_ALGORITHM_RS256;
private File privateKeyFile;
private Optional<String> passphrase = Optional.empty();
public Builder setAlgorithm(String algorithm) {
this.algorithm = algorithm;
return this;
}
public Builder setPrivateKeyFile(File privateKeyFile) {
this.privateKeyFile = privateKeyFile;
return this;
}
public Builder setPassphrase(String passphrase) {
this.passphrase = Optional.of(passphrase);
return this;
}
private DefaultAssertionCreator build() {
return new DefaultAssertionCreator(algorithm, privateKeyFile, passphrase);
}
}
}
| Builder |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerUpgradeTestSpecifications.java | {
"start": 33726,
"end": 34572
} | class ____
implements TypeSerializerUpgradeTestBase.PreUpgradeSetup<StaticSchemaPojo> {
@Override
public TypeSerializer<StaticSchemaPojo> createPriorSerializer() {
SerializerConfig serializerConfig = new SerializerConfigImpl();
TypeSerializer<StaticSchemaPojo> serializer =
TypeExtractor.createTypeInfo(StaticSchemaPojo.class)
.createSerializer(serializerConfig);
assertThat(serializer.getClass()).isSameAs(PojoSerializer.class);
return serializer;
}
@Override
public StaticSchemaPojo createTestData() {
return new StaticSchemaPojoSubclassA(
"gt", 7, StaticSchemaPojo.Color.BLUE, false, 911108);
}
}
public static final | NewRegisteredPojoSubclassSetup |
java | google__truth | core/src/main/java/com/google/common/truth/IntegerSubject.java | {
"start": 988,
"end": 1610
} | class ____ extends ComparableSubject<Integer> {
private final @Nullable Integer actual;
/**
* The constructor is for use by subclasses only. If you want to create an instance of this class
* itself, call {@link Subject#check(String, Object...) check(...)}{@code .that(actual)}.
*/
protected IntegerSubject(FailureMetadata metadata, @Nullable Integer actual) {
super(metadata, actual);
this.actual = actual;
}
/**
* A partially specified check about an approximate relationship to a {@code int} actual value
* using a tolerance.
*
* @since 1.2
*/
public static final | IntegerSubject |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/output/ArrayComplexData.java | {
"start": 1117,
"end": 2843
} | class ____ extends ComplexData {
private final List<Object> data;
public ArrayComplexData(int count) {
// RESP2 response for array data might end up returning -1 here if there are no results to process
if (count > 0) {
data = new ArrayList<>(count);
} else {
data = new ArrayList<>(0);
}
}
@Override
public void storeObject(Object value) {
data.add(value);
}
@Override
public List<Object> getDynamicList() {
return Collections.unmodifiableList(data);
}
@Override
public Set<Object> getDynamicSet() {
// RESP2 compatibility mode - assuming the caller is aware that the array really contains a set (because in RESP2 we
// lack support for this data type) we make the conversion here
Set<Object> set = new LinkedHashSet<>(data);
return Collections.unmodifiableSet(set);
}
@Override
public Map<Object, Object> getDynamicMap() {
// RESP2 compatibility mode - assuming the caller is aware that the array really contains a map (because in RESP2 we
// lack support for this data type) we make the conversion here
Map<Object, Object> map = new LinkedHashMap<>();
final Boolean[] isKey = { true };
final Object[] key = new Object[1];
data.forEach(element -> {
if (isKey[0]) {
key[0] = element;
isKey[0] = false;
} else {
map.put(key[0], element);
isKey[0] = true;
}
});
return Collections.unmodifiableMap(map);
}
@Override
public boolean isList() {
return true;
}
}
| ArrayComplexData |
java | spring-projects__spring-framework | spring-expression/src/test/java/org/springframework/expression/spel/ScenariosForSpringSecurityExpressionTests.java | {
"start": 8424,
"end": 9609
} | class ____ implements MethodExecutor {
TypeConverter tc;
public HasRoleExecutor(TypeConverter typeConverter) {
this.tc = typeConverter;
}
@Override
public TypedValue execute(EvaluationContext context, Object target, Object... arguments)
throws AccessException {
try {
Method m = HasRoleExecutor.class.getMethod("hasRole", String[].class);
Object[] args = arguments;
if (args != null) {
ReflectionHelper.convertAllArguments(tc, args, m);
}
if (m.isVarArgs()) {
args = ReflectionHelper.setupArgumentsForVarargsInvocation(m.getParameterTypes(), args);
}
return new TypedValue(m.invoke(null, args), new TypeDescriptor(new MethodParameter(m,-1)));
}
catch (Exception ex) {
throw new AccessException("Problem invoking hasRole", ex);
}
}
public static boolean hasRole(String... strings) {
return true;
}
}
@Override
public MethodExecutor resolve(EvaluationContext context, Object targetObject, String name, List<TypeDescriptor> arguments) {
if (name.equals("hasRole")) {
return new HasRoleExecutor(context.getTypeConverter());
}
return null;
}
}
}
| HasRoleExecutor |
java | apache__rocketmq | client/src/main/java/org/apache/rocketmq/client/impl/consumer/DefaultMQPullConsumerImpl.java | {
"start": 3727,
"end": 37849
} | class ____ implements MQConsumerInner {
private static final Logger log = LoggerFactory.getLogger(DefaultMQPullConsumerImpl.class);
private final DefaultMQPullConsumer defaultMQPullConsumer;
private final long consumerStartTimestamp = System.currentTimeMillis();
private final RPCHook rpcHook;
private final ArrayList<ConsumeMessageHook> consumeMessageHookList = new ArrayList<>();
private final ArrayList<FilterMessageHook> filterMessageHookList = new ArrayList<>();
private volatile ServiceState serviceState = ServiceState.CREATE_JUST;
protected MQClientInstance mQClientFactory;
private PullAPIWrapper pullAPIWrapper;
private OffsetStore offsetStore;
private RebalanceImpl rebalanceImpl = new RebalancePullImpl(this);
public DefaultMQPullConsumerImpl(final DefaultMQPullConsumer defaultMQPullConsumer, final RPCHook rpcHook) {
this.defaultMQPullConsumer = defaultMQPullConsumer;
this.rpcHook = rpcHook;
}
public void registerConsumeMessageHook(final ConsumeMessageHook hook) {
this.consumeMessageHookList.add(hook);
log.info("register consumeMessageHook Hook, {}", hook.hookName());
}
public void createTopic(String key, String newTopic, int queueNum) throws MQClientException {
createTopic(key, newTopic, queueNum, 0);
}
public void createTopic(String key, String newTopic, int queueNum, int topicSysFlag) throws MQClientException {
this.isRunning();
this.mQClientFactory.getMQAdminImpl().createTopic(key, newTopic, queueNum, topicSysFlag, null);
}
private void isRunning() throws MQClientException {
if (this.serviceState != ServiceState.RUNNING) {
throw new MQClientException("The consumer is not in running status, "
+ this.serviceState
+ FAQUrl.suggestTodo(FAQUrl.CLIENT_SERVICE_NOT_OK),
null);
}
}
public long fetchConsumeOffset(MessageQueue mq, boolean fromStore) throws MQClientException {
this.isRunning();
return this.offsetStore.readOffset(mq, fromStore ? ReadOffsetType.READ_FROM_STORE : ReadOffsetType.MEMORY_FIRST_THEN_STORE);
}
public Set<MessageQueue> fetchMessageQueuesInBalance(String topic) throws MQClientException {
this.isRunning();
if (null == topic) {
throw new IllegalArgumentException("topic is null");
}
ConcurrentMap<MessageQueue, ProcessQueue> mqTable = this.rebalanceImpl.getProcessQueueTable();
Set<MessageQueue> mqResult = new HashSet<>();
for (MessageQueue mq : mqTable.keySet()) {
if (mq.getTopic().equals(topic)) {
mqResult.add(mq);
}
}
return parseSubscribeMessageQueues(mqResult);
}
public List<MessageQueue> fetchPublishMessageQueues(String topic) throws MQClientException {
this.isRunning();
return this.mQClientFactory.getMQAdminImpl().fetchPublishMessageQueues(topic);
}
public Set<MessageQueue> fetchSubscribeMessageQueues(String topic) throws MQClientException {
this.isRunning();
// check if has info in memory, otherwise invoke api.
Set<MessageQueue> result = this.rebalanceImpl.getTopicSubscribeInfoTable().get(topic);
if (null == result) {
result = this.mQClientFactory.getMQAdminImpl().fetchSubscribeMessageQueues(topic);
}
return parseSubscribeMessageQueues(result);
}
public Set<MessageQueue> parseSubscribeMessageQueues(Set<MessageQueue> queueSet) {
Set<MessageQueue> resultQueues = new HashSet<>();
for (MessageQueue messageQueue : queueSet) {
String userTopic = NamespaceUtil.withoutNamespace(messageQueue.getTopic(),
this.defaultMQPullConsumer.getNamespace());
resultQueues.add(new MessageQueue(userTopic, messageQueue.getBrokerName(), messageQueue.getQueueId()));
}
return resultQueues;
}
public long earliestMsgStoreTime(MessageQueue mq) throws MQClientException {
this.isRunning();
return this.mQClientFactory.getMQAdminImpl().earliestMsgStoreTime(mq);
}
public long maxOffset(MessageQueue mq) throws MQClientException {
this.isRunning();
return this.mQClientFactory.getMQAdminImpl().maxOffset(mq);
}
public long minOffset(MessageQueue mq) throws MQClientException {
this.isRunning();
return this.mQClientFactory.getMQAdminImpl().minOffset(mq);
}
public PullResult pull(MessageQueue mq, String subExpression, long offset, int maxNums)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
return pull(mq, subExpression, offset, maxNums, this.defaultMQPullConsumer.getConsumerPullTimeoutMillis());
}
public PullResult pull(MessageQueue mq, String subExpression, long offset, int maxNums, long timeout)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
SubscriptionData subscriptionData = getSubscriptionData(mq, subExpression);
return this.pullSyncImpl(mq, subscriptionData, offset, maxNums, false, timeout);
}
public PullResult pull(MessageQueue mq, MessageSelector messageSelector, long offset, int maxNums)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
return pull(mq, messageSelector, offset, maxNums, this.defaultMQPullConsumer.getConsumerPullTimeoutMillis());
}
public PullResult pull(MessageQueue mq, MessageSelector messageSelector, long offset, int maxNums, long timeout)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
SubscriptionData subscriptionData = getSubscriptionData(mq, messageSelector);
return this.pullSyncImpl(mq, subscriptionData, offset, maxNums, false, timeout);
}
private SubscriptionData getSubscriptionData(MessageQueue mq, String subExpression)
throws MQClientException {
if (null == mq) {
throw new MQClientException("mq is null", null);
}
try {
return FilterAPI.buildSubscriptionData(mq.getTopic(), subExpression);
} catch (Exception e) {
throw new MQClientException("parse subscription error", e);
}
}
private SubscriptionData getSubscriptionData(MessageQueue mq, MessageSelector messageSelector)
throws MQClientException {
if (null == mq) {
throw new MQClientException("mq is null", null);
}
try {
return FilterAPI.build(mq.getTopic(),
messageSelector.getExpression(), messageSelector.getExpressionType());
} catch (Exception e) {
throw new MQClientException("parse subscription error", e);
}
}
private PullResult pullSyncImpl(MessageQueue mq, SubscriptionData subscriptionData, long offset, int maxNums, boolean block,
long timeout)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
this.isRunning();
if (null == mq) {
throw new MQClientException("mq is null", null);
}
if (offset < 0) {
throw new MQClientException("offset < 0", null);
}
if (maxNums <= 0) {
throw new MQClientException("maxNums <= 0", null);
}
this.subscriptionAutomatically(mq.getTopic());
int sysFlag = PullSysFlag.buildSysFlag(false, block, true, false);
long timeoutMillis = block ? this.defaultMQPullConsumer.getConsumerTimeoutMillisWhenSuspend() : timeout;
boolean isTagType = ExpressionType.isTagType(subscriptionData.getExpressionType());
PullResult pullResult = this.pullAPIWrapper.pullKernelImpl(
mq,
subscriptionData.getSubString(),
subscriptionData.getExpressionType(),
isTagType ? 0L : subscriptionData.getSubVersion(),
offset,
maxNums,
sysFlag,
0,
this.defaultMQPullConsumer.getBrokerSuspendMaxTimeMillis(),
timeoutMillis,
CommunicationMode.SYNC,
null
);
this.pullAPIWrapper.processPullResult(mq, pullResult, subscriptionData);
//If namespace is not null , reset Topic without namespace.
this.resetTopic(pullResult.getMsgFoundList());
if (!this.consumeMessageHookList.isEmpty()) {
ConsumeMessageContext consumeMessageContext = null;
consumeMessageContext = new ConsumeMessageContext();
consumeMessageContext.setNamespace(defaultMQPullConsumer.getNamespace());
consumeMessageContext.setConsumerGroup(this.groupName());
consumeMessageContext.setMq(mq);
consumeMessageContext.setMsgList(pullResult.getMsgFoundList());
consumeMessageContext.setSuccess(false);
this.executeHookBefore(consumeMessageContext);
consumeMessageContext.setStatus(ConsumeConcurrentlyStatus.CONSUME_SUCCESS.toString());
consumeMessageContext.setSuccess(true);
consumeMessageContext.setAccessChannel(defaultMQPullConsumer.getAccessChannel());
this.executeHookAfter(consumeMessageContext);
}
return pullResult;
}
public void resetTopic(List<MessageExt> msgList) {
if (null == msgList || msgList.size() == 0) {
return;
}
//If namespace not null , reset Topic without namespace.
String namespace = this.getDefaultMQPullConsumer().getNamespace();
if (namespace != null) {
for (MessageExt messageExt : msgList) {
messageExt.setTopic(NamespaceUtil.withoutNamespace(messageExt.getTopic(), namespace));
}
}
}
public void subscriptionAutomatically(final String topic) {
if (!this.rebalanceImpl.getSubscriptionInner().containsKey(topic)) {
try {
SubscriptionData subscriptionData = FilterAPI.buildSubscriptionData(topic, SubscriptionData.SUB_ALL);
this.rebalanceImpl.subscriptionInner.putIfAbsent(topic, subscriptionData);
} catch (Exception ignore) {
}
}
}
public void unsubscribe(String topic) {
this.rebalanceImpl.getSubscriptionInner().remove(topic);
}
@Override
public String groupName() {
return this.defaultMQPullConsumer.getConsumerGroup();
}
public void executeHookBefore(final ConsumeMessageContext context) {
if (!this.consumeMessageHookList.isEmpty()) {
for (ConsumeMessageHook hook : this.consumeMessageHookList) {
try {
hook.consumeMessageBefore(context);
} catch (Throwable ignored) {
}
}
}
}
public void executeHookAfter(final ConsumeMessageContext context) {
if (!this.consumeMessageHookList.isEmpty()) {
for (ConsumeMessageHook hook : this.consumeMessageHookList) {
try {
hook.consumeMessageAfter(context);
} catch (Throwable ignored) {
}
}
}
}
@Override
public MessageModel messageModel() {
return this.defaultMQPullConsumer.getMessageModel();
}
@Override
public ConsumeType consumeType() {
return ConsumeType.CONSUME_ACTIVELY;
}
@Override
public ConsumeFromWhere consumeFromWhere() {
return ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET;
}
@Override
public Set<SubscriptionData> subscriptions() {
Set<SubscriptionData> registerSubscriptions = defaultMQPullConsumer.getRegisterSubscriptions();
if (registerSubscriptions != null && !registerSubscriptions.isEmpty()) {
return registerSubscriptions;
}
Set<SubscriptionData> result = new HashSet<>();
Set<String> topics = this.defaultMQPullConsumer.getRegisterTopics();
if (topics != null) {
synchronized (topics) {
for (String t : topics) {
SubscriptionData ms = null;
try {
ms = FilterAPI.buildSubscriptionData(t, SubscriptionData.SUB_ALL);
} catch (Exception e) {
log.error("parse subscription error", e);
}
if (ms != null) {
ms.setSubVersion(0L);
result.add(ms);
}
}
}
}
return result;
}
@Override
public void doRebalance() {
if (!defaultMQPullConsumer.isEnableRebalance()) {
return;
}
if (this.rebalanceImpl != null) {
this.rebalanceImpl.doRebalance(false);
}
}
@Override
public boolean tryRebalance() {
if (!defaultMQPullConsumer.isEnableRebalance()) {
return true;
}
if (this.rebalanceImpl != null) {
return this.rebalanceImpl.doRebalance(false);
}
return false;
}
@Override
public void persistConsumerOffset() {
try {
this.isRunning();
Set<MessageQueue> mqs = new HashSet<>();
Set<MessageQueue> allocateMq = this.rebalanceImpl.getProcessQueueTable().keySet();
mqs.addAll(allocateMq);
this.offsetStore.persistAll(mqs);
} catch (Exception e) {
log.error("group: " + this.defaultMQPullConsumer.getConsumerGroup() + " persistConsumerOffset exception", e);
}
}
@Override
public void updateTopicSubscribeInfo(String topic, Set<MessageQueue> info) {
Map<String, SubscriptionData> subTable = this.rebalanceImpl.getSubscriptionInner();
if (subTable != null) {
if (subTable.containsKey(topic)) {
this.rebalanceImpl.getTopicSubscribeInfoTable().put(topic, info);
}
}
}
@Override
public boolean isSubscribeTopicNeedUpdate(String topic) {
Map<String, SubscriptionData> subTable = this.rebalanceImpl.getSubscriptionInner();
if (subTable != null) {
if (subTable.containsKey(topic)) {
return !this.rebalanceImpl.topicSubscribeInfoTable.containsKey(topic);
}
}
return false;
}
@Override
public boolean isUnitMode() {
return this.defaultMQPullConsumer.isUnitMode();
}
@Override
public ConsumerRunningInfo consumerRunningInfo() {
ConsumerRunningInfo info = new ConsumerRunningInfo();
Properties prop = MixAll.object2Properties(this.defaultMQPullConsumer);
prop.put(ConsumerRunningInfo.PROP_CONSUMER_START_TIMESTAMP, String.valueOf(this.consumerStartTimestamp));
info.setProperties(prop);
info.getSubscriptionSet().addAll(this.subscriptions());
return info;
}
public void pull(MessageQueue mq, String subExpression, long offset, int maxNums, PullCallback pullCallback)
throws MQClientException, RemotingException, InterruptedException {
pull(mq, subExpression, offset, maxNums, pullCallback, this.defaultMQPullConsumer.getConsumerPullTimeoutMillis());
}
public void pull(MessageQueue mq, String subExpression, long offset, int maxNums, PullCallback pullCallback,
long timeout)
throws MQClientException, RemotingException, InterruptedException {
SubscriptionData subscriptionData = getSubscriptionData(mq, subExpression);
this.pullAsyncImpl(mq, subscriptionData, offset, maxNums, pullCallback, false, timeout);
}
public void pull(MessageQueue mq, String subExpression, long offset, int maxNums, int maxSize, PullCallback pullCallback,
long timeout)
throws MQClientException, RemotingException, InterruptedException {
SubscriptionData subscriptionData = getSubscriptionData(mq, subExpression);
this.pullAsyncImpl(mq, subscriptionData, offset, maxNums, maxSize, pullCallback, false, timeout);
}
public void pull(MessageQueue mq, MessageSelector messageSelector, long offset, int maxNums,
PullCallback pullCallback)
throws MQClientException, RemotingException, InterruptedException {
pull(mq, messageSelector, offset, maxNums, pullCallback, this.defaultMQPullConsumer.getConsumerPullTimeoutMillis());
}
public void pull(MessageQueue mq, MessageSelector messageSelector, long offset, int maxNums,
PullCallback pullCallback,
long timeout)
throws MQClientException, RemotingException, InterruptedException {
SubscriptionData subscriptionData = getSubscriptionData(mq, messageSelector);
this.pullAsyncImpl(mq, subscriptionData, offset, maxNums, pullCallback, false, timeout);
}
private void pullAsyncImpl(
final MessageQueue mq,
final SubscriptionData subscriptionData,
final long offset,
final int maxNums,
final int maxSizeInBytes,
final PullCallback pullCallback,
final boolean block,
final long timeout) throws MQClientException, RemotingException, InterruptedException {
this.isRunning();
if (null == mq) {
throw new MQClientException("mq is null", null);
}
if (offset < 0) {
throw new MQClientException("offset < 0", null);
}
if (maxNums <= 0) {
throw new MQClientException("maxNums <= 0", null);
}
if (maxSizeInBytes <= 0) {
throw new MQClientException("maxSizeInBytes <= 0", null);
}
if (null == pullCallback) {
throw new MQClientException("pullCallback is null", null);
}
this.subscriptionAutomatically(mq.getTopic());
try {
int sysFlag = PullSysFlag.buildSysFlag(false, block, true, false);
long timeoutMillis = block ? this.defaultMQPullConsumer.getConsumerTimeoutMillisWhenSuspend() : timeout;
boolean isTagType = ExpressionType.isTagType(subscriptionData.getExpressionType());
this.pullAPIWrapper.pullKernelImpl(
mq,
subscriptionData.getSubString(),
subscriptionData.getExpressionType(),
isTagType ? 0L : subscriptionData.getSubVersion(),
offset,
maxNums,
maxSizeInBytes,
sysFlag,
0,
this.defaultMQPullConsumer.getBrokerSuspendMaxTimeMillis(),
timeoutMillis,
CommunicationMode.ASYNC,
new PullCallback() {
@Override
public void onSuccess(PullResult pullResult) {
PullResult userPullResult = DefaultMQPullConsumerImpl.this.pullAPIWrapper.processPullResult(mq, pullResult, subscriptionData);
resetTopic(userPullResult.getMsgFoundList());
pullCallback.onSuccess(userPullResult);
}
@Override
public void onException(Throwable e) {
pullCallback.onException(e);
}
});
} catch (MQBrokerException e) {
throw new MQClientException("pullAsync unknown exception", e);
}
}
private void pullAsyncImpl(
final MessageQueue mq,
final SubscriptionData subscriptionData,
final long offset,
final int maxNums,
final PullCallback pullCallback,
final boolean block,
final long timeout) throws MQClientException, RemotingException, InterruptedException {
pullAsyncImpl(
mq,
subscriptionData,
offset,
maxNums,
Integer.MAX_VALUE,
pullCallback,
block,
timeout
);
}
public PullResult pullBlockIfNotFound(MessageQueue mq, String subExpression, long offset, int maxNums)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
SubscriptionData subscriptionData = getSubscriptionData(mq, subExpression);
return this.pullSyncImpl(mq, subscriptionData, offset, maxNums, true, this.getDefaultMQPullConsumer().getConsumerPullTimeoutMillis());
}
public DefaultMQPullConsumer getDefaultMQPullConsumer() {
return defaultMQPullConsumer;
}
public void pullBlockIfNotFound(MessageQueue mq, String subExpression, long offset, int maxNums,
PullCallback pullCallback)
throws MQClientException, RemotingException, InterruptedException {
SubscriptionData subscriptionData = getSubscriptionData(mq, subExpression);
this.pullAsyncImpl(mq, subscriptionData, offset, maxNums, pullCallback, true,
this.getDefaultMQPullConsumer().getConsumerPullTimeoutMillis());
}
public void pullBlockIfNotFoundWithMessageSelector(MessageQueue mq, MessageSelector messageSelector, long offset, int maxNums,
PullCallback pullCallback)
throws MQClientException, RemotingException, InterruptedException {
SubscriptionData subscriptionData = getSubscriptionData(mq, messageSelector);
this.pullAsyncImpl(mq, subscriptionData, offset, maxNums, pullCallback, true,
this.getDefaultMQPullConsumer().getConsumerPullTimeoutMillis());
}
public PullResult pullBlockIfNotFoundWithMessageSelector(MessageQueue mq, MessageSelector messageSelector, long offset, int maxNums)
throws MQClientException, RemotingException, InterruptedException, MQBrokerException {
SubscriptionData subscriptionData = getSubscriptionData(mq, messageSelector);
return this.pullSyncImpl(mq, subscriptionData, offset, maxNums, true, this.getDefaultMQPullConsumer().getConsumerPullTimeoutMillis());
}
public QueryResult queryMessage(String topic, String key, int maxNum, long begin, long end)
throws MQClientException, InterruptedException {
this.isRunning();
return this.mQClientFactory.getMQAdminImpl().queryMessage(topic, key, maxNum, begin, end);
}
public MessageExt queryMessageByUniqKey(String topic, String uniqKey)
throws MQClientException, InterruptedException {
this.isRunning();
return this.mQClientFactory.getMQAdminImpl().queryMessageByUniqKey(topic, uniqKey);
}
public long searchOffset(MessageQueue mq, long timestamp) throws MQClientException {
this.isRunning();
return this.mQClientFactory.getMQAdminImpl().searchOffset(mq, timestamp);
}
public void sendMessageBack(MessageExt msg, int delayLevel, final String brokerName)
throws RemotingException, MQBrokerException, InterruptedException, MQClientException {
sendMessageBack(msg, delayLevel, brokerName, this.defaultMQPullConsumer.getConsumerGroup());
}
public void updateConsumeOffsetToBroker(MessageQueue mq, long offset, boolean isOneway) throws RemotingException,
MQBrokerException, InterruptedException, MQClientException {
this.offsetStore.updateConsumeOffsetToBroker(mq, offset, isOneway);
}
@Deprecated
public void sendMessageBack(MessageExt msg, int delayLevel, final String brokerName, String consumerGroup)
throws RemotingException, MQBrokerException, InterruptedException, MQClientException {
try {
String destBrokerName = brokerName;
if (destBrokerName != null && destBrokerName.startsWith(MixAll.LOGICAL_QUEUE_MOCK_BROKER_PREFIX)) {
destBrokerName = this.mQClientFactory.getBrokerNameFromMessageQueue(this.defaultMQPullConsumer.queueWithNamespace(new MessageQueue(msg.getTopic(), msg.getBrokerName(), msg.getQueueId())));
}
String brokerAddr = (null != destBrokerName) ? this.mQClientFactory.findBrokerAddressInPublish(destBrokerName)
: RemotingHelper.parseSocketAddressAddr(msg.getStoreHost());
if (UtilAll.isBlank(brokerAddr)) {
throw new MQClientException("Broker[" + destBrokerName + "] master node does not exist", null);
}
if (UtilAll.isBlank(consumerGroup)) {
consumerGroup = this.defaultMQPullConsumer.getConsumerGroup();
}
this.mQClientFactory.getMQClientAPIImpl().consumerSendMessageBack(brokerAddr, brokerName, msg, consumerGroup,
delayLevel, 3000, this.defaultMQPullConsumer.getMaxReconsumeTimes());
} catch (Exception e) {
log.error("sendMessageBack Exception, " + this.defaultMQPullConsumer.getConsumerGroup(), e);
Message newMsg = new Message(MixAll.getRetryTopic(this.defaultMQPullConsumer.getConsumerGroup()), msg.getBody());
String originMsgId = MessageAccessor.getOriginMessageId(msg);
MessageAccessor.setOriginMessageId(newMsg, UtilAll.isBlank(originMsgId) ? msg.getMsgId() : originMsgId);
newMsg.setFlag(msg.getFlag());
MessageAccessor.setProperties(newMsg, msg.getProperties());
MessageAccessor.putProperty(newMsg, MessageConst.PROPERTY_RETRY_TOPIC, msg.getTopic());
MessageAccessor.setReconsumeTime(newMsg, String.valueOf(msg.getReconsumeTimes() + 1));
MessageAccessor.setMaxReconsumeTimes(newMsg, String.valueOf(this.defaultMQPullConsumer.getMaxReconsumeTimes()));
newMsg.setDelayTimeLevel(3 + msg.getReconsumeTimes());
this.mQClientFactory.getDefaultMQProducer().send(newMsg);
} finally {
msg.setTopic(NamespaceUtil.withoutNamespace(msg.getTopic(), this.defaultMQPullConsumer.getNamespace()));
}
}
public synchronized void shutdown() {
switch (this.serviceState) {
case CREATE_JUST:
break;
case RUNNING:
this.persistConsumerOffset();
this.mQClientFactory.unregisterConsumer(this.defaultMQPullConsumer.getConsumerGroup());
this.mQClientFactory.shutdown();
log.info("the consumer [{}] shutdown OK", this.defaultMQPullConsumer.getConsumerGroup());
this.serviceState = ServiceState.SHUTDOWN_ALREADY;
break;
case SHUTDOWN_ALREADY:
break;
default:
break;
}
}
public synchronized void start() throws MQClientException {
switch (this.serviceState) {
case CREATE_JUST:
this.serviceState = ServiceState.START_FAILED;
this.checkConfig();
this.copySubscription();
if (this.defaultMQPullConsumer.getMessageModel() == MessageModel.CLUSTERING) {
this.defaultMQPullConsumer.changeInstanceNameToPID();
}
this.mQClientFactory = MQClientManager.getInstance().getOrCreateMQClientInstance(this.defaultMQPullConsumer, this.rpcHook);
this.rebalanceImpl.setConsumerGroup(this.defaultMQPullConsumer.getConsumerGroup());
this.rebalanceImpl.setMessageModel(this.defaultMQPullConsumer.getMessageModel());
this.rebalanceImpl.setAllocateMessageQueueStrategy(this.defaultMQPullConsumer.getAllocateMessageQueueStrategy());
this.rebalanceImpl.setmQClientFactory(this.mQClientFactory);
this.pullAPIWrapper = new PullAPIWrapper(
mQClientFactory,
this.defaultMQPullConsumer.getConsumerGroup(), isUnitMode());
this.pullAPIWrapper.registerFilterMessageHook(filterMessageHookList);
if (this.defaultMQPullConsumer.getOffsetStore() != null) {
this.offsetStore = this.defaultMQPullConsumer.getOffsetStore();
} else {
switch (this.defaultMQPullConsumer.getMessageModel()) {
case BROADCASTING:
this.offsetStore = new LocalFileOffsetStore(this.mQClientFactory, this.defaultMQPullConsumer.getConsumerGroup());
break;
case CLUSTERING:
this.offsetStore = new RemoteBrokerOffsetStore(this.mQClientFactory, this.defaultMQPullConsumer.getConsumerGroup());
break;
default:
break;
}
this.defaultMQPullConsumer.setOffsetStore(this.offsetStore);
}
this.offsetStore.load();
boolean registerOK = mQClientFactory.registerConsumer(this.defaultMQPullConsumer.getConsumerGroup(), this);
if (!registerOK) {
this.serviceState = ServiceState.CREATE_JUST;
throw new MQClientException("The consumer group[" + this.defaultMQPullConsumer.getConsumerGroup()
+ "] has been created before, specify another name please." + FAQUrl.suggestTodo(FAQUrl.GROUP_NAME_DUPLICATE_URL),
null);
}
mQClientFactory.start();
log.info("the consumer [{}] start OK", this.defaultMQPullConsumer.getConsumerGroup());
this.serviceState = ServiceState.RUNNING;
break;
case RUNNING:
case START_FAILED:
case SHUTDOWN_ALREADY:
throw new MQClientException("The PullConsumer service state not OK, maybe started once, "
+ this.serviceState
+ FAQUrl.suggestTodo(FAQUrl.CLIENT_SERVICE_NOT_OK),
null);
default:
break;
}
}
private void checkConfig() throws MQClientException {
// check consumerGroup
Validators.checkGroup(this.defaultMQPullConsumer.getConsumerGroup());
// consumerGroup
if (null == this.defaultMQPullConsumer.getConsumerGroup()) {
throw new MQClientException(
"consumerGroup is null"
+ FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL),
null);
}
// consumerGroup
if (this.defaultMQPullConsumer.getConsumerGroup().equals(MixAll.DEFAULT_CONSUMER_GROUP)) {
throw new MQClientException(
"consumerGroup can not equal "
+ MixAll.DEFAULT_CONSUMER_GROUP
+ ", please specify another one."
+ FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL),
null);
}
// messageModel
if (null == this.defaultMQPullConsumer.getMessageModel()) {
throw new MQClientException(
"messageModel is null"
+ FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL),
null);
}
// allocateMessageQueueStrategy
if (null == this.defaultMQPullConsumer.getAllocateMessageQueueStrategy()) {
throw new MQClientException(
"allocateMessageQueueStrategy is null"
+ FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL),
null);
}
// allocateMessageQueueStrategy
if (this.defaultMQPullConsumer.getConsumerTimeoutMillisWhenSuspend() < this.defaultMQPullConsumer.getBrokerSuspendMaxTimeMillis()) {
throw new MQClientException(
"Long polling mode, the consumer consumerTimeoutMillisWhenSuspend must greater than brokerSuspendMaxTimeMillis"
+ FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL),
null);
}
}
private void copySubscription() throws MQClientException {
try {
Set<String> registerTopics = this.defaultMQPullConsumer.getRegisterTopics();
if (registerTopics != null) {
for (final String topic : registerTopics) {
SubscriptionData subscriptionData = FilterAPI.buildSubscriptionData(topic, SubscriptionData.SUB_ALL);
this.rebalanceImpl.getSubscriptionInner().put(topic, subscriptionData);
}
}
} catch (Exception e) {
throw new MQClientException("subscription exception", e);
}
}
public void updateConsumeOffset(MessageQueue mq, long offset) throws MQClientException {
this.isRunning();
this.offsetStore.updateOffset(mq, offset, false);
}
public MessageExt viewMessage(String topic, String msgId)
throws RemotingException, MQBrokerException, InterruptedException, MQClientException {
this.isRunning();
return this.mQClientFactory.getMQAdminImpl().viewMessage(topic, msgId);
}
public void registerFilterMessageHook(final FilterMessageHook hook) {
this.filterMessageHookList.add(hook);
log.info("register FilterMessageHook Hook, {}", hook.hookName());
}
public OffsetStore getOffsetStore() {
return offsetStore;
}
public void setOffsetStore(OffsetStore offsetStore) {
this.offsetStore = offsetStore;
}
public PullAPIWrapper getPullAPIWrapper() {
return pullAPIWrapper;
}
public void setPullAPIWrapper(PullAPIWrapper pullAPIWrapper) {
this.pullAPIWrapper = pullAPIWrapper;
}
public ServiceState getServiceState() {
return serviceState;
}
//Don't use this deprecated setter, which will be removed soon.
@Deprecated
public void setServiceState(ServiceState serviceState) {
this.serviceState = serviceState;
}
public long getConsumerStartTimestamp() {
return consumerStartTimestamp;
}
public RebalanceImpl getRebalanceImpl() {
return rebalanceImpl;
}
}
| DefaultMQPullConsumerImpl |
java | apache__camel | components/camel-huawei/camel-huaweicloud-dms/src/main/java/org/apache/camel/component/huaweicloud/dms/DmsClient.java | {
"start": 1772,
"end": 2823
} | class ____ {
protected HcClient hcClient;
public DmsClient(HcClient hcClient) {
this.hcClient = hcClient;
}
public static ClientBuilder<DmsClient> newBuilder() {
return new ClientBuilder<>(DmsClient::new);
}
public CreateInstanceResponse createInstance(CreateInstanceRequest request) {
return hcClient.syncInvokeHttp(request, DmsMeta.CREATE_INSTANCE);
}
public DeleteInstanceResponse deleteInstance(DeleteInstanceRequest request) {
return hcClient.syncInvokeHttp(request, DmsMeta.DELETE_INSTANCE);
}
public ListInstancesResponse listInstances(ListInstancesRequest request) {
return hcClient.syncInvokeHttp(request, DmsMeta.LIST_INSTANCES);
}
public DmsInstance queryInstance(QueryInstanceRequest request) {
return hcClient.syncInvokeHttp(request, DmsMeta.QUERY_INSTANCE);
}
public UpdateInstanceResponse updateInstance(UpdateInstanceRequest request) {
return hcClient.syncInvokeHttp(request, DmsMeta.UPDATE_INSTANCE);
}
}
| DmsClient |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/logging/log4j2/Log4J2RuntimeHints.java | {
"start": 1020,
"end": 2110
} | class ____ implements RuntimeHintsRegistrar {
@Override
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
if (ClassUtils.isPresent(Log4J2LoggingSystem.Factory.LOG4J_CORE_CONTEXT_FACTORY, classLoader)) {
registerLog4j2Hints(hints, classLoader);
}
}
private void registerLog4j2Hints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
hints.reflection().registerTypeIfPresent(classLoader, Log4J2LoggingSystem.Factory.LOG4J_CORE_CONTEXT_FACTORY);
// Register default Log4j2 configuration files
hints.resources().registerPattern("org/springframework/boot/logging/log4j2/log4j2.xml");
hints.resources().registerPattern("org/springframework/boot/logging/log4j2/log4j2-file.xml");
hints.resources().registerPattern("log4j2.springboot");
// Declares the types that Log4j2LoggingSystem checks for existence reflectively.
hints.reflection().registerTypeIfPresent(classLoader, Log4J2LoggingSystem.LOG4J_BRIDGE_HANDLER);
hints.reflection().registerTypeIfPresent(classLoader, Log4J2LoggingSystem.LOG4J_LOG_MANAGER);
}
}
| Log4J2RuntimeHints |
java | apache__camel | components/camel-webhook/src/test/java/org/apache/camel/component/webhook/WebhookBasePathTest.java | {
"start": 1289,
"end": 3859
} | class ____ extends WebhookTestBase {
@Test
public void testComponentPath() {
String result = template.requestBody("netty-http:http://localhost:" + port + "/base/uri0", "", String.class);
assertEquals("msg: webhook", result);
}
@Test
public void testUriPath() {
String result = template.requestBody("netty-http:http://localhost:" + port + "/base/uri", "", String.class);
assertEquals("uri: webhook", result);
}
@Test
public void testAutoPath() {
String result = template.requestBody("netty-http:http://localhost:" + port + "/base"
+ WebhookConfiguration.computeDefaultPath("wb-delegate://auto"),
"", String.class);
assertEquals("auto: webhook", result);
}
@Test
public void testRootPathError() {
assertThrows(CamelExecutionException.class,
() -> template.requestBody("netty-http:http://localhost:" + port, "", String.class));
}
@Test
public void testRootBasePathError() {
assertThrows(CamelExecutionException.class,
() -> template.requestBody("netty-http:http://localhost:" + port + "/base/", "", String.class));
}
@Override
public CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
WebhookComponent comp = (WebhookComponent) context.getComponent("webhook");
comp.getConfiguration().setWebhookBasePath("/base");
return context;
}
@Override
protected void bindToRegistry(Registry registry) {
registry.bind("wb-delegate-component", new TestComponent(endpoint -> {
endpoint.setWebhookHandler(proc -> ex -> {
ex.getMessage().setBody("webhook");
proc.process(ex);
});
}));
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
restConfiguration()
.host("0.0.0.0")
.port(port);
from("webhook:wb-delegate://xx?webhookPath=uri0")
.transform(body().prepend("msg: "));
from("webhook:wb-delegate://xx?webhookPath=/uri")
.transform(body().prepend("uri: "));
from("webhook:wb-delegate://auto")
.transform(body().prepend("auto: "));
}
};
}
}
| WebhookBasePathTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JsonPatchEndpointBuilderFactory.java | {
"start": 6171,
"end": 8672
} | interface ____
extends
EndpointProducerBuilder {
default JsonPatchEndpointBuilder basic() {
return (JsonPatchEndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedJsonPatchEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedJsonPatchEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
public | AdvancedJsonPatchEndpointBuilder |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/bean/types/GenericBeanTypesTest.java | {
"start": 689,
"end": 6296
} | class ____ {
@RegisterExtension
ArcTestContainer container = new ArcTestContainer(MyBean.class, Producer.class);
@Test
public void recursiveGeneric() {
InjectableBean<Object> bean = Arc.container().instance("myBean").getBean();
Set<Type> types = bean.getTypes();
assertEquals(3, types.size());
assertTrue(types.contains(Object.class));
for (Type type : types) {
if (type instanceof ParameterizedType) {
Type genericClass = ((ParameterizedType) type).getRawType();
assertTrue(MyBean.class.equals(genericClass) || Iterable.class.equals(genericClass));
assertEquals(1, ((ParameterizedType) type).getActualTypeArguments().length);
Type typeArg = ((ParameterizedType) type).getActualTypeArguments()[0];
assertTrue(typeArg instanceof TypeVariable);
assertEquals("T", ((TypeVariable<?>) typeArg).getName());
assertEquals(1, ((TypeVariable<?>) typeArg).getBounds().length);
Type bound = ((TypeVariable<?>) typeArg).getBounds()[0];
assertTrue(bound instanceof ParameterizedType);
assertEquals(Comparable.class, ((ParameterizedType) bound).getRawType());
assertEquals(1, ((ParameterizedType) bound).getActualTypeArguments().length);
Type boundTypeArg = ((ParameterizedType) bound).getActualTypeArguments()[0];
assertTrue(boundTypeArg instanceof TypeVariable);
assertEquals("T", ((TypeVariable<?>) boundTypeArg).getName());
// recursive
}
}
}
@Test
public void duplicateRecursiveGeneric() {
InjectableBean<Object> bean = Arc.container().instance("foobar").getBean();
Set<Type> types = bean.getTypes();
assertEquals(2, types.size());
assertTrue(types.contains(Object.class));
for (Type type : types) {
if (type instanceof ParameterizedType) {
Type genericClass = ((ParameterizedType) type).getRawType();
assertEquals(FooBar.class, genericClass);
assertEquals(2, ((ParameterizedType) type).getActualTypeArguments().length);
Type typeArg = ((ParameterizedType) type).getActualTypeArguments()[0];
assertTrue(typeArg instanceof TypeVariable);
assertEquals("T", ((TypeVariable<?>) typeArg).getName());
assertEquals(1, ((TypeVariable<?>) typeArg).getBounds().length);
Type bound = ((TypeVariable<?>) typeArg).getBounds()[0];
assertTrue(bound instanceof ParameterizedType);
assertEquals(FooBar.class, ((ParameterizedType) bound).getRawType());
typeArg = ((ParameterizedType) type).getActualTypeArguments()[1];
assertTrue(typeArg instanceof TypeVariable);
assertEquals("U", ((TypeVariable<?>) typeArg).getName());
assertEquals(1, ((TypeVariable<?>) typeArg).getBounds().length);
bound = ((TypeVariable<?>) typeArg).getBounds()[0];
assertTrue(bound instanceof ParameterizedType);
assertEquals(Comparable.class, ((ParameterizedType) bound).getRawType());
}
}
}
@Test
public void mutuallyRecursiveGeneric() {
InjectableBean<Object> bean = Arc.container().instance("graph").getBean();
Set<Type> types = bean.getTypes();
System.out.println(types);
assertEquals(2, types.size());
assertTrue(types.contains(Object.class));
for (Type type : types) {
if (type instanceof ParameterizedType) {
Type genericClass = ((ParameterizedType) type).getRawType();
assertEquals(Graph.class, genericClass);
assertEquals(3, ((ParameterizedType) type).getActualTypeArguments().length);
Type typeArg = ((ParameterizedType) type).getActualTypeArguments()[0];
assertTrue(typeArg instanceof TypeVariable);
assertEquals("G", ((TypeVariable<?>) typeArg).getName());
assertEquals(1, ((TypeVariable<?>) typeArg).getBounds().length);
Type bound = ((TypeVariable<?>) typeArg).getBounds()[0];
assertTrue(bound instanceof ParameterizedType);
assertEquals(Graph.class, ((ParameterizedType) bound).getRawType());
typeArg = ((ParameterizedType) type).getActualTypeArguments()[1];
assertTrue(typeArg instanceof TypeVariable);
assertEquals("E", ((TypeVariable<?>) typeArg).getName());
assertEquals(1, ((TypeVariable<?>) typeArg).getBounds().length);
bound = ((TypeVariable<?>) typeArg).getBounds()[0];
assertTrue(bound instanceof ParameterizedType);
assertEquals(Edge.class, ((ParameterizedType) bound).getRawType());
typeArg = ((ParameterizedType) type).getActualTypeArguments()[2];
assertTrue(typeArg instanceof TypeVariable);
assertEquals("N", ((TypeVariable<?>) typeArg).getName());
assertEquals(1, ((TypeVariable<?>) typeArg).getBounds().length);
bound = ((TypeVariable<?>) typeArg).getBounds()[0];
assertTrue(bound instanceof ParameterizedType);
assertEquals(Node.class, ((ParameterizedType) bound).getRawType());
}
}
}
@Dependent
@Named("myBean")
static | GenericBeanTypesTest |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/impl/Log4jLogEventNanoTimeTest.java | {
"start": 1809,
"end": 3965
} | class ____ {
@BeforeAll
static void beforeClass() {
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY, "NanoTimeToFileTest.xml");
}
@AfterAll
static void afterClass() {
System.setProperty(Constants.LOG4J_CONTEXT_SELECTOR, Strings.EMPTY);
}
@Test
void testLog4jLogEventUsesNanoTimeClock() throws Exception {
final File file = new File("target", "NanoTimeToFileTest.log");
// System.out.println(f.getAbsolutePath());
file.delete();
final Logger log = LogManager.getLogger("com.foo.Bar");
final long before = System.nanoTime();
log.info("Use actual System.nanoTime()");
assertInstanceOf(SystemNanoClock.class, Log4jLogEvent.getNanoClock(), "using SystemNanoClock");
final long DUMMYNANOTIME = 123;
Log4jLogEvent.setNanoClock(new DummyNanoClock(DUMMYNANOTIME));
log.info("Use dummy nano clock");
assertInstanceOf(DummyNanoClock.class, Log4jLogEvent.getNanoClock(), "using SystemNanoClock");
CoreLoggerContexts.stopLoggerContext(file); // stop async thread
String line1;
String line2;
try (final BufferedReader reader = new BufferedReader(new FileReader(file))) {
line1 = reader.readLine();
line2 = reader.readLine();
// System.out.println(line1);
// System.out.println(line2);
}
file.delete();
assertNotNull(line1, "line1");
assertNotNull(line2, "line2");
final String[] line1Parts = line1.split(" AND ");
assertEquals("Use actual System.nanoTime()", line1Parts[2]);
assertEquals(line1Parts[0], line1Parts[1]);
final long loggedNanoTime = Long.parseLong(line1Parts[0]);
assertTrue(loggedNanoTime - before < TimeUnit.SECONDS.toNanos(1), "used system nano time");
final String[] line2Parts = line2.split(" AND ");
assertEquals("Use dummy nano clock", line2Parts[2]);
assertEquals(String.valueOf(DUMMYNANOTIME), line2Parts[0]);
assertEquals(String.valueOf(DUMMYNANOTIME), line2Parts[1]);
}
}
| Log4jLogEventNanoTimeTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/secured/ClientCredentialsRequestFormatter.java | {
"start": 1336,
"end": 3681
} | class ____ implements HttpRequestFormatter {
public static final String GRANT_TYPE = "client_credentials";
private final String clientId;
private final String clientSecret;
private final String scope;
public ClientCredentialsRequestFormatter(String clientId, String clientSecret, String scope, boolean urlencode) {
if (Utils.isBlank(clientId))
throw new ConfigException(SASL_OAUTHBEARER_CLIENT_CREDENTIALS_CLIENT_ID, clientId);
if (Utils.isBlank(clientSecret))
throw new ConfigException(SASL_OAUTHBEARER_CLIENT_CREDENTIALS_CLIENT_SECRET, clientId);
clientId = clientId.trim();
clientSecret = clientSecret.trim();
scope = Utils.isBlank(scope) ? null : scope.trim();
// according to RFC-6749 clientId & clientSecret must be urlencoded, see https://tools.ietf.org/html/rfc6749#section-2.3.1
if (urlencode) {
clientId = URLEncoder.encode(clientId, StandardCharsets.UTF_8);
clientSecret = URLEncoder.encode(clientSecret, StandardCharsets.UTF_8);
if (scope != null)
scope = URLEncoder.encode(scope, StandardCharsets.UTF_8);
}
this.clientId = clientId;
this.clientSecret = clientSecret;
this.scope = scope;
}
@Override
public Map<String, String> formatHeaders() {
String s = String.format("%s:%s", clientId, clientSecret);
// Per RFC-7617, we need to use the *non-URL safe* base64 encoder. See KAFKA-14496.
String encoded = Base64.getEncoder().encodeToString(Utils.utf8(s));
String authorizationHeader = String.format("Basic %s", encoded);
Map<String, String> headers = new HashMap<>();
headers.put("Accept", "application/json");
headers.put("Authorization", authorizationHeader);
headers.put("Cache-Control", "no-cache");
headers.put("Content-Type", "application/x-www-form-urlencoded");
return headers;
}
@Override
public String formatBody() {
StringBuilder requestParameters = new StringBuilder();
requestParameters.append("grant_type=").append(GRANT_TYPE);
if (scope != null)
requestParameters.append("&scope=").append(scope);
return requestParameters.toString();
}
}
| ClientCredentialsRequestFormatter |
java | apache__camel | components/camel-aws/camel-aws-config/src/main/java/org/apache/camel/component/aws/config/AWSConfigConstants.java | {
"start": 950,
"end": 2162
} | interface ____ {
@Metadata(description = "The operation we want to perform", javaType = "String")
String OPERATION = "CamelAwsConfigOperation";
@Metadata(description = "The Managed rule source identifier", javaType = "String")
String RULE_SOURCE_IDENTIFIER = "CamelAwsConfigRuleSourceIdentifier";
@Metadata(description = "The source object for the rule. The owner of the rule could be AWS, CUSTOM_LAMBDA or CUSTOM_POLICY",
javaType = "String")
String SOURCE = "CamelAwsConfigRuleSource";
@Metadata(description = "The Managed rule name", javaType = "String")
String RULE_NAME = "CamelAwsConfigRuleName";
@Metadata(description = "The Conformance pack name", javaType = "String")
String CONFORMACE_PACK_NAME = "CamelAwsConformancePackName";
@Metadata(description = "The location of the file containing the template body in S3", javaType = "String")
String CONFORMACE_PACK_S3_TEMPLATE_URI = "CamelAwsConfigConformacePackS3TemplateURI";
@Metadata(description = "A string containing the full conformance pack template body", javaType = "String")
String CONFORMACE_PACK_TEMPLATE_BODY = "CamelAwsConfigConformacePackTemplateBody";
}
| AWSConfigConstants |
java | google__guava | android/guava-tests/test/com/google/common/collect/IterablesTest.java | {
"start": 9801,
"end": 29034
} | class ____ extends TypeA implements TypeB {}
@GwtIncompatible // Iterables.filter(Iterable, Class)
public void testFilterByType_iterator() throws Exception {
HasBoth hasBoth = new HasBoth();
Iterable<TypeA> alist = Lists.newArrayList(new TypeA(), new TypeA(), hasBoth, new TypeA());
Iterable<TypeB> blist = filter(alist, TypeB.class);
assertThat(blist).containsExactly(hasBoth).inOrder();
}
public void testTransform_iterator() {
List<String> input = asList("1", "2", "3");
Iterable<Integer> result =
Iterables.transform(
input,
new Function<String, Integer>() {
@Override
public Integer apply(String from) {
return Integer.valueOf(from);
}
});
List<Integer> actual = newArrayList(result);
List<Integer> expected = asList(1, 2, 3);
assertEquals(expected, actual);
assertCanIterateAgain(result);
assertEquals("[1, 2, 3]", result.toString());
}
public void testPoorlyBehavedTransform() {
List<String> input = asList("1", "not a number", "3");
Iterable<Integer> result =
Iterables.transform(
input,
new Function<String, Integer>() {
@Override
public Integer apply(String from) {
return Integer.valueOf(from);
}
});
Iterator<Integer> resultIterator = result.iterator();
resultIterator.next();
assertThrows(NumberFormatException.class, () -> resultIterator.next());
}
public void testNullFriendlyTransform() {
List<@Nullable Integer> input = asList(1, 2, null, 3);
Iterable<String> result =
Iterables.transform(
input,
new Function<@Nullable Integer, String>() {
@Override
public String apply(@Nullable Integer from) {
return String.valueOf(from);
}
});
List<String> actual = newArrayList(result);
List<String> expected = asList("1", "2", "null", "3");
assertEquals(expected, actual);
}
// Far less exhaustive than the tests in IteratorsTest
public void testCycle() {
Iterable<String> cycle = Iterables.cycle("a", "b");
int howManyChecked = 0;
for (String string : cycle) {
String expected = (howManyChecked % 2 == 0) ? "a" : "b";
assertEquals(expected, string);
if (howManyChecked++ == 5) {
break;
}
}
// We left the last iterator pointing to "b". But a new iterator should
// always point to "a".
for (String string : cycle) {
assertEquals("a", string);
break;
}
assertEquals("[a, b] (cycled)", cycle.toString());
}
// Again, the exhaustive tests are in IteratorsTest
public void testConcatIterable() {
List<Integer> list1 = newArrayList(1);
List<Integer> list2 = newArrayList(4);
List<List<Integer>> input = newArrayList(list1, list2);
Iterable<Integer> result = Iterables.concat(input);
assertEquals(asList(1, 4), newArrayList(result));
// Now change the inputs and see result dynamically change as well
list1.add(2);
List<Integer> list3 = newArrayList(3);
input.add(1, list3);
assertEquals(asList(1, 2, 3, 4), newArrayList(result));
assertEquals("[1, 2, 3, 4]", result.toString());
}
public void testConcatVarargs() {
List<Integer> list1 = newArrayList(1);
List<Integer> list2 = newArrayList(4);
List<Integer> list3 = newArrayList(7, 8);
List<Integer> list4 = newArrayList(9);
List<Integer> list5 = newArrayList(10);
Iterable<Integer> result = Iterables.concat(list1, list2, list3, list4, list5);
assertEquals(asList(1, 4, 7, 8, 9, 10), newArrayList(result));
assertEquals("[1, 4, 7, 8, 9, 10]", result.toString());
}
public void testConcatNullPointerException() {
List<Integer> list1 = newArrayList(1);
List<Integer> list2 = newArrayList(4);
assertThrows(NullPointerException.class, () -> Iterables.concat(list1, null, list2));
}
public void testConcatPeformingFiniteCycle() {
Iterable<Integer> iterable = asList(1, 2, 3);
int n = 4;
Iterable<Integer> repeated = Iterables.concat(nCopies(n, iterable));
assertThat(repeated).containsExactly(1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3).inOrder();
}
public void testPartition_badSize() {
Iterable<Integer> source = singleton(1);
assertThrows(IllegalArgumentException.class, () -> Iterables.partition(source, 0));
}
public void testPartition_empty() {
Iterable<Integer> source = emptySet();
Iterable<List<Integer>> partitions = Iterables.partition(source, 1);
assertTrue(Iterables.isEmpty(partitions));
}
public void testPartition_singleton1() {
Iterable<Integer> source = singleton(1);
Iterable<List<Integer>> partitions = Iterables.partition(source, 1);
assertEquals(1, Iterables.size(partitions));
assertEquals(singletonList(1), partitions.iterator().next());
}
public void testPartition_view() {
List<Integer> list = asList(1, 2);
Iterable<List<Integer>> partitions = Iterables.partition(list, 2);
// Changes before the partition is retrieved are reflected
list.set(0, 3);
Iterator<List<Integer>> iterator = partitions.iterator();
// Changes before the partition is retrieved are reflected
list.set(1, 4);
List<Integer> first = iterator.next();
// Changes after are not
list.set(0, 5);
assertEquals(ImmutableList.of(3, 4), first);
}
@J2ktIncompatible // Arrays.asList(...).subList() doesn't implement RandomAccess in J2KT.
@GwtIncompatible // Arrays.asList(...).subList doesn't implement RandomAccess in GWT
public void testPartitionRandomAccessInput() {
Iterable<Integer> source = asList(1, 2, 3);
Iterable<List<Integer>> partitions = Iterables.partition(source, 2);
Iterator<List<Integer>> iterator = partitions.iterator();
assertTrue(iterator.next() instanceof RandomAccess);
assertTrue(iterator.next() instanceof RandomAccess);
}
@J2ktIncompatible // Arrays.asList(...).subList() doesn't implement RandomAccess in J2KT.
@GwtIncompatible // Arrays.asList(...).subList() doesn't implement RandomAccess in GWT
public void testPartitionNonRandomAccessInput() {
Iterable<Integer> source = new LinkedList<>(asList(1, 2, 3));
Iterable<List<Integer>> partitions = Iterables.partition(source, 2);
Iterator<List<Integer>> iterator = partitions.iterator();
// Even though the input list doesn't implement RandomAccess, the output
// lists do.
assertTrue(iterator.next() instanceof RandomAccess);
assertTrue(iterator.next() instanceof RandomAccess);
}
public void testPaddedPartition_basic() {
List<Integer> list = asList(1, 2, 3, 4, 5);
Iterable<List<@Nullable Integer>> partitions = Iterables.paddedPartition(list, 2);
assertEquals(3, Iterables.size(partitions));
assertEquals(Arrays.<@Nullable Integer>asList(5, null), Iterables.getLast(partitions));
}
public void testPaddedPartitionRandomAccessInput() {
Iterable<Integer> source = asList(1, 2, 3);
Iterable<List<Integer>> partitions = Iterables.paddedPartition(source, 2);
Iterator<List<Integer>> iterator = partitions.iterator();
assertTrue(iterator.next() instanceof RandomAccess);
assertTrue(iterator.next() instanceof RandomAccess);
}
public void testPaddedPartitionNonRandomAccessInput() {
Iterable<Integer> source = new LinkedList<>(asList(1, 2, 3));
Iterable<List<Integer>> partitions = Iterables.paddedPartition(source, 2);
Iterator<List<Integer>> iterator = partitions.iterator();
// Even though the input list doesn't implement RandomAccess, the output
// lists do.
assertTrue(iterator.next() instanceof RandomAccess);
assertTrue(iterator.next() instanceof RandomAccess);
}
// More tests in IteratorsTest
public void testAddAllToList() {
List<String> alreadyThere = newArrayList("already", "there");
List<String> freshlyAdded = newArrayList("freshly", "added");
boolean changed = Iterables.addAll(alreadyThere, freshlyAdded);
assertThat(alreadyThere).containsExactly("already", "there", "freshly", "added").inOrder();
assertTrue(changed);
}
private static void assertCanIterateAgain(Iterable<?> iterable) {
for (@SuppressWarnings("unused") Object obj : iterable) {}
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNullPointerExceptions() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(Iterables.class);
}
// More exhaustive tests are in IteratorsTest.
public void testElementsEqual() throws Exception {
Iterable<?> a;
Iterable<?> b;
// A few elements.
a = asList(4, 8, 15, 16, 23, 42);
b = asList(4, 8, 15, 16, 23, 42);
assertTrue(elementsEqual(a, b));
// An element differs.
a = asList(4, 8, 15, 12, 23, 42);
b = asList(4, 8, 15, 16, 23, 42);
assertFalse(elementsEqual(a, b));
// null versus non-null.
a = Arrays.<@Nullable Integer>asList(4, 8, 15, null, 23, 42);
b = asList(4, 8, 15, 16, 23, 42);
assertFalse(elementsEqual(a, b));
assertFalse(elementsEqual(b, a));
// Different lengths.
a = asList(4, 8, 15, 16, 23);
b = asList(4, 8, 15, 16, 23, 42);
assertFalse(elementsEqual(a, b));
assertFalse(elementsEqual(b, a));
}
public void testToString() {
List<String> list = emptyList();
assertEquals("[]", Iterables.toString(list));
list = newArrayList("yam", "bam", "jam", "ham");
assertEquals("[yam, bam, jam, ham]", Iterables.toString(list));
}
public void testLimit() {
Iterable<String> iterable = newArrayList("foo", "bar", "baz");
Iterable<String> limited = Iterables.limit(iterable, 2);
List<String> expected = ImmutableList.of("foo", "bar");
List<String> actual = newArrayList(limited);
assertEquals(expected, actual);
assertCanIterateAgain(limited);
assertEquals("[foo, bar]", limited.toString());
}
public void testLimit_illegalArgument() {
List<String> list = newArrayList("a", "b", "c");
assertThrows(IllegalArgumentException.class, () -> Iterables.limit(list, -1));
}
public void testIsEmpty() {
Iterable<String> emptyList = emptyList();
assertTrue(Iterables.isEmpty(emptyList));
Iterable<String> singletonList = singletonList("foo");
assertFalse(Iterables.isEmpty(singletonList));
}
public void testSkip_simple() {
Collection<String> set = ImmutableSet.of("a", "b", "c", "d", "e");
assertEquals(newArrayList("c", "d", "e"), newArrayList(skip(set, 2)));
assertEquals("[c, d, e]", skip(set, 2).toString());
}
public void testSkip_simpleList() {
Collection<String> list = newArrayList("a", "b", "c", "d", "e");
assertEquals(newArrayList("c", "d", "e"), newArrayList(skip(list, 2)));
assertEquals("[c, d, e]", skip(list, 2).toString());
}
public void testSkip_pastEnd() {
Collection<String> set = ImmutableSet.of("a", "b");
assertEquals(emptyList(), newArrayList(skip(set, 20)));
}
public void testSkip_pastEndList() {
Collection<String> list = newArrayList("a", "b");
assertEquals(emptyList(), newArrayList(skip(list, 20)));
}
public void testSkip_skipNone() {
Collection<String> set = ImmutableSet.of("a", "b");
assertEquals(newArrayList("a", "b"), newArrayList(skip(set, 0)));
}
public void testSkip_skipNoneList() {
Collection<String> list = newArrayList("a", "b");
assertEquals(newArrayList("a", "b"), newArrayList(skip(list, 0)));
}
public void testSkip_removal() {
Collection<String> set = newHashSet("a", "b");
Iterator<String> iterator = skip(set, 2).iterator();
try {
iterator.next();
} catch (NoSuchElementException suppressed) {
// We want remove() to fail even after a failed call to next().
}
assertThrows(IllegalStateException.class, () -> iterator.remove());
}
public void testSkip_allOfMutableList_modifiable() {
List<String> list = newArrayList("a", "b");
Iterator<String> iterator = skip(list, 2).iterator();
assertThrows(IllegalStateException.class, () -> iterator.remove());
}
public void testSkip_allOfImmutableList_modifiable() {
List<String> list = ImmutableList.of("a", "b");
Iterator<String> iterator = skip(list, 2).iterator();
assertThrows(UnsupportedOperationException.class, () -> iterator.remove());
}
@GwtIncompatible // slow (~35s)
public void testSkip_iterator() {
new IteratorTester<Integer>(
5, MODIFIABLE, newArrayList(2, 3), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override
protected Iterator<Integer> newTargetIterator() {
return skip(new LinkedHashSet<>(asList(1, 2, 3)), 1).iterator();
}
}.test();
}
@GwtIncompatible // slow (~35s)
public void testSkip_iteratorList() {
new IteratorTester<Integer>(
5, MODIFIABLE, newArrayList(2, 3), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override
protected Iterator<Integer> newTargetIterator() {
return skip(newArrayList(1, 2, 3), 1).iterator();
}
}.test();
}
public void testSkip_nonStructurallyModifiedList() throws Exception {
List<String> list = newArrayList("a", "b", "c");
Iterable<String> tail = skip(list, 1);
Iterator<String> tailIterator = tail.iterator();
list.set(2, "C");
assertEquals("b", tailIterator.next());
assertEquals("C", tailIterator.next());
assertFalse(tailIterator.hasNext());
}
public void testSkip_structurallyModifiedSkipSome() throws Exception {
Collection<String> set = new LinkedHashSet<>(asList("a", "b", "c"));
Iterable<String> tail = skip(set, 1);
set.remove("b");
set.addAll(newArrayList("A", "B", "C"));
assertThat(tail).containsExactly("c", "A", "B", "C").inOrder();
}
public void testSkip_structurallyModifiedSkipSomeList() throws Exception {
List<String> list = newArrayList("a", "b", "c");
Iterable<String> tail = skip(list, 1);
list.subList(1, 3).clear();
list.addAll(0, newArrayList("A", "B", "C"));
assertThat(tail).containsExactly("B", "C", "a").inOrder();
}
public void testSkip_structurallyModifiedSkipAll() throws Exception {
Collection<String> set = new LinkedHashSet<>(asList("a", "b", "c"));
Iterable<String> tail = skip(set, 2);
set.remove("a");
set.remove("b");
assertFalse(tail.iterator().hasNext());
}
public void testSkip_structurallyModifiedSkipAllList() throws Exception {
List<String> list = newArrayList("a", "b", "c");
Iterable<String> tail = skip(list, 2);
list.subList(0, 2).clear();
assertTrue(Iterables.isEmpty(tail));
}
public void testSkip_illegalArgument() {
List<String> list = newArrayList("a", "b", "c");
assertThrows(IllegalArgumentException.class, () -> skip(list, -1));
}
private void testGetOnAbc(Iterable<String> iterable) {
try {
Iterables.get(iterable, -1);
fail();
} catch (IndexOutOfBoundsException expected) {
}
assertEquals("a", Iterables.get(iterable, 0));
assertEquals("b", Iterables.get(iterable, 1));
assertEquals("c", Iterables.get(iterable, 2));
try {
Iterables.get(iterable, 3);
fail();
} catch (IndexOutOfBoundsException nsee) {
}
try {
Iterables.get(iterable, 4);
fail();
} catch (IndexOutOfBoundsException nsee) {
}
}
private void testGetOnEmpty(Iterable<String> iterable) {
try {
Iterables.get(iterable, 0);
fail();
} catch (IndexOutOfBoundsException expected) {
}
}
public void testGet_list() {
testGetOnAbc(newArrayList("a", "b", "c"));
}
public void testGet_emptyList() {
testGetOnEmpty(Collections.<String>emptyList());
}
public void testGet_sortedSet() {
testGetOnAbc(ImmutableSortedSet.of("b", "c", "a"));
}
public void testGet_emptySortedSet() {
testGetOnEmpty(ImmutableSortedSet.<String>of());
}
public void testGet_iterable() {
testGetOnAbc(ImmutableSet.of("a", "b", "c"));
}
public void testGet_emptyIterable() {
testGetOnEmpty(new HashSet<String>());
}
public void testGet_withDefault_negativePosition() {
assertThrows(
IndexOutOfBoundsException.class, () -> Iterables.get(newArrayList("a", "b", "c"), -1, "d"));
}
public void testGet_withDefault_simple() {
ArrayList<String> list = newArrayList("a", "b", "c");
assertEquals("b", Iterables.get(list, 1, "d"));
}
public void testGet_withDefault_iterable() {
Set<String> set = ImmutableSet.of("a", "b", "c");
assertEquals("b", Iterables.get(set, 1, "d"));
}
public void testGet_withDefault_last() {
ArrayList<String> list = newArrayList("a", "b", "c");
assertEquals("c", Iterables.get(list, 2, "d"));
}
public void testGet_withDefault_lastPlusOne() {
ArrayList<String> list = newArrayList("a", "b", "c");
assertEquals("d", Iterables.get(list, 3, "d"));
}
public void testGet_withDefault_doesntIterate() {
List<String> list = new DiesOnIteratorArrayList();
list.add("a");
assertEquals("a", Iterables.get(list, 0, "b"));
}
public void testGetFirst_withDefault_singleton() {
Iterable<String> iterable = singletonList("foo");
assertEquals("foo", Iterables.getFirst(iterable, "bar"));
}
public void testGetFirst_withDefault_empty() {
Iterable<String> iterable = emptyList();
assertEquals("bar", Iterables.getFirst(iterable, "bar"));
}
public void testGetFirst_withDefault_empty_null() {
Iterable<String> iterable = emptyList();
assertThat(Iterables.<@Nullable String>getFirst(iterable, null)).isNull();
}
public void testGetFirst_withDefault_multiple() {
Iterable<String> iterable = asList("foo", "bar");
assertEquals("foo", Iterables.getFirst(iterable, "qux"));
}
public void testGetLast_list() {
List<String> list = newArrayList("a", "b", "c");
assertEquals("c", Iterables.getLast(list));
}
public void testGetLast_emptyList() {
List<String> list = emptyList();
assertThrows(NoSuchElementException.class, () -> Iterables.getLast(list));
}
public void testGetLast_sortedSet() {
SortedSet<String> sortedSet = ImmutableSortedSet.of("b", "c", "a");
assertEquals("c", Iterables.getLast(sortedSet));
}
public void testGetLast_withDefault_singleton() {
Iterable<String> iterable = singletonList("foo");
assertEquals("foo", Iterables.getLast(iterable, "bar"));
}
public void testGetLast_withDefault_empty() {
Iterable<String> iterable = emptyList();
assertEquals("bar", Iterables.getLast(iterable, "bar"));
}
public void testGetLast_withDefault_empty_null() {
Iterable<String> iterable = emptyList();
assertThat(Iterables.<@Nullable String>getLast(iterable, null)).isNull();
}
public void testGetLast_withDefault_multiple() {
Iterable<String> iterable = asList("foo", "bar");
assertEquals("bar", Iterables.getLast(iterable, "qux"));
}
/**
* {@link ArrayList} extension that forbids the use of {@link Collection#iterator} for tests that
* need to prove that it isn't called.
*/
private static | HasBoth |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregateExpressionSizeOverrideFixedTest.java | {
"start": 1128,
"end": 2225
} | class ____ extends ContextTestSupport {
@Test
public void testAggregateExpressionSize() throws Exception {
getMockEndpoint("mock:aggregated").expectedBodiesReceived("A+B+C");
getMockEndpoint("mock:aggregated").expectedPropertyReceived(Exchange.AGGREGATED_COMPLETED_BY, "size");
Map<String, Object> headers = new HashMap<>();
headers.put("id", 123);
headers.put("mySize", 3);
template.sendBodyAndHeaders("direct:start", "A", headers);
template.sendBodyAndHeaders("direct:start", "B", headers);
template.sendBodyAndHeaders("direct:start", "C", headers);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").aggregate(header("id"), new BodyInAggregatingStrategy()).completionSize(2)
.completionSize(header("mySize")).to("mock:aggregated");
}
};
}
}
| AggregateExpressionSizeOverrideFixedTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/filter/NullSkipForCollections4309Test.java | {
"start": 741,
"end": 1019
} | enum ____ {
ONE, TWO
}
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", include = JsonTypeInfo.As.EXISTING_PROPERTY, visible = true)
@JsonSubTypes(value = { @JsonSubTypes.Type(value = DataType1.class, names = { "TYPE1" }) })
static abstract | Type |
java | apache__camel | components/camel-amqp/src/generated/java/org/apache/camel/component/amqp/AMQPEndpointUriFactory.java | {
"start": 514,
"end": 6735
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":destinationType:destinationName";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(104);
props.add("acceptMessagesWhileStopping");
props.add("acknowledgementModeName");
props.add("allowAdditionalHeaders");
props.add("allowNullBody");
props.add("allowReplyManagerQuickStop");
props.add("allowSerializedHeaders");
props.add("alwaysCopyMessage");
props.add("artemisConsumerPriority");
props.add("artemisStreamingEnabled");
props.add("asyncConsumer");
props.add("asyncStartListener");
props.add("asyncStopListener");
props.add("autoStartup");
props.add("browseLimit");
props.add("cacheLevel");
props.add("cacheLevelName");
props.add("clientId");
props.add("concurrentConsumers");
props.add("connectionFactory");
props.add("consumerType");
props.add("correlationProperty");
props.add("defaultTaskExecutorType");
props.add("deliveryDelay");
props.add("deliveryMode");
props.add("deliveryPersistent");
props.add("destinationName");
props.add("destinationResolver");
props.add("destinationType");
props.add("disableReplyTo");
props.add("disableTimeToLive");
props.add("durableSubscriptionName");
props.add("eagerLoadingOfProperties");
props.add("eagerPoisonBody");
props.add("errorHandler");
props.add("errorHandlerLogStackTrace");
props.add("errorHandlerLoggingLevel");
props.add("exceptionHandler");
props.add("exceptionListener");
props.add("exchangePattern");
props.add("explicitQosEnabled");
props.add("exposeListenerSession");
props.add("forceSendOriginalMessage");
props.add("formatDateHeadersToIso8601");
props.add("headerFilterStrategy");
props.add("idleConsumerLimit");
props.add("idleReceivesPerTaskLimit");
props.add("idleTaskExecutionLimit");
props.add("includeAllJMSXProperties");
props.add("includeSentJMSMessageID");
props.add("jmsKeyFormatStrategy");
props.add("jmsMessageType");
props.add("lazyCreateTransactionManager");
props.add("lazyStartProducer");
props.add("mapJmsMessage");
props.add("maxConcurrentConsumers");
props.add("maxMessagesPerTask");
props.add("messageConverter");
props.add("messageCreatedStrategy");
props.add("messageIdEnabled");
props.add("messageListenerContainerFactory");
props.add("messageTimestampEnabled");
props.add("password");
props.add("preserveMessageQos");
props.add("priority");
props.add("pubSubNoLocal");
props.add("receiveTimeout");
props.add("recoveryInterval");
props.add("replyCorrelationProperty");
props.add("replyTo");
props.add("replyToCacheLevelName");
props.add("replyToConcurrentConsumers");
props.add("replyToConsumerType");
props.add("replyToDeliveryPersistent");
props.add("replyToDestinationSelectorName");
props.add("replyToMaxConcurrentConsumers");
props.add("replyToOnTimeoutMaxConcurrentConsumers");
props.add("replyToOverride");
props.add("replyToSameDestinationAllowed");
props.add("replyToType");
props.add("requestTimeout");
props.add("requestTimeoutCheckerInterval");
props.add("selector");
props.add("streamMessageTypeEnabled");
props.add("subscriptionDurable");
props.add("subscriptionName");
props.add("subscriptionShared");
props.add("synchronous");
props.add("taskExecutor");
props.add("temporaryQueueResolver");
props.add("testConnectionOnStartup");
props.add("timeToLive");
props.add("transacted");
props.add("transactedInOut");
props.add("transactionManager");
props.add("transactionName");
props.add("transactionTimeout");
props.add("transferException");
props.add("transferExchange");
props.add("useMessageIDAsCorrelationID");
props.add("username");
props.add("waitForProvisionCorrelationToBeUpdatedCounter");
props.add("waitForProvisionCorrelationToBeUpdatedThreadSleepingTime");
props.add("waitForTemporaryReplyToToBeUpdatedCounter");
props.add("waitForTemporaryReplyToToBeUpdatedThreadSleepingTime");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(2);
secretProps.add("password");
secretProps.add("username");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "amqp".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "destinationType", "queue", false, copy);
uri = buildPathParameter(syntax, uri, "destinationName", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| AMQPEndpointUriFactory |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/onetomany/BasicSet.java | {
"start": 987,
"end": 6210
} | class ____ {
private Integer ed1_id;
private Integer ed2_id;
private Integer ing1_id;
private Integer ing2_id;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
SetRefEdEntity ed1 = new SetRefEdEntity( 1, "data_ed_1" );
SetRefEdEntity ed2 = new SetRefEdEntity( 2, "data_ed_2" );
em.persist( ed1 );
em.persist( ed2 );
ed1_id = ed1.getId();
ed2_id = ed2.getId();
} );
scope.inTransaction( em -> {
SetRefEdEntity ed1 = em.find( SetRefEdEntity.class, ed1_id );
SetRefIngEntity ing1 = new SetRefIngEntity( 3, "data_ing_1" );
SetRefIngEntity ing2 = new SetRefIngEntity( 4, "data_ing_2" );
ing1.setReference( ed1 );
ing2.setReference( ed1 );
em.persist( ing1 );
em.persist( ing2 );
ing1_id = ing1.getId();
ing2_id = ing2.getId();
} );
scope.inTransaction( em -> {
SetRefIngEntity ing1 = em.find( SetRefIngEntity.class, ing1_id );
SetRefEdEntity ed2 = em.find( SetRefEdEntity.class, ed2_id );
ing1.setReference( ed2 );
} );
scope.inTransaction( em -> {
SetRefIngEntity ing2 = em.find( SetRefIngEntity.class, ing2_id );
SetRefEdEntity ed2 = em.find( SetRefEdEntity.class, ed2_id );
ing2.setReference( ed2 );
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1, 2, 3, 4 ), auditReader.getRevisions( SetRefEdEntity.class, ed1_id ) );
assertEquals( Arrays.asList( 1, 3, 4 ), auditReader.getRevisions( SetRefEdEntity.class, ed2_id ) );
assertEquals( Arrays.asList( 2, 3 ), auditReader.getRevisions( SetRefIngEntity.class, ing1_id ) );
assertEquals( Arrays.asList( 2, 4 ), auditReader.getRevisions( SetRefIngEntity.class, ing2_id ) );
} );
}
@Test
public void testHistoryOfEdId1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
SetRefIngEntity ing1 = em.find( SetRefIngEntity.class, ing1_id );
SetRefIngEntity ing2 = em.find( SetRefIngEntity.class, ing2_id );
SetRefEdEntity rev1 = auditReader.find( SetRefEdEntity.class, ed1_id, 1 );
SetRefEdEntity rev2 = auditReader.find( SetRefEdEntity.class, ed1_id, 2 );
SetRefEdEntity rev3 = auditReader.find( SetRefEdEntity.class, ed1_id, 3 );
SetRefEdEntity rev4 = auditReader.find( SetRefEdEntity.class, ed1_id, 4 );
assertEquals( Collections.EMPTY_SET, rev1.getReffering() );
assertEquals( TestTools.makeSet( ing1, ing2 ), rev2.getReffering() );
assertEquals( TestTools.makeSet( ing2 ), rev3.getReffering() );
assertEquals( Collections.EMPTY_SET, rev4.getReffering() );
} );
}
@Test
public void testHistoryOfEdId2(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
SetRefIngEntity ing1 = em.find( SetRefIngEntity.class, ing1_id );
SetRefIngEntity ing2 = em.find( SetRefIngEntity.class, ing2_id );
SetRefEdEntity rev1 = auditReader.find( SetRefEdEntity.class, ed2_id, 1 );
SetRefEdEntity rev2 = auditReader.find( SetRefEdEntity.class, ed2_id, 2 );
SetRefEdEntity rev3 = auditReader.find( SetRefEdEntity.class, ed2_id, 3 );
SetRefEdEntity rev4 = auditReader.find( SetRefEdEntity.class, ed2_id, 4 );
assertEquals( Collections.EMPTY_SET, rev1.getReffering() );
assertEquals( Collections.EMPTY_SET, rev2.getReffering() );
assertEquals( TestTools.makeSet( ing1 ), rev3.getReffering() );
assertEquals( TestTools.makeSet( ing1, ing2 ), rev4.getReffering() );
} );
}
@Test
public void testHistoryOfEdIng1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
SetRefEdEntity ed1 = em.find( SetRefEdEntity.class, ed1_id );
SetRefEdEntity ed2 = em.find( SetRefEdEntity.class, ed2_id );
SetRefIngEntity rev1 = auditReader.find( SetRefIngEntity.class, ing1_id, 1 );
SetRefIngEntity rev2 = auditReader.find( SetRefIngEntity.class, ing1_id, 2 );
SetRefIngEntity rev3 = auditReader.find( SetRefIngEntity.class, ing1_id, 3 );
SetRefIngEntity rev4 = auditReader.find( SetRefIngEntity.class, ing1_id, 4 );
assertNull( rev1 );
assertEquals( ed1, rev2.getReference() );
assertEquals( ed2, rev3.getReference() );
assertEquals( ed2, rev4.getReference() );
} );
}
@Test
public void testHistoryOfEdIng2(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
SetRefEdEntity ed1 = em.find( SetRefEdEntity.class, ed1_id );
SetRefEdEntity ed2 = em.find( SetRefEdEntity.class, ed2_id );
SetRefIngEntity rev1 = auditReader.find( SetRefIngEntity.class, ing2_id, 1 );
SetRefIngEntity rev2 = auditReader.find( SetRefIngEntity.class, ing2_id, 2 );
SetRefIngEntity rev3 = auditReader.find( SetRefIngEntity.class, ing2_id, 3 );
SetRefIngEntity rev4 = auditReader.find( SetRefIngEntity.class, ing2_id, 4 );
assertNull( rev1 );
assertEquals( ed1, rev2.getReference() );
assertEquals( ed1, rev3.getReference() );
assertEquals( ed2, rev4.getReference() );
} );
}
}
| BasicSet |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/aggregate/DB2AggregateSupport.java | {
"start": 16628,
"end": 19374
} | class ____ implements AggregateWriteExpression {
private final LinkedHashMap<String, AggregateWriteExpression> subExpressions = new LinkedHashMap<>();
protected final EmbeddableMappingType embeddableMappingType;
protected final String structTypeName;
protected final boolean nullable;
public AggregateStructWriteExpression(SelectableMapping selectableMapping) {
final DB2StructJdbcType structJdbcType = (DB2StructJdbcType) selectableMapping.getJdbcMapping().getJdbcType();
this.embeddableMappingType = structJdbcType.getEmbeddableMappingType();
this.structTypeName = structJdbcType.getStructTypeName();
this.nullable = selectableMapping.isNullable();
}
protected void initializeSubExpressions(SelectableMapping[] columns, TypeConfiguration typeConfiguration) {
for ( SelectableMapping column : columns ) {
final SelectablePath selectablePath = column.getSelectablePath();
final SelectablePath[] parts = selectablePath.getParts();
final String typeName = determineTypeName( column, typeConfiguration );
AggregateStructWriteExpression currentAggregate = this;
EmbeddableMappingType currentMappingType = embeddableMappingType;
for ( int i = 1; i < parts.length - 1; i++ ) {
final SelectableMapping selectableMapping = currentMappingType.getJdbcValueSelectable(
currentMappingType.getSelectableIndex( parts[i].getSelectableName() )
);
currentAggregate = (AggregateStructWriteExpression) currentAggregate.subExpressions.computeIfAbsent(
parts[i].getSelectableName(),
k -> new AggregateStructWriteExpression( selectableMapping )
);
currentMappingType = currentAggregate.embeddableMappingType;
}
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
new BasicStructWriteExpression(
column,
typeName,
customWriteExpression
)
);
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
if ( nullable ) {
sb.append( "coalesce(" );
sb.append( path );
sb.append( "," );
sb.append( structTypeName );
sb.append( "())" );
}
else {
sb.append( path );
}
for ( Map.Entry<String, AggregateWriteExpression> entry : subExpressions.entrySet() ) {
final String column = entry.getKey();
final AggregateWriteExpression value = entry.getValue();
sb.append( ".." );
sb.append( column );
sb.append( '(' );
value.append( sb, path + ".." + column, translator, expression );
sb.append( ')' );
}
}
}
private static | AggregateStructWriteExpression |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/MonoOperator.java | {
"start": 1089,
"end": 1702
} | class ____<I, O> extends Mono<O> implements Scannable {
protected final Mono<? extends I> source;
/**
* Build a {@link MonoOperator} wrapper around the passed parent {@link Publisher}
*
* @param source the {@link Publisher} to decorate
*/
protected MonoOperator(Mono<? extends I> source) {
this.source = Objects.requireNonNull(source);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PREFETCH) return Integer.MAX_VALUE;
if (key == Attr.PARENT) return source;
if (key == InternalProducerAttr.INSTANCE) return false; // public class!
return null;
}
}
| MonoOperator |
java | spring-projects__spring-boot | module/spring-boot-batch/src/test/java/org/springframework/boot/batch/autoconfigure/BatchJobLauncherAutoConfigurationTests.java | {
"start": 1926,
"end": 5710
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner().withConfiguration(
AutoConfigurations.of(BatchAutoConfiguration.class, BatchJobLauncherAutoConfiguration.class));
@Test
void testDefinesAndLaunchesJob() {
this.contextRunner.withUserConfiguration(JobConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(JobOperator.class);
context.getBean(JobLauncherApplicationRunner.class).run(new DefaultApplicationArguments("jobParam=test"));
JobParameters jobParameters = new JobParametersBuilder().addString("jobParam", "test").toJobParameters();
assertThat(context.getBean(JobRepository.class).getLastJobExecution("job", jobParameters)).isNotNull();
});
}
@Test
void testDefinesAndLaunchesJobIgnoreOptionArguments() {
this.contextRunner.withUserConfiguration(JobConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(JobOperator.class);
context.getBean(JobLauncherApplicationRunner.class)
.run(new DefaultApplicationArguments("--spring.property=value", "jobParam=test"));
JobParameters jobParameters = new JobParametersBuilder().addString("jobParam", "test").toJobParameters();
assertThat(context.getBean(JobRepository.class).getLastJobExecution("job", jobParameters)).isNotNull();
});
}
@Test
void testRegisteredAndLocalJob() {
this.contextRunner.withUserConfiguration(NamedJobConfigurationWithRegisteredAndLocalJob.class)
.withPropertyValues("spring.batch.job.name:discreteRegisteredJob")
.run((context) -> {
assertThat(context).hasSingleBean(JobOperator.class);
context.getBean(JobLauncherApplicationRunner.class).run();
JobExecution lastJobExecution = context.getBean(JobRepository.class)
.getLastJobExecution("discreteRegisteredJob", new JobParameters());
assertThat(lastJobExecution).isNotNull();
assertThat(lastJobExecution.getStatus()).isEqualTo(BatchStatus.COMPLETED);
});
}
@Test
void testDefinesAndLaunchesLocalJob() {
this.contextRunner.withUserConfiguration(NamedJobConfigurationWithLocalJob.class)
.withPropertyValues("spring.batch.job.name:discreteLocalJob")
.run((context) -> {
assertThat(context).hasSingleBean(JobOperator.class);
context.getBean(JobLauncherApplicationRunner.class).run();
assertThat(context.getBean(JobRepository.class)
.getLastJobExecution("discreteLocalJob", new JobParameters())).isNotNull();
});
}
@Test
void testMultipleJobsAndNoJobName() {
this.contextRunner.withUserConfiguration(MultipleJobConfiguration.class).run((context) -> {
assertThat(context).hasFailed();
Throwable startupFailure = context.getStartupFailure();
assertThat(startupFailure).isNotNull();
Throwable cause = startupFailure.getCause();
assertThat(cause).isNotNull();
assertThat(cause.getMessage()).contains("Job name must be specified in case of multiple jobs");
});
}
@Test
void testMultipleJobsAndJobName() {
this.contextRunner.withUserConfiguration(MultipleJobConfiguration.class)
.withPropertyValues("spring.batch.job.name:discreteLocalJob")
.run((context) -> {
assertThat(context).hasSingleBean(JobOperator.class);
context.getBean(JobLauncherApplicationRunner.class).run();
assertThat(context.getBean(JobRepository.class)
.getLastJobExecution("discreteLocalJob", new JobParameters())).isNotNull();
});
}
@Test
void testDisableLaunchesJob() {
this.contextRunner.withUserConfiguration(JobConfiguration.class)
.withPropertyValues("spring.batch.job.enabled:false")
.run((context) -> {
assertThat(context).hasSingleBean(JobOperator.class);
assertThat(context).doesNotHaveBean(CommandLineRunner.class);
});
}
@Configuration(proxyBeanMethods = false)
static | BatchJobLauncherAutoConfigurationTests |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableFlatMapSingle.java | {
"start": 2095,
"end": 10380
} | class ____<T, R>
extends AtomicInteger
implements FlowableSubscriber<T>, Subscription {
private static final long serialVersionUID = 8600231336733376951L;
final Subscriber<? super R> downstream;
final boolean delayErrors;
final int maxConcurrency;
final AtomicLong requested;
final CompositeDisposable set;
final AtomicInteger active;
final AtomicThrowable errors;
final Function<? super T, ? extends SingleSource<? extends R>> mapper;
final AtomicReference<SpscLinkedArrayQueue<R>> queue;
Subscription upstream;
volatile boolean cancelled;
FlatMapSingleSubscriber(Subscriber<? super R> actual,
Function<? super T, ? extends SingleSource<? extends R>> mapper, boolean delayErrors, int maxConcurrency) {
this.downstream = actual;
this.mapper = mapper;
this.delayErrors = delayErrors;
this.maxConcurrency = maxConcurrency;
this.requested = new AtomicLong();
this.set = new CompositeDisposable();
this.errors = new AtomicThrowable();
this.active = new AtomicInteger(1);
this.queue = new AtomicReference<>();
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
int m = maxConcurrency;
if (m == Integer.MAX_VALUE) {
s.request(Long.MAX_VALUE);
} else {
s.request(maxConcurrency);
}
}
}
@Override
public void onNext(T t) {
SingleSource<? extends R> ms;
try {
ms = Objects.requireNonNull(mapper.apply(t), "The mapper returned a null SingleSource");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
upstream.cancel();
onError(ex);
return;
}
active.getAndIncrement();
InnerObserver inner = new InnerObserver();
if (!cancelled && set.add(inner)) {
ms.subscribe(inner);
}
}
@Override
public void onError(Throwable t) {
active.decrementAndGet();
if (errors.tryAddThrowableOrReport(t)) {
if (!delayErrors) {
set.dispose();
}
drain();
}
}
@Override
public void onComplete() {
active.decrementAndGet();
drain();
}
@Override
public void cancel() {
cancelled = true;
upstream.cancel();
set.dispose();
errors.tryTerminateAndReport();
}
@Override
public void request(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.add(requested, n);
drain();
}
}
void innerSuccess(InnerObserver inner, R value) {
set.delete(inner);
if (get() == 0 && compareAndSet(0, 1)) {
boolean d = active.decrementAndGet() == 0;
if (requested.get() != 0) {
downstream.onNext(value);
SpscLinkedArrayQueue<R> q = queue.get();
if (d && (q == null || q.isEmpty())) {
errors.tryTerminateConsumer(downstream);
return;
}
BackpressureHelper.produced(requested, 1);
if (maxConcurrency != Integer.MAX_VALUE) {
upstream.request(1);
}
} else {
SpscLinkedArrayQueue<R> q = getOrCreateQueue();
synchronized (q) {
q.offer(value);
}
}
if (decrementAndGet() == 0) {
return;
}
} else {
SpscLinkedArrayQueue<R> q = getOrCreateQueue();
synchronized (q) {
q.offer(value);
}
active.decrementAndGet();
if (getAndIncrement() != 0) {
return;
}
}
drainLoop();
}
SpscLinkedArrayQueue<R> getOrCreateQueue() {
SpscLinkedArrayQueue<R> current = queue.get();
if (current != null) {
return current;
}
current = new SpscLinkedArrayQueue<>(Flowable.bufferSize());
if (queue.compareAndSet(null, current)) {
return current;
}
return queue.get();
}
void innerError(InnerObserver inner, Throwable e) {
set.delete(inner);
if (errors.tryAddThrowableOrReport(e)) {
if (!delayErrors) {
upstream.cancel();
set.dispose();
} else {
if (maxConcurrency != Integer.MAX_VALUE) {
upstream.request(1);
}
}
active.decrementAndGet();
drain();
}
}
void drain() {
if (getAndIncrement() == 0) {
drainLoop();
}
}
void clear() {
SpscLinkedArrayQueue<R> q = queue.get();
if (q != null) {
q.clear();
}
}
void drainLoop() {
int missed = 1;
Subscriber<? super R> a = downstream;
AtomicInteger n = active;
AtomicReference<SpscLinkedArrayQueue<R>> qr = queue;
for (;;) {
long r = requested.get();
long e = 0L;
while (e != r) {
if (cancelled) {
clear();
return;
}
if (!delayErrors) {
Throwable ex = errors.get();
if (ex != null) {
clear();
errors.tryTerminateConsumer(downstream);
return;
}
}
boolean d = n.get() == 0;
SpscLinkedArrayQueue<R> q = qr.get();
R v = q != null ? q.poll() : null;
boolean empty = v == null;
if (d && empty) {
errors.tryTerminateConsumer(a);
return;
}
if (empty) {
break;
}
a.onNext(v);
e++;
}
if (e == r) {
if (cancelled) {
clear();
return;
}
if (!delayErrors) {
Throwable ex = errors.get();
if (ex != null) {
clear();
errors.tryTerminateConsumer(a);
return;
}
}
boolean d = n.get() == 0;
SpscLinkedArrayQueue<R> q = qr.get();
boolean empty = q == null || q.isEmpty();
if (d && empty) {
errors.tryTerminateConsumer(a);
return;
}
}
if (e != 0L) {
BackpressureHelper.produced(requested, e);
if (maxConcurrency != Integer.MAX_VALUE) {
upstream.request(e);
}
}
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
final | FlatMapSingleSubscriber |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/event/fire/EventFireTest.java | {
"start": 256,
"end": 766
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer();
@Test
public <T> void testEventFireThrowsExceptionIfEventTypeHasTypeVariable() {
Assertions.assertThrows(IllegalArgumentException.class,
() -> Arc.container().beanManager().getEvent().select(BarInterface.class).fire(new Foo<T>()),
"Event#fire should throw IllegalArgumentException if the payload contains unresolved type variable");
}
public | EventFireTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsOnly_Test.java | {
"start": 1855,
"end": 7622
} | class ____ extends IterablesBaseTest {
@Test
void should_pass_if_actual_contains_given_values_only() {
iterables.assertContainsOnly(someInfo(), actual, array("Luke", "Yoda", "Leia"));
}
@Test
void should_pass_if_actual_contains_given_values_only_with_null_elements() {
actual.add(null);
actual.add(null);
iterables.assertContainsOnly(someInfo(), actual, array("Luke", null, "Yoda", "Leia", null));
}
@Test
void should_pass_if_actual_contains_given_values_only_in_different_order() {
iterables.assertContainsOnly(someInfo(), actual, array("Leia", "Yoda", "Luke"));
}
@Test
void should_pass_if_actual_contains_given_values_only_more_than_once() {
actual.addAll(list("Luke", "Luke"));
iterables.assertContainsOnly(someInfo(), actual, array("Luke", "Yoda", "Leia"));
}
@Test
void should_pass_if_actual_contains_given_values_only_even_if_duplicated() {
iterables.assertContainsOnly(someInfo(), actual, array("Luke", "Luke", "Luke", "Yoda", "Leia"));
}
@Test
void should_pass_if_actual_and_given_values_are_empty() {
actual.clear();
iterables.assertContainsOnly(someInfo(), actual, emptyArray());
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not() {
// GIVEN
Object[] expected = array("Luke", "Yoda", "Leia");
actual.clear();
// WHEN
expectAssertionError(() -> iterables.assertContainsOnly(someInfo(), actual, expected));
// THEN
verify(failures).failure(info, shouldContainOnly(actual, expected, list("Luke", "Yoda", "Leia"), emptyList()));
}
@Test
void should_fail_if_actual_is_empty_and_array_of_values_to_look_for_is_not() {
// GIVEN
Object[] expected = emptyArray();
// WHEN
expectAssertionError(() -> iterables.assertContainsOnly(someInfo(), actual, expected));
// THEN
verify(failures).failure(info, shouldContainOnly(actual, expected, emptyList(), list("Luke", "Yoda", "Leia")));
}
@Test
void should_throw_error_if_array_of_values_to_look_for_is_null() {
// GIVEN
Object[] expected = null;
// WHEN
NullPointerException npe = catchNullPointerException(() -> iterables.assertContainsOnly(someInfo(), actual, expected));
// THEN
then(npe).hasMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
actual = null;
// WHEN
var assertionError = expectAssertionError(() -> iterables.assertContainsOnly(someInfo(), null, array("Yoda")));
// THEN
then(assertionError).hasMessage(shouldNotBeNull().create());
}
@Test
void should_fail_if_actual_does_not_contain_all_given_values() {
// GIVEN
Object[] expected = { "Luke", "Yoda", "Han" };
// WHEN
expectAssertionError(() -> iterables.assertContainsOnly(info, actual, expected));
// THEN
verify(failures).failure(info, shouldContainOnly(actual, expected, list("Han"), list("Leia")));
}
@Test
void should_fail_if_actual_contains_additional_elements() {
// GIVEN
Object[] expected = { "Luke", "Yoda" };
// WHEN
expectAssertionError(() -> iterables.assertContainsOnly(info, actual, expected));
// THEN
verify(failures).failure(info, shouldContainOnly(actual, expected, emptyList(), list("Leia")));
}
@Test
void should_fail_if_actual_contains_a_subset_of_expected_elements() {
// GIVEN
Object[] expected = { "Luke", "Yoda", "Obiwan", "Leia" };
// WHEN
expectAssertionError(() -> iterables.assertContainsOnly(info, actual, expected));
// THEN
verify(failures).failure(info, shouldContainOnly(actual, expected, list("Obiwan"), emptyList()));
}
// ------------------------------------------------------------------------------------------------------------------
// tests using a custom comparison strategy
// ------------------------------------------------------------------------------------------------------------------
@Test
void should_pass_if_actual_contains_given_values_only_according_to_custom_comparison_strategy() {
iterablesWithCaseInsensitiveComparisonStrategy.assertContainsOnly(someInfo(), actual, array("LUKE", "YODA", "Leia"));
}
@Test
void should_pass_if_actual_contains_given_values_only_in_different_order_according_to_custom_comparison_strategy() {
iterablesWithCaseInsensitiveComparisonStrategy.assertContainsOnly(someInfo(), actual, array("LEIA", "yoda", "LukE"));
}
@Test
void should_pass_if_actual_contains_given_values_only_more_than_once_according_to_custom_comparison_strategy() {
actual.addAll(list("Luke", "Luke"));
iterablesWithCaseInsensitiveComparisonStrategy.assertContainsOnly(someInfo(), actual, array("luke", "YOda", "LeIA"));
}
@Test
void should_pass_if_actual_contains_given_values_only_even_if_duplicated_according_to_custom_comparison_strategy() {
actual.addAll(list("LUKE"));
iterablesWithCaseInsensitiveComparisonStrategy.assertContainsOnly(someInfo(), actual,
array("LUke", "LUKE", "lukE", "YOda", "Leia"));
}
@Test
void should_fail_if_actual_does_not_contain_given_values_only_according_to_custom_comparison_strategy() {
// GIVEN
Object[] expected = { "Luke", "Yoda", "Han" };
// WHEN
expectAssertionError(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertContainsOnly(info, actual, expected));
// THEN
verify(failures).failure(info, shouldContainOnly(actual, expected, list("Han"), list("Leia"), comparisonStrategy));
}
@Test
void should_pass_if_nonrestartable_actual_contains_only_given_values() {
iterables.assertContainsOnly(someInfo(), createSinglyIterable(actual), array("Luke", "Yoda", "Leia"));
}
}
| Iterables_assertContainsOnly_Test |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/hamcrest/ModuleDescriptorMatchers.java | {
"start": 4815,
"end": 6579
} | class ____ extends TypeSafeMatcher<Opens> {
private final String source;
private final Set<String> targets;
OpensMatcher(String source, Set<String> targets) {
this.source = source;
this.targets = Set.copyOf(targets);
}
@Override
protected boolean matchesSafely(final Opens item) {
return item != null && Objects.equals(item.source(), source) && Objects.equals(item.targets(), targets);
}
@Override
public void describeTo(final Description description) {
description.appendText(String.format(java.util.Locale.ROOT, "Opens[%s]", opensToString(source, targets)));
}
@Override
protected void describeMismatchSafely(final Opens item, final Description mismatchDescription) {
describeTo(mismatchDescription);
if (item == null) {
mismatchDescription.appendText("was null");
} else {
mismatchDescription.appendText(String.format(java.util.Locale.ROOT, ", actual Opens[%s]", opensToString(item)));
}
}
private static String opensToString(String source, Set<String> targets) {
if (targets.isEmpty()) {
return source;
} else {
return source + " to " + targets;
}
}
private static String opensToString(Opens opens) {
if (opens.targets().isEmpty()) {
return opens.source();
} else {
return opens.source() + " to " + opens.targets();
}
}
}
/**
* Matcher that matches the <i>service</i> and <i>providers</i> of a {@code Provides}.
*/
static | OpensMatcher |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedAnonymousClassTest.java | {
"start": 4381,
"end": 5052
} | class ____ {
public static void main(String[] args) throws Exception {
// BUG: Diagnostic contains:
new Callable<Void>() {
Void register;
public Void call() throws Exception {
return null;
}
};
}
}
""")
.doTest();
}
@Test
public void liveCallableViaField() {
compilationHelper
.addSourceLines(
"a/One.java",
"""
package a;
import java.util.concurrent.Callable;
import java.util.ArrayList;
public | One |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/sealed/SingletonSealedProducerTest.java | {
"start": 396,
"end": 671
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(Producer.class);
@Test
public void test() {
assertNotNull(Arc.container().select(MySealed.class).get());
}
@Dependent
static | SingletonSealedProducerTest |
java | quarkusio__quarkus | extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/extensions/NumberTemplateExtensions.java | {
"start": 212,
"end": 1689
} | class ____ {
static Integer mod(Integer number, Integer mod) {
return number % mod;
}
// addition
@TemplateExtension(matchNames = { "plus", "+" })
static Integer addToInt(Integer number, String name, Integer other) {
return number + other;
}
@TemplateExtension(matchNames = { "plus", "+" })
static Long addToInt(Integer number, String name, Long other) {
return number + other;
}
@TemplateExtension(matchNames = { "plus", "+" })
static Long addToLong(Long number, String name, Integer other) {
return number + other;
}
@TemplateExtension(matchNames = { "plus", "+" })
static Long addToLong(Long number, String name, Long other) {
return number + other;
}
// subtraction
@TemplateExtension(matchNames = { "minus", "-" })
static Integer subtractFromInt(Integer number, String name, Integer other) {
return number - other;
}
@TemplateExtension(matchNames = { "minus", "-" })
static Long subtractFromInt(Integer number, String name, Long other) {
return number - other;
}
@TemplateExtension(matchNames = { "minus", "-" })
static Long subtractFromLong(Long number, String name, Integer other) {
return number - other;
}
@TemplateExtension(matchNames = { "minus", "-" })
static Long subtractFromLong(Long number, String name, Long other) {
return number - other;
}
}
| NumberTemplateExtensions |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/LinkedCaseInsensitiveMap.java | {
"start": 12616,
"end": 13252
} | class ____<T> implements Iterator<T> {
private final Iterator<Entry<String, V>> delegate;
private @Nullable Entry<String, V> last;
public EntryIterator() {
this.delegate = targetMap.entrySet().iterator();
}
protected Entry<String, V> nextEntry() {
Entry<String, V> entry = this.delegate.next();
this.last = entry;
return entry;
}
@Override
public boolean hasNext() {
return this.delegate.hasNext();
}
@Override
public void remove() {
this.delegate.remove();
if (this.last != null) {
removeCaseInsensitiveKey(this.last.getKey());
this.last = null;
}
}
}
private | EntryIterator |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/visitor/functions/Least.java | {
"start": 1036,
"end": 2671
} | class ____ implements Function {
public static final Least instance = new Least();
public Object eval(SQLEvalVisitor visitor, SQLMethodInvokeExpr x) {
Object result = null;
for (SQLExpr item : x.getArguments()) {
item.accept(visitor);
Object itemValue = item.getAttributes().get(EVAL_VALUE);
if (result == null) {
result = itemValue;
} else {
if (SQLEvalVisitorUtils.lt(itemValue, result)) {
result = itemValue;
}
}
}
return result;
}
public Object eval(SQLMethodInvokeExpr x) {
List<SQLExpr> arguments = x.getArguments();
if (arguments.size() > 0) {
SQLExpr p0 = arguments.get(0);
if (p0 instanceof SQLIntegerExpr && ((SQLIntegerExpr) p0).getNumber() instanceof Integer) {
int val = ((SQLIntegerExpr) p0).getNumber().intValue();
for (int i = 1; i < arguments.size(); i++) {
SQLExpr param = arguments.get(i);
if (param instanceof SQLIntegerExpr && ((SQLIntegerExpr) param).getNumber() instanceof Integer) {
int paramVal = ((SQLIntegerExpr) param).getNumber().intValue();
if (paramVal < val) {
val = paramVal;
}
} else {
return SQLEvalVisitor.EVAL_ERROR;
}
}
return val;
}
}
return SQLEvalVisitor.EVAL_ERROR;
}
}
| Least |
java | elastic__elasticsearch | modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java | {
"start": 1556,
"end": 5612
} | class ____ extends Plugin implements ActionPlugin, IngestPlugin, ExtensiblePlugin {
public IngestCommonPlugin() {}
@Override
public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) {
return Map.ofEntries(
entry(AppendProcessor.TYPE, new AppendProcessor.Factory(parameters.scriptService)),
entry(BytesProcessor.TYPE, new BytesProcessor.Factory()),
entry(CommunityIdProcessor.TYPE, new CommunityIdProcessor.Factory()),
entry(ConvertProcessor.TYPE, new ConvertProcessor.Factory()),
entry(CsvProcessor.TYPE, new CsvProcessor.Factory()),
entry(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory(parameters.scriptService)),
entry(DateProcessor.TYPE, new DateProcessor.Factory(parameters.scriptService)),
entry(DissectProcessor.TYPE, new DissectProcessor.Factory()),
entry(DotExpanderProcessor.TYPE, new DotExpanderProcessor.Factory()),
entry(DropProcessor.TYPE, new DropProcessor.Factory()),
entry(FailProcessor.TYPE, new FailProcessor.Factory(parameters.scriptService)),
entry(FingerprintProcessor.TYPE, new FingerprintProcessor.Factory()),
entry(ForEachProcessor.TYPE, new ForEachProcessor.Factory(parameters.scriptService)),
entry(GrokProcessor.TYPE, new GrokProcessor.Factory(parameters.matcherWatchdog)),
entry(GsubProcessor.TYPE, new GsubProcessor.Factory()),
entry(HtmlStripProcessor.TYPE, new HtmlStripProcessor.Factory()),
entry(JoinProcessor.TYPE, new JoinProcessor.Factory()),
entry(JsonProcessor.TYPE, new JsonProcessor.Factory()),
entry(KeyValueProcessor.TYPE, new KeyValueProcessor.Factory(parameters.scriptService)),
entry(LowercaseProcessor.TYPE, new LowercaseProcessor.Factory()),
entry(NetworkDirectionProcessor.TYPE, new NetworkDirectionProcessor.Factory(parameters.scriptService)),
entry(PipelineProcessor.TYPE, new PipelineProcessor.Factory(parameters.ingestService)),
entry(RegisteredDomainProcessor.TYPE, new RegisteredDomainProcessor.Factory()),
entry(RecoverFailureDocumentProcessor.TYPE, new RecoverFailureDocumentProcessor.Factory()),
entry(RemoveProcessor.TYPE, new RemoveProcessor.Factory(parameters.scriptService)),
entry(RenameProcessor.TYPE, new RenameProcessor.Factory(parameters.scriptService)),
entry(RerouteProcessor.TYPE, new RerouteProcessor.Factory()),
entry(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService)),
entry(SetProcessor.TYPE, new SetProcessor.Factory(parameters.scriptService)),
entry(SortProcessor.TYPE, new SortProcessor.Factory()),
entry(SplitProcessor.TYPE, new SplitProcessor.Factory()),
entry(TerminateProcessor.TYPE, new TerminateProcessor.Factory()),
entry(TrimProcessor.TYPE, new TrimProcessor.Factory()),
entry(URLDecodeProcessor.TYPE, new URLDecodeProcessor.Factory()),
entry(UppercaseProcessor.TYPE, new UppercaseProcessor.Factory()),
entry(UriPartsProcessor.TYPE, new UriPartsProcessor.Factory())
);
}
@Override
public List<ActionHandler> getActions() {
return List.of(new ActionHandler(GrokProcessorGetAction.INSTANCE, GrokProcessorGetAction.TransportAction.class));
}
@Override
public List<RestHandler> getRestHandlers(
Settings settings,
NamedWriteableRegistry namedWriteableRegistry,
RestController restController,
ClusterSettings clusterSettings,
IndexScopedSettings indexScopedSettings,
SettingsFilter settingsFilter,
IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster,
Predicate<NodeFeature> clusterSupportsFeature
) {
return List.of(new GrokProcessorGetAction.RestAction());
}
}
| IngestCommonPlugin |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TempDirectoryCleanupTests.java | {
"start": 7894,
"end": 8256
} | class ____ {
@TempDir(cleanup = ON_SUCCESS)
Path onSuccessFailingFieldDir;
@Test
void testOnSuccessFailingField() {
TempDirFieldTests.onSuccessFailingFieldDir = onSuccessFailingFieldDir;
fail();
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
static | OnSuccessFailingFieldCase |
java | elastic__elasticsearch | x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipeTests.java | {
"start": 1229,
"end": 6651
} | class ____ extends AbstractNodeTestCase<BinaryStringNumericPipe, Pipe> {
@Override
protected BinaryStringNumericPipe randomInstance() {
return randomBinaryStringNumericPipe();
}
private Expression randomBinaryStringNumericExpression() {
return randomBinaryStringNumericPipe().expression();
}
private BinaryStringNumericOperation randomBinaryStringNumericOperation() {
return randomBinaryStringNumericPipe().operation();
}
public static BinaryStringNumericPipe randomBinaryStringNumericPipe() {
List<Pipe> functions = new ArrayList<>();
functions.add(new Left(randomSource(), randomStringLiteral(), randomIntLiteral()).makePipe());
functions.add(new Right(randomSource(), randomStringLiteral(), randomIntLiteral()).makePipe());
functions.add(new Repeat(randomSource(), randomStringLiteral(), randomIntLiteral()).makePipe());
return (BinaryStringNumericPipe) randomFrom(functions);
}
@Override
public void testTransform() {
// test transforming only the properties (source, expression, operation),
// skipping the children (the two parameters of the binary function) which are tested separately
BinaryStringNumericPipe b1 = randomInstance();
Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomBinaryStringNumericExpression());
BinaryStringNumericPipe newB = new BinaryStringNumericPipe(b1.source(), newExpression, b1.left(), b1.right(), b1.operation());
assertEquals(newB, b1.transformPropertiesOnly(Expression.class, v -> Objects.equals(v, b1.expression()) ? newExpression : v));
BinaryStringNumericPipe b2 = randomInstance();
BinaryStringNumericOperation newOp = randomValueOtherThan(b2.operation(), () -> randomBinaryStringNumericOperation());
newB = new BinaryStringNumericPipe(b2.source(), b2.expression(), b2.left(), b2.right(), newOp);
assertEquals(
newB,
b2.transformPropertiesOnly(BinaryStringNumericOperation.class, v -> Objects.equals(v, b2.operation()) ? newOp : v)
);
BinaryStringNumericPipe b3 = randomInstance();
Source newLoc = randomValueOtherThan(b3.source(), () -> randomSource());
newB = new BinaryStringNumericPipe(newLoc, b3.expression(), b3.left(), b3.right(), b3.operation());
assertEquals(newB, b3.transformPropertiesOnly(Source.class, v -> Objects.equals(v, b3.source()) ? newLoc : v));
}
@Override
public void testReplaceChildren() {
BinaryStringNumericPipe b = randomInstance();
Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral())));
Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), () -> randomIntLiteral())));
BinaryStringNumericPipe newB = new BinaryStringNumericPipe(b.source(), b.expression(), b.left(), b.right(), b.operation());
BinaryPipe transformed = newB.replaceChildren(newLeft, b.right());
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), b.right());
transformed = newB.replaceChildren(b.left(), newRight);
assertEquals(transformed.left(), b.left());
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), newRight);
transformed = newB.replaceChildren(newLeft, newRight);
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), newRight);
}
@Override
protected BinaryStringNumericPipe mutate(BinaryStringNumericPipe instance) {
List<Function<BinaryStringNumericPipe, BinaryStringNumericPipe>> randoms = new ArrayList<>();
randoms.add(
f -> new BinaryStringNumericPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))),
f.right(),
f.operation()
)
);
randoms.add(
f -> new BinaryStringNumericPipe(
f.source(),
f.expression(),
f.left(),
pipe(((Expression) randomValueOtherThan(f.right(), () -> randomIntLiteral()))),
f.operation()
)
);
randoms.add(
f -> new BinaryStringNumericPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))),
pipe(((Expression) randomValueOtherThan(f.right(), () -> randomIntLiteral()))),
f.operation()
)
);
return randomFrom(randoms).apply(instance);
}
@Override
protected BinaryStringNumericPipe copy(BinaryStringNumericPipe instance) {
return new BinaryStringNumericPipe(
instance.source(),
instance.expression(),
instance.left(),
instance.right(),
instance.operation()
);
}
}
| BinaryStringNumericPipeTests |
java | quarkusio__quarkus | integration-tests/spring-web/src/test/java/io/quarkus/it/spring/web/TestSecurityTest.java | {
"start": 429,
"end": 1278
} | class ____ {
@Test
public void testSecuredWithDisabledAuth() {
RestAssured.when().get("/api/securedMethod").then()
.body(is("accessibleForAdminOnly"));
}
@Test
public void testPreAuthorizeWithDisabledAuth() {
RestAssured.when().get("/api/allowedForUserOrViewer").then()
.body(is("allowedForUserOrViewer"));
}
@Test
@TestSecurity(user = "dummy", roles = "viewer")
public void testWithTestSecurityAndWrongRole() {
RestAssured.when().get("/api/securedMethod").then()
.statusCode(403);
}
@Test
@TestSecurity(user = "dummy", roles = "admin")
public void testWithTestSecurityAndCorrectRole() {
RestAssured.when().get("/api/securedMethod").then()
.body(is("accessibleForAdminOnly"));
}
}
| TestSecurityTest |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/UnnecessaryAssignment.java | {
"start": 3937,
"end": 12331
} | class ____ extends BugChecker
implements AssignmentTreeMatcher, VariableTreeMatcher {
private static final ImmutableSet<String> FRAMEWORK_ANNOTATIONS =
ImmutableSet.of(
"com.google.testing.junit.testparameterinjector.TestParameter",
"com.google.inject.Inject",
"jakarta.inject.Inject",
"javax.inject.Inject");
private static final String MOCK_ANNOTATION = "org.mockito.Mock";
private static final Matcher<Tree> HAS_MOCK_ANNOTATION = symbolHasAnnotation(MOCK_ANNOTATION);
private static final Matcher<Tree> HAS_NON_MOCK_FRAMEWORK_ANNOTATION =
allOf(
anyOf(
FRAMEWORK_ANNOTATIONS.stream()
.map(Matchers::symbolHasAnnotation)
.collect(toImmutableList())),
not(UnnecessaryAssignment::isOptionalInject));
private static boolean isOptionalInject(Tree tree, VisitorState state) {
var symbol = getSymbol(tree);
var compound = symbol.attribute(INJECT.get(state));
if (compound == null) {
return false;
}
return MoreAnnotations.getValue(compound, "optional")
.map(a -> Objects.equals(a.getValue(), true))
.orElse(false);
}
private static final Supplier<Symbol> INJECT =
VisitorState.memoize(state -> state.getSymbolFromString("com.google.inject.Inject"));
private static final Matcher<ExpressionTree> MOCK_FACTORY =
staticMethod().onClass("org.mockito.Mockito").named("mock");
private static final Matcher<ExpressionTree> INITIALIZES_MOCKS =
anyOf(staticMethod().onClass("org.mockito.MockitoAnnotations").named("initMocks"));
private static final MultiMatcher<ClassTree, AnnotationTree> MOCKITO_RUNNER =
annotations(
AT_LEAST_ONE,
hasArgumentWithValue(
"value",
isJUnit4TestRunnerOfType(ImmutableList.of("org.mockito.junit.MockitoJUnitRunner"))));
private static boolean hasAnnotation(Tree tree, String annotation, VisitorState state) {
Symbol sym = getSymbol(tree);
return sym != null && ASTHelpers.hasAnnotation(sym, annotation, state);
}
private static Optional<String> hasAnnotation(
Tree tree, Collection<String> annotations, VisitorState state) {
return annotations.stream().filter(a -> hasAnnotation(tree, a, state)).findFirst();
}
@Override
public Description matchAssignment(AssignmentTree tree, VisitorState state) {
Tree variable = tree.getVariable();
Optional<String> annotation = Optional.empty();
if (hasAnnotation(variable, MOCK_ANNOTATION, state)) {
annotation = Optional.of(MOCK_ANNOTATION);
} else {
annotation =
hasAnnotation(variable, FRAMEWORK_ANNOTATIONS, state)
.filter(a -> !isOptionalInject(variable, state));
}
if (annotation.isEmpty()) {
return NO_MATCH;
}
SuggestedFix fix =
state.getPath().getParentPath().getLeaf() instanceof ExpressionStatementTree est
? SuggestedFix.delete(est)
: SuggestedFix.emptyFix();
return buildDescription(tree).addFix(fix).setMessage(buildMessage(annotation.get())).build();
}
@Override
public Description matchVariable(VariableTree tree, VisitorState state) {
boolean hasMockAnnotation = HAS_MOCK_ANNOTATION.matches(tree, state);
boolean hasInjectyAnnotation = HAS_NON_MOCK_FRAMEWORK_ANNOTATION.matches(tree, state);
if (hasMockAnnotation && hasInjectyAnnotation) {
return buildDescription(tree)
.setMessage(
"Fields shouldn't be annotated with both @Mock and another @Inject-like annotation,"
+ " because both Mockito and the injector will assign to the field, and one of"
+ " the values will overwrite the other")
.build();
}
if (tree.getInitializer() == null) {
return NO_MATCH;
}
if (hasMockAnnotation) {
return buildDescription(tree)
.addFix(createMockFix(tree, state))
.setMessage(buildMessage(MOCK_ANNOTATION))
.build();
}
if (hasInjectyAnnotation) {
Description.Builder description = buildDescription(tree);
if (!tree.getModifiers().getFlags().contains(Modifier.FINAL)) {
String source =
state
.getSourceCode()
.subSequence(getStartPosition(tree), getStartPosition(tree.getInitializer()))
.toString();
ImmutableList<ErrorProneToken> tokens =
getTokens(source, getStartPosition(tree), state.context);
int equalsPos =
findLast(tokens.stream().filter(t -> t.kind().equals(TokenKind.EQ))).get().pos();
description.addFix(
SuggestedFix.builder()
.setShortDescription("Remove the variable's initializer")
.replace(equalsPos, state.getEndPosition(tree.getInitializer()), "")
.build());
}
AnnotationTree annotationToRemove =
tree.getModifiers().getAnnotations().stream()
.filter(
anno ->
FRAMEWORK_ANNOTATIONS.stream()
.anyMatch(
fanno ->
isSubtype(getType(anno), state.getTypeFromString(fanno), state)))
.findFirst()
.get();
return description
.setMessage(
String.format(
"Fields annotated with @%s should not be manually assigned to, as they should be"
+ " initialized by a framework. Remove the assignment if a framework is"
+ " being used, or the annotation if one isn't.",
getType(annotationToRemove).tsym.getSimpleName()))
.addFix(
SuggestedFix.builder()
.setShortDescription("Remove the annotation")
.delete(annotationToRemove)
.build())
.build();
}
return NO_MATCH;
}
private static SuggestedFix createMockFix(VariableTree tree, VisitorState state) {
if (MOCK_FACTORY.matches(tree.getInitializer(), state)
&& !classContainsInitializer(state.findEnclosing(ClassTree.class), state)) {
AnnotationTree anno =
ASTHelpers.getAnnotationWithSimpleName(tree.getModifiers().getAnnotations(), "Mock");
return SuggestedFix.delete(anno);
}
int startPos = getStartPosition(tree);
ImmutableList<ErrorProneToken> tokens =
state.getOffsetTokens(startPos, getStartPosition(tree.getInitializer()));
for (ErrorProneToken token : Lists.reverse(tokens)) {
if (token.kind() == TokenKind.EQ) {
return SuggestedFix.replace(token.pos(), state.getEndPosition(tree.getInitializer()), "");
}
}
return SuggestedFix.emptyFix();
}
private static boolean classContainsInitializer(ClassTree classTree, VisitorState state) {
AtomicBoolean initialized = new AtomicBoolean(false);
new TreeScanner<Void, Void>() {
@Override
public Void visitClass(ClassTree classTree, Void unused) {
if (MOCKITO_RUNNER.matches(classTree, state)) {
initialized.set(true);
return null;
}
return super.visitClass(classTree, null);
}
@Override
public Void visitMethodInvocation(MethodInvocationTree methodInvocationTree, Void unused) {
if (INITIALIZES_MOCKS.matches(methodInvocationTree, state)) {
initialized.set(true);
return null;
}
return super.visitMethodInvocation(methodInvocationTree, null);
}
@Override
public Void visitNewClass(NewClassTree newClassTree, Void unused) {
if (INITIALIZES_MOCKS.matches(newClassTree, state)) {
initialized.set(true);
return null;
}
return super.visitNewClass(newClassTree, null);
}
}.scan(classTree, null);
return initialized.get();
}
private static String buildMessage(String annotationUsed) {
String simpleName = annotationUsed.substring(annotationUsed.lastIndexOf('.') + 1);
return String.format(
"Fields annotated with @%s should not be manually assigned to, as they should be"
+ " initialized by a framework. Remove the assignment if a framework is being"
+ " used, or the annotation if one isn't.",
simpleName);
}
}
| UnnecessaryAssignment |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/MlMemoryTrackerTests.java | {
"start": 2616,
"end": 20122
} | class ____ extends ESTestCase {
private JobManager jobManager;
private JobResultsProvider jobResultsProvider;
private DataFrameAnalyticsConfigProvider configProvider;
private MlMemoryTracker memoryTracker;
@Before
public void setup() {
ClusterSettings clusterSettings = new ClusterSettings(
Settings.EMPTY,
Collections.singleton(PersistentTasksClusterService.CLUSTER_TASKS_ALLOCATION_RECHECK_INTERVAL_SETTING)
);
ClusterService clusterService = mock(ClusterService.class);
ClusterState clusterState = ClusterState.EMPTY_STATE;
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
when(clusterService.state()).thenReturn(clusterState);
ThreadPool threadPool = mock(ThreadPool.class);
ExecutorService executorService = mock(ExecutorService.class);
doAnswer(invocation -> {
Runnable r = (Runnable) invocation.getArguments()[0];
r.run();
return null;
}).when(executorService).execute(any(Runnable.class));
when(threadPool.executor(anyString())).thenReturn(executorService);
jobManager = mock(JobManager.class);
jobResultsProvider = mock(JobResultsProvider.class);
configProvider = mock(DataFrameAnalyticsConfigProvider.class);
memoryTracker = new MlMemoryTracker(Settings.EMPTY, clusterService, threadPool, jobManager, jobResultsProvider, configProvider);
}
public void testRefreshAll() {
boolean isMaster = randomBoolean();
if (isMaster) {
memoryTracker.onMaster();
} else {
memoryTracker.offMaster();
}
Map<String, PersistentTasksCustomMetadata.PersistentTask<?>> tasks = new HashMap<>();
int numAnomalyDetectorJobTasks = randomIntBetween(2, 5);
for (int i = 1; i <= numAnomalyDetectorJobTasks; ++i) {
String jobId = "job" + i;
PersistentTasksCustomMetadata.PersistentTask<?> task = makeTestAnomalyDetectorTask(jobId);
tasks.put(task.getId(), task);
}
// One snapshot upgrade is for a running job, one for a job that isn't running
int numSnapshotUpgradeTasks = 2;
for (int i = numAnomalyDetectorJobTasks; i < numAnomalyDetectorJobTasks + numSnapshotUpgradeTasks; ++i) {
String jobId = "job" + i;
String snapshotId = Long.toString(randomLongBetween(1000000000L, 9999999999L));
PersistentTasksCustomMetadata.PersistentTask<?> task = makeTestSnapshotUpgradeTask(jobId, snapshotId);
tasks.put(task.getId(), task);
}
List<String> allIds = new ArrayList<>();
int numDataFrameAnalyticsTasks = randomIntBetween(2, 5);
for (int i = 1; i <= numDataFrameAnalyticsTasks; ++i) {
String id = "analytics" + i;
allIds.add(id);
PersistentTasksCustomMetadata.PersistentTask<?> task = makeTestDataFrameAnalyticsTask(id, false);
tasks.put(task.getId(), task);
}
PersistentTasksCustomMetadata persistentTasks = new PersistentTasksCustomMetadata(
numAnomalyDetectorJobTasks + numSnapshotUpgradeTasks + numDataFrameAnalyticsTasks,
tasks
);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
Consumer<Long> listener = (Consumer<Long>) invocation.getArguments()[3];
listener.accept(randomLongBetween(1000, 1000000));
return null;
}).when(jobResultsProvider).getEstablishedMemoryUsage(anyString(), any(), any(), any(), any());
if (isMaster) {
memoryTracker.refresh(persistentTasks, ActionTestUtils.assertNoFailureListener(aVoid -> {}));
} else {
AtomicReference<Exception> exception = new AtomicReference<>();
memoryTracker.refresh(persistentTasks, ActionListener.wrap(e -> fail("Expected failure response"), exception::set));
assertEquals("Request to refresh anomaly detector memory requirement on non-master node", exception.get().getMessage());
}
if (isMaster) {
for (int i = 1; i < numAnomalyDetectorJobTasks + numSnapshotUpgradeTasks; ++i) {
String jobId = "job" + i;
// This should only be called once even for the job where there's both a job task and a snapshot upgrade task
verify(jobResultsProvider, times(1)).getEstablishedMemoryUsage(eq(jobId), any(), any(), any(), any());
}
verify(configProvider, times(1)).getConfigsForJobsWithTasksLeniently(eq(new HashSet<>(allIds)), any());
} else {
verify(jobResultsProvider, never()).getEstablishedMemoryUsage(anyString(), any(), any(), any(), any());
}
}
public void testRefreshWithSkips() {
boolean isMaster = randomBoolean();
if (isMaster) {
memoryTracker.onMaster();
} else {
memoryTracker.offMaster();
}
Map<String, PersistentTasksCustomMetadata.PersistentTask<?>> tasks = new HashMap<>();
Set<String> toSkip = new HashSet<>();
int numAnomalyDetectorJobTasks = randomIntBetween(2, 5);
for (int i = 1; i <= numAnomalyDetectorJobTasks; ++i) {
String jobId = "job" + i;
if (randomBoolean()) {
toSkip.add(jobId);
}
PersistentTasksCustomMetadata.PersistentTask<?> task = makeTestAnomalyDetectorTask(jobId);
tasks.put(task.getId(), task);
}
// One snapshot upgrade is for a running job, one for a job that isn't running
int numSnapshotUpgradeTasks = 2;
for (int i = numAnomalyDetectorJobTasks; i < numAnomalyDetectorJobTasks + numSnapshotUpgradeTasks; ++i) {
String jobId = "job" + i;
String snapshotId = Long.toString(randomLongBetween(1000000000L, 9999999999L));
PersistentTasksCustomMetadata.PersistentTask<?> task = makeTestSnapshotUpgradeTask(jobId, snapshotId);
tasks.put(task.getId(), task);
}
List<String> allIds = new ArrayList<>();
int numDataFrameAnalyticsTasks = randomIntBetween(2, 5);
for (int i = 1; i <= numDataFrameAnalyticsTasks; ++i) {
String id = "analytics" + i;
allIds.add(id);
PersistentTasksCustomMetadata.PersistentTask<?> task = makeTestDataFrameAnalyticsTask(id, false);
tasks.put(task.getId(), task);
}
PersistentTasksCustomMetadata persistentTasks = new PersistentTasksCustomMetadata(
numAnomalyDetectorJobTasks + numSnapshotUpgradeTasks + numDataFrameAnalyticsTasks,
tasks
);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
Consumer<Long> listener = (Consumer<Long>) invocation.getArguments()[3];
listener.accept(randomLongBetween(1000, 1000000));
return null;
}).when(jobResultsProvider).getEstablishedMemoryUsage(anyString(), any(), any(), any(), any());
if (isMaster) {
memoryTracker.refresh(persistentTasks, toSkip, ActionTestUtils.assertNoFailureListener(aVoid -> {}));
} else {
AtomicReference<Exception> exception = new AtomicReference<>();
memoryTracker.refresh(persistentTasks, toSkip, ActionListener.wrap(e -> fail("Expected failure response"), exception::set));
assertEquals("Request to refresh anomaly detector memory requirement on non-master node", exception.get().getMessage());
}
if (isMaster) {
for (int i = 1; i < numAnomalyDetectorJobTasks + numSnapshotUpgradeTasks; ++i) {
String jobId = "job" + i;
if (toSkip.contains(jobId)) {
verify(jobResultsProvider, never()).getEstablishedMemoryUsage(eq(jobId), any(), any(), any(), any());
} else {
// This should only be called once even for the job where there's both a job task and a snapshot upgrade task
verify(jobResultsProvider, times(1)).getEstablishedMemoryUsage(eq(jobId), any(), any(), any(), any());
}
}
verify(configProvider, times(1)).getConfigsForJobsWithTasksLeniently(eq(new HashSet<>(allIds)), any());
} else {
verify(jobResultsProvider, never()).getEstablishedMemoryUsage(anyString(), any(), any(), any(), any());
}
}
public void testRefreshAllFailure() {
Map<String, PersistentTasksCustomMetadata.PersistentTask<?>> tasks = new HashMap<>();
int numAnomalyDetectorJobTasks = randomIntBetween(2, 5);
for (int i = 1; i <= numAnomalyDetectorJobTasks; ++i) {
String jobId = "job" + i;
PersistentTasksCustomMetadata.PersistentTask<?> task = makeTestAnomalyDetectorTask(jobId);
tasks.put(task.getId(), task);
}
int numSnapshotUpgradeTasks = randomIntBetween(1, 3);
for (int i = numAnomalyDetectorJobTasks; i < numAnomalyDetectorJobTasks + numSnapshotUpgradeTasks; ++i) {
String jobId = "job" + i;
PersistentTasksCustomMetadata.PersistentTask<?> task = makeTestAnomalyDetectorTask(jobId);
tasks.put(task.getId(), task);
}
int numDataFrameAnalyticsTasks = randomIntBetween(2, 5);
for (int i = 1; i <= numDataFrameAnalyticsTasks; ++i) {
String id = "analytics" + i;
PersistentTasksCustomMetadata.PersistentTask<?> task = makeTestDataFrameAnalyticsTask(id, false);
tasks.put(task.getId(), task);
}
PersistentTasksCustomMetadata persistentTasks = new PersistentTasksCustomMetadata(
numAnomalyDetectorJobTasks + numSnapshotUpgradeTasks + numDataFrameAnalyticsTasks,
tasks
);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
Consumer<Long> listener = (Consumer<Long>) invocation.getArguments()[3];
listener.accept(randomLongBetween(1000, 1000000));
return null;
}).when(jobResultsProvider).getEstablishedMemoryUsage(anyString(), any(), any(), any(), any());
// First run a refresh using a component that calls the onFailure method of the listener
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<List<DataFrameAnalyticsConfig>> listener = (ActionListener<List<DataFrameAnalyticsConfig>>) invocation
.getArguments()[1];
listener.onFailure(new IllegalArgumentException("computer says no"));
return null;
}).when(configProvider).getConfigsForJobsWithTasksLeniently(any(), any());
AtomicBoolean gotErrorResponse = new AtomicBoolean(false);
memoryTracker.refresh(
persistentTasks,
ActionListener.wrap(aVoid -> fail("Expected error response"), e -> gotErrorResponse.set(true))
);
assertTrue(gotErrorResponse.get());
// Now run another refresh using a component that calls the onResponse method of the listener - this
// proves that the ML memory tracker has not been permanently blocked up by the previous failure
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<List<DataFrameAnalyticsConfig>> listener = (ActionListener<List<DataFrameAnalyticsConfig>>) invocation
.getArguments()[1];
listener.onResponse(Collections.emptyList());
return null;
}).when(configProvider).getConfigsForJobsWithTasksLeniently(any(), any());
AtomicReference<Exception> exception = new AtomicReference<>();
memoryTracker.refresh(persistentTasks, ActionListener.wrap(e -> fail("Expected failure response"), exception::set));
assertEquals("Request to refresh anomaly detector memory requirement on non-master node", exception.get().getMessage());
}
public void testRefreshOneAnomalyDetectorJob() {
boolean isMaster = randomBoolean();
if (isMaster) {
memoryTracker.onMaster();
} else {
memoryTracker.offMaster();
}
String jobId = "job";
boolean haveEstablishedModelMemory = randomBoolean();
long modelBytes = 1024 * 1024;
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
Consumer<Long> listener = (Consumer<Long>) invocation.getArguments()[3];
listener.accept(haveEstablishedModelMemory ? modelBytes : 0L);
return null;
}).when(jobResultsProvider).getEstablishedMemoryUsage(eq(jobId), any(), any(), any(), any());
boolean simulateVeryOldJob = randomBoolean();
long recentJobModelMemoryLimitMb = 2;
Job job = mock(Job.class);
when(job.getAnalysisLimits()).thenReturn(simulateVeryOldJob ? null : new AnalysisLimits(recentJobModelMemoryLimitMb, 4L));
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<Job> listener = (ActionListener<Job>) invocation.getArguments()[1];
listener.onResponse(job);
return null;
}).when(jobManager).getJob(eq(jobId), any());
if (isMaster) {
AtomicReference<Long> refreshedMemoryRequirement = new AtomicReference<>();
memoryTracker.refreshAnomalyDetectorJobMemory(jobId, ActionTestUtils.assertNoFailureListener(refreshedMemoryRequirement::set));
if (haveEstablishedModelMemory) {
assertEquals(
Long.valueOf(modelBytes + Job.PROCESS_MEMORY_OVERHEAD.getBytes()),
memoryTracker.getAnomalyDetectorJobMemoryRequirement(jobId)
);
} else {
long expectedModelMemoryLimit = simulateVeryOldJob
? AnalysisLimits.PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB
: recentJobModelMemoryLimitMb;
assertEquals(
Long.valueOf(ByteSizeValue.ofMb(expectedModelMemoryLimit).getBytes() + Job.PROCESS_MEMORY_OVERHEAD.getBytes()),
memoryTracker.getAnomalyDetectorJobMemoryRequirement(jobId)
);
}
assertEquals(memoryTracker.getAnomalyDetectorJobMemoryRequirement(jobId), refreshedMemoryRequirement.get());
} else {
AtomicReference<Exception> exception = new AtomicReference<>();
memoryTracker.refreshAnomalyDetectorJobMemory(
jobId,
ActionListener.wrap(e -> fail("Expected failure response"), exception::set)
);
assertEquals("Request to refresh anomaly detector memory requirement on non-master node", exception.get().getMessage());
assertNull(memoryTracker.getAnomalyDetectorJobMemoryRequirement(jobId));
}
memoryTracker.removeAnomalyDetectorJob(jobId);
assertNull(memoryTracker.getAnomalyDetectorJobMemoryRequirement(jobId));
}
public void testStop() {
memoryTracker.onMaster();
memoryTracker.stop();
AtomicReference<Exception> exception = new AtomicReference<>();
memoryTracker.refreshAnomalyDetectorJobMemory("job", ActionListener.wrap(ESTestCase::assertNull, exception::set));
assertNotNull(exception.get());
assertThat(exception.get(), instanceOf(EsRejectedExecutionException.class));
assertEquals("Couldn't run ML memory update - node is shutting down", exception.get().getMessage());
}
public void testMaxDuration() {
assertThat(MlMemoryTracker.max(Duration.ofMinutes(1), Duration.ofMinutes(2)), equalTo(Duration.ofMinutes(2)));
assertThat(MlMemoryTracker.max(Duration.ofMinutes(4), Duration.ofMinutes(3)), equalTo(Duration.ofMinutes(4)));
assertThat(MlMemoryTracker.max(Duration.ofMinutes(5), Duration.ofMinutes(5)), equalTo(Duration.ofMinutes(5)));
}
private PersistentTasksCustomMetadata.PersistentTask<OpenJobAction.JobParams> makeTestAnomalyDetectorTask(String jobId) {
return new PersistentTasksCustomMetadata.PersistentTask<>(
MlTasks.jobTaskId(jobId),
MlTasks.JOB_TASK_NAME,
new OpenJobAction.JobParams(jobId),
0,
PersistentTasksCustomMetadata.INITIAL_ASSIGNMENT
);
}
private PersistentTasksCustomMetadata.PersistentTask<SnapshotUpgradeTaskParams> makeTestSnapshotUpgradeTask(
String jobId,
String snapshotId
) {
return new PersistentTasksCustomMetadata.PersistentTask<>(
MlTasks.snapshotUpgradeTaskId(jobId, snapshotId),
MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME,
new SnapshotUpgradeTaskParams(jobId, snapshotId),
0,
PersistentTasksCustomMetadata.INITIAL_ASSIGNMENT
);
}
private PersistentTasksCustomMetadata.PersistentTask<StartDataFrameAnalyticsAction.TaskParams> makeTestDataFrameAnalyticsTask(
String id,
boolean allowLazyStart
) {
return new PersistentTasksCustomMetadata.PersistentTask<>(
MlTasks.dataFrameAnalyticsTaskId(id),
MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME,
new StartDataFrameAnalyticsAction.TaskParams(id, MlConfigVersion.CURRENT, allowLazyStart),
0,
PersistentTasksCustomMetadata.INITIAL_ASSIGNMENT
);
}
}
| MlMemoryTrackerTests |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/setup/MockMvcFilterDecoratorTests.java | {
"start": 1406,
"end": 7686
} | class ____ {
private MockHttpServletRequest request;
private MockHttpServletResponse response;
private MockFilterChain filterChain;
private MockFilter delegate;
private MockMvcFilterDecorator filter;
@BeforeEach
public void setup() {
request = new MockHttpServletRequest();
request.setContextPath("/context");
response = new MockHttpServletResponse();
filterChain = new MockFilterChain();
delegate = new MockFilter();
}
@Test
public void init() throws Exception {
FilterConfig config = new MockFilterConfig();
filter = new MockMvcFilterDecorator(delegate, new String[] {"/"});
filter.init(config);
assertThat(delegate.filterConfig).isEqualTo(config);
}
@Test
public void destroy() {
filter = new MockMvcFilterDecorator(delegate, new String[] {"/"});
filter.destroy();
assertThat(delegate.destroy).isTrue();
}
@Test
public void matchExact() throws Exception {
assertFilterInvoked("/test", "/test");
}
@Test
public void matchExactEmpty() throws Exception {
assertFilterInvoked("", "");
}
@Test
public void matchPathMappingAllFolder() throws Exception {
assertFilterInvoked("/test/this", "*");
assertFilterInvoked("/test/this", "/*");
}
@Test
public void matchPathMappingAll() throws Exception {
assertFilterInvoked("/test", "*");
assertFilterInvoked("/test", "/*");
}
@Test
public void matchPathMappingAllContextRoot() throws Exception {
assertFilterInvoked("", "*");
assertFilterInvoked("", "/*");
}
@Test
public void matchPathMappingContextRootAndSlash() throws Exception {
assertFilterInvoked("/", "*");
assertFilterInvoked("/", "/*");
}
@Test
public void matchPathMappingFolderPatternWithMultiFolderPath() throws Exception {
assertFilterInvoked("/test/this/here", "/test/*");
}
@Test
public void matchPathMappingFolderPattern() throws Exception {
assertFilterInvoked("/test/this", "/test/*");
}
@Test
public void matchPathMappingNoSuffix() throws Exception {
assertFilterInvoked("/test/", "/test/*");
}
@Test
public void matchPathMappingMissingSlash() throws Exception {
assertFilterInvoked("/test", "/test/*");
}
@Test
public void noMatchPathMappingMulti() throws Exception {
assertFilterNotInvoked("/this/test/here", "/test/*");
}
@Test
public void noMatchPathMappingEnd() throws Exception {
assertFilterNotInvoked("/this/test", "/test/*");
}
@Test
public void noMatchPathMappingEndSuffix() throws Exception {
assertFilterNotInvoked("/test2/", "/test/*");
}
@Test
public void noMatchPathMappingMissingSlash() throws Exception {
assertFilterNotInvoked("/test2", "/test/*");
}
@Test
public void noMatchDispatcherType() throws Exception {
assertFilterNotInvoked(DispatcherType.FORWARD, DispatcherType.REQUEST, "/test", "/test");
}
@Test
public void matchExtensionMulti() throws Exception {
assertFilterInvoked("/test/this/here.html", "*.html");
}
@Test
public void matchExtension() throws Exception {
assertFilterInvoked("/test/this.html", "*.html");
}
@Test
public void matchExtensionNoPrefix() throws Exception {
assertFilterInvoked("/.html", "*.html");
}
@Test
public void matchExtensionNoFolder() throws Exception {
assertFilterInvoked("/test.html", "*.html");
}
@Test
public void noMatchExtensionNoSlash() throws Exception {
assertFilterNotInvoked(".html", "*.html");
}
@Test
public void noMatchExtensionSlashEnd() throws Exception {
assertFilterNotInvoked("/index.html/", "*.html");
}
@Test
public void noMatchExtensionPeriodEnd() throws Exception {
assertFilterNotInvoked("/index.html.", "*.html");
}
@Test
public void noMatchExtensionLarger() throws Exception {
assertFilterNotInvoked("/index.htm", "*.html");
}
@Test
public void noMatchInvalidPattern() throws Exception {
// pattern uses extension mapping but starts with / (treated as exact match)
assertFilterNotInvoked("/index.html", "/*.html");
}
/*
* Below are tests from Table 12-1 of the Servlet Specification
*/
@Test
public void specPathMappingMultiFolderPattern() throws Exception {
assertFilterInvoked("/foo/bar/index.html", "/foo/bar/*");
}
@Test
public void specPathMappingMultiFolderPatternAlternate() throws Exception {
assertFilterInvoked("/foo/bar/index.bop", "/foo/bar/*");
}
@Test
public void specPathMappingNoSlash() throws Exception {
assertFilterInvoked("/baz", "/baz/*");
}
@Test
public void specPathMapping() throws Exception {
assertFilterInvoked("/baz/index.html", "/baz/*");
}
@Test
public void specExactMatch() throws Exception {
assertFilterInvoked("/catalog", "/catalog");
}
@Test
public void specExtensionMappingSingleFolder() throws Exception {
assertFilterInvoked("/catalog/racecar.bop", "*.bop");
}
@Test
public void specExtensionMapping() throws Exception {
assertFilterInvoked("/index.bop", "*.bop");
}
private void assertFilterNotInvoked(String requestUri, String pattern) throws Exception {
assertFilterNotInvoked(DispatcherType.REQUEST, DispatcherType.REQUEST, requestUri, pattern);
}
private void assertFilterNotInvoked(
DispatcherType requestDispatcherType, DispatcherType filterDispatcherType,
String requestUri, String pattern) throws Exception {
request.setDispatcherType(requestDispatcherType);
request.setRequestURI(request.getContextPath() + requestUri);
filter = new MockMvcFilterDecorator(delegate, null, null, EnumSet.of(filterDispatcherType), pattern);
filter.doFilter(request, response, filterChain);
assertThat(delegate.request).isNull();
assertThat(delegate.response).isNull();
assertThat(delegate.chain).isNull();
assertThat(filterChain.getRequest()).isEqualTo(request);
assertThat(filterChain.getResponse()).isEqualTo(response);
filterChain = new MockFilterChain();
}
private void assertFilterInvoked(String requestUri, String pattern) throws Exception {
request.setRequestURI(request.getContextPath() + requestUri);
filter = new MockMvcFilterDecorator(delegate, new String[] {pattern});
filter.doFilter(request, response, filterChain);
assertThat(delegate.request).isEqualTo(request);
assertThat(delegate.response).isEqualTo(response);
assertThat(delegate.chain).isEqualTo(filterChain);
delegate = new MockFilter();
}
private static | MockMvcFilterDecoratorTests |
java | apache__flink | flink-clients/src/test/java/org/apache/flink/client/program/artifact/ArtifactFetchManagerTest.java | {
"start": 9936,
"end": 10618
} | class ____ implements HttpHandler {
final File file;
DummyHttpDownloadHandler(File fileToDownload) {
checkArgument(fileToDownload.exists(), "The file to be download not exists!");
this.file = fileToDownload;
}
@Override
public void handle(HttpExchange exchange) throws IOException {
exchange.getResponseHeaders().add("Content-Type", "application/octet-stream");
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, file.length());
FileUtils.copyFile(this.file, exchange.getResponseBody());
exchange.close();
}
}
private static | DummyHttpDownloadHandler |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeSetAssert_containsAnyRangesOf_Test.java | {
"start": 1454,
"end": 3502
} | class ____ {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
RangeSet<Integer> actual = null;
// WHEN
var error = expectAssertionError(() -> assertThat(actual).containsAnyRangesOf(asList(1, 2)));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_values_is_null() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of();
Iterable<Integer> values = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).containsAnyRangesOf(values));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("values").create());
}
@Test
void should_fail_if_values_is_empty() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 1));
Iterable<Integer> values = emptySet();
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).containsAnyRangesOf(values));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("Expecting values not to be empty");
}
@Test
void should_fail_if_actual_does_not_contain_values() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 3));
List<Integer> values = List.of(4, 5);
// WHEN
var error = expectAssertionError(() -> assertThat(actual).containsAnyRangesOf(values));
// THEN
then(error).hasMessage(shouldContainAnyOf(actual, values).create());
}
@Test
void should_pass_if_both_actual_and_values_are_empty() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of();
Iterable<Integer> values = emptySet();
// WHEN/THEN
assertThat(actual).containsAnyRangesOf(values);
}
@Test
void should_pass_if_actual_contains_any_values() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(1, 10));
Iterable<Integer> values = List.of(0, 1, 2, 12, 13);
// WHEN/THEN
assertThat(actual).containsAnyRangesOf(values);
}
}
| RangeSetAssert_containsAnyRangesOf_Test |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlLoadDataInFileStatement.java | {
"start": 986,
"end": 6361
} | class ____ extends MySqlStatementImpl {
private boolean lowPriority;
private boolean concurrent;
private boolean local;
private SQLLiteralExpr fileName;
private boolean replicate;
private boolean ignore;
private SQLName tableName;
private String charset;
private SQLLiteralExpr columnsTerminatedBy;
private boolean columnsEnclosedOptionally;
private SQLLiteralExpr columnsEnclosedBy;
private SQLLiteralExpr columnsEscaped;
private SQLLiteralExpr linesStartingBy;
private SQLLiteralExpr linesTerminatedBy;
private SQLExpr ignoreLinesNumber;
private List<SQLExpr> setList = new ArrayList<SQLExpr>();
private List<SQLExpr> columns = new ArrayList<SQLExpr>();
public boolean isLowPriority() {
return lowPriority;
}
public void setLowPriority(boolean lowPriority) {
this.lowPriority = lowPriority;
}
public boolean isConcurrent() {
return concurrent;
}
public void setConcurrent(boolean concurrent) {
this.concurrent = concurrent;
}
public boolean isLocal() {
return local;
}
public void setLocal(boolean local) {
this.local = local;
}
public SQLLiteralExpr getFileName() {
return fileName;
}
public void setFileName(SQLLiteralExpr fileName) {
this.fileName = fileName;
}
public boolean isReplicate() {
return replicate;
}
public void setReplicate(boolean replicate) {
this.replicate = replicate;
}
public boolean isIgnore() {
return ignore;
}
public void setIgnore(boolean ignore) {
this.ignore = ignore;
}
public SQLName getTableName() {
return tableName;
}
public void setTableName(SQLName tableName) {
this.tableName = tableName;
}
public String getCharset() {
return charset;
}
public void setCharset(String charset) {
this.charset = charset;
}
public SQLLiteralExpr getColumnsTerminatedBy() {
return columnsTerminatedBy;
}
public void setColumnsTerminatedBy(SQLLiteralExpr columnsTerminatedBy) {
this.columnsTerminatedBy = columnsTerminatedBy;
}
public boolean isColumnsEnclosedOptionally() {
return columnsEnclosedOptionally;
}
public void setColumnsEnclosedOptionally(boolean columnsEnclosedOptionally) {
this.columnsEnclosedOptionally = columnsEnclosedOptionally;
}
public SQLLiteralExpr getColumnsEnclosedBy() {
return columnsEnclosedBy;
}
public void setColumnsEnclosedBy(SQLLiteralExpr columnsEnclosedBy) {
this.columnsEnclosedBy = columnsEnclosedBy;
}
public SQLLiteralExpr getColumnsEscaped() {
return columnsEscaped;
}
public void setColumnsEscaped(SQLLiteralExpr columnsEscaped) {
this.columnsEscaped = columnsEscaped;
}
public SQLLiteralExpr getLinesStartingBy() {
return linesStartingBy;
}
public void setLinesStartingBy(SQLLiteralExpr linesStartingBy) {
this.linesStartingBy = linesStartingBy;
}
public SQLLiteralExpr getLinesTerminatedBy() {
return linesTerminatedBy;
}
public void setLinesTerminatedBy(SQLLiteralExpr linesTerminatedBy) {
this.linesTerminatedBy = linesTerminatedBy;
}
public SQLExpr getIgnoreLinesNumber() {
return ignoreLinesNumber;
}
public void setIgnoreLinesNumber(SQLExpr ignoreLinesNumber) {
this.ignoreLinesNumber = ignoreLinesNumber;
}
public List<SQLExpr> getSetList() {
return setList;
}
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, fileName);
acceptChild(visitor, tableName);
acceptChild(visitor, columnsTerminatedBy);
acceptChild(visitor, columnsEnclosedBy);
acceptChild(visitor, columnsEscaped);
acceptChild(visitor, linesStartingBy);
acceptChild(visitor, linesTerminatedBy);
acceptChild(visitor, ignoreLinesNumber);
acceptChild(visitor, setList);
}
visitor.endVisit(this);
}
@Override
public List<SQLObject> getChildren() {
List<SQLObject> children = new ArrayList<SQLObject>();
if (fileName != null) {
children.add(fileName);
}
if (tableName != null) {
children.add(tableName);
}
if (columnsTerminatedBy != null) {
children.add(columnsTerminatedBy);
}
if (columnsEnclosedBy != null) {
children.add(columnsEnclosedBy);
}
if (columnsEscaped != null) {
children.add(columnsEscaped);
}
if (linesStartingBy != null) {
children.add(linesStartingBy);
}
if (linesTerminatedBy != null) {
children.add(linesTerminatedBy);
}
if (ignoreLinesNumber != null) {
children.add(ignoreLinesNumber);
}
return children;
}
public List<SQLExpr> getColumns() {
return columns;
}
public void setColumns(List<SQLExpr> columns) {
this.columns = columns;
}
public void setSetList(List<SQLExpr> setList) {
this.setList = setList;
}
}
| MySqlLoadDataInFileStatement |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/util/NonReusingKeyGroupedIteratorTest.java | {
"start": 1640,
"end": 38112
} | class ____ {
private MutableObjectIterator<Record> sourceIter; // the iterator that provides the input
private NonReusingKeyGroupedIterator<Record>
psi; // the grouping iterator, progressing in key steps
@BeforeEach
void setup() {
final ArrayList<IntStringPair> source = new ArrayList<IntStringPair>();
// add elements to the source
source.add(new IntStringPair(new IntValue(1), new StringValue("A")));
source.add(new IntStringPair(new IntValue(2), new StringValue("B")));
source.add(new IntStringPair(new IntValue(3), new StringValue("C")));
source.add(new IntStringPair(new IntValue(3), new StringValue("D")));
source.add(new IntStringPair(new IntValue(4), new StringValue("E")));
source.add(new IntStringPair(new IntValue(4), new StringValue("F")));
source.add(new IntStringPair(new IntValue(4), new StringValue("G")));
source.add(new IntStringPair(new IntValue(5), new StringValue("H")));
source.add(new IntStringPair(new IntValue(5), new StringValue("I")));
source.add(new IntStringPair(new IntValue(5), new StringValue("J")));
source.add(new IntStringPair(new IntValue(5), new StringValue("K")));
source.add(new IntStringPair(new IntValue(5), new StringValue("L")));
this.sourceIter =
new MutableObjectIterator<Record>() {
final Iterator<IntStringPair> it = source.iterator();
@Override
public Record next(Record reuse) throws IOException {
if (it.hasNext()) {
IntStringPair pair = it.next();
reuse.setField(0, pair.getInteger());
reuse.setField(1, pair.getString());
return reuse;
} else {
return null;
}
}
@Override
public Record next() throws IOException {
if (it.hasNext()) {
IntStringPair pair = it.next();
Record result = new Record(2);
result.setField(0, pair.getInteger());
result.setField(1, pair.getString());
return result;
} else {
return null;
}
}
};
@SuppressWarnings("unchecked")
final RecordComparator comparator =
new RecordComparator(new int[] {0}, new Class[] {IntValue.class});
this.psi = new NonReusingKeyGroupedIterator<Record>(this.sourceIter, comparator);
}
@Test
void testNextKeyOnly() throws Exception {
try {
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(1))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isOne();
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(2))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(2);
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(3))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(4))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(4);
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(5))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must not have another key.")
.isFalse();
assertThat((Iterable<? extends Record>) this.psi.getValues())
.withFailMessage("KeyGroupedIterator must not have another value.")
.isNull();
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must not have another key.")
.isFalse();
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must not have another key.")
.isFalse();
} catch (Exception e) {
e.printStackTrace();
fail("The test encountered an unexpected exception.");
}
}
@Test
void testFullIterationThroughAllValues() throws IOException {
try {
// Key 1, Value A
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(1))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isOne();
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("A");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must not have another value.")
.isFalse();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(1))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
// Key 2, Value B
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(2))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(2);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("B");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must not have another value.")
.isFalse();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(2))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
// Key 3, Values C, D
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(3))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("C");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(3))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("D");
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(3))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThatThrownBy(() -> this.psi.getValues().next())
.withFailMessage(
"A new KeyGroupedIterator must not have any value available and hence throw an exception on next().")
.isInstanceOf(NoSuchElementException.class);
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must not have another value.")
.isFalse();
assertThatThrownBy(() -> this.psi.getValues().next())
.withFailMessage(
"A new KeyGroupedIterator must not have any value available and hence throw an exception on next().")
.isInstanceOf(NoSuchElementException.class);
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(3))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
// Key 4, Values E, F, G
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(4))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(4);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("E");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(4))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(4);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("F");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(4))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(4);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("G");
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(4))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(4);
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must not have another value.")
.isFalse();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(4))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(4);
// Key 5, Values H, I, J, K, L
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(5))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("H");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(5))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("I");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(5))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("J");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(5))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("K");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(5))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("L");
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(5))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThatThrownBy(() -> this.psi.getValues().next())
.withFailMessage(
"A new KeyGroupedIterator must not have any value available and hence throw an exception on next().")
.isInstanceOf(NoSuchElementException.class);
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must not have another value.")
.isFalse();
assertThat(
this.psi
.getComparatorWithCurrentReference()
.equalToReference(new Record(new IntValue(5))))
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThatThrownBy(() -> this.psi.getValues().next())
.withFailMessage(
"A new KeyGroupedIterator must not have any value available and hence throw an exception on next().")
.isInstanceOf(NoSuchElementException.class);
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must not have another value.")
.isFalse();
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must not have another value.")
.isFalse();
} catch (Exception e) {
e.printStackTrace();
fail("The test encountered an unexpected exception.");
}
}
@Test
void testMixedProgress() throws Exception {
try {
// Progression only via nextKey() and hasNext() - Key 1, Value A
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
// Progression only through nextKey() - Key 2, Value B
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
// Progression first though haNext() and next(), then through hasNext() - Key 3, Values
// C, D
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("C");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
// Progression first via next() only, then hasNext() only Key 4, Values E, F, G
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("E");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
// Key 5, Values H, I, J, K, L
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("H");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThat(this.psi.getCurrent().getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(5);
assertThat(this.psi.getValues().next().getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("I");
assertThat(this.psi.getValues().hasNext())
.withFailMessage("KeyGroupedIterator must have another value.")
.isTrue();
// end
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must not have another key.")
.isFalse();
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must not have another key.")
.isFalse();
} catch (Exception e) {
e.printStackTrace();
fail("The test encountered an unexpected exception.");
}
}
@Test
void testHasNextDoesNotOverwriteCurrentRecord() throws Exception {
try {
Iterator<Record> valsIter = null;
Record rec = null;
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
valsIter = this.psi.getValues();
assertThat(valsIter).withFailMessage("Returned Iterator must not be null").isNotNull();
assertThat(valsIter)
.withFailMessage("KeyGroupedIterator's value iterator must have another value.")
.hasNext();
rec = valsIter.next();
assertThat(rec.getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isOne();
assertThat(rec.getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("A");
assertThat(valsIter)
.withFailMessage("KeyGroupedIterator must not have another value.")
.isExhausted();
assertThat(rec.getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isOne();
assertThat(rec.getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("A");
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
valsIter = this.psi.getValues();
assertThat(valsIter).withFailMessage("Returned Iterator must not be null").isNotNull();
assertThat(valsIter)
.withFailMessage("KeyGroupedIterator's value iterator must have another value.")
.hasNext();
rec = valsIter.next();
assertThat(rec.getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(2);
assertThat(rec.getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("B");
assertThat(valsIter)
.withFailMessage("KeyGroupedIterator must not have another value.")
.isExhausted();
assertThat(rec.getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(2);
assertThat(rec.getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("B");
assertThat(this.psi.nextKey())
.withFailMessage("KeyGroupedIterator must have another key.")
.isTrue();
valsIter = this.psi.getValues();
assertThat(valsIter).withFailMessage("Returned Iterator must not be null").isNotNull();
assertThat(valsIter)
.withFailMessage("KeyGroupedIterator's value iterator must have another value.")
.hasNext();
rec = valsIter.next();
assertThat(rec.getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(rec.getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("C");
assertThat(valsIter)
.withFailMessage("KeyGroupedIterator's value iterator must have another value.")
.hasNext();
assertThat(rec.getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(rec.getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("C");
rec = valsIter.next();
assertThat(rec.getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(rec.getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("D");
assertThat(valsIter)
.withFailMessage("KeyGroupedIterator must not have another value.")
.isExhausted();
assertThat(rec.getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(rec.getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("D");
assertThat(valsIter)
.withFailMessage("KeyGroupedIterator must not have another value.")
.isExhausted();
assertThat(rec.getField(0, IntValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong key.")
.isEqualTo(3);
assertThat(rec.getField(1, StringValue.class).getValue())
.withFailMessage("KeyGroupedIterator returned a wrong value.")
.isEqualTo("D");
} catch (Exception e) {
e.printStackTrace();
fail("The test encountered an unexpected exception.");
}
}
private static final | NonReusingKeyGroupedIteratorTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/RouteDirectSuspendResumeTest.java | {
"start": 1202,
"end": 2785
} | class ____ extends ContextTestSupport {
@Test
public void testSuspendResume() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A");
template.sendBody("direct:foo", "A");
assertMockEndpointsSatisfied();
log.info("Suspending");
// now suspend and dont expect a message to be routed
resetMocks();
mock.expectedMessageCount(0);
context.getRouteController().suspendRoute("foo");
// direct consumer supports suspension
assertEquals("Suspended", context.getRouteController().getRouteStatus("foo").name());
assertThrows(Exception.class, () -> template.sendBody("direct:foo", "B"),
"Should have thrown an exception");
log.info("Resuming");
// now resume and expect the previous message to be routed
resetMocks();
mock.expectedBodiesReceived("B");
context.getRouteController().resumeRoute("foo");
template.sendBody("direct:foo", "B");
assertMockEndpointsSatisfied();
assertEquals("Started", context.getRouteController().getRouteStatus("foo").name());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.getComponent("direct", DirectComponent.class).setBlock(false);
from("direct:foo").routeId("foo").to("log:foo").to("mock:result");
}
};
}
}
| RouteDirectSuspendResumeTest |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/environment/StreamExecutionEnvironmentTest.java | {
"start": 3287,
"end": 26380
} | class ____ {
@Test
void fromElementsWithBaseTypeTest1() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.fromData(ParentClass.class, new SubClass(1, "Java"), new ParentClass(1, "hello"));
}
@Test
void fromElementsWithBaseTypeTest2() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
assertThatThrownBy(
() ->
env.fromData(
SubClass.class,
new SubClass(1, "Java"),
new ParentClass(1, "hello")))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testFromElementsDeducedType() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<String> source = env.fromData("a", "b");
DataGeneratorSource<String> generatorSource = getSourceFromStream(source);
assertThat(generatorSource.getProducedType()).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
@Test
void testFromElementsPostConstructionType() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<String> source = env.fromData("a", "b");
TypeInformation<String> customType = new GenericTypeInfo<>(String.class);
source.returns(customType);
DataGeneratorSource<String> generatorSource = getSourceFromStream(source);
source.sinkTo(new DiscardingSink<>());
env.getStreamGraph();
assertThat(generatorSource.getProducedType()).isNotEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(generatorSource.getProducedType()).isEqualTo(customType);
}
@Test
@SuppressWarnings({"unchecked", "rawtypes"})
void testFromElementsPostConstructionTypeIncompatible() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<String> source = env.fromData("a", "b");
source.returns((TypeInformation) BasicTypeInfo.INT_TYPE_INFO);
source.sinkTo(new DiscardingSink<>());
assertThatThrownBy(env::getStreamGraph)
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("not all subclasses of java.lang.Integer");
}
@Test
void testFromElementsNullElement() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
assertThatThrownBy(() -> env.fromData("a", null, "c"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("contains a null element");
}
@Test
@SuppressWarnings("unchecked")
void testFromCollectionParallelism() {
try {
TypeInformation<Integer> typeInfo = BasicTypeInfo.INT_TYPE_INFO;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Integer> dataStream2 =
env.fromParallelCollection(new DummySplittableIterator<Integer>(), typeInfo)
.setParallelism(4);
dataStream2.sinkTo(new DiscardingSink<>());
final StreamGraph streamGraph = env.getStreamGraph();
streamGraph.getStreamingPlanAsJSON();
assertThat(streamGraph.getStreamNode(dataStream2.getId()).getParallelism())
.as("Parallelism of parallel collection source must be 4.")
.isEqualTo(4);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
void testSources() {
// TODO: remove this test when SourceFunction API gets removed together with the deprecated
// StreamExecutionEnvironment generateSequence() and fromCollection() methods
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SourceFunction<Integer> srcFun =
new SourceFunction<Integer>() {
private static final long serialVersionUID = 1L;
@Override
public void run(SourceContext<Integer> ctx) throws Exception {}
@Override
public void cancel() {}
};
DataStreamSource<Integer> src1 = env.addSource(srcFun);
src1.sinkTo(new DiscardingSink<>());
assertThat(getFunctionFromDataSource(src1)).isEqualTo(srcFun);
List<Long> list = Arrays.asList(0L, 1L, 2L);
DataStreamSource<Long> src2 = env.fromSequence(0, 2);
Object generatorSource = getSourceFromStream(src2);
assertThat(generatorSource).isInstanceOf(NumberSequenceSource.class);
DataStreamSource<Long> src3 = env.fromData(0L, 1L, 2L);
assertThat(getSourceFromDataSourceTyped(src3)).isInstanceOf(DataGeneratorSource.class);
DataStreamSource<Long> src4 = env.fromCollection(list);
assertThat(getFunctionFromDataSource(src4)).isInstanceOf(FromElementsFunction.class);
}
/** Verifies that the API method doesn't throw and creates a source of the expected type. */
@Test
void testFromSequence() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Long> src = env.fromSequence(0, 2);
assertThat(src.getType()).isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
}
@Test
void testParallelismBounds() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SourceFunction<Integer> srcFun =
new SourceFunction<Integer>() {
private static final long serialVersionUID = 1L;
@Override
public void run(SourceContext<Integer> ctx) throws Exception {}
@Override
public void cancel() {}
};
SingleOutputStreamOperator<Object> operator =
env.addSource(srcFun)
.flatMap(
new FlatMapFunction<Integer, Object>() {
private static final long serialVersionUID = 1L;
@Override
public void flatMap(Integer value, Collector<Object> out)
throws Exception {}
});
// default value for max parallelism
assertThat(operator.getTransformation().getMaxParallelism()).isEqualTo(-1);
// bounds for parallelism 1
assertThatThrownBy(() -> operator.setParallelism(0))
.isInstanceOf(IllegalArgumentException.class);
// bounds for parallelism 2
operator.setParallelism(1);
assertThat(operator.getParallelism()).isOne();
// bounds for parallelism 3
operator.setParallelism(1 << 15);
assertThat(operator.getParallelism()).isEqualTo(1 << 15);
// default value after generating
env.getStreamGraph(false).getJobGraph();
assertThat(operator.getTransformation().getMaxParallelism()).isEqualTo(-1);
// configured value after generating
env.setMaxParallelism(42);
env.getStreamGraph(false).getJobGraph();
assertThat(operator.getTransformation().getMaxParallelism()).isEqualTo(42);
// bounds configured parallelism 1
assertThatThrownBy(() -> env.setMaxParallelism(0))
.isInstanceOf(IllegalArgumentException.class);
// bounds configured parallelism 2
assertThatThrownBy(() -> env.setMaxParallelism(1 + (1 << 15)))
.isInstanceOf(IllegalArgumentException.class);
// bounds for max parallelism 1
assertThatThrownBy(() -> operator.setMaxParallelism(0))
.isInstanceOf(IllegalArgumentException.class);
// bounds for max parallelism 2
assertThatThrownBy(() -> operator.setMaxParallelism(1 + (1 << 15)))
.isInstanceOf(IllegalArgumentException.class);
// bounds for max parallelism 3
operator.setMaxParallelism(1);
assertThat(operator.getTransformation().getMaxParallelism()).isOne();
// bounds for max parallelism 4
operator.setMaxParallelism(1 << 15);
assertThat(operator.getTransformation().getMaxParallelism()).isEqualTo(1 << 15);
// override config
env.getStreamGraph(false).getJobGraph();
assertThat(operator.getTransformation().getMaxParallelism()).isEqualTo(1 << 15);
}
@Test
void testRegisterSlotSharingGroup() {
final SlotSharingGroup ssg1 =
SlotSharingGroup.newBuilder("ssg1").setCpuCores(1).setTaskHeapMemoryMB(100).build();
final SlotSharingGroup ssg2 =
SlotSharingGroup.newBuilder("ssg2").setCpuCores(2).setTaskHeapMemoryMB(200).build();
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.registerSlotSharingGroup(ssg1);
env.registerSlotSharingGroup(ssg2);
env.registerSlotSharingGroup(SlotSharingGroup.newBuilder("ssg3").build());
final DataStream<Integer> source = env.fromData(1).slotSharingGroup("ssg1");
source.map(value -> value).slotSharingGroup(ssg2).sinkTo(new DiscardingSink<>());
final StreamGraph streamGraph = env.getStreamGraph();
assertThat(streamGraph.getSlotSharingGroupResource("ssg1").get())
.isEqualTo(ResourceProfile.fromResources(1, 100));
assertThat(streamGraph.getSlotSharingGroupResource("ssg2").get())
.isEqualTo(ResourceProfile.fromResources(2, 200));
assertThat(streamGraph.getSlotSharingGroupResource("ssg3")).isNotPresent();
}
@Test
void testRegisterSlotSharingGroupConflict() {
final SlotSharingGroup ssg =
SlotSharingGroup.newBuilder("ssg1").setCpuCores(1).setTaskHeapMemoryMB(100).build();
final SlotSharingGroup ssgConflict =
SlotSharingGroup.newBuilder("ssg1").setCpuCores(2).setTaskHeapMemoryMB(200).build();
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.registerSlotSharingGroup(ssg);
final DataStream<Integer> source = env.fromData(1).slotSharingGroup("ssg1");
source.map(value -> value).slotSharingGroup(ssgConflict).sinkTo(new DiscardingSink<>());
assertThatThrownBy(env::getStreamGraph).isInstanceOf(IllegalArgumentException.class);
}
@Test
void testGetStreamGraph() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Integer> dataStream1 = env.fromData(1, 2, 3);
dataStream1.sinkTo(new DiscardingSink<>());
assertThat(env.getStreamGraph().getStreamNodes().size()).isEqualTo(2);
DataStreamSource<Integer> dataStream2 = env.fromData(1, 2, 3);
dataStream2.sinkTo(new DiscardingSink<>());
// Previous getStreamGraph() call cleaned dataStream1 transformations
assertThat(env.getStreamGraph().getStreamNodes().size()).isEqualTo(2);
DataStreamSource<Integer> dataStream3 = env.fromData(1, 2, 3);
dataStream3.sinkTo(new DiscardingSink<>());
// Does not clear the transformations.
env.getExecutionPlan();
DataStreamSource<Integer> dataStream4 = env.fromData(1, 2, 3);
dataStream4.sinkTo(new DiscardingSink<>());
// dataStream3 are preserved
assertThat(env.getStreamGraph().getStreamNodes().size()).isEqualTo(4);
}
@Test
void testDefaultJobName() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
testJobName(StreamGraphGenerator.DEFAULT_STREAMING_JOB_NAME, env);
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
testJobName(StreamGraphGenerator.DEFAULT_BATCH_JOB_NAME, env);
}
@Test
void testUserDefinedJobName() {
String jobName = "MyTestJob";
Configuration config = new Configuration();
config.set(PipelineOptions.NAME, jobName);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(config);
testJobName(jobName, env);
}
@Test
void testUserDefinedJobNameWithConfigure() {
String jobName = "MyTestJob";
Configuration config = new Configuration();
config.set(PipelineOptions.NAME, jobName);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.configure(config, this.getClass().getClassLoader());
testJobName(jobName, env);
}
private void testJobName(String expectedJobName, StreamExecutionEnvironment env) {
env.fromData(1, 2, 3).print();
StreamGraph streamGraph = env.getStreamGraph();
assertThat(streamGraph.getJobName()).isEqualTo(expectedJobName);
}
@Test
void testAddSourceWithUserDefinedTypeInfo() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Row> source1 =
env.addSource(new RowSourceFunction(), Types.ROW(Types.STRING));
// the source type information should be the user defined type
assertThat(source1.getType()).isEqualTo(Types.ROW(Types.STRING));
DataStreamSource<Row> source2 = env.addSource(new RowSourceFunction());
// the source type information should be derived from RowSourceFunction#getProducedType
assertThat(source2.getType()).isEqualTo(new GenericTypeInfo<>(Row.class));
}
@Test
void testPeriodicMaterializeEnabled() {
Configuration config = new Configuration();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.configure(config, this.getClass().getClassLoader());
assertThat(env.getConfig().isPeriodicMaterializeEnabled())
.isEqualTo(StateChangelogOptions.PERIODIC_MATERIALIZATION_ENABLED.defaultValue());
config.set(StateChangelogOptions.PERIODIC_MATERIALIZATION_ENABLED, false);
env.configure(config, this.getClass().getClassLoader());
assertThat(env.getConfig().isPeriodicMaterializeEnabled()).isFalse();
}
@Test
void testPeriodicMaterializeInterval() {
Configuration config = new Configuration();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.configure(config, this.getClass().getClassLoader());
assertThat(env.getConfig().getPeriodicMaterializeIntervalMillis())
.isEqualTo(
StateChangelogOptions.PERIODIC_MATERIALIZATION_INTERVAL
.defaultValue()
.toMillis());
config.setString(StateChangelogOptions.PERIODIC_MATERIALIZATION_INTERVAL.key(), "60s");
env.configure(config, this.getClass().getClassLoader());
assertThat(env.getConfig().getPeriodicMaterializeIntervalMillis()).isEqualTo(60 * 1000);
assertThatThrownBy(
() -> {
config.setString(
StateChangelogOptions.PERIODIC_MATERIALIZATION_INTERVAL.key(),
"-1ms");
env.configure(config, this.getClass().getClassLoader());
})
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testBufferTimeoutByDefault() {
Configuration config = new Configuration();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
testBufferTimeout(config, env);
}
@Test
void testBufferTimeoutEnabled() {
Configuration config = new Configuration();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
config.set(ExecutionOptions.BUFFER_TIMEOUT_ENABLED, true);
testBufferTimeout(config, env);
}
@Test
void testBufferTimeoutDisabled() {
Configuration config = new Configuration();
config.set(ExecutionOptions.BUFFER_TIMEOUT_ENABLED, false);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// The execution.buffer-timeout's default value 100ms will not take effect.
env.configure(config, this.getClass().getClassLoader());
assertThat(env.getBufferTimeout())
.isEqualTo(ExecutionOptions.DISABLED_NETWORK_BUFFER_TIMEOUT);
// Setting execution.buffer-timeout's to 0ms will not take effect.
env.setBufferTimeout(0);
assertThat(env.getBufferTimeout())
.isEqualTo(ExecutionOptions.DISABLED_NETWORK_BUFFER_TIMEOUT);
// Setting execution.buffer-timeout's to -1ms will not take effect.
env.setBufferTimeout(-1);
assertThat(env.getBufferTimeout())
.isEqualTo(ExecutionOptions.DISABLED_NETWORK_BUFFER_TIMEOUT);
}
@Test
void testAsyncExecutionConfigurations() {
Configuration config = new Configuration();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.configure(config, this.getClass().getClassLoader());
assertThat(env.getConfig().getAsyncStateTotalBufferSize())
.isEqualTo(ExecutionOptions.ASYNC_STATE_TOTAL_BUFFER_SIZE.defaultValue());
assertThat(env.getConfig().getAsyncStateActiveBufferSize())
.isEqualTo(ExecutionOptions.ASYNC_STATE_ACTIVE_BUFFER_SIZE.defaultValue());
assertThat(env.getConfig().getAsyncStateActiveBufferTimeout())
.isEqualTo(ExecutionOptions.ASYNC_STATE_ACTIVE_BUFFER_TIMEOUT.defaultValue());
config.set(ExecutionOptions.ASYNC_STATE_TOTAL_BUFFER_SIZE, 3);
config.set(ExecutionOptions.ASYNC_STATE_ACTIVE_BUFFER_SIZE, 2);
config.set(ExecutionOptions.ASYNC_STATE_ACTIVE_BUFFER_TIMEOUT, 1L);
env.configure(config, this.getClass().getClassLoader());
assertThat(env.getConfig().getAsyncStateTotalBufferSize()).isEqualTo(3);
assertThat(env.getConfig().getAsyncStateActiveBufferSize()).isEqualTo(2);
assertThat(env.getConfig().getAsyncStateActiveBufferTimeout()).isEqualTo(1);
env.getConfig()
.setAsyncStateTotalBufferSize(6)
.setAsyncStateActiveBufferSize(5)
.setAsyncStateActiveBufferTimeout(4);
assertThat(env.getConfig().getAsyncStateTotalBufferSize()).isEqualTo(6);
assertThat(env.getConfig().getAsyncStateActiveBufferSize()).isEqualTo(5);
assertThat(env.getConfig().getAsyncStateActiveBufferTimeout()).isEqualTo(4);
}
private void testBufferTimeout(Configuration config, StreamExecutionEnvironment env) {
env.configure(config, this.getClass().getClassLoader());
assertThat(env.getBufferTimeout())
.isEqualTo(ExecutionOptions.BUFFER_TIMEOUT.defaultValue().toMillis());
config.setString(ExecutionOptions.BUFFER_TIMEOUT.key(), "0ms");
env.configure(config, this.getClass().getClassLoader());
assertThat(env.getBufferTimeout()).isZero();
assertThatThrownBy(
() -> {
config.setString(ExecutionOptions.BUFFER_TIMEOUT.key(), "-1ms");
env.configure(config, this.getClass().getClassLoader());
env.getBufferTimeout();
})
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testConcurrentSetContext() throws Exception {
int numThreads = 20;
final CountDownLatch waitingThreadCount = new CountDownLatch(numThreads);
final OneShotLatch latch = new OneShotLatch();
final List<CheckedThread> threads = new ArrayList<>();
for (int x = 0; x < numThreads; x++) {
final CheckedThread thread =
new CheckedThread() {
@Override
public void go() {
final StreamExecutionEnvironment preparedEnvironment =
new StreamExecutionEnvironment();
StreamExecutionEnvironment.initializeContextEnvironment(
configuration -> preparedEnvironment);
try {
waitingThreadCount.countDown();
latch.awaitQuietly();
assertThat(StreamExecutionEnvironment.getExecutionEnvironment())
.isSameAs(preparedEnvironment);
} finally {
StreamExecutionEnvironment.resetContextEnvironment();
}
}
};
thread.start();
threads.add(thread);
}
// wait for all threads to be ready and trigger the job submissions at the same time
waitingThreadCount.await();
latch.trigger();
for (CheckedThread thread : threads) {
thread.sync();
}
}
/////////////////////////////////////////////////////////////
// Utilities
/////////////////////////////////////////////////////////////
private static StreamOperator<?> getOperatorFromDataStream(DataStream<?> dataStream) {
StreamExecutionEnvironment env = dataStream.getExecutionEnvironment();
StreamGraph streamGraph = env.getStreamGraph();
return streamGraph.getStreamNode(dataStream.getId()).getOperator();
}
@SuppressWarnings("unchecked")
private static <T> SourceFunction<T> getFunctionFromDataSource(
DataStreamSource<T> dataStreamSource) {
dataStreamSource.sinkTo(new DiscardingSink<>());
AbstractUdfStreamOperator<?, ?> operator =
(AbstractUdfStreamOperator<?, ?>) getOperatorFromDataStream(dataStreamSource);
return (SourceFunction<T>) operator.getUserFunction();
}
@SuppressWarnings("unchecked")
private static <T, S extends Source<T, ?, ?>> S getSourceFromStream(DataStream<T> stream) {
return (S) ((SourceTransformation<T, ?, ?>) stream.getTransformation()).getSource();
}
private static <T> Source<T, ?, ?> getSourceFromDataSourceTyped(
DataStreamSource<T> dataStreamSource) {
dataStreamSource.sinkTo(new DiscardingSink<>());
dataStreamSource.getExecutionEnvironment().getStreamGraph();
return ((SourceTransformation<T, ?, ?>) dataStreamSource.getTransformation()).getSource();
}
private static | StreamExecutionEnvironmentTest |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ThrowableProxy.java | {
"start": 1514,
"end": 1767
} | class ____ or JVM. When an application
* deserializes a ThrowableProxy, the throwable may not be set, but the throwable's information is preserved in other
* fields of the proxy like the message and stack trace.
* </p>
*
* <p>
* TODO: Move this | loader |
java | apache__camel | components/camel-huawei/camel-huaweicloud-functiongraph/src/main/java/org/apache/camel/FunctionGraphProducer.java | {
"start": 1588,
"end": 8001
} | class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(FunctionGraphProducer.class);
private FunctionGraphEndpoint endpoint;
private FunctionGraphClient functionGraphClient;
public FunctionGraphProducer(FunctionGraphEndpoint endpoint) {
super(endpoint);
this.endpoint = endpoint;
}
public void process(Exchange exchange) throws Exception {
ClientConfigurations clientConfigurations = new ClientConfigurations(endpoint);
if (functionGraphClient == null) {
LOG.debug("Initializing SDK client");
this.functionGraphClient = endpoint.initClient();
LOG.debug("Successfully initialized SDK client");
}
updateClientConfigs(exchange, clientConfigurations);
switch (clientConfigurations.getOperation()) {
case FunctionGraphOperations.INVOKE_FUNCTION:
invokeFunction(exchange, clientConfigurations);
break;
default:
throw new UnsupportedOperationException(
String.format("%s is not a supported operation", clientConfigurations.getOperation()));
}
}
/**
* Perform invoke function operation and map return object to exchange body
*
* @param exchange
* @param clientConfigurations
*/
private void invokeFunction(Exchange exchange, ClientConfigurations clientConfigurations) {
// convert exchange body to Map object
Object body = exchange.getMessage().getBody();
Map request;
if (body instanceof Map) {
request = exchange.getMessage().getBody(Map.class);
} else if (body instanceof String) {
String strBody = exchange.getMessage().getBody(String.class);
try {
request = new ObjectMapper().readValue(strBody, HashMap.class);
} catch (JsonProcessingException e) {
throw new IllegalArgumentException("Request body must be a JSON or a HashMap");
}
} else {
throw new IllegalArgumentException("Exchange body is mandatory and should be a valid Map or JSON string");
}
// checking for function name and function package
if (ObjectHelper.isEmpty(clientConfigurations.getFunctionName())) {
throw new IllegalArgumentException("Function name is mandatory for invokeFunction.");
}
// invoke the function
InvokeFunctionRequest invokeFunctionRequest = new InvokeFunctionRequest()
.withBody(request)
.withFunctionUrn(FunctionGraphUtils.composeUrn(FunctionGraphConstants.URN_FORMAT, clientConfigurations))
.withXCFFRequestVersion(FunctionGraphConstants.REQUEST_VERSION);
if (ObjectHelper.isNotEmpty(clientConfigurations.getXCffLogType())) {
invokeFunctionRequest.withXCffLogType(clientConfigurations.getXCffLogType());
}
InvokeFunctionResponse response = functionGraphClient.invokeFunction(invokeFunctionRequest);
String responseBody
= FunctionGraphUtils.extractJsonFieldAsString(response.getResult(), FunctionGraphConstants.RESPONSE_BODY);
exchange.getMessage().setBody(responseBody);
if (ObjectHelper.isNotEmpty(clientConfigurations.getXCffLogType())) {
exchange.setProperty(FunctionGraphProperties.XCFFLOGS, response.getLog());
}
LOG.debug("Invoke Function results: {}", response);
}
/**
* Update dynamic client configurations. Some endpoint parameters (operation, function name, package, and
* XCFFLogType) can also be passed via exchange properties, so they can be updated between each transaction. Since
* they can change, we must clear the previous transaction and update these parameters with their new values
*
* @param exchange
* @param clientConfigurations
*/
private void updateClientConfigs(Exchange exchange, ClientConfigurations clientConfigurations) {
// checking for required operation
if (ObjectHelper.isEmpty(exchange.getProperty(FunctionGraphProperties.OPERATION))
&& ObjectHelper.isEmpty(endpoint.getOperation())) {
throw new IllegalArgumentException("Operation name not found");
} else {
clientConfigurations.setOperation(
ObjectHelper.isNotEmpty(exchange.getProperty(FunctionGraphProperties.OPERATION))
? (String) exchange.getProperty(FunctionGraphProperties.OPERATION)
: endpoint.getOperation());
}
// checking for required function name (exchange overrides endpoint function name)
if (ObjectHelper.isEmpty(exchange.getProperty(FunctionGraphProperties.FUNCTION_NAME))
&& ObjectHelper.isEmpty(endpoint.getFunctionName())) {
throw new IllegalArgumentException("Function name not found");
} else {
clientConfigurations.setFunctionName(
ObjectHelper.isNotEmpty(exchange.getProperty(FunctionGraphProperties.FUNCTION_NAME))
? (String) exchange.getProperty(FunctionGraphProperties.FUNCTION_NAME)
: endpoint.getFunctionName());
}
// checking for optional function package (exchange overrides endpoint function package)
if (ObjectHelper.isEmpty(exchange.getProperty(FunctionGraphProperties.FUNCTION_PACKAGE))
&& ObjectHelper.isEmpty(endpoint.getFunctionPackage())) {
clientConfigurations.setFunctionPackage(FunctionGraphConstants.DEFAULT_FUNCTION_PACKAGE);
} else {
clientConfigurations.setFunctionPackage(
ObjectHelper.isNotEmpty(exchange.getProperty(FunctionGraphProperties.FUNCTION_PACKAGE))
? (String) exchange.getProperty(FunctionGraphProperties.FUNCTION_PACKAGE)
: endpoint.getFunctionPackage());
}
// checking for optional XCffLogType
if (ObjectHelper.isEmpty(exchange.getProperty(FunctionGraphProperties.XCFFLOGTYPE))) {
LOG.warn("No XCffLogType given");
} else {
clientConfigurations.setXCffLogType((String) exchange.getProperty(FunctionGraphProperties.XCFFLOGTYPE));
}
}
}
| FunctionGraphProducer |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/messaging/jms/receiving/custom/MyBean.java | {
"start": 820,
"end": 970
} | class ____ {
@JmsListener(destination = "someQueue", containerFactory = "myFactory")
public void processMessage(String content) {
// ...
}
}
| MyBean |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/profile/UnlessBuildProfileAllAnyTest.java | {
"start": 1048,
"end": 1706
} | interface ____ {
String profile();
}
@Test
void unlessBuildProfile() {
assertTrue(config.getProfiles().contains("test"));
assertTrue(config.getProfiles().contains("build"));
Set<String> unlessProfiles = unlessBuildProfiles.stream().map(UnlessBuildProfileBean::profile).collect(toSet());
assertEquals(2, unlessProfiles.size());
assertTrue(unlessProfiles.contains("dev"));
assertTrue(unlessProfiles.contains("allOf-dev,allOf-test,allOf-build"));
}
// Active, the "dev" profile is not active
@ApplicationScoped
@UnlessBuildProfile("dev")
public static | UnlessBuildProfileBean |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java | {
"start": 2317,
"end": 10645
} | class ____ implements Callable<Boolean> {
// rwxr-xr-x
static final FsPermission DIRECTORY_PERMISSION =
new FsPermission((short)00755);
// r-xr-xr-x
static final FsPermission FILE_PERMISSION =
new FsPermission((short)00555);
private static final Logger LOG =
LoggerFactory.getLogger(SharedCacheUploader.class);
private final LocalResource resource;
private final Path localPath;
private final String user;
private final Configuration conf;
private final SCMUploaderProtocol scmClient;
private final FileSystem fs;
private final FileSystem localFs;
private final String sharedCacheRootDir;
private final int nestedLevel;
private final SharedCacheChecksum checksum;
private final RecordFactory recordFactory;
public SharedCacheUploader(LocalResource resource, Path localPath,
String user, Configuration conf, SCMUploaderProtocol scmClient)
throws IOException {
this(resource, localPath, user, conf, scmClient,
FileSystem.get(conf), localPath.getFileSystem(conf));
}
/**
* @param resource the local resource that contains the original remote path
* @param localPath the path in the local filesystem where the resource is
* localized
* @param fs the filesystem of the shared cache
* @param localFs the local filesystem
*/
public SharedCacheUploader(LocalResource resource, Path localPath,
String user, Configuration conf, SCMUploaderProtocol scmClient,
FileSystem fs, FileSystem localFs) {
this.resource = resource;
this.localPath = localPath;
this.user = user;
this.conf = conf;
this.scmClient = scmClient;
this.fs = fs;
this.sharedCacheRootDir =
conf.get(YarnConfiguration.SHARED_CACHE_ROOT,
YarnConfiguration.DEFAULT_SHARED_CACHE_ROOT);
this.nestedLevel = SharedCacheUtil.getCacheDepth(conf);
this.checksum = SharedCacheChecksumFactory.getChecksum(conf);
this.localFs = localFs;
this.recordFactory = RecordFactoryProvider.getRecordFactory(null);
}
/**
* Uploads the file under the shared cache, and notifies the shared cache
* manager. If it is unable to upload the file because it already exists, it
* returns false.
*/
@Override
public Boolean call() throws Exception {
Path tempPath = null;
try {
if (!verifyAccess()) {
LOG.warn("User " + user + " is not authorized to upload file " +
localPath.getName());
return false;
}
// first determine the actual local path that will be used for upload
Path actualPath = getActualPath();
// compute the checksum
String checksumVal = computeChecksum(actualPath);
// create the directory (if it doesn't exist)
Path directoryPath =
new Path(SharedCacheUtil.getCacheEntryPath(nestedLevel,
sharedCacheRootDir, checksumVal));
// let's not check if the directory already exists: in the vast majority
// of the cases, the directory does not exist; as long as mkdirs does not
// error out if it exists, we should be fine
fs.mkdirs(directoryPath, DIRECTORY_PERMISSION);
// create the temporary file
tempPath = new Path(directoryPath, getTemporaryFileName(actualPath));
if (!uploadFile(actualPath, tempPath)) {
LOG.warn("Could not copy the file to the shared cache at " + tempPath);
return false;
}
// set the permission so that it is readable but not writable
fs.setPermission(tempPath, FILE_PERMISSION);
// rename it to the final filename
Path finalPath = new Path(directoryPath, actualPath.getName());
if (!fs.rename(tempPath, finalPath)) {
LOG.warn("The file already exists under " + finalPath +
". Ignoring this attempt.");
deleteTempFile(tempPath);
return false;
}
// notify the SCM
if (!notifySharedCacheManager(checksumVal, actualPath.getName())) {
// the shared cache manager rejected the upload (as it is likely
// uploaded under a different name
// clean up this file and exit
fs.delete(finalPath, false);
return false;
}
// set the replication factor
short replication =
(short)conf.getInt(YarnConfiguration.SHARED_CACHE_NM_UPLOADER_REPLICATION_FACTOR,
YarnConfiguration.DEFAULT_SHARED_CACHE_NM_UPLOADER_REPLICATION_FACTOR);
fs.setReplication(finalPath, replication);
LOG.info("File " + actualPath.getName() +
" was uploaded to the shared cache at " + finalPath);
return true;
} catch (IOException e) {
LOG.warn("Exception while uploading the file " + localPath.getName(), e);
// in case an exception is thrown, delete the temp file
deleteTempFile(tempPath);
throw e;
}
}
@VisibleForTesting
Path getActualPath() throws IOException {
Path path = localPath;
FileStatus status = localFs.getFileStatus(path);
if (status != null && status.isDirectory()) {
// for certain types of resources that get unpacked, the original file may
// be found under the directory with the same name (see
// FSDownload.unpack); check if the path is a directory and if so look
// under it
path = new Path(path, path.getName());
}
return path;
}
private void deleteTempFile(Path tempPath) {
try {
if (tempPath != null) {
fs.delete(tempPath, false);
}
} catch (IOException ioe) {
LOG.debug("Exception received while deleting temp files", ioe);
}
}
/**
* Checks that the (original) remote file is either owned by the user who
* started the app or public.
*/
@VisibleForTesting
boolean verifyAccess() throws IOException {
// if it is in the public cache, it's trivially OK
if (resource.getVisibility() == LocalResourceVisibility.PUBLIC) {
return true;
}
final Path remotePath;
try {
remotePath = resource.getResource().toPath();
} catch (URISyntaxException e) {
throw new IOException("Invalid resource", e);
}
// get the file status of the HDFS file
FileSystem remoteFs = remotePath.getFileSystem(conf);
FileStatus status = remoteFs.getFileStatus(remotePath);
// check to see if the file has been modified in any way
if (status.getModificationTime() != resource.getTimestamp()) {
LOG.warn("The remote file " + remotePath +
" has changed since it's localized; will not consider it for upload");
return false;
}
// check for the user ownership
if (status.getOwner().equals(user)) {
return true; // the user owns the file
}
// check if the file is publicly readable otherwise
return fileIsPublic(remotePath, remoteFs, status);
}
@VisibleForTesting
boolean fileIsPublic(final Path remotePath, FileSystem remoteFs,
FileStatus status) throws IOException {
return FSDownload.isPublic(remoteFs, remotePath, status, null);
}
/**
* Uploads the file to the shared cache under a temporary name, and returns
* the result.
*/
@VisibleForTesting
boolean uploadFile(Path sourcePath, Path tempPath) throws IOException {
return FileUtil.copy(localFs, sourcePath, fs, tempPath, false, conf);
}
@VisibleForTesting
String computeChecksum(Path path) throws IOException {
InputStream is = localFs.open(path);
try {
return checksum.computeChecksum(is);
} finally {
try { is.close(); } catch (IOException ignore) {}
}
}
private String getTemporaryFileName(Path path) {
return path.getName() + "-" + ThreadLocalRandom.current().nextLong();
}
@VisibleForTesting
boolean notifySharedCacheManager(String checksumVal, String fileName)
throws IOException {
try {
SCMUploaderNotifyRequest request =
recordFactory.newRecordInstance(SCMUploaderNotifyRequest.class);
request.setResourceKey(checksumVal);
request.setFilename(fileName);
return scmClient.notify(request).getAccepted();
} catch (YarnException e) {
throw new IOException(e);
} catch (UndeclaredThrowableException e) {
// retrieve the cause of the exception and throw it as an IOException
throw new IOException(e.getCause() == null ? e : e.getCause());
}
}
}
| SharedCacheUploader |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionRequestBWCSerializingTests.java | {
"start": 527,
"end": 1524
} | class ____ extends AbstractBWCWireSerializationTestCase<
UpdateConnectorLastSeenAction.Request> {
@Override
protected Writeable.Reader<UpdateConnectorLastSeenAction.Request> instanceReader() {
return UpdateConnectorLastSeenAction.Request::new;
}
@Override
protected UpdateConnectorLastSeenAction.Request createTestInstance() {
return new UpdateConnectorLastSeenAction.Request(randomUUID());
}
@Override
protected UpdateConnectorLastSeenAction.Request mutateInstance(UpdateConnectorLastSeenAction.Request instance) throws IOException {
return new UpdateConnectorLastSeenAction.Request(randomValueOtherThan(instance.getConnectorId(), () -> randomUUID()));
}
@Override
protected UpdateConnectorLastSeenAction.Request mutateInstanceForVersion(
UpdateConnectorLastSeenAction.Request instance,
TransportVersion version
) {
return instance;
}
}
| UpdateConnectorLastSeenActionRequestBWCSerializingTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/disk/iomanager/IORequest.java | {
"start": 1004,
"end": 1411
} | interface ____ {
/**
* Method that is called by the target I/O thread after the request has been processed.
*
* @param ioex The exception that occurred while processing the I/O request. Is <tt>null</tt> if
* everything was fine.
*/
public void requestDone(IOException ioex);
}
/** Interface for I/O requests that are handled by the IOManager's reading thread. */
| IORequest |
java | apache__dubbo | dubbo-spring-boot-project/dubbo-spring-boot-actuator/src/main/java/org/apache/dubbo/spring/boot/actuate/endpoint/condition/CompatibleOnEnabledEndpointCondition.java | {
"start": 1775,
"end": 3182
} | class ____ implements Condition {
private static final ErrorTypeAwareLogger LOGGER =
LoggerFactory.getErrorTypeAwareLogger(CompatibleOnEnabledEndpointCondition.class);
// Spring Boot [2.0.0 , 2.2.x]
static String CONDITION_CLASS_NAME_OLD =
"org.springframework.boot.actuate.autoconfigure.endpoint.condition.OnEnabledEndpointCondition";
// Spring Boot 2.2.0 +
static String CONDITION_CLASS_NAME_NEW =
"org.springframework.boot.actuate.autoconfigure.endpoint.condition.OnAvailableEndpointCondition";
@Override
public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) {
ClassLoader classLoader = context.getClassLoader();
if (ClassUtils.isPresent(CONDITION_CLASS_NAME_OLD, classLoader)) {
Class<?> cls = ClassUtils.resolveClassName(CONDITION_CLASS_NAME_OLD, classLoader);
if (Condition.class.isAssignableFrom(cls)) {
Condition condition = Condition.class.cast(BeanUtils.instantiateClass(cls));
return condition.matches(context, metadata);
}
}
// Check by org.springframework.boot.actuate.autoconfigure.endpoint.condition.ConditionalOnAvailableEndpoint
if (ClassUtils.isPresent(CONDITION_CLASS_NAME_NEW, classLoader)) {
return true;
}
// No condition | CompatibleOnEnabledEndpointCondition |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/AlreadyCheckedTest.java | {
"start": 12024,
"end": 12310
} | class ____ {
public void test(Foo a, Foo b) {
if (a.bar().equals(E.A)) {
// BUG: Diagnostic contains:
if (a.bar().equals(E.A)) {}
}
}
@AutoValue
abstract static | Test |
java | processing__processing4 | app/src/processing/app/ui/SketchbookFrame.java | {
"start": 1284,
"end": 7108
} | class ____ extends JFrame {
protected Base base;
public SketchbookFrame(final Base base) {
super(Language.text("sketchbook"));
this.base = base;
final ActionListener listener = e -> setVisible(false);
Toolkit.registerWindowCloseKeys(getRootPane(), listener);
Toolkit.setIcon(this);
Container pane = getContentPane();
pane.setLayout(new BorderLayout());
updateCenterPanel();
Container buttons = Box.createHorizontalBox();
JButton addButton = new JButton("Show Folder");
addButton.addActionListener(e -> Platform.openFolder(Base.getSketchbookFolder()));
buttons.add(Box.createHorizontalGlue());
buttons.add(addButton, BorderLayout.WEST);
JButton refreshButton = new JButton("Refresh");
refreshButton.addActionListener(e -> base.rebuildSketchbook());
buttons.add(refreshButton, BorderLayout.EAST);
buttons.add(Box.createHorizontalGlue());
final int high = addButton.getPreferredSize().height;
final int wide = 4 * Toolkit.getButtonWidth() / 3;
addButton.setPreferredSize(new Dimension(wide, high));
refreshButton.setPreferredSize(new Dimension(wide, high));
JPanel buttonPanel = new JPanel(); // adds extra border
// wasn't necessary to set a border b/c JPanel adds plenty
//buttonPanel.setBorder(new EmptyBorder(3, 0, 3, 0));
buttonPanel.add(buttons);
pane.add(buttonPanel, BorderLayout.SOUTH);
pack();
}
private JTree rebuildTree() {
final JTree tree = new JTree(base.buildSketchbookTree());
tree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
tree.setShowsRootHandles(true);
tree.expandRow(0);
tree.setRootVisible(false);
tree.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
if (e.getClickCount() == 2) {
DefaultMutableTreeNode node =
(DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
int selRow = tree.getRowForLocation(e.getX(), e.getY());
//TreePath selPath = tree.getPathForLocation(e.getX(), e.getY());
//if (node != null && node.isLeaf() && node.getPath().equals(selPath)) {
if (node != null && node.isLeaf() && selRow != -1) {
SketchReference sketch = (SketchReference) node.getUserObject();
base.handleOpen(sketch.getPath());
}
}
}
});
tree.addKeyListener(new KeyAdapter() {
// ESC doesn't fire keyTyped(), so we have to catch it on keyPressed
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
setVisible(false);
}
}
public void keyTyped(KeyEvent e) {
if (e.getKeyChar() == KeyEvent.VK_ENTER) {
DefaultMutableTreeNode node =
(DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
if (node != null && node.isLeaf()) {
SketchReference sketch = (SketchReference) node.getUserObject();
base.handleOpen(sketch.getPath());
}
}
}
});
final int border = Toolkit.zoom(5);
tree.setBorder(new EmptyBorder(border, border, border, border));
if (Platform.isMacOS()) {
tree.setToggleClickCount(2);
} else {
tree.setToggleClickCount(1);
}
// Special cell renderer that takes the UI zoom into account
tree.setCellRenderer(new ZoomTreeCellRenderer());
return tree;
}
private void updateCenterPanel() {
JTree tree = rebuildTree();
Container panel;
// Check whether sketchbook is empty or not
TreeModel treeModel = tree.getModel();
if (treeModel.getChildCount(treeModel.getRoot()) != 0) {
JScrollPane treePane = new JScrollPane(tree,
ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED,
ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
treePane.setPreferredSize(Toolkit.zoom(250, 450));
treePane.setBorder(new EmptyBorder(0, 0, 0, 0));
//getContentPane().add(treePane);
panel = treePane;
} else {
JPanel emptyPanel = new JPanel();
emptyPanel.setBackground(Color.WHITE);
emptyPanel.setPreferredSize(Toolkit.zoom(250,450));
JLabel emptyLabel = new JLabel("Empty Sketchbook");
emptyLabel.setForeground(Color.GRAY);
emptyPanel.add(emptyLabel);
//setContentPane(emptyPanel);
panel = emptyPanel;
}
Container pane = getContentPane();
//pane.setLayout(new BorderLayout());
BorderLayout layout = (BorderLayout) pane.getLayout();
Component comp = layout.getLayoutComponent(BorderLayout.CENTER);
if (comp != null) {
pane.remove(comp);
}
pane.add(panel, BorderLayout.CENTER);
}
public void rebuild() {
updateCenterPanel();
// After replacing the tree/panel, this calls the layout manager,
// otherwise it'll just leave a frozen-looking component until
// the window is closed and re-opened.
getContentPane().validate();
}
public void setVisible() {
// TODO The ExamplesFrame code doesn't invokeLater(), is it necessary?
// Either one of them is wrong, or this is hiding a bug [fry 150811]
EventQueue.invokeLater(() -> {
// Space for the editor plus a li'l gap
int roughWidth = getWidth() + 20;
// If no window open, or the editor is at the edge of the screen
Editor editor = base.getActiveEditor();
if (editor == null) {
setLocationRelativeTo(null);
} else {
Point p = editor.getLocation();
if (p.x < roughWidth) {
// Center the window on the screen
setLocationRelativeTo(null);
} else {
// Open the window relative to the editor
setLocation(p.x - roughWidth, p.y);
}
}
setVisible(true);
});
}
}
| SketchbookFrame |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/function/server/EntityResponse.java | {
"start": 2529,
"end": 3430
} | class ____ elements contained in the publisher
* @return the created builder
* @since 5.2
*/
static <T> Builder<T> fromProducer(T producer, Class<?> elementClass) {
return new DefaultEntityResponseBuilder<>(producer,
BodyInserters.fromProducer(producer, elementClass));
}
/**
* Create a builder with the given producer.
* @param producer the producer that represents the body of the response
* @param typeReference the type of elements contained in the producer
* @return the created builder
* @since 5.2
*/
static <T> Builder<T> fromProducer(T producer, ParameterizedTypeReference<?> typeReference) {
return new DefaultEntityResponseBuilder<>(producer,
BodyInserters.fromProducer(producer, typeReference));
}
/**
* Create a builder with the given publisher.
* @param publisher the publisher that represents the body of the response
* @param elementClass the | of |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/RetryWhileSimpleExpressionIssueTest.java | {
"start": 1043,
"end": 2013
} | class ____ extends ContextTestSupport {
@Test
public void testRetryWhileSimple() throws Exception {
getMockEndpoint("mock:error").expectedMessageCount(1);
getMockEndpoint("mock:error").message(0).body().isInstanceOf(MyCoolDude.class);
MyCoolDude dude = new MyCoolDude();
template.sendBody("direct:start", dude);
assertMockEndpointsSatisfied();
assertEquals(3 + 1, dude.getCounter());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
onException(IllegalArgumentException.class).retryWhile(simple("${body.areWeCool} == 'no'")).redeliveryDelay(0)
.handled(true).to("mock:error");
from("direct:start").throwException(new IllegalArgumentException("Forced"));
}
};
}
public static | RetryWhileSimpleExpressionIssueTest |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java | {
"start": 717,
"end": 975
} | interface ____ {
/**
* Configuration which applies to building a JAR file for the project.
*/
JarConfig jar();
/**
* The entry point of the application. This can either be a fully qualified name of a standard Java
* | PackageConfig |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/creators/TestCreators.java | {
"start": 5111,
"end": 5333
} | class ____ {
BigInteger _value;
public BigIntegerWrapper() { }
public BigIntegerWrapper(final BigInteger value) { _value = value; }
}
// [databind#2215]
protected static | BigIntegerWrapper |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java | {
"start": 63970,
"end": 67663
} | class ____ implements Runnable {
MRAppMaster appMaster;
MRAppMasterShutdownHook(MRAppMaster appMaster) {
this.appMaster = appMaster;
}
public void run() {
LOG.info("MRAppMaster received a signal. Signaling RMCommunicator and "
+ "JobHistoryEventHandler.");
// Notify the JHEH and RMCommunicator that a SIGTERM has been received so
// that they don't take too long in shutting down
if(appMaster.containerAllocator instanceof ContainerAllocatorRouter) {
((ContainerAllocatorRouter) appMaster.containerAllocator)
.setSignalled(true);
}
appMaster.notifyIsLastAMRetry(appMaster.isLastAMRetry);
appMaster.stop();
try {
JobContext jobContext = appMaster
.getJobContextFromConf(appMaster.getConfig());
appMaster.committer.abortJob(jobContext, State.KILLED);
} catch (FileNotFoundException e) {
System.out.println("Previous job temporary " +
"files do not exist, no clean up was necessary.");
} catch (Exception e) {
// the clean up of a previous attempt is not critical to the success
// of this job - only logging the error
System.err.println("Error while trying to " +
"clean up previous job's temporary files" + e);
}
}
}
public void notifyIsLastAMRetry(boolean isLastAMRetry){
if(containerAllocator instanceof ContainerAllocatorRouter) {
LOG.info("Notify RMCommunicator isAMLastRetry: " + isLastAMRetry);
((ContainerAllocatorRouter) containerAllocator)
.setShouldUnregister(isLastAMRetry);
}
if(jobHistoryEventHandler != null) {
LOG.info("Notify JHEH isAMLastRetry: " + isLastAMRetry);
jobHistoryEventHandler.setForcejobCompletion(isLastAMRetry);
}
}
protected static void initAndStartAppMaster(final MRAppMaster appMaster,
final JobConf conf, String jobUserName) throws IOException,
InterruptedException {
UserGroupInformation.setConfiguration(conf);
// MAPREDUCE-6565: need to set configuration for SecurityUtil.
SecurityUtil.setConfiguration(conf);
// Security framework already loaded the tokens into current UGI, just use
// them
Credentials credentials =
UserGroupInformation.getCurrentUser().getCredentials();
LOG.info("Executing with tokens: {}", credentials.getAllTokens());
UserGroupInformation appMasterUgi = UserGroupInformation
.createRemoteUser(jobUserName);
appMasterUgi.addCredentials(credentials);
// Now remove the AM->RM token so tasks don't have it
Iterator<Token<?>> iter = credentials.getAllTokens().iterator();
while (iter.hasNext()) {
Token<?> token = iter.next();
if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) {
iter.remove();
}
}
conf.getCredentials().addAll(credentials);
appMasterUgi.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
appMaster.init(conf);
appMaster.start();
if(appMaster.errorHappenedShutDown) {
throw new IOException("Was asked to shut down.");
}
return null;
}
});
}
/**
* Creates a job classloader based on the configuration if the job classloader
* is enabled. It is a no-op if the job classloader is not enabled.
*/
private void createJobClassLoader(Configuration conf) throws IOException {
jobClassLoader = MRApps.createJobClassLoader(conf);
}
/**
* Executes the given action with the job classloader set as the configuration
* classloader as well as the thread context | MRAppMasterShutdownHook |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/text/StrBuilder.java | {
"start": 5812,
"end": 5883
} | class ____ allow StrBuilder to operate as a tokenizer.
*/
final | to |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/broadcast/BroadcastVariableKey.java | {
"start": 1079,
"end": 2602
} | class ____ {
private final JobVertexID vertexId;
private final String name;
private final int superstep;
public BroadcastVariableKey(JobVertexID vertexId, String name, int superstep) {
if (vertexId == null || name == null || superstep <= 0) {
throw new IllegalArgumentException();
}
this.vertexId = vertexId;
this.name = name;
this.superstep = superstep;
}
// ---------------------------------------------------------------------------------------------
public JobVertexID getVertexId() {
return vertexId;
}
public String getName() {
return name;
}
public int getSuperstep() {
return superstep;
}
// ---------------------------------------------------------------------------------------------
@Override
public int hashCode() {
return 31 * superstep + 47 * name.hashCode() + 83 * vertexId.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj != null && obj.getClass() == BroadcastVariableKey.class) {
BroadcastVariableKey other = (BroadcastVariableKey) obj;
return this.superstep == other.superstep
&& this.name.equals(other.name)
&& this.vertexId.equals(other.vertexId);
} else {
return false;
}
}
@Override
public String toString() {
return vertexId + " \"" + name + "\" (" + superstep + ')';
}
}
| BroadcastVariableKey |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/concurrent/FutureUtils.java | {
"start": 18401,
"end": 31127
} | class ____ extends Exception {
private static final long serialVersionUID = 3613470781274141862L;
public RetryException(String message) {
super(message);
}
public RetryException(String message, Throwable cause) {
super(message, cause);
}
public RetryException(Throwable cause) {
super(cause);
}
}
/**
* Times the given future out after the timeout.
*
* @param future to time out
* @param timeout after which the given future is timed out
* @param timeUnit time unit of the timeout
* @param timeoutMsg timeout message for exception
* @param <T> type of the given future
* @return The timeout enriched future
*/
public static <T> CompletableFuture<T> orTimeout(
CompletableFuture<T> future,
long timeout,
TimeUnit timeUnit,
@Nullable String timeoutMsg) {
return orTimeout(future, timeout, timeUnit, Executors.directExecutor(), timeoutMsg);
}
/**
* Times the given future out after the timeout.
*
* @param future to time out
* @param timeout after which the given future is timed out
* @param timeUnit time unit of the timeout
* @param timeoutFailExecutor executor that will complete the future exceptionally after the
* timeout is reached
* @param timeoutMsg timeout message for exception
* @param <T> type of the given future
* @return The timeout enriched future
*/
public static <T> CompletableFuture<T> orTimeout(
CompletableFuture<T> future,
long timeout,
TimeUnit timeUnit,
Executor timeoutFailExecutor,
@Nullable String timeoutMsg) {
if (!future.isDone()) {
final ScheduledFuture<?> timeoutFuture =
Delayer.delay(
() -> timeoutFailExecutor.execute(new Timeout(future, timeoutMsg)),
timeout,
timeUnit);
future.whenComplete(
(T value, Throwable throwable) -> {
if (!timeoutFuture.isDone()) {
timeoutFuture.cancel(false);
}
});
}
return future;
}
// ------------------------------------------------------------------------
// Delayed completion
// ------------------------------------------------------------------------
/**
* Asynchronously completes the future after a certain delay.
*
* @param future The future to complete.
* @param success The element to complete the future with.
* @param delay The delay after which the future should be completed.
*/
public static <T> void completeDelayed(CompletableFuture<T> future, T success, Duration delay) {
Delayer.delay(() -> future.complete(success), delay.toMillis(), TimeUnit.MILLISECONDS);
}
// ------------------------------------------------------------------------
// Future actions
// ------------------------------------------------------------------------
/**
* Run the given {@code RunnableFuture} if it is not done, and then retrieves its result.
*
* @param future to run if not done and get
* @param <T> type of the result
* @return the result after running the future
* @throws ExecutionException if a problem occurred
* @throws InterruptedException if the current thread has been interrupted
*/
public static <T> T runIfNotDoneAndGet(RunnableFuture<T> future)
throws ExecutionException, InterruptedException {
if (null == future) {
return null;
}
if (!future.isDone()) {
future.run();
}
return future.get();
}
/**
* Run the given action after the completion of the given future. The given future can be
* completed normally or exceptionally. In case of an exceptional completion the, the action's
* exception will be added to the initial exception.
*
* @param future to wait for its completion
* @param runnable action which is triggered after the future's completion
* @return Future which is completed after the action has completed. This future can contain an
* exception, if an error occurred in the given future or action.
*/
public static CompletableFuture<Void> runAfterwards(
CompletableFuture<?> future, RunnableWithException runnable) {
return runAfterwardsAsync(future, runnable, Executors.directExecutor());
}
/**
* Run the given action after the completion of the given future. The given future can be
* completed normally or exceptionally. In case of an exceptional completion the, the action's
* exception will be added to the initial exception.
*
* @param future to wait for its completion
* @param runnable action which is triggered after the future's completion
* @return Future which is completed after the action has completed. This future can contain an
* exception, if an error occurred in the given future or action.
*/
public static CompletableFuture<Void> runAfterwardsAsync(
CompletableFuture<?> future, RunnableWithException runnable) {
return runAfterwardsAsync(future, runnable, ForkJoinPool.commonPool());
}
/**
* Run the given action after the completion of the given future. The given future can be
* completed normally or exceptionally. In case of an exceptional completion the action's
* exception will be added to the initial exception.
*
* @param future to wait for its completion
* @param runnable action which is triggered after the future's completion
* @param executor to run the given action
* @return Future which is completed after the action has completed. This future can contain an
* exception, if an error occurred in the given future or action.
*/
public static CompletableFuture<Void> runAfterwardsAsync(
CompletableFuture<?> future, RunnableWithException runnable, Executor executor) {
final CompletableFuture<Void> resultFuture = new CompletableFuture<>();
future.whenCompleteAsync(
(Object ignored, Throwable throwable) -> {
try {
runnable.run();
} catch (Throwable e) {
throwable = ExceptionUtils.firstOrSuppressed(e, throwable);
}
if (throwable != null) {
resultFuture.completeExceptionally(throwable);
} else {
resultFuture.complete(null);
}
},
executor);
return resultFuture;
}
/**
* Run the given asynchronous action after the completion of the given future. The given future
* can be completed normally or exceptionally. In case of an exceptional completion, the
* asynchronous action's exception will be added to the initial exception.
*
* @param future to wait for its completion
* @param composedAction asynchronous action which is triggered after the future's completion
* @return Future which is completed after the asynchronous action has completed. This future
* can contain an exception if an error occurred in the given future or asynchronous action.
*/
public static CompletableFuture<Void> composeAfterwards(
CompletableFuture<?> future, Supplier<CompletableFuture<?>> composedAction) {
return composeAfterwardsInternal(future, composedAction, CompletableFuture::whenComplete);
}
/**
* Run the given asynchronous action after the completion of the given future. The given future
* can be completed normally or exceptionally. In case of an exceptional completion, the
* asynchronous action's exception will be added to the initial exception.
*
* @param future to wait for its completion
* @param composedAction asynchronous action which is triggered after the future's completion
* @return Future which is completed on the passed {@link Executor} after the asynchronous
* action has completed. This future can contain an exception if an error occurred in the
* given future or asynchronous action.
*/
public static CompletableFuture<Void> composeAfterwardsAsync(
CompletableFuture<?> future,
Supplier<CompletableFuture<?>> composedAction,
Executor executor) {
return composeAfterwardsInternal(
future,
composedAction,
(composedActionFuture, resultFutureCompletion) ->
composedActionFuture.whenCompleteAsync(resultFutureCompletion, executor));
}
private static CompletableFuture<Void> composeAfterwardsInternal(
CompletableFuture<?> future,
Supplier<CompletableFuture<?>> composedAction,
BiConsumer<CompletableFuture<?>, BiConsumer<Object, Throwable>> forwardAction) {
final CompletableFuture<Void> resultFuture = new CompletableFuture<>();
future.whenComplete(
(Object outerIgnored, Throwable outerThrowable) -> {
forwardAction.accept(
composedAction.get(),
(Object innerIgnored, Throwable innerThrowable) -> {
if (innerThrowable != null) {
resultFuture.completeExceptionally(
ExceptionUtils.firstOrSuppressed(
innerThrowable, outerThrowable));
} else if (outerThrowable != null) {
resultFuture.completeExceptionally(outerThrowable);
} else {
resultFuture.complete(null);
}
});
});
return resultFuture;
}
// ------------------------------------------------------------------------
// composing futures
// ------------------------------------------------------------------------
/**
* Creates a future that is complete once multiple other futures completed. The future fails
* (completes exceptionally) once one of the futures in the conjunction fails. Upon successful
* completion, the future returns the collection of the futures' results.
*
* <p>The ConjunctFuture gives access to how many Futures in the conjunction have already
* completed successfully, via {@link ConjunctFuture#getNumFuturesCompleted()}.
*
* @param futures The futures that make up the conjunction. No null entries are allowed.
* @return The ConjunctFuture that completes once all given futures are complete (or one fails).
*/
public static <T> ConjunctFuture<Collection<T>> combineAll(
Collection<? extends CompletableFuture<? extends T>> futures) {
checkNotNull(futures, "futures");
return new ResultConjunctFuture<>(futures);
}
/**
* Creates a future that is complete once all of the given futures have completed. The future
* fails (completes exceptionally) once one of the given futures fails.
*
* <p>The ConjunctFuture gives access to how many Futures have already completed successfully,
* via {@link ConjunctFuture#getNumFuturesCompleted()}.
*
* @param futures The futures to wait on. No null entries are allowed.
* @return The WaitingFuture that completes once all given futures are complete (or one fails).
*/
public static ConjunctFuture<Void> waitForAll(
Collection<? extends CompletableFuture<?>> futures) {
checkNotNull(futures, "futures");
return new WaitingConjunctFuture(futures);
}
/**
* A future that is complete once multiple other futures completed. The futures are not
* necessarily of the same type. The ConjunctFuture fails (completes exceptionally) once one of
* the Futures in the conjunction fails.
*
* <p>The advantage of using the ConjunctFuture over chaining all the futures (such as via
* {@link CompletableFuture#thenCombine(CompletionStage, BiFunction)}) is that ConjunctFuture
* also tracks how many of the Futures are already complete.
*/
public abstract static | RetryException |
java | google__guava | android/guava/src/com/google/common/util/concurrent/AggregateFutureState.java | {
"start": 1639,
"end": 6646
} | class ____<OutputT extends @Nullable Object>
extends AbstractFuture.TrustedFuture<OutputT> {
/*
* The following fields are package-private, even though we intend never to use them outside this
* file. For discussion, see AbstractFutureState.
*/
// Lazily initialized the first time we see an exception; not released until all the input futures
// have completed and we have processed them all.
volatile @Nullable Set<Throwable> seenExceptionsField = null;
volatile int remainingField;
private static final AtomicHelper ATOMIC_HELPER;
private static final LazyLogger log = new LazyLogger(AggregateFutureState.class);
static {
AtomicHelper helper;
Throwable thrownReflectionFailure = null;
try {
helper = new SafeAtomicHelper();
} catch (Throwable reflectionFailure) { // sneaky checked exception
// Some Android 5.0.x Samsung devices have bugs in JDK reflection APIs that cause
// getDeclaredField to throw a NoSuchFieldException when the field is definitely there.
// For these users fallback to a suboptimal implementation, based on synchronized. This will
// be a definite performance hit to those users.
thrownReflectionFailure = reflectionFailure;
helper = new SynchronizedAtomicHelper();
}
ATOMIC_HELPER = helper;
// Log after all static init is finished; if an installed logger uses any Futures methods, it
// shouldn't break in cases where reflection is missing/broken.
if (thrownReflectionFailure != null) {
log.get().log(Level.SEVERE, "SafeAtomicHelper is broken!", thrownReflectionFailure);
}
}
AggregateFutureState(int remainingFutures) {
this.remainingField = remainingFutures;
}
final Set<Throwable> getOrInitSeenExceptions() {
/*
* The initialization of seenExceptionsField has to be more complicated than we'd like. The
* simple approach would be for each caller CAS it from null to a Set populated with its
* exception. But there's another race: If the first thread fails with an exception and a second
* thread immediately fails with the same exception:
*
* Thread1: calls setException(), which returns true, context switch before it can CAS
* seenExceptionsField to its exception
*
* Thread2: calls setException(), which returns false, CASes seenExceptionsField to its
* exception, and wrongly believes that its exception is new (leading it to logging it when it
* shouldn't)
*
* Our solution is for threads to CAS seenExceptionsField from null to a Set populated with _the
* initial exception_, no matter which thread does the work. This ensures that
* seenExceptionsField always contains not just the current thread's exception but also the
* initial thread's.
*/
Set<Throwable> seenExceptionsLocal = seenExceptionsField;
if (seenExceptionsLocal == null) {
// TODO(cpovirk): Should we use a simpler (presumably cheaper) data structure?
/*
* Using weak references here could let us release exceptions earlier, but:
*
* 1. On Android, querying a WeakReference blocks if the GC is doing an otherwise-concurrent
* pass.
*
* 2. We would probably choose to compare exceptions using == instead of equals() (for
* consistency with how weak references are cleared). That's a behavior change -- arguably the
* removal of a feature.
*
* Fortunately, exceptions rarely contain references to expensive resources.
*/
//
seenExceptionsLocal = newConcurrentHashSet();
/*
* Other handleException() callers may see this as soon as we publish it. We need to populate
* it with the initial failure before we do, or else they may think that the initial failure
* has never been seen before.
*/
addInitialException(seenExceptionsLocal);
ATOMIC_HELPER.compareAndSetSeenExceptions(this, null, seenExceptionsLocal);
/*
* If another handleException() caller created the set, we need to use that copy in case yet
* other callers have added to it.
*
* This read is guaranteed to get us the right value because we only set this once (here).
*
* requireNonNull is safe because either our compareAndSet succeeded or it failed because
* another thread did it for us.
*/
seenExceptionsLocal = requireNonNull(seenExceptionsField);
}
return seenExceptionsLocal;
}
/** Populates {@code seen} with the exception that was passed to {@code setException}. */
abstract void addInitialException(Set<Throwable> seen);
final int decrementRemainingAndGet() {
return ATOMIC_HELPER.decrementAndGetRemainingCount(this);
}
final void clearSeenExceptions() {
seenExceptionsField = null;
}
@VisibleForTesting
static String atomicHelperTypeForTest() {
return ATOMIC_HELPER.atomicHelperTypeForTest();
}
private abstract static | AggregateFutureState |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/MonoPeekTest.java | {
"start": 1218,
"end": 2919
} | class ____ {
@Test
public void onMonoRejectedDoOnTerminate() {
Mono<String> mp = Mono.error(new Exception("test"));
AtomicInteger invoked = new AtomicInteger();
mp.doOnTerminate(invoked::incrementAndGet)
.subscribe();
assertThat(invoked).hasValue(1);
}
@Test
public void onMonoSuccessDoOnTerminate() {
Mono<String> mp = Mono.just("test");
AtomicInteger invoked = new AtomicInteger();
mp.doOnTerminate(invoked::incrementAndGet)
.subscribe();
assertThat(invoked).hasValue(1);
}
@Test
public void onMonoSuccessDoOnSuccess() {
Mono<String> mp = Mono.just("test");
AtomicReference<String> ref = new AtomicReference<>();
mp.doOnSuccess(ref::set)
.subscribe();
assertThat(ref.get()).isEqualToIgnoringCase("test");
}
@Test
public void onMonoDoOnRequest() {
Mono<String> mp = Mono.just("test");
AtomicReference<Long> ref = new AtomicReference<>();
StepVerifier.create(mp.doOnRequest(ref::set), 0)
.thenAwait()
.thenRequest(123)
.expectNext("test")
.verifyComplete();
assertThat(ref).hasValue(123L);
}
@Test
public void onMonoDoOnSubscribe() {
Mono<String> mp = Mono.just("test");
AtomicReference<Subscription> ref = new AtomicReference<>();
StepVerifier.create(mp.doOnSubscribe(ref::set))
.expectNext("test")
.verifyComplete();
assertThat(ref.get()).isNotNull();
}
@Test
public void onMonoRejectedDoOnError() {
Mono<String> mp = Mono.error(new Exception("test"));
AtomicReference<Throwable> ref = new AtomicReference<>();
mp.doOnError(ref::set)
.subscribe();
assertThat(ref.get()).hasMessage("test");
}
final static | MonoPeekTest |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/support/JpaMetamodelEntityInformation.java | {
"start": 9692,
"end": 11714
} | class ____<T> implements Iterable<SingularAttribute<? super T, ?>> {
private final IdentifiableType<T> type;
private final Set<SingularAttribute<? super T, ?>> idClassAttributes;
private final Set<SingularAttribute<? super T, ?>> attributes;
private @Nullable Class<?> idType;
IdMetadata(IdentifiableType<T> source, PersistenceProvider persistenceProvider) {
this.type = source;
this.idClassAttributes = persistenceProvider.getIdClassAttributes(source);
this.attributes = source.hasSingleIdAttribute()
? Collections.singleton(source.getId(source.getIdType().getJavaType()))
: source.getIdClassAttributes();
}
boolean hasSimpleId() {
return idClassAttributes.isEmpty() && attributes.size() == 1;
}
public Class<?> getType() {
if (idType != null) {
return idType;
}
// lazy initialization of idType field with tolerable benign data-race
this.idType = tryExtractIdTypeWithFallbackToIdTypeLookup();
if (this.idType == null) {
throw new IllegalStateException("Cannot resolve Id type from " + type);
}
return this.idType;
}
private @Nullable Class<?> tryExtractIdTypeWithFallbackToIdTypeLookup() {
try {
Class<?> idClassType = lookupIdClass(type);
if (idClassType != null) {
return idClassType;
}
Type<?> idType = type.getIdType();
return idType == null ? null : idType.getJavaType();
} catch (IllegalStateException e) {
// see https://hibernate.onjira.com/browse/HHH-6951
return null;
}
}
private static @Nullable Class<?> lookupIdClass(IdentifiableType<?> type) {
IdClass annotation = type.getJavaType() != null
? AnnotationUtils.findAnnotation(type.getJavaType(), IdClass.class)
: null;
return annotation == null ? null : annotation.value();
}
SingularAttribute<? super T, ?> getSimpleIdAttribute() {
return attributes.iterator().next();
}
@Override
public Iterator<SingularAttribute<? super T, ?>> iterator() {
return attributes.iterator();
}
}
}
| IdMetadata |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java | {
"start": 3277,
"end": 7433
} | class ____ implements AuditTrail {
@Override
public String name() {
return "noop";
}
@Override
public void authenticationSuccess(RestRequest request) {}
@Override
public void authenticationSuccess(
String requestId,
Authentication authentication,
String action,
TransportRequest transportRequest
) {}
@Override
public void anonymousAccessDenied(String requestId, String action, TransportRequest transportRequest) {}
@Override
public void anonymousAccessDenied(String requestId, HttpPreRequest request) {}
@Override
public void authenticationFailed(String requestId, HttpPreRequest request) {}
@Override
public void authenticationFailed(String requestId, String action, TransportRequest transportRequest) {}
@Override
public void authenticationFailed(String requestId, AuthenticationToken token, String action, TransportRequest transportRequest) {}
@Override
public void authenticationFailed(String requestId, AuthenticationToken token, HttpPreRequest request) {}
@Override
public void authenticationFailed(
String requestId,
String realm,
AuthenticationToken token,
String action,
TransportRequest transportRequest
) {}
@Override
public void authenticationFailed(String requestId, String realm, AuthenticationToken token, HttpPreRequest request) {}
@Override
public void accessGranted(
String requestId,
Authentication authentication,
String action,
TransportRequest transportRequest,
AuthorizationInfo authorizationInfo
) {}
@Override
public void accessDenied(
String requestId,
Authentication authentication,
String action,
TransportRequest transportRequest,
AuthorizationInfo authorizationInfo
) {}
@Override
public void tamperedRequest(String requestId, HttpPreRequest request) {}
@Override
public void tamperedRequest(String requestId, String action, TransportRequest transportRequest) {}
@Override
public void tamperedRequest(String requestId, Authentication authentication, String action, TransportRequest transportRequest) {}
@Override
public void connectionGranted(InetSocketAddress inetAddress, String profile, SecurityIpFilterRule rule) {}
@Override
public void connectionDenied(InetSocketAddress inetAddress, String profile, SecurityIpFilterRule rule) {}
@Override
public void runAsGranted(
String requestId,
Authentication authentication,
String action,
TransportRequest transportRequest,
AuthorizationInfo authorizationInfo
) {}
@Override
public void runAsDenied(
String requestId,
Authentication authentication,
String action,
TransportRequest transportRequest,
AuthorizationInfo authorizationInfo
) {}
@Override
public void runAsDenied(
String requestId,
Authentication authentication,
HttpPreRequest request,
AuthorizationInfo authorizationInfo
) {}
@Override
public void explicitIndexAccessEvent(
String requestId,
AuditLevel eventType,
Authentication authentication,
String action,
String[] indices,
String requestName,
InetSocketAddress remoteAddress,
AuthorizationInfo authorizationInfo
) {}
@Override
public void coordinatingActionResponse(
String requestId,
Authentication authentication,
String action,
TransportRequest transportRequest,
TransportResponse transportResponse
) {}
}
}
| NoopAuditTrail |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/filters/NameFilterTest_short.java | {
"start": 329,
"end": 1956
} | class ____ extends TestCase {
public void test_namefilter() throws Exception {
NameFilter filter = new NameFilter() {
public String process(Object source, String name, Object value) {
if (value != null) {
Assert.assertTrue(value instanceof Short);
}
if (name.equals("id")) {
return "ID";
}
return name;
}
};
SerializeWriter out = new SerializeWriter();
JSONSerializer serializer = new JSONSerializer(out);
serializer.getNameFilters().add(filter);
Bean a = new Bean();
serializer.write(a);
String text = out.toString();
Assert.assertEquals("{\"ID\":0}", text);
}
public void test_namefilter_1() throws Exception {
NameFilter filter = new NameFilter() {
public String process(Object source, String name, Object value) {
if (name.equals("id")) {
Assert.assertTrue(value instanceof Short);
return "ID";
}
return name;
}
};
SerializeWriter out = new SerializeWriter();
JSONSerializer serializer = new JSONSerializer(out);
serializer.getNameFilters().add(filter);
Map<String, Object> map = new HashMap<String, Object>();
map.put("id", (short) 0);
serializer.write(map);
String text = out.toString();
Assert.assertEquals("{\"ID\":0}", text);
}
public static | NameFilterTest_short |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.